Skip to content

Commit fa6b706

Browse files
committed
Auto merge of #62560 - pietroalbini:tools-builders-on-prs, r=alexcrichton
ci: add a pr builder to test tools when submodules are updated This PR adds the x86_64-gnu-tools builders to PRs where submodules are updated. Since it's not possible to *start* the builder only when submodule changes are detected, I opted into adding a "decider" task at the start of the job which sets the `SKIP_JOB` environment variable when submodules are not updated, and I gated the most time-consuming tasks (the actual build and artifacts upload) on the variable not being there. All of this is conditionally included in the `steps/run.yml` only when a template parameter is present, so it should only affect that builder on PRs. The cost for this should be a dummy builder running for 2/3 minutes for each PR, and we should be able to handle it. Fixes #61837 r? @alexcrichton
2 parents d32a725 + 2d2dcb0 commit fa6b706

File tree

2 files changed

+38
-18
lines changed

2 files changed

+38
-18
lines changed

.azure-pipelines/pr.yml

+9-11
Original file line numberDiff line numberDiff line change
@@ -20,14 +20,12 @@ jobs:
2020
mingw-check:
2121
IMAGE: mingw-check
2222

23-
# TODO: enable this job if the commit message matches this regex, need tools
24-
# figure out how to get the current commit message on azure and stick it in a
25-
# condition somewhere
26-
# if: commit_message =~ /(?i:^update.*\b(rls|rustfmt|clippy|miri|cargo)\b)/
27-
# - job: Linux-x86_64-gnu-tools
28-
# pool:
29-
# vmImage: ubuntu-16.04
30-
# steps:
31-
# - template: steps/run.yml
32-
# variables:
33-
# IMAGE: x86_64-gnu-tools
23+
- job: LinuxTools
24+
pool:
25+
vmImage: ubuntu-16.04
26+
steps:
27+
- template: steps/run.yml
28+
parameters:
29+
only_on_updated_submodules: 'yes'
30+
variables:
31+
IMAGE: x86_64-gnu-tools

.azure-pipelines/steps/run.yml

+29-7
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,11 @@
66
#
77
# Check travis config for `gdb --batch` command to print all crash logs
88

9+
parameters:
10+
# When this parameter is set to anything other than an empty string the tests
11+
# will only be executed when the commit updates submodules
12+
only_on_updated_submodules: ''
13+
914
steps:
1015

1116
# Disable automatic line ending conversion, which is enabled by default on
@@ -21,6 +26,22 @@ steps:
2126
- checkout: self
2227
fetchDepth: 2
2328

29+
# Set the SKIP_JOB environment variable if this job is supposed to only run
30+
# when submodules are updated and they were not. The following time consuming
31+
# tasks will be skipped when the environment variable is present.
32+
- ${{ if parameters.only_on_updated_submodules }}:
33+
- bash: |
34+
set -e
35+
# Submodules pseudo-files inside git have the 160000 permissions, so when
36+
# those files are present in the diff a submodule was updated.
37+
if git diff HEAD^ | grep "^index .* 160000" >/dev/null 2>&1; then
38+
echo "Executing the job since submodules are updated"
39+
else
40+
echo "Not executing this job since no submodules were updated"
41+
echo "##vso[task.setvariable variable=SKIP_JOB;]1"
42+
fi
43+
displayName: Decide whether to run this job
44+
2445
# Spawn a background process to collect CPU usage statistics which we'll upload
2546
# at the end of the build. See the comments in the script here for more
2647
# information.
@@ -71,7 +92,7 @@ steps:
7192
echo '{"ipv6":true,"fixed-cidr-v6":"fd9a:8454:6789:13f7::/64"}' | sudo tee /etc/docker/daemon.json
7293
sudo service docker restart
7394
displayName: Enable IPv6
74-
condition: and(succeeded(), eq(variables['Agent.OS'], 'Linux'))
95+
condition: and(succeeded(), not(variables.SKIP_JOB), eq(variables['Agent.OS'], 'Linux'))
7596

7697
# Disable automatic line ending conversion (again). On Windows, when we're
7798
# installing dependencies, something switches the git configuration directory or
@@ -87,12 +108,12 @@ steps:
87108
set -e
88109
mkdir -p $HOME/rustsrc
89110
$BUILD_SOURCESDIRECTORY/src/ci/init_repo.sh . $HOME/rustsrc
90-
condition: and(succeeded(), ne(variables['Agent.OS'], 'Windows_NT'))
111+
condition: and(succeeded(), not(variables.SKIP_JOB), ne(variables['Agent.OS'], 'Windows_NT'))
91112
displayName: Check out submodules (Unix)
92113
- script: |
93114
if not exist D:\cache\rustsrc\NUL mkdir D:\cache\rustsrc
94115
sh src/ci/init_repo.sh . /d/cache/rustsrc
95-
condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT'))
116+
condition: and(succeeded(), not(variables.SKIP_JOB), eq(variables['Agent.OS'], 'Windows_NT'))
96117
displayName: Check out submodules (Windows)
97118

98119
# See also the disable for autocrlf above, this just checks that it worked
@@ -124,10 +145,10 @@ steps:
124145
retry pip3 install awscli --upgrade --user
125146
echo "##vso[task.prependpath]$HOME/.local/bin"
126147
displayName: Install awscli (Linux)
127-
condition: and(succeeded(), eq(variables['Agent.OS'], 'Linux'))
148+
condition: and(succeeded(), not(variables.SKIP_JOB), eq(variables['Agent.OS'], 'Linux'))
128149
- script: pip install awscli
129150
displayName: Install awscli (non-Linux)
130-
condition: and(succeeded(), ne(variables['Agent.OS'], 'Linux'))
151+
condition: and(succeeded(), not(variables.SKIP_JOB), ne(variables['Agent.OS'], 'Linux'))
131152

132153
# Configure our CI_JOB_NAME variable which log analyzers can use for the main
133154
# step to see what's going on.
@@ -145,7 +166,7 @@ steps:
145166
python2.7 "$BUILD_SOURCESDIRECTORY/src/tools/publish_toolstate.py" "$(git rev-parse HEAD)" "$(git log --format=%s -n1 HEAD)" "" ""
146167
cd ..
147168
rm -rf rust-toolstate
148-
condition: and(succeeded(), eq(variables['IMAGE'], 'mingw-check'))
169+
condition: and(succeeded(), not(variables.SKIP_JOB), eq(variables['IMAGE'], 'mingw-check'))
149170
displayName: Verify the publish_toolstate script works
150171

151172
- bash: |
@@ -166,6 +187,7 @@ steps:
166187
SRC: .
167188
AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
168189
TOOLSTATE_REPO_ACCESS_TOKEN: $(TOOLSTATE_REPO_ACCESS_TOKEN)
190+
condition: and(succeeded(), not(variables.SKIP_JOB))
169191
displayName: Run build
170192

171193
# If we're a deploy builder, use the `aws` command to publish everything to our
@@ -188,7 +210,7 @@ steps:
188210
retry aws s3 cp --no-progress --recursive --acl public-read ./$upload_dir s3://$DEPLOY_BUCKET/$deploy_dir/$BUILD_SOURCEVERSION
189211
env:
190212
AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
191-
condition: and(succeeded(), or(eq(variables.DEPLOY, '1'), eq(variables.DEPLOY_ALT, '1')))
213+
condition: and(succeeded(), not(variables.SKIP_JOB), or(eq(variables.DEPLOY, '1'), eq(variables.DEPLOY_ALT, '1')))
192214
displayName: Upload artifacts
193215

194216
# Upload CPU usage statistics that we've been gathering this whole time. Always

0 commit comments

Comments
 (0)