Skip to content

Commit 3797f99

Browse files
committed
Auto merge of #134095 - Kobzol:datadog-lockfile, r=<try>
[CI] Use a lockfile for installing the `datadog` package Without a lockfile, it could fail to compile when the dependencies have changed. Reported [here](https://rust-lang.zulipchat.com/#narrow/channel/242791-t-infra/topic/CI.20failure.20in.20DataDog.20upload). r? `@jdno` try-job: x86_64-msvc-ext2
2 parents eedc229 + 0db3a9a commit 3797f99

File tree

4 files changed

+5043
-35
lines changed

4 files changed

+5043
-35
lines changed

.github/workflows/ci.yml

+28-27
Original file line numberDiff line numberDiff line change
@@ -192,12 +192,12 @@ jobs:
192192
- name: ensure the stable version number is correct
193193
run: src/ci/scripts/verify-stable-version-number.sh
194194

195-
- name: run the build
195+
# - name: run the build
196196
# Redirect stderr to stdout to avoid reordering the two streams in the GHA logs.
197-
run: src/ci/scripts/run-build-from-ci.sh 2>&1
198-
env:
199-
AWS_ACCESS_KEY_ID: ${{ env.CACHES_AWS_ACCESS_KEY_ID }}
200-
AWS_SECRET_ACCESS_KEY: ${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.CACHES_AWS_ACCESS_KEY_ID)] }}
197+
# run: src/ci/scripts/run-build-from-ci.sh 2>&1
198+
# env:
199+
# AWS_ACCESS_KEY_ID: ${{ env.CACHES_AWS_ACCESS_KEY_ID }}
200+
# AWS_SECRET_ACCESS_KEY: ${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.CACHES_AWS_ACCESS_KEY_ID)] }}
201201

202202
- name: create github artifacts
203203
run: src/ci/scripts/create-doc-artifacts.sh
@@ -207,26 +207,26 @@ jobs:
207207
echo "disk usage:"
208208
df -h
209209
210-
- name: upload artifacts to github
211-
uses: actions/upload-artifact@v4
212-
with:
213-
# name is set in previous step
214-
name: ${{ env.DOC_ARTIFACT_NAME }}
215-
path: obj/artifacts/doc
216-
if-no-files-found: ignore
217-
retention-days: 5
218-
219-
- name: upload artifacts to S3
220-
run: src/ci/scripts/upload-artifacts.sh
221-
env:
222-
AWS_ACCESS_KEY_ID: ${{ env.ARTIFACTS_AWS_ACCESS_KEY_ID }}
223-
AWS_SECRET_ACCESS_KEY: ${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.ARTIFACTS_AWS_ACCESS_KEY_ID)] }}
224-
# Adding a condition on DEPLOY=1 or DEPLOY_ALT=1 is not needed as all deploy
225-
# builders *should* have the AWS credentials available. Still, explicitly
226-
# adding the condition is helpful as this way CI will not silently skip
227-
# deploying artifacts from a dist builder if the variables are misconfigured,
228-
# erroring about invalid credentials instead.
229-
if: github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1'
210+
# - name: upload artifacts to github
211+
# uses: actions/upload-artifact@v4
212+
# with:
213+
# # name is set in previous step
214+
# name: ${{ env.DOC_ARTIFACT_NAME }}
215+
# path: obj/artifacts/doc
216+
# if-no-files-found: ignore
217+
# retention-days: 5
218+
#
219+
# - name: upload artifacts to S3
220+
# run: src/ci/scripts/upload-artifacts.sh
221+
# env:
222+
# AWS_ACCESS_KEY_ID: ${{ env.ARTIFACTS_AWS_ACCESS_KEY_ID }}
223+
# AWS_SECRET_ACCESS_KEY: ${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.ARTIFACTS_AWS_ACCESS_KEY_ID)] }}
224+
# # Adding a condition on DEPLOY=1 or DEPLOY_ALT=1 is not needed as all deploy
225+
# # builders *should* have the AWS credentials available. Still, explicitly
226+
# # adding the condition is helpful as this way CI will not silently skip
227+
# # deploying artifacts from a dist builder if the variables are misconfigured,
228+
# # erroring about invalid credentials instead.
229+
# if: github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1'
230230

231231
- name: upload job metrics to DataDog
232232
if: needs.calculate_matrix.outputs.run_type != 'pr'
@@ -235,8 +235,9 @@ jobs:
235235
DATADOG_API_KEY: ${{ secrets.DATADOG_API_KEY }}
236236
DD_GITHUB_JOB_NAME: ${{ matrix.name }}
237237
run: |
238-
npm install -g @datadog/datadog-ci@^2.x.x
239-
python3 src/ci/scripts/upload-build-metrics.py build/cpu-usage.csv
238+
cd src/ci
239+
npm ci
240+
python3 scripts/upload-build-metrics.py ../../build/cpu-usage.csv
240241
241242
# This job isused to tell bors the final status of the build, as there is no practical way to detect
242243
# when a workflow is successful listening to webhooks only in our current bors implementation (homu).

0 commit comments

Comments
 (0)