about summary refs log tree commit diff
path: root/ops/pipelines/static-pipeline.yaml
diff options
context:
space:
mode:
Diffstat (limited to 'ops/pipelines/static-pipeline.yaml')
-rw-r--r--ops/pipelines/static-pipeline.yaml35
1 files changed, 29 insertions, 6 deletions
diff --git a/ops/pipelines/static-pipeline.yaml b/ops/pipelines/static-pipeline.yaml
index 23a1fba4f2..af4f9d784e 100644
--- a/ops/pipelines/static-pipeline.yaml
+++ b/ops/pipelines/static-pipeline.yaml
@@ -4,6 +4,8 @@
 # If something fails during the creation of the pipeline, the fallback
 # is executed instead which will simply report an error to Gerrit.
 ---
+env:
+  BUILDKITE_TOKEN_PATH: /run/agenix/buildkite-graphql-token
 steps:
   # Run pipeline for tvl-kit when new commits arrive on canon. Since
   # it is not part of the depot build tree, this is a useful
@@ -15,6 +17,16 @@ steps:
     build:
       message: "Verification triggered by ${BUILDKITE_COMMIT}"
 
+  # Run pipeline for tvix when new commits arrive on canon. Since
+  # it is not part of the depot build tree, this is a useful
+  # verification to ensure we don't break external things (too much).
+  - trigger: "tvix"
+    async: true
+    label: ":fork:"
+    branches: "refs/heads/canon"
+    build:
+      message: "Verification triggered by ${BUILDKITE_COMMIT}"
+
   # Create a revision number for the current commit for builds on
   # canon.
   #
@@ -23,6 +35,11 @@ steps:
   #
   # Revision numbers are defined as the number of commits in the
   # lineage of HEAD, following only the first parent of merges.
+  #
+  # Note that git does not fetch these refs by default, instead
+  # you'll have to modify your git config using
+  # `git config --add remote.origin.fetch '+refs/r/*:refs/r/*'`.
+  # The refs are available after the next `git fetch`.
   - label: ":git:"
     branches: "refs/heads/canon"
     command: |
@@ -32,12 +49,14 @@ steps:
   # Generate & upload dynamic build steps
   - label: ":llama:"
     key: "pipeline-gen"
+    concurrency_group: 'depot-nix-eval'
+    concurrency: 5 # much more than this and whitby will OOM
     command: |
       set -ue
 
       if test -n "$${GERRIT_CHANGE_URL-}"; then
         echo "This is a build of [cl/$$GERRIT_CHANGE_ID]($$GERRIT_CHANGE_URL) (at patchset #$$GERRIT_PATCHSET)" | \
-          buildkite-agent annotate
+          buildkite-agent annotate --context cl-annotation
       fi
 
       # Attempt to fetch a target map from a parent commit on canon,
@@ -50,7 +69,11 @@ steps:
         PIPELINE_ARGS="--arg parentTargetMap tmp/parent-target-map.json"
       fi
 
-      nix-build -A ops.pipelines.depot -o pipeline --show-trace $$PIPELINE_ARGS
+      nix-build --option restrict-eval true --include "depot=$${PWD}" \
+        --include "store=/nix/store" \
+        --allowed-uris 'https://' \
+        -A ops.pipelines.depot \
+        -o pipeline --show-trace $$PIPELINE_ARGS
 
       # Steps need to be uploaded in reverse order because pipeline
       # upload prepends instead of appending.
@@ -85,7 +108,7 @@ steps:
 
       readonly FAILED_JOBS=$(curl 'https://graphql.buildkite.com/v1' \
         --silent \
-        -H "Authorization: Bearer $(cat /run/agenix/buildkite-graphql-token)" \
+        -H "Authorization: Bearer $(cat ${BUILDKITE_TOKEN_PATH})" \
         -d "{\"query\": \"query BuildStatusQuery { build(uuid: \\\"$BUILDKITE_BUILD_ID\\\") { jobs(passed: false) { count } } }\"}" | \
         jq -r '.data.build.jobs.count')
 
@@ -95,8 +118,8 @@ steps:
         exit 1
       fi
 
-  # After duck, on success, upload and run any post-build steps that
-  # were output by the dynamic pipeline.
+  # After duck, on success, upload and run any release steps that were
+  # output by the dynamic pipeline.
   - label: ":arrow_heading_down:"
     depends_on:
       - step: ":duck:"
@@ -106,6 +129,6 @@ steps:
 
       buildkite-agent artifact download "pipeline/*" .
 
-      find ./pipeline -name 'post-chunk-*.json' | tac | while read chunk; do
+      find ./pipeline -name 'release-chunk-*.json' | tac | while read chunk; do
         buildkite-agent pipeline upload $$chunk
       done