diff options
author | Vincent Ambo <mail@tazj.in> | 2021-12-28T10·20+0300 |
---|---|---|
committer | tazjin <mail@tazj.in> | 2021-12-28T15·34+0000 |
commit | b7ef2a579b513a8d1a8deb9e864f8f664ee44d2a (patch) | |
tree | d52b0b010db650389ed95731f21e7a8492c38288 /ops | |
parent | 88d7075b30b65deec1c7cf94b46630dc3cf36be0 (diff) |
refactor: Generalise pipeline generation in //nix/buildkite r/3491
Extracts the logic for generating our Buildkite pipeline (which has been copy&pasted and slightly modified in some places outside of depot) into a generic //nix/buildkite library. This should cause no change in functionality. Change-Id: Iad3201713945de41279b39e4f1b847f697c179f7 Reviewed-on: https://cl.tvl.fyi/c/depot/+/4726 Autosubmit: tazjin <mail@tazj.in> Tested-by: BuildkiteCI Reviewed-by: sterni <sternenseemann@systemli.org>
Diffstat (limited to 'ops')
-rw-r--r-- | ops/pipelines/depot.nix | 122 |
1 files changed, 7 insertions, 115 deletions
diff --git a/ops/pipelines/depot.nix b/ops/pipelines/depot.nix index 5843423d05fa..63b1bc067a91 100644 --- a/ops/pipelines/depot.nix +++ b/ops/pipelines/depot.nix @@ -1,80 +1,8 @@ # This file configures the primary build pipeline used for the # top-level list of depot targets. -# -# It outputs a "YAML" (actually JSON) file which is evaluated and -# submitted to Buildkite at the start of each build. This means we can -# dynamically configure the pipeline execution here. -{ depot, lib, pkgs, ... }: +{ depot, ... }: let - inherit (builtins) - attrValues - concatStringsSep - foldl' - length - map - mapAttrs - toJSON; - - inherit (pkgs) runCommandNoCC symlinkJoin writeText; - - # Create an expression that builds the target at the specified - # location. - mkBuildExpr = target: - let - descend = expr: attr: "builtins.getAttr \"${attr}\" (${expr})"; - targetExpr = foldl' descend "import ./. {}" target.__readTree; - subtargetExpr = descend targetExpr target.__subtarget; - in if target ? __subtarget then subtargetExpr else targetExpr; - - # Create a pipeline label from the targets tree location. - mkLabel = target: - let label = concatStringsSep "/" target.__readTree; - in if target ? __subtarget - then "${label}:${target.__subtarget}" - else label; - - # Create a pipeline step from a single target. - mkStep = target: { - command = let - drvPath = builtins.unsafeDiscardStringContext target.drvPath; - in lib.concatStringsSep " " [ - # First try to realise the drvPath of the target so we don't evaluate twice. - # Nix has no concept of depending on a derivation file without depending on - # at least one of its `outPath`s, so we need to discard the string context - # if we don't want to build everything during pipeline construction. - "nix-store --realise '${drvPath}'" - # Since we don't gcroot the derivation files, they may be deleted by the - # garbage collector. In that case we can reevaluate and build the attribute - # using nix-build. - "|| (test ! -f '${drvPath}' && nix-build -E '${mkBuildExpr target}' --show-trace)" - ]; - label = ":nix: ${mkLabel target}"; - - # Skip build steps if their out path has already been built. - skip = let - shouldSkip = with builtins; - # Only skip in real Buildkite builds - (getEnv "BUILDKITE_BUILD_ID" != "") && - # Always build everything for the canon branch. - (getEnv "BUILDKITE_BRANCH" != "refs/heads/canon") && - # Discard string context to avoid realising the store path during - # pipeline construction. - (pathExists (unsafeDiscardStringContext target.outPath)); - in if shouldSkip then "Target was already built." else false; - - # Add a "fake" dependency on the initial static pipeline step. When - # uploading a pipeline dynamically, an implicit dependency on the uploading - # step is added to all newly created build steps. Since we are uploading in - # batches this stops the jobs in the first batch from running before all - # batches have been uploaded. - # - # By setting an explicit dependency on a step that has always completed at - # this point, we override that behaviour and allow the steps to start - # running already. - depends_on = ":init:"; - }; - # Protobuf check step which validates that changes to .proto files # between revisions don't cause backwards-incompatible or otherwise # flawed changes. @@ -82,45 +10,9 @@ let command = "${depot.nix.bufCheck}/bin/ci-buf-check"; label = ":water_buffalo:"; }; - - # All pipeline steps before batching them into smaller chunks. - allSteps = - # Create build steps for each CI target - (map mkStep depot.ci.targets) - - ++ [ - # Simultaneously run protobuf checks - protoCheck - ]; - - # Helper function to inelegantly divide a list into chunks of at - # most n elements. - # - # This works by assigning each element a chunk ID based on its - # index, and then grouping all elements by their chunk ID. - chunksOf = n: list: let - chunkId = idx: toString (idx / n + 1); - assigned = lib.imap1 (idx: value: { inherit value ; chunk = chunkId idx; }) list; - unchunk = mapAttrs (_: elements: map (e: e.value) elements); - in unchunk (lib.groupBy (e: e.chunk) assigned); - - # Define a build pipeline chunk as a JSON file, using the pipeline - # format documented on - # https://buildkite.com/docs/pipelines/defining-steps. - makePipelineChunk = chunkId: chunk: rec { - filename = "chunk-${chunkId}.json"; - path = writeText filename (toJSON { - steps = chunk; - }); - }; - - pipelineChunks = attrValues (mapAttrs makePipelineChunk (chunksOf 256 allSteps)); - -in runCommandNoCC "depot-pipeline" {} '' - mkdir $out - echo "Generated ${toString (length pipelineChunks)} pipeline chunks" - ${ - lib.concatMapStringsSep "\n" - (chunk: "cp ${chunk.path} $out/${chunk.filename}") pipelineChunks - } -'' +in depot.nix.buildkite.mkPipeline { + headBranch = "refs/heads/canon"; + drvTargets = depot.ci.targets; + skipIfBuilt = true; + additionalSteps = [ protoCheck ]; +} |