about summary refs log tree commit diff
path: root/nix/buildkite/default.nix
blob: 09fe0a741e41ec0f764ad15a2379f5a7ae93e0ba (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
# Logic for generating Buildkite pipelines from Nix build targets read
# by //nix/readTree.
#
# It outputs a "YAML" (actually JSON) file which is evaluated and
# submitted to Buildkite at the start of each build.
#
# The structure of the file that is being created is documented here:
#   https://buildkite.com/docs/pipelines/defining-steps
{ pkgs, ... }:

let
  inherit (builtins)
    attrValues
    concatMap
    concatStringsSep
    filter
    foldl'
    length
    mapAttrs
    toJSON;

  inherit (pkgs) lib runCommandNoCC writeText;
in rec {
  # Creates a Nix expression that yields the target at the specified
  # location in the repository.
  #
  # This makes a distinction between normal targets (which physically
  # exist in the repository) and subtargets (which are "virtual"
  # targets exposed by a physical one) to make it clear in the build
  # output which is which.
  mkBuildExpr = target:
    let
      descend = expr: attr: "builtins.getAttr \"${attr}\" (${expr})";
      targetExpr = foldl' descend "import ./. {}" target.__readTree;
      subtargetExpr = descend targetExpr target.__subtarget;
    in if target ? __subtarget then subtargetExpr else targetExpr;

  # Create a pipeline label from the target's tree location.
  mkLabel = target:
    let label = concatStringsSep "/" target.__readTree;
    in if target ? __subtarget
      then "${label}:${target.__subtarget}"
      else label;

  # Skip build steps if their out path has already been built.
  skip = headBranch: target: let
    shouldSkip = with builtins;
      # Only skip in real Buildkite builds
      (getEnv "BUILDKITE_BUILD_ID" != "") &&
      # Always build everything for the canon branch.
      (getEnv "BUILDKITE_BRANCH" != headBranch) &&
      # Discard string context to avoid realising the store path during
      # pipeline construction.
      (pathExists (unsafeDiscardStringContext target.outPath));
    in if shouldSkip then "Target was already built." else false;

  # Create a pipeline step from a single target.
  mkStep = headBranch: skipIfBuilt: target: {
    label = ":nix: ${mkLabel target}";
    skip = if skipIfBuilt then skip headBranch target else false;

    command = let
      drvPath = builtins.unsafeDiscardStringContext target.drvPath;
    in concatStringsSep " " [
      # First try to realise the drvPath of the target so we don't evaluate twice.
      # Nix has no concept of depending on a derivation file without depending on
      # at least one of its `outPath`s, so we need to discard the string context
      # if we don't want to build everything during pipeline construction.
      "nix-store --realise '${drvPath}'"
      # Since we don't gcroot the derivation files, they may be deleted by the
      # garbage collector. In that case we can reevaluate and build the attribute
      # using nix-build.
      "|| (test ! -f '${drvPath}' && nix-build -E '${mkBuildExpr target}' --show-trace)"
    ];

    # Add a dependency on the initial static pipeline step which
    # always runs. This allows build steps uploaded in batches to
    # start running before all batches have been uploaded.
    depends_on = ":init:";
  };

  # Helper function to inelegantly divide a list into chunks of at
  # most n elements.
  #
  # This works by assigning each element a chunk ID based on its
  # index, and then grouping all elements by their chunk ID.
  chunksOf = n: list: let
    chunkId = idx: toString (idx / n + 1);
    assigned = lib.imap1 (idx: value: { inherit value ; chunk = chunkId idx; }) list;
    unchunk = mapAttrs (_: elements: map (e: e.value) elements);
  in unchunk (lib.groupBy (e: e.chunk) assigned);

  # Define a build pipeline chunk as a JSON file, using the pipeline
  # format documented on
  # https://buildkite.com/docs/pipelines/defining-steps.
  makePipelineChunk = chunkId: chunk: rec {
    filename = "chunk-${chunkId}.json";
    path = writeText filename (toJSON {
      steps = chunk;
    });
  };

  # Split the pipeline into chunks of at most 256 steps at once, which
  # are uploaded sequentially. This is because of a limitation in the
  # Buildkite backend which struggles to process more than a specific
  # number of chunks at once.
  pipelineChunks = steps:
    attrValues (mapAttrs makePipelineChunk (chunksOf 256 steps));

  # Create a pipeline structure for the given targets.
  mkPipeline = {
    # HEAD branch of the repository on which release steps, GC
    # anchoring and other "mainline only" steps should run.
    headBranch,

    # List of derivations as read by readTree (in most cases just the
    # output of readTree.gather) that should be built in Buildkite.
    #
    # These are scheduled as the first build steps and run as fast as
    # possible, in order, without any concurrency restrictions.
    drvTargets,

    # Should build steps be skipped (on non-HEAD builds) if the output
    # path has already been built?
    skipIfBuilt ? false,

    # A list of plain Buildkite step structures to run alongside the
    # build for all drvTargets, but before proceeding with any
    # post-build actions such as status reporting.
    #
    # Can be used for things like code formatting checks.
    additionalSteps ? [],

    # A list of plain Buildkite step structures to run after all
    # previous steps succeeded.
    #
    # Can be used for status reporting steps and the like.
    postBuildSteps ? []
  }: let
    mkStep' = mkStep headBranch skipIfBuilt;
    steps =
      # Add build steps for each derivation target.
      (map mkStep' drvTargets)

      # Add additional steps (if set).
      ++ additionalSteps

      # Wait for all previous checks to complete
      ++ [({
        wait = null;
        continue_on_failure = true;
      })]

      # Run post-build steps for status reporting and co.
      ++ postBuildSteps;
    chunks = pipelineChunks steps;
  in runCommandNoCC "buildkite-pipeline" {} ''
    mkdir $out
    echo "Generated ${toString (length chunks)} pipeline chunks"
    ${
      lib.concatMapStringsSep "\n"
        (chunk: "cp ${chunk.path} $out/${chunk.filename}") chunks
    }
  '';
}