blob: 5843423d05fad7ecb181dc42bcd48c2289ec5f96 (
plain) (
blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
|
# This file configures the primary build pipeline used for the
# top-level list of depot targets.
#
# It outputs a "YAML" (actually JSON) file which is evaluated and
# submitted to Buildkite at the start of each build. This means we can
# dynamically configure the pipeline execution here.
{ depot, lib, pkgs, ... }:
let
inherit (builtins)
attrValues
concatStringsSep
foldl'
length
map
mapAttrs
toJSON;
inherit (pkgs) runCommandNoCC symlinkJoin writeText;
# Create an expression that builds the target at the specified
# location.
mkBuildExpr = target:
let
descend = expr: attr: "builtins.getAttr \"${attr}\" (${expr})";
targetExpr = foldl' descend "import ./. {}" target.__readTree;
subtargetExpr = descend targetExpr target.__subtarget;
in if target ? __subtarget then subtargetExpr else targetExpr;
# Create a pipeline label from the targets tree location.
mkLabel = target:
let label = concatStringsSep "/" target.__readTree;
in if target ? __subtarget
then "${label}:${target.__subtarget}"
else label;
# Create a pipeline step from a single target.
mkStep = target: {
command = let
drvPath = builtins.unsafeDiscardStringContext target.drvPath;
in lib.concatStringsSep " " [
# First try to realise the drvPath of the target so we don't evaluate twice.
# Nix has no concept of depending on a derivation file without depending on
# at least one of its `outPath`s, so we need to discard the string context
# if we don't want to build everything during pipeline construction.
"nix-store --realise '${drvPath}'"
# Since we don't gcroot the derivation files, they may be deleted by the
# garbage collector. In that case we can reevaluate and build the attribute
# using nix-build.
"|| (test ! -f '${drvPath}' && nix-build -E '${mkBuildExpr target}' --show-trace)"
];
label = ":nix: ${mkLabel target}";
# Skip build steps if their out path has already been built.
skip = let
shouldSkip = with builtins;
# Only skip in real Buildkite builds
(getEnv "BUILDKITE_BUILD_ID" != "") &&
# Always build everything for the canon branch.
(getEnv "BUILDKITE_BRANCH" != "refs/heads/canon") &&
# Discard string context to avoid realising the store path during
# pipeline construction.
(pathExists (unsafeDiscardStringContext target.outPath));
in if shouldSkip then "Target was already built." else false;
# Add a "fake" dependency on the initial static pipeline step. When
# uploading a pipeline dynamically, an implicit dependency on the uploading
# step is added to all newly created build steps. Since we are uploading in
# batches this stops the jobs in the first batch from running before all
# batches have been uploaded.
#
# By setting an explicit dependency on a step that has always completed at
# this point, we override that behaviour and allow the steps to start
# running already.
depends_on = ":init:";
};
# Protobuf check step which validates that changes to .proto files
# between revisions don't cause backwards-incompatible or otherwise
# flawed changes.
protoCheck = {
command = "${depot.nix.bufCheck}/bin/ci-buf-check";
label = ":water_buffalo:";
};
# All pipeline steps before batching them into smaller chunks.
allSteps =
# Create build steps for each CI target
(map mkStep depot.ci.targets)
++ [
# Simultaneously run protobuf checks
protoCheck
];
# Helper function to inelegantly divide a list into chunks of at
# most n elements.
#
# This works by assigning each element a chunk ID based on its
# index, and then grouping all elements by their chunk ID.
chunksOf = n: list: let
chunkId = idx: toString (idx / n + 1);
assigned = lib.imap1 (idx: value: { inherit value ; chunk = chunkId idx; }) list;
unchunk = mapAttrs (_: elements: map (e: e.value) elements);
in unchunk (lib.groupBy (e: e.chunk) assigned);
# Define a build pipeline chunk as a JSON file, using the pipeline
# format documented on
# https://buildkite.com/docs/pipelines/defining-steps.
makePipelineChunk = chunkId: chunk: rec {
filename = "chunk-${chunkId}.json";
path = writeText filename (toJSON {
steps = chunk;
});
};
pipelineChunks = attrValues (mapAttrs makePipelineChunk (chunksOf 256 allSteps));
in runCommandNoCC "depot-pipeline" {} ''
mkdir $out
echo "Generated ${toString (length pipelineChunks)} pipeline chunks"
${
lib.concatMapStringsSep "\n"
(chunk: "cp ${chunk.path} $out/${chunk.filename}") pipelineChunks
}
''
|