From aa122cbae78ce97d60c0c98ba14df753d97e40b1 Mon Sep 17 00:00:00 2001 From: Vincent Ambo Date: Sun, 30 Jan 2022 19:06:58 +0300 Subject: style: format entire depot with nixpkgs-fmt This CL can be used to compare the style of nixpkgs-fmt against other formatters (nixpkgs, alejandra). Change-Id: I87c6abff6bcb546b02ead15ad0405f81e01b6d9e Reviewed-on: https://cl.tvl.fyi/c/depot/+/4397 Tested-by: BuildkiteCI Reviewed-by: sterni Reviewed-by: lukegb Reviewed-by: wpcarro Reviewed-by: Profpatsch Reviewed-by: kanepyork Reviewed-by: tazjin Reviewed-by: cynthia Reviewed-by: edef Reviewed-by: eta Reviewed-by: grfn --- nix/binify/default.nix | 2 +- nix/buildGo/default.nix | 91 +++--- nix/buildGo/example/default.nix | 5 +- nix/buildGo/external/default.nix | 50 ++-- nix/buildGo/proto.nix | 3 +- nix/buildLisp/default.nix | 277 ++++++++++-------- nix/buildLisp/example/default.nix | 19 +- nix/buildManPages/default.nix | 77 +++-- nix/buildkite/default.nix | 312 +++++++++++---------- nix/drvSeqL/default.nix | 26 +- nix/emptyDerivation/default.nix | 3 +- nix/emptyDerivation/emptyDerivation.nix | 12 +- nix/emptyDerivation/tests.nix | 18 +- nix/escapeExecline/default.nix | 19 +- nix/getBins/default.nix | 21 +- nix/getBins/tests.nix | 20 +- nix/mergePatch/default.nix | 140 ++++----- nix/netstring/attrsToKeyValList.nix | 4 +- nix/nint/default.nix | 14 +- nix/readTree/default.nix | 139 +++++---- nix/readTree/tests/default.nix | 18 +- .../tests/test-marker/directory-marked/default.nix | 2 +- .../directory-marked/nested/default.nix | 2 +- .../tests/test-marker/file-children/one.nix | 2 +- .../tests/test-marker/file-children/two.nix | 2 +- nix/renderMarkdown/default.nix | 2 +- nix/runExecline/default.nix | 5 +- nix/runExecline/runExecline.nix | 64 ++--- nix/runExecline/tests.nix | 121 +++++--- nix/runTestsuite/default.nix | 76 +++-- nix/sparseTree/default.nix | 20 +- nix/tag/default.nix | 84 +++--- nix/tag/tests.nix | 44 +-- nix/tailscale/default.nix | 3 +- nix/utils/default.nix | 39 +-- nix/utils/tests/default.nix | 54 ++-- nix/writeElispBin/default.nix | 8 +- nix/writeExecline/default.nix | 17 +- nix/writeScript/default.nix | 20 +- nix/writers/default.nix | 126 +++++---- nix/writers/tests/rust.nix | 40 +-- nix/yants/default.nix | 301 +++++++++++--------- nix/yants/tests/default.nix | 34 +-- 43 files changed, 1326 insertions(+), 1010 deletions(-) (limited to 'nix') diff --git a/nix/binify/default.nix b/nix/binify/default.nix index d40930fd33..a9900caf43 100644 --- a/nix/binify/default.nix +++ b/nix/binify/default.nix @@ -10,7 +10,7 @@ # with `binify { exe = …; name = "hello" }`. { exe, name }: -pkgs.runCommandLocal "${name}-bin" {} '' +pkgs.runCommandLocal "${name}-bin" { } '' mkdir -p $out/bin ln -sT ${lib.escapeShellArg exe} $out/bin/${lib.escapeShellArg name} '' diff --git a/nix/buildGo/default.nix b/nix/buildGo/default.nix index a2396dc3f7..0126a93d16 100644 --- a/nix/buildGo/default.nix +++ b/nix/buildGo/default.nix @@ -4,8 +4,9 @@ # buildGo provides Nix functions to build Go packages in the style of Bazel's # rules_go. -{ pkgs ? import {} -, ... }: +{ pkgs ? import { } +, ... +}: let inherit (builtins) @@ -40,7 +41,7 @@ let xFlags = x_defs: spaceOut (map (k: "-X ${k}=${x_defs."${k}"}") (attrNames x_defs)); - pathToName = p: replaceStrings ["/"] ["_"] (toString p); + pathToName = p: replaceStrings [ "/" ] [ "_" ] (toString p); # Add an `overrideGo` attribute to a function result that works # similar to `overrideAttrs`, but is used specifically for the @@ -52,49 +53,50 @@ let # High-level build functions # Build a Go program out of the specified files and dependencies. - program = { name, srcs, deps ? [], x_defs ? {} }: - let uniqueDeps = allDeps (map (d: d.gopkg) deps); - in runCommand name {} '' - ${go}/bin/go tool compile -o ${name}.a -trimpath=$PWD -trimpath=${go} ${includeSources uniqueDeps} ${spaceOut srcs} - mkdir -p $out/bin - export GOROOT_FINAL=go - ${go}/bin/go tool link -o $out/bin/${name} -buildid nix ${xFlags x_defs} ${includeLibs uniqueDeps} ${name}.a - ''; + program = { name, srcs, deps ? [ ], x_defs ? { } }: + let uniqueDeps = allDeps (map (d: d.gopkg) deps); + in runCommand name { } '' + ${go}/bin/go tool compile -o ${name}.a -trimpath=$PWD -trimpath=${go} ${includeSources uniqueDeps} ${spaceOut srcs} + mkdir -p $out/bin + export GOROOT_FINAL=go + ${go}/bin/go tool link -o $out/bin/${name} -buildid nix ${xFlags x_defs} ${includeLibs uniqueDeps} ${name}.a + ''; # Build a Go library assembled out of the specified files. # # This outputs both the sources and compiled binary, as both are # needed when downstream packages depend on it. - package = { name, srcs, deps ? [], path ? name, sfiles ? [] }: - let - uniqueDeps = allDeps (map (d: d.gopkg) deps); - - # The build steps below need to be executed conditionally for Go - # assembly if the analyser detected any *.s files. - # - # This is required for several popular packages (e.g. x/sys). - ifAsm = do: lib.optionalString (sfiles != []) do; - asmBuild = ifAsm '' - ${go}/bin/go tool asm -trimpath $PWD -I $PWD -I ${go}/share/go/pkg/include -D GOOS_linux -D GOARCH_amd64 -gensymabis -o ./symabis ${spaceOut sfiles} - ${go}/bin/go tool asm -trimpath $PWD -I $PWD -I ${go}/share/go/pkg/include -D GOOS_linux -D GOARCH_amd64 -o ./asm.o ${spaceOut sfiles} - ''; - asmLink = ifAsm "-symabis ./symabis -asmhdr $out/go_asm.h"; - asmPack = ifAsm '' - ${go}/bin/go tool pack r $out/${path}.a ./asm.o - ''; - - gopkg = (runCommand "golib-${name}" {} '' - mkdir -p $out/${path} - ${srcList path (map (s: "${s}") srcs)} - ${asmBuild} - ${go}/bin/go tool compile -pack ${asmLink} -o $out/${path}.a -trimpath=$PWD -trimpath=${go} -p ${path} ${includeSources uniqueDeps} ${spaceOut srcs} - ${asmPack} - '') // { - inherit gopkg; - goDeps = uniqueDeps; - goImportPath = path; - }; - in gopkg; + package = { name, srcs, deps ? [ ], path ? name, sfiles ? [ ] }: + let + uniqueDeps = allDeps (map (d: d.gopkg) deps); + + # The build steps below need to be executed conditionally for Go + # assembly if the analyser detected any *.s files. + # + # This is required for several popular packages (e.g. x/sys). + ifAsm = do: lib.optionalString (sfiles != [ ]) do; + asmBuild = ifAsm '' + ${go}/bin/go tool asm -trimpath $PWD -I $PWD -I ${go}/share/go/pkg/include -D GOOS_linux -D GOARCH_amd64 -gensymabis -o ./symabis ${spaceOut sfiles} + ${go}/bin/go tool asm -trimpath $PWD -I $PWD -I ${go}/share/go/pkg/include -D GOOS_linux -D GOARCH_amd64 -o ./asm.o ${spaceOut sfiles} + ''; + asmLink = ifAsm "-symabis ./symabis -asmhdr $out/go_asm.h"; + asmPack = ifAsm '' + ${go}/bin/go tool pack r $out/${path}.a ./asm.o + ''; + + gopkg = (runCommand "golib-${name}" { } '' + mkdir -p $out/${path} + ${srcList path (map (s: "${s}") srcs)} + ${asmBuild} + ${go}/bin/go tool compile -pack ${asmLink} -o $out/${path}.a -trimpath=$PWD -trimpath=${go} -p ${path} ${includeSources uniqueDeps} ${spaceOut srcs} + ${asmPack} + '') // { + inherit gopkg; + goDeps = uniqueDeps; + goImportPath = path; + }; + in + gopkg; # Build a tree of Go libraries out of an external Go source # directory that follows the standard Go layout and was not built @@ -110,10 +112,10 @@ let }; # Build a Go library out of the specified protobuf definition. - proto = { name, proto, path ? name, goPackage ? name, extraDeps ? [] }: (makeOverridable package) { + proto = { name, proto, path ? name, goPackage ? name, extraDeps ? [ ] }: (makeOverridable package) { inherit name path; deps = [ protoLibs.goProto.proto.gopkg ] ++ extraDeps; - srcs = lib.singleton (runCommand "goproto-${name}.pb.go" {} '' + srcs = lib.singleton (runCommand "goproto-${name}.pb.go" { } '' cp ${proto} ${baseNameOf proto} ${protobuf}/bin/protoc --plugin=${protoLibs.goProto.protoc-gen-go.gopkg}/bin/protoc-gen-go \ --go_out=plugins=grpc,import_path=${baseNameOf path}:. ${baseNameOf proto} @@ -124,7 +126,8 @@ let # Build a Go library out of the specified gRPC definition. grpc = args: proto (args // { extraDeps = [ protoLibs.goGrpc.gopkg ]; }); -in { +in +{ # Only the high-level builder functions are exposed, but made # overrideable. program = makeOverridable program; diff --git a/nix/buildGo/example/default.nix b/nix/buildGo/example/default.nix index 99c0a7d79b..08da075e18 100644 --- a/nix/buildGo/example/default.nix +++ b/nix/buildGo/example/default.nix @@ -8,7 +8,7 @@ # users a quick introduction to how to use buildGo. let - buildGo = import ../default.nix {}; + buildGo = import ../default.nix { }; # Example use of buildGo.package, which creates an importable Go # package from the specified source files. @@ -29,7 +29,8 @@ let # Example use of buildGo.program, which builds an executable using # the specified name and dependencies (which in turn must have been # created via buildGo.package etc.) -in buildGo.program { +in +buildGo.program { name = "example"; srcs = [ diff --git a/nix/buildGo/external/default.nix b/nix/buildGo/external/default.nix index 6540faf04c..f713783a58 100644 --- a/nix/buildGo/external/default.nix +++ b/nix/buildGo/external/default.nix @@ -17,12 +17,12 @@ let inherit (pkgs) lib runCommand go jq ripgrep; - pathToName = p: replaceStrings ["/"] ["_"] (toString p); + pathToName = p: replaceStrings [ "/" ] [ "_" ] (toString p); # Collect all non-vendored dependencies from the Go standard library # into a file that can be used to filter them out when processing # dependencies. - stdlibPackages = runCommand "stdlib-pkgs.json" {} '' + stdlibPackages = runCommand "stdlib-pkgs.json" { } '' export HOME=$PWD export GOPATH=/dev/null ${go}/bin/go list std | \ @@ -45,20 +45,28 @@ let }; mkset = path: value: - if path == [] then { gopkg = value; } + if path == [ ] then { gopkg = value; } else { "${head path}" = mkset (tail path) value; }; last = l: elemAt l ((length l) - 1); toPackage = self: src: path: depMap: entry: let - localDeps = map (d: lib.attrByPath (d ++ [ "gopkg" ]) ( - throw "missing local dependency '${lib.concatStringsSep "." d}' in '${path}'" - ) self) entry.localDeps; - - foreignDeps = map (d: lib.attrByPath [ d.path ] ( - throw "missing foreign dependency '${d.path}' in '${path}, imported at ${d.position}'" - ) depMap) entry.foreignDeps; + localDeps = map + (d: lib.attrByPath (d ++ [ "gopkg" ]) + ( + throw "missing local dependency '${lib.concatStringsSep "." d}' in '${path}'" + ) + self) + entry.localDeps; + + foreignDeps = map + (d: lib.attrByPath [ d.path ] + ( + throw "missing foreign dependency '${d.path}' in '${path}, imported at ${d.position}'" + ) + depMap) + entry.foreignDeps; args = { srcs = map (f: src + ("/" + f)) entry.files; @@ -74,22 +82,28 @@ let binArgs = args // { name = (last ((lib.splitString "/" path) ++ entry.locator)); }; - in if entry.isCommand then (program binArgs) else (package libArgs); + in + if entry.isCommand then (program binArgs) else (package libArgs); -in { src, path, deps ? [] }: let +in +{ src, path, deps ? [ ] }: +let # Build a map of dependencies (from their import paths to their # derivation) so that they can be conditionally imported only in # sub-packages that require them. - depMap = listToAttrs (map (d: { - name = d.goImportPath; - value = d; - }) (map (d: d.gopkg) deps)); + depMap = listToAttrs (map + (d: { + name = d.goImportPath; + value = d; + }) + (map (d: d.gopkg) deps)); name = pathToName path; - analysisOutput = runCommand "${name}-structure.json" {} '' + analysisOutput = runCommand "${name}-structure.json" { } '' ${analyser}/bin/analyser -path ${path} -source ${src} > $out ''; analysis = fromJSON (readFile analysisOutput); -in lib.fix(self: foldl' lib.recursiveUpdate {} ( +in +lib.fix (self: foldl' lib.recursiveUpdate { } ( map (entry: mkset entry.locator (toPackage self src path depMap entry)) analysis )) diff --git a/nix/buildGo/proto.nix b/nix/buildGo/proto.nix index 4bd3a57276..6c37f758ce 100644 --- a/nix/buildGo/proto.nix +++ b/nix/buildGo/proto.nix @@ -8,7 +8,8 @@ let inherit (builtins) fetchGit map; -in rec { +in +rec { goProto = external { path = "github.com/golang/protobuf"; src = fetchGit { diff --git a/nix/buildLisp/default.nix b/nix/buildLisp/default.nix index ab23b30210..30b90d9049 100644 --- a/nix/buildLisp/default.nix +++ b/nix/buildLisp/default.nix @@ -4,7 +4,7 @@ # buildLisp is designed to enforce conventions and do away with the # free-for-all of existing Lisp build systems. -{ pkgs ? import {}, ... }: +{ pkgs ? import { }, ... }: let inherit (builtins) map elemAt match filter; @@ -70,11 +70,16 @@ let implFilter = impl: xs: let isFilterSet = x: builtins.isAttrs x && !(lib.isDerivation x); - in builtins.map ( - x: if isFilterSet x then x.${impl.name} or x.default else x - ) (builtins.filter ( - x: !(isFilterSet x) || x ? ${impl.name} || x ? default - ) xs); + in + builtins.map + ( + x: if isFilterSet x then x.${impl.name} or x.default else x + ) + (builtins.filter + ( + x: !(isFilterSet x) || x ? ${impl.name} || x ? default + ) + xs); # Generates lisp code which instructs the given lisp implementation to load # all the given dependencies. @@ -103,17 +108,21 @@ let # 'allDeps' flattens the list of dependencies (and their # dependencies) into one ordered list of unique deps which # all use the given implementation. - allDeps = impl: deps: let - # The override _should_ propagate itself recursively, as every derivation - # would only expose its actually used dependencies. Use implementation - # attribute created by withExtras if present, override in all other cases - # (mainly bundled). - deps' = builtins.map (dep: dep."${impl.name}" or (dep.overrideLisp (_: { - implementation = impl; - }))) deps; - in (lib.toposort dependsOn (lib.unique ( - lib.flatten (deps' ++ (map (d: d.lispDeps) deps')) - ))).result; + allDeps = impl: deps: + let + # The override _should_ propagate itself recursively, as every derivation + # would only expose its actually used dependencies. Use implementation + # attribute created by withExtras if present, override in all other cases + # (mainly bundled). + deps' = builtins.map + (dep: dep."${impl.name}" or (dep.overrideLisp (_: { + implementation = impl; + }))) + deps; + in + (lib.toposort dependsOn (lib.unique ( + lib.flatten (deps' ++ (map (d: d.lispDeps) deps')) + ))).result; # 'allNative' extracts all native dependencies of a dependency list # to ensure that library load paths are set correctly during all @@ -138,42 +147,49 @@ let withExtras = f: args: let drv = (makeOverridable f) args; - in lib.fix (self: - drv.overrideLisp (old: - let - implementation = old.implementation or defaultImplementation; - brokenOn = old.brokenOn or []; - targets = lib.subtractLists (brokenOn ++ [ implementation.name ]) - (builtins.attrNames impls); - in { - passthru = (old.passthru or {}) // { - repl = implementation.lispWith [ self ]; - - # meta is done via passthru to minimize rebuilds caused by overriding - meta = (old.passthru.meta or {}) // { - inherit targets; - }; - } // builtins.listToAttrs (builtins.map (impl: { - inherit (impl) name; - value = self.overrideLisp (_: { - implementation = impl; - }); - }) (builtins.attrValues impls)); - }) // { - overrideLisp = new: withExtras f (args // new args); - }); + in + lib.fix (self: + drv.overrideLisp + (old: + let + implementation = old.implementation or defaultImplementation; + brokenOn = old.brokenOn or [ ]; + targets = lib.subtractLists (brokenOn ++ [ implementation.name ]) + (builtins.attrNames impls); + in + { + passthru = (old.passthru or { }) // { + repl = implementation.lispWith [ self ]; + + # meta is done via passthru to minimize rebuilds caused by overriding + meta = (old.passthru.meta or { }) // { + inherit targets; + }; + } // builtins.listToAttrs (builtins.map + (impl: { + inherit (impl) name; + value = self.overrideLisp (_: { + implementation = impl; + }); + }) + (builtins.attrValues impls)); + }) // { + overrideLisp = new: withExtras f (args // new args); + }); # 'testSuite' builds a Common Lisp test suite that loads all of srcs and deps, # and then executes expression to check its result - testSuite = { name, expression, srcs, deps ? [], native ? [], implementation }: + testSuite = { name, expression, srcs, deps ? [ ], native ? [ ], implementation }: let lispDeps = allDeps implementation (implFilter implementation deps); lispNativeDeps = allNative native lispDeps; filteredSrcs = implFilter implementation srcs; - in runCommandNoCC name { - LD_LIBRARY_PATH = lib.makeLibraryPath lispNativeDeps; - LANG = "C.UTF-8"; - } '' + in + runCommandNoCC name + { + LD_LIBRARY_PATH = lib.makeLibraryPath lispNativeDeps; + LANG = "C.UTF-8"; + } '' echo "Running test suite ${name}" ${implementation.runScript} ${ @@ -452,15 +468,16 @@ let } $@ ''; - bundled = name: runCommandNoCC "${name}-cllib" { - passthru = { - lispName = name; - lispNativeDeps = []; - lispDeps = []; - lispBinary = false; - repl = impls.ecl.lispWith [ (impls.ecl.bundled name) ]; - }; - } '' + bundled = name: runCommandNoCC "${name}-cllib" + { + passthru = { + lispName = name; + lispNativeDeps = [ ]; + lispDeps = [ ]; + lispBinary = false; + repl = impls.ecl.lispWith [ (impls.ecl.bundled name) ]; + }; + } '' mkdir -p "$out" ln -s "${ecl-static}/lib/ecl-${ecl-static.version}/${name}.${impls.ecl.faslExt}" -t "$out" ln -s "${ecl-static}/lib/ecl-${ecl-static.version}/lib${name}.a" "$out/${name}.a" @@ -489,7 +506,8 @@ let # See https://ccl.clozure.com/docs/ccl.html#building-definitions faslExt = - /**/ if targetPlatform.isPowerPC && targetPlatform.is32bit then "pfsl" + /**/ + if targetPlatform.isPowerPC && targetPlatform.is32bit then "pfsl" else if targetPlatform.isPowerPC && targetPlatform.is64bit then "p64fsl" else if targetPlatform.isx86_64 && targetPlatform.isLinux then "lx64fsl" else if targetPlatform.isx86_32 && targetPlatform.isLinux then "lx32fsl" @@ -572,7 +590,7 @@ let lib.optionalString (deps != []) "--load ${writeText "load.lisp" (impls.ccl.genLoadLisp lispDeps)}" } "$@" - ''; + ''; }; }; @@ -586,37 +604,42 @@ let library = { name , implementation ? defaultImplementation - , brokenOn ? [] # TODO(sterni): make this a warning + , brokenOn ? [ ] # TODO(sterni): make this a warning , srcs - , deps ? [] - , native ? [] + , deps ? [ ] + , native ? [ ] , tests ? null - , passthru ? {} + , passthru ? { } }: let filteredDeps = implFilter implementation deps; filteredSrcs = implFilter implementation srcs; lispNativeDeps = (allNative native filteredDeps); lispDeps = allDeps implementation filteredDeps; - testDrv = if ! isNull tests - then testSuite { - name = tests.name or "${name}-test"; - srcs = filteredSrcs ++ (tests.srcs or []); - deps = filteredDeps ++ (tests.deps or []); - expression = tests.expression; - inherit implementation; - } + testDrv = + if ! isNull tests + then + testSuite + { + name = tests.name or "${name}-test"; + srcs = filteredSrcs ++ (tests.srcs or [ ]); + deps = filteredDeps ++ (tests.deps or [ ]); + expression = tests.expression; + inherit implementation; + } else null; - in lib.fix (self: runCommandNoCC "${name}-cllib" { - LD_LIBRARY_PATH = lib.makeLibraryPath lispNativeDeps; - LANG = "C.UTF-8"; - passthru = passthru // { - inherit lispNativeDeps lispDeps; - lispName = name; - lispBinary = false; - tests = testDrv; - }; - } '' + in + lib.fix (self: runCommandNoCC "${name}-cllib" + { + LD_LIBRARY_PATH = lib.makeLibraryPath lispNativeDeps; + LANG = "C.UTF-8"; + passthru = passthru // { + inherit lispNativeDeps lispDeps; + lispName = name; + lispBinary = false; + tests = testDrv; + }; + } '' ${if ! isNull testDrv then "echo 'Test ${testDrv} succeeded'" else "echo 'No tests run'"} @@ -637,13 +660,13 @@ let program = { name , implementation ? defaultImplementation - , brokenOn ? [] # TODO(sterni): make this a warning + , brokenOn ? [ ] # TODO(sterni): make this a warning , main ? "${name}:main" , srcs - , deps ? [] - , native ? [] + , deps ? [ ] + , native ? [ ] , tests ? null - , passthru ? {} + , passthru ? { } }: let filteredSrcs = implFilter implementation srcs; @@ -656,45 +679,53 @@ let deps = lispDeps; srcs = filteredSrcs; }; - testDrv = if ! isNull tests - then testSuite { - name = tests.name or "${name}-test"; - srcs = - ( # testSuite does run implFilter as well - filteredSrcs ++ (tests.srcs or [])); - deps = filteredDeps ++ (tests.deps or []); - expression = tests.expression; - inherit implementation; - } + testDrv = + if ! isNull tests + then + testSuite + { + name = tests.name or "${name}-test"; + srcs = + ( + # testSuite does run implFilter as well + filteredSrcs ++ (tests.srcs or [ ]) + ); + deps = filteredDeps ++ (tests.deps or [ ]); + expression = tests.expression; + inherit implementation; + } else null; - in lib.fix (self: runCommandNoCC "${name}" { - nativeBuildInputs = [ makeWrapper ]; - LD_LIBRARY_PATH = libPath; - LANG = "C.UTF-8"; - passthru = passthru // { - lispName = name; - lispDeps = [ selfLib ]; - lispNativeDeps = native; - lispBinary = true; - tests = testDrv; - }; - } ('' - ${if ! isNull testDrv - then "echo 'Test ${testDrv} succeeded'" - else ""} - mkdir -p $out/bin - - ${implementation.runScript} ${ - implementation.genDumpLisp { - inherit name main; - deps = ([ selfLib ] ++ lispDeps); - } + in + lib.fix (self: runCommandNoCC "${name}" + { + nativeBuildInputs = [ makeWrapper ]; + LD_LIBRARY_PATH = libPath; + LANG = "C.UTF-8"; + passthru = passthru // { + lispName = name; + lispDeps = [ selfLib ]; + lispNativeDeps = native; + lispBinary = true; + tests = testDrv; + }; } - '' + lib.optionalString implementation.wrapProgram '' - wrapProgram $out/bin/${name} \ - --prefix LD_LIBRARY_PATH : "${libPath}" \ - --add-flags "\$NIX_BUILDLISP_LISP_ARGS --" - '')); + ('' + ${if ! isNull testDrv + then "echo 'Test ${testDrv} succeeded'" + else ""} + mkdir -p $out/bin + + ${implementation.runScript} ${ + implementation.genDumpLisp { + inherit name main; + deps = ([ selfLib ] ++ lispDeps); + } + } + '' + lib.optionalString implementation.wrapProgram '' + wrapProgram $out/bin/${name} \ + --prefix LD_LIBRARY_PATH : "${libPath}" \ + --add-flags "\$NIX_BUILDLISP_LISP_ARGS --" + '')); # 'bundled' creates a "library" which makes a built-in package available, # such as any of SBCL's sb-* packages or ASDF. By default this is done @@ -714,11 +745,13 @@ let }: implementation.bundled or (defaultBundled implementation) name; - in (makeOverridable bundled') { + in + (makeOverridable bundled') { inherit name; }; -in { +in +{ library = withExtras library; program = withExtras program; inherit bundled; diff --git a/nix/buildLisp/example/default.nix b/nix/buildLisp/example/default.nix index 6a518e4964..6add2676f1 100644 --- a/nix/buildLisp/example/default.nix +++ b/nix/buildLisp/example/default.nix @@ -14,15 +14,16 @@ let ]; }; -# Example Lisp program. -# -# This builds & writes an executable for a program using the library -# above to disk. -# -# By default, buildLisp.program expects the entry point to be -# `$name:main`. This can be overridden by configuring the `main` -# attribute. -in buildLisp.program { + # Example Lisp program. + # + # This builds & writes an executable for a program using the library + # above to disk. + # + # By default, buildLisp.program expects the entry point to be + # `$name:main`. This can be overridden by configuring the `main` + # attribute. +in +buildLisp.program { name = "example"; deps = [ libExample ]; diff --git a/nix/buildManPages/default.nix b/nix/buildManPages/default.nix index fe6d145f6c..746ed25182 100644 --- a/nix/buildManPages/default.nix +++ b/nix/buildManPages/default.nix @@ -13,9 +13,9 @@ let ; bins = getBins mandoc [ "mandoc" ] - // getBins gzip [ "gzip" ] - // getBins coreutils [ "mkdir" "ln" "cp" ] - ; + // getBins gzip [ "gzip" ] + // getBins coreutils [ "mkdir" "ln" "cp" ] + ; defaultGzip = true; @@ -35,41 +35,68 @@ let }: { content , ... - }@page: let + }@page: + let source = builtins.toFile (basename false page) content; - in runExecline (basename gzip page) {} ([ - (if requireLint then "if" else "foreground") [ - bins.mandoc "-mdoc" "-T" "lint" source + in + runExecline (basename gzip page) { } ([ + (if requireLint then "if" else "foreground") + [ + bins.mandoc + "-mdoc" + "-T" + "lint" + source ] - "importas" "out" "out" + "importas" + "out" + "out" ] ++ (if gzip then [ - "redirfd" "-w" "1" "$out" - bins.gzip "-c" source + "redirfd" + "-w" + "1" + "$out" + bins.gzip + "-c" + source ] else [ - bins.cp "--reflink=auto" source "$out" + bins.cp + "--reflink=auto" + source + "$out" ])); buildManPages = name: - { derivationArgs ? {} + { derivationArgs ? { } , gzip ? defaultGzip , ... }@args: pages: - runExecline "${name}-man-pages" { - inherit derivationArgs; - } ([ - "importas" "out" "out" - ] ++ lib.concatMap ({ name, section, content }@page: [ - "if" [ bins.mkdir "-p" (manDir page) ] - "if" [ - bins.ln "-s" - (buildManPage args page) - (target gzip page) - ] - ]) pages); + runExecline "${name}-man-pages" + { + inherit derivationArgs; + } + ([ + "importas" + "out" + "out" + ] ++ lib.concatMap + ({ name, section, content }@page: [ + "if" + [ bins.mkdir "-p" (manDir page) ] + "if" + [ + bins.ln + "-s" + (buildManPage args page) + (target gzip page) + ] + ]) + pages); -in { +in +{ __functor = _: buildManPages; single = buildManPage; diff --git a/nix/buildkite/default.nix b/nix/buildkite/default.nix index e0c947deae..d17b5c86c4 100644 --- a/nix/buildkite/default.nix +++ b/nix/buildkite/default.nix @@ -29,7 +29,8 @@ let unsafeDiscardStringContext; inherit (pkgs) lib runCommandNoCC writeText; -in rec { +in +rec { # Creates a Nix expression that yields the target at the specified # location in the repository. # @@ -42,14 +43,15 @@ in rec { descend = expr: attr: "builtins.getAttr \"${attr}\" (${expr})"; targetExpr = foldl' descend "import ./. {}" target.__readTree; subtargetExpr = descend targetExpr target.__subtarget; - in if target ? __subtarget then subtargetExpr else targetExpr; + in + if target ? __subtarget then subtargetExpr else targetExpr; # Create a pipeline label from the target's tree location. mkLabel = target: let label = concatStringsSep "/" target.__readTree; in if target ? __subtarget - then "${label}:${target.__subtarget}" - else label; + then "${label}:${target.__subtarget}" + else label; # Determine whether to skip a target if it has not diverged from the # HEAD branch. @@ -74,33 +76,36 @@ in rec { # Create a pipeline step from a single target. mkStep = headBranch: parentTargetMap: target: - let - label = mkLabel target; - drvPath = unsafeDiscardStringContext target.drvPath; - shouldSkip' = shouldSkip parentTargetMap; - in { - label = ":nix: " + label; - key = hashString "sha1" label; - skip = shouldSkip' label drvPath; - command = mkBuildCommand target drvPath; - env.READTREE_TARGET = label; - - # Add a dependency on the initial static pipeline step which - # always runs. This allows build steps uploaded in batches to - # start running before all batches have been uploaded. - depends_on = ":init:"; - }; + let + label = mkLabel target; + drvPath = unsafeDiscardStringContext target.drvPath; + shouldSkip' = shouldSkip parentTargetMap; + in + { + label = ":nix: " + label; + key = hashString "sha1" label; + skip = shouldSkip' label drvPath; + command = mkBuildCommand target drvPath; + env.READTREE_TARGET = label; + + # Add a dependency on the initial static pipeline step which + # always runs. This allows build steps uploaded in batches to + # start running before all batches have been uploaded. + depends_on = ":init:"; + }; # Helper function to inelegantly divide a list into chunks of at # most n elements. # # This works by assigning each element a chunk ID based on its # index, and then grouping all elements by their chunk ID. - chunksOf = n: list: let - chunkId = idx: toString (idx / n + 1); - assigned = lib.imap1 (idx: value: { inherit value ; chunk = chunkId idx; }) list; - unchunk = mapAttrs (_: elements: map (e: e.value) elements); - in unchunk (lib.groupBy (e: e.chunk) assigned); + chunksOf = n: list: + let + chunkId = idx: toString (idx / n + 1); + assigned = lib.imap1 (idx: value: { inherit value; chunk = chunkId idx; }) list; + unchunk = mapAttrs (_: elements: map (e: e.value) elements); + in + unchunk (lib.groupBy (e: e.chunk) assigned); # Define a build pipeline chunk as a JSON file, using the pipeline # format documented on @@ -120,104 +125,112 @@ in rec { attrValues (mapAttrs (makePipelineChunk name) (chunksOf 192 steps)); # Create a pipeline structure for the given targets. - mkPipeline = { - # HEAD branch of the repository on which release steps, GC - # anchoring and other "mainline only" steps should run. - headBranch, - - # List of derivations as read by readTree (in most cases just the - # output of readTree.gather) that should be built in Buildkite. - # - # These are scheduled as the first build steps and run as fast as - # possible, in order, without any concurrency restrictions. - drvTargets, - - # Derivation map of a parent commit. Only targets which no longer - # correspond to the content of this map will be built. Passing an - # empty map will always build all targets. - parentTargetMap ? {}, - - # A list of plain Buildkite step structures to run alongside the - # build for all drvTargets, but before proceeding with any - # post-build actions such as status reporting. - # - # Can be used for things like code formatting checks. - additionalSteps ? [], - - # A list of plain Buildkite step structures to run after all - # previous steps succeeded. - # - # Can be used for status reporting steps and the like. - postBuildSteps ? [] - }: let - # Convert a target into all of its build and post-build steps, - # treated separately as they need to be in different chunks. - targetToSteps = target: let - step = mkStep headBranch parentTargetMap target; - - # Split build/post-build steps - splitExtraSteps = partition ({ postStep, ... }: postStep) - (attrValues (mapAttrs (name: value: { - inherit name value; - postStep = (value ? prompt) || (value.postBuild or false); - }) (target.meta.ci.extraSteps or {}))); - - mkExtraStep' = { name, value, ... }: mkExtraStep step name value; - extraBuildSteps = map mkExtraStep' splitExtraSteps.wrong; # 'wrong' -> no prompt - extraPostSteps = map mkExtraStep' splitExtraSteps.right; # 'right' -> has prompt - in { - buildSteps = [ step ] ++ extraBuildSteps; - postSteps = extraPostSteps; - }; - - # Combine all target steps into separate build and post-build step lists. - steps = foldl' (acc: t: { - buildSteps = acc.buildSteps ++ t.buildSteps; - postSteps = acc.postSteps ++ t.postSteps; - }) { buildSteps = []; postSteps = []; } (map targetToSteps drvTargets); - - buildSteps = - # Add build steps for each derivation target and their extra - # steps. - steps.buildSteps - - # Add additional steps (if set). - ++ additionalSteps; - - postSteps = - # Add post-build steps for each derivation target. - steps.postSteps - - # Add any globally defined post-build steps. - ++ postBuildSteps; - - buildChunks = pipelineChunks "build" buildSteps; - postBuildChunks = pipelineChunks "post" postSteps; - chunks = buildChunks ++ postBuildChunks; - in runCommandNoCC "buildkite-pipeline" {} '' - mkdir $out - echo "Generated ${toString (length chunks)} pipeline chunks" - ${ - lib.concatMapStringsSep "\n" - (chunk: "cp ${chunk.path} $out/${chunk.filename}") chunks - } - ''; + mkPipeline = + { + # HEAD branch of the repository on which release steps, GC + # anchoring and other "mainline only" steps should run. + headBranch + , # List of derivations as read by readTree (in most cases just the + # output of readTree.gather) that should be built in Buildkite. + # + # These are scheduled as the first build steps and run as fast as + # possible, in order, without any concurrency restrictions. + drvTargets + , # Derivation map of a parent commit. Only targets which no longer + # correspond to the content of this map will be built. Passing an + # empty map will always build all targets. + parentTargetMap ? { } + , # A list of plain Buildkite step structures to run alongside the + # build for all drvTargets, but before proceeding with any + # post-build actions such as status reporting. + # + # Can be used for things like code formatting checks. + additionalSteps ? [ ] + , # A list of plain Buildkite step structures to run after all + # previous steps succeeded. + # + # Can be used for status reporting steps and the like. + postBuildSteps ? [ ] + }: + let + # Convert a target into all of its build and post-build steps, + # treated separately as they need to be in different chunks. + targetToSteps = target: + let + step = mkStep headBranch parentTargetMap target; + + # Split build/post-build steps + splitExtraSteps = partition ({ postStep, ... }: postStep) + (attrValues (mapAttrs + (name: value: { + inherit name value; + postStep = (value ? prompt) || (value.postBuild or false); + }) + (target.meta.ci.extraSteps or { }))); + + mkExtraStep' = { name, value, ... }: mkExtraStep step name value; + extraBuildSteps = map mkExtraStep' splitExtraSteps.wrong; # 'wrong' -> no prompt + extraPostSteps = map mkExtraStep' splitExtraSteps.right; # 'right' -> has prompt + in + { + buildSteps = [ step ] ++ extraBuildSteps; + postSteps = extraPostSteps; + }; + + # Combine all target steps into separate build and post-build step lists. + steps = foldl' + (acc: t: { + buildSteps = acc.buildSteps ++ t.buildSteps; + postSteps = acc.postSteps ++ t.postSteps; + }) + { buildSteps = [ ]; postSteps = [ ]; } + (map targetToSteps drvTargets); + + buildSteps = + # Add build steps for each derivation target and their extra + # steps. + steps.buildSteps + + # Add additional steps (if set). + ++ additionalSteps; + + postSteps = + # Add post-build steps for each derivation target. + steps.postSteps + + # Add any globally defined post-build steps. + ++ postBuildSteps; + + buildChunks = pipelineChunks "build" buildSteps; + postBuildChunks = pipelineChunks "post" postSteps; + chunks = buildChunks ++ postBuildChunks; + in + runCommandNoCC "buildkite-pipeline" { } '' + mkdir $out + echo "Generated ${toString (length chunks)} pipeline chunks" + ${ + lib.concatMapStringsSep "\n" + (chunk: "cp ${chunk.path} $out/${chunk.filename}") chunks + } + ''; # Create a drvmap structure for the given targets, containing the # mapping of all target paths to their derivations. The mapping can # be persisted for future use. - mkDrvmap = drvTargets: writeText "drvmap.json" (toJSON (listToAttrs (map (target: { - name = mkLabel target; - value = { - drvPath = unsafeDiscardStringContext target.drvPath; - - # Include the attrPath in the output to reconstruct the drv - # without parsing the human-readable label. - attrPath = target.__readTree ++ lib.optionals (target ? __subtarget) [ - target.__subtarget - ]; - }; - }) drvTargets))); + mkDrvmap = drvTargets: writeText "drvmap.json" (toJSON (listToAttrs (map + (target: { + name = mkLabel target; + value = { + drvPath = unsafeDiscardStringContext target.drvPath; + + # Include the attrPath in the output to reconstruct the drv + # without parsing the human-readable label. + attrPath = target.__readTree ++ lib.optionals (target ? __subtarget) [ + target.__subtarget + ]; + }; + }) + drvTargets))); # Implementation of extra step logic. # @@ -278,34 +291,37 @@ in rec { # Create the Buildkite configuration for an extra step, optionally # wrapping it in a gate group. - mkExtraStep = parent: key: { - command, - label ? key, - prompt ? false, - needsOutput ? false, - branches ? null, - alwaysRun ? false, - postBuild ? false - }@cfg: let - parentLabel = parent.env.READTREE_TARGET; - - step = { - label = ":gear: ${label} (from ${parentLabel})"; - skip = if alwaysRun then false else parent.skip or false; - depends_on = lib.optional (!alwaysRun && !needsOutput) parent.key; - branches = if branches != null then lib.concatStringsSep " " branches else null; - - command = pkgs.writeShellScript "${key}-script" '' - set -ueo pipefail - ${lib.optionalString needsOutput "echo '~~~ Preparing build output of ${parentLabel}'"} - ${lib.optionalString needsOutput parent.command} - echo '+++ Running extra step command' - exec ${command} - ''; - }; - in if (isString prompt) - then mkGatedStep { - inherit step label parent prompt; - } + mkExtraStep = parent: key: { command + , label ? key + , prompt ? false + , needsOutput ? false + , branches ? null + , alwaysRun ? false + , postBuild ? false + }@cfg: + let + parentLabel = parent.env.READTREE_TARGET; + + step = { + label = ":gear: ${label} (from ${parentLabel})"; + skip = if alwaysRun then false else parent.skip or false; + depends_on = lib.optional (!alwaysRun && !needsOutput) parent.key; + branches = if branches != null then lib.concatStringsSep " " branches else null; + + command = pkgs.writeShellScript "${key}-script" '' + set -ueo pipefail + ${lib.optionalString needsOutput "echo '~~~ Preparing build output of ${parentLabel}'"} + ${lib.optionalString needsOutput parent.command} + echo '+++ Running extra step command' + exec ${command} + ''; + }; + in + if (isString prompt) + then + mkGatedStep + { + inherit step label parent prompt; + } else step; } diff --git a/nix/drvSeqL/default.nix b/nix/drvSeqL/default.nix index 3339289b3b..6437e1a043 100644 --- a/nix/drvSeqL/default.nix +++ b/nix/drvSeqL/default.nix @@ -17,9 +17,10 @@ let drvSeqL = defun [ (list drv) drv drv ] (drvDeps: drvOut: let - drvOutOutputs = drvOut.outputs or ["out"]; + drvOutOutputs = drvOut.outputs or [ "out" ]; in - pkgs.runCommandLocal drvOut.name { + pkgs.runCommandLocal drvOut.name + { # we inherit all attributes in order to replicate # the original derivation as much as possible outputs = drvOutOutputs; @@ -29,15 +30,18 @@ let } # the outputs of the original derivation are replicated # by creating a symlink to the old output path - (lib.concatMapStrings (output: '' - target=${lib.escapeShellArg drvOut.${output}} - # if the target is already a symlink, follow it until it’s not; - # this is done to prevent too many dereferences - target=$(readlink -e "$target") - # link to the output - ln -s "$target" "${"$"}${output}" - '') drvOutOutputs)); + (lib.concatMapStrings + (output: '' + target=${lib.escapeShellArg drvOut.${output}} + # if the target is already a symlink, follow it until it’s not; + # this is done to prevent too many dereferences + target=$(readlink -e "$target") + # link to the output + ln -s "$target" "${"$"}${output}" + '') + drvOutOutputs)); -in { +in +{ __functor = _: drvSeqL; } diff --git a/nix/emptyDerivation/default.nix b/nix/emptyDerivation/default.nix index 4165d4fd9a..8433984012 100644 --- a/nix/emptyDerivation/default.nix +++ b/nix/emptyDerivation/default.nix @@ -14,7 +14,8 @@ let inherit (depot.nix.runTestsuite) runTestsuite it assertEq; }; -in { +in +{ __functor = _: emptyDerivation; inherit tests; } diff --git a/nix/emptyDerivation/emptyDerivation.nix b/nix/emptyDerivation/emptyDerivation.nix index 5e84abe2d5..772df96352 100644 --- a/nix/emptyDerivation/emptyDerivation.nix +++ b/nix/emptyDerivation/emptyDerivation.nix @@ -11,7 +11,7 @@ let bins = getBins pkgs.s6-portable-utils [ "s6-touch" ] - // getBins pkgs.execline [ "importas" "exec" ]; + // getBins pkgs.execline [ "importas" "exec" ]; emptiness = { name = "empty-derivation"; @@ -21,12 +21,16 @@ let builder = bins.exec; args = [ - bins.importas "out" "out" - bins.s6-touch "$out" + bins.importas + "out" + "out" + bins.s6-touch + "$out" ]; }; -in (derivation emptiness) // { +in +(derivation emptiness) // { # This allows us to call the empty derivation # like a function and override fields/add new fields. __functor = _: overrides: diff --git a/nix/emptyDerivation/tests.nix b/nix/emptyDerivation/tests.nix index 053603b027..a738428824 100644 --- a/nix/emptyDerivation/tests.nix +++ b/nix/emptyDerivation/tests.nix @@ -10,10 +10,17 @@ let ]; fooOut = emptyDerivation { - builder = writeExecline "foo-builder" {} [ - "importas" "out" "out" - "redirfd" "-w" "1" "$out" - bins.s6-echo "-n" "foo" + builder = writeExecline "foo-builder" { } [ + "importas" + "out" + "out" + "redirfd" + "-w" + "1" + "$out" + bins.s6-echo + "-n" + "foo" ]; }; @@ -26,7 +33,8 @@ let "bar") ]; -in runTestsuite "emptyDerivation" [ +in +runTestsuite "emptyDerivation" [ empty overrideBuilder ] diff --git a/nix/escapeExecline/default.nix b/nix/escapeExecline/default.nix index deef5c2c4e..d2c39dd398 100644 --- a/nix/escapeExecline/default.nix +++ b/nix/escapeExecline/default.nix @@ -16,14 +16,17 @@ let # escapeExecline [ "if" [ "somecommand" ] "true" ] # == ''"if" { "somecommand" } "true"'' escapeExecline = execlineList: lib.concatStringsSep " " - (let - go = arg: - if builtins.isString arg then [(escapeExeclineArg arg)] - else if builtins.isPath arg then [(escapeExeclineArg "${arg}")] - else if lib.isDerivation arg then [(escapeExeclineArg arg)] - else if builtins.isList arg then [ "{" ] ++ builtins.concatMap go arg ++ [ "}" ] - else abort "escapeExecline can only hande nested lists of strings, was ${lib.generators.toPretty {} arg}"; - in builtins.concatMap go execlineList); + ( + let + go = arg: + if builtins.isString arg then [ (escapeExeclineArg arg) ] + else if builtins.isPath arg then [ (escapeExeclineArg "${arg}") ] + else if lib.isDerivation arg then [ (escapeExeclineArg arg) ] + else if builtins.isList arg then [ "{" ] ++ builtins.concatMap go arg ++ [ "}" ] + else abort "escapeExecline can only hande nested lists of strings, was ${lib.generators.toPretty {} arg}"; + in + builtins.concatMap go execlineList + ); in escapeExecline diff --git a/nix/getBins/default.nix b/nix/getBins/default.nix index 5ba7584ed8..e354b176c8 100644 --- a/nix/getBins/default.nix +++ b/nix/getBins/default.nix @@ -26,14 +26,16 @@ let getBins = drv: xs: - let f = x: - # TODO(Profpatsch): typecheck - let x' = if builtins.isString x then { use = x; as = x; } else x; - in { - name = x'.as; - value = "${lib.getBin drv}/bin/${x'.use}"; - }; - in builtins.listToAttrs (builtins.map f xs); + let + f = x: + # TODO(Profpatsch): typecheck + let x' = if builtins.isString x then { use = x; as = x; } else x; + in { + name = x'.as; + value = "${lib.getBin drv}/bin/${x'.use}"; + }; + in + builtins.listToAttrs (builtins.map f xs); tests = import ./tests.nix { @@ -42,7 +44,8 @@ let inherit (depot.nix.runTestsuite) assertEq it runTestsuite; }; -in { +in +{ __functor = _: getBins; inherit tests; } diff --git a/nix/getBins/tests.nix b/nix/getBins/tests.nix index ff81deb5f1..e0f5ab4263 100644 --- a/nix/getBins/tests.nix +++ b/nix/getBins/tests.nix @@ -5,11 +5,11 @@ let drv2 = writeScriptBin "goodbye" "tschau"; bins = getBins drv [ - "hello" - { use = "hello"; as = "also-hello"; } - ] - // getBins drv2 [ "goodbye" ] - ; + "hello" + { use = "hello"; as = "also-hello"; } + ] + // getBins drv2 [ "goodbye" ] + ; simple = it "path is equal to the executable name" [ (assertEq "path" @@ -33,8 +33,8 @@ let ]; in - runTestsuite "getBins" [ - simple - useAs - secondDrv - ] +runTestsuite "getBins" [ + simple + useAs + secondDrv +] diff --git a/nix/mergePatch/default.nix b/nix/mergePatch/default.nix index 0f80b93d4c..d56106925a 100644 --- a/nix/mergePatch/default.nix +++ b/nix/mergePatch/default.nix @@ -8,31 +8,31 @@ For example, given the following original document: { - a = "b"; - c = { + a = "b"; + c = { d = "e"; f = "g"; - } + } } Changing the value of `a` and removing `f` can be achieved by merging the patch { - a = "z"; - c.f = null; + a = "z"; + c.f = null; } which results in { - a = "z"; - c = { + a = "z"; + c = { d = "e"; - }; + }; } Pseudo-code: - define MergePatch(Target, Patch): + define MergePatch(Target, Patch): if Patch is an Object: if Target is not an Object: Target = {} # Ignore the contents and set it to an empty Object @@ -55,19 +55,19 @@ let mergePatch = target: patch: if lib.isAttrs patch then - let target' = if lib.isAttrs target then target else {}; + let target' = if lib.isAttrs target then target else { }; in foldlAttrs - (acc: patchEl: - if patchEl.value == null - then removeAttrs acc [ patchEl.name ] - else acc // { - ${patchEl.name} = - mergePatch - (acc.${patchEl.name} or "unnused") - patchEl.value; - }) - target' - patch + (acc: patchEl: + if patchEl.value == null + then removeAttrs acc [ patchEl.name ] + else acc // { + ${patchEl.name} = + mergePatch + (acc.${patchEl.name} or "unnused") + patchEl.value; + }) + target' + patch else patch; inherit (depot.nix.runTestsuite) @@ -93,46 +93,49 @@ let }; emptyPatch = it "the empty patch returns the original target" [ (assertEq "id" - (mergePatch testTarget {}) + (mergePatch testTarget { }) testTarget) ]; nonAttrs = it "one side is a non-attrset value" [ (assertEq "target is a value means the value is replaced by the patch" (mergePatch 42 testPatch) - (mergePatch {} testPatch)) + (mergePatch { } testPatch)) (assertEq "patch is a value means it replaces target alltogether" (mergePatch testTarget 42) 42) ]; rfcExamples = it "the examples from the RFC" [ (assertEq "a subset is deleted and overwritten" - (mergePatch testTarget testPatch) { + (mergePatch testTarget testPatch) + { a = "z"; c = { d = "e"; }; }) (assertEq "a more complicated example from the example section" - (mergePatch { - title = "Goodbye!"; + (mergePatch + { + title = "Goodbye!"; author = { givenName = "John"; familyName = "Doe"; }; - tags = [ "example" "sample" ]; - content = "This will be unchanged"; - } { - title = "Hello!"; - phoneNumber = "+01-123-456-7890"; - author.familyName = null; - tags = [ "example" ]; - }) + tags = [ "example" "sample" ]; + content = "This will be unchanged"; + } + { + title = "Hello!"; + phoneNumber = "+01-123-456-7890"; + author.familyName = null; + tags = [ "example" ]; + }) { title = "Hello!"; phoneNumber = "+01-123-456-7890"; - author = { - givenName = "John"; - }; + author = { + givenName = "John"; + }; tags = [ "example" ]; content = "This will be unchanged"; }) @@ -144,42 +147,45 @@ let (assertEq "test number ${toString index}" (mergePatch target patch) res); - in it "the test suite from the RFC" [ - (r 1 {"a" = "b";} {"a" = "c";} {"a" = "c";}) - (r 2 {"a" = "b";} {"b" = "c";} {"a" = "b"; "b" = "c";}) - (r 3 {"a" = "b";} {"a" = null;} {}) - (r 4 {"a" = "b"; "b" = "c";} - {"a" = null;} - {"b" = "c";}) - (r 5 {"a" = ["b"];} {"a" = "c";} {"a" = "c";}) - (r 6 {"a" = "c";} {"a" = ["b"];} {"a" = ["b"];}) - (r 7 {"a" = {"b" = "c";}; } - {"a" = {"b" = "d"; "c" = null;};} - {"a" = {"b" = "d";};}) - (r 8 {"a" = [{"b" = "c";}];} - {"a" = [1];} - {"a" = [1];}) - (r 9 ["a" "b"] ["c" "d"] ["c" "d"]) - (r 10 {"a" = "b";} ["c"] ["c"]) - (r 11 {"a" = "foo";} null null) - (r 12 {"a" = "foo";} "bar" "bar") - (r 13 {"e" = null;} {"a" = 1;} {"e" = null; "a" = 1;}) - (r 14 [1 2] - {"a" = "b"; "c" = null;} - {"a" = "b";}) - (r 15 {} - {"a" = {"bb" = {"ccc" = null;};};} - {"a" = {"bb" = {};};}) - ]; - - in runTestsuite "mergePatch" [ + in + it "the test suite from the RFC" [ + (r 1 { "a" = "b"; } { "a" = "c"; } { "a" = "c"; }) + (r 2 { "a" = "b"; } { "b" = "c"; } { "a" = "b"; "b" = "c"; }) + (r 3 { "a" = "b"; } { "a" = null; } { }) + (r 4 { "a" = "b"; "b" = "c"; } + { "a" = null; } + { "b" = "c"; }) + (r 5 { "a" = [ "b" ]; } { "a" = "c"; } { "a" = "c"; }) + (r 6 { "a" = "c"; } { "a" = [ "b" ]; } { "a" = [ "b" ]; }) + (r 7 { "a" = { "b" = "c"; }; } + { "a" = { "b" = "d"; "c" = null; }; } + { "a" = { "b" = "d"; }; }) + (r 8 { "a" = [{ "b" = "c"; }]; } + { "a" = [ 1 ]; } + { "a" = [ 1 ]; }) + (r 9 [ "a" "b" ] [ "c" "d" ] [ "c" "d" ]) + (r 10 { "a" = "b"; } [ "c" ] [ "c" ]) + (r 11 { "a" = "foo"; } null null) + (r 12 { "a" = "foo"; } "bar" "bar") + (r 13 { "e" = null; } { "a" = 1; } { "e" = null; "a" = 1; }) + (r 14 [ 1 2 ] + { "a" = "b"; "c" = null; } + { "a" = "b"; }) + (r 15 { } + { "a" = { "bb" = { "ccc" = null; }; }; } + { "a" = { "bb" = { }; }; }) + ]; + + in + runTestsuite "mergePatch" [ emptyPatch nonAttrs rfcExamples rfcTests ]; -in { +in +{ __functor = _: mergePatch; inherit tests; diff --git a/nix/netstring/attrsToKeyValList.nix b/nix/netstring/attrsToKeyValList.nix index 2805d0fbce..c854b56955 100644 --- a/nix/netstring/attrsToKeyValList.nix +++ b/nix/netstring/attrsToKeyValList.nix @@ -28,6 +28,6 @@ attrs: lib.concatStrings (lib.mapAttrsToList (k: v: depot.nix.netstring.fromString - ( depot.nix.netstring.fromString k - + depot.nix.netstring.fromString v)) + (depot.nix.netstring.fromString k + + depot.nix.netstring.fromString v)) attrs) diff --git a/nix/nint/default.nix b/nix/nint/default.nix index 5cf83d15d6..0087fc0416 100644 --- a/nix/nint/default.nix +++ b/nix/nint/default.nix @@ -6,9 +6,11 @@ let ; in - rustSimpleBin { - name = "nint"; - dependencies = [ - depot.third_party.rust-crates.serde_json - ]; - } (builtins.readFile ./nint.rs) +rustSimpleBin +{ + name = "nint"; + dependencies = [ + depot.third_party.rust-crates.serde_json + ]; +} + (builtins.readFile ./nint.rs) diff --git a/nix/readTree/default.nix b/nix/readTree/default.nix index 259f2f2fbf..0c59c890d4 100644 --- a/nix/readTree/default.nix +++ b/nix/readTree/default.nix @@ -43,10 +43,13 @@ let children = readDir path; isVisible = f: f == ".skip-subtree" || (substring 0 1 f) != "."; names = filter isVisible (attrNames children); - in listToAttrs (map (name: { - inherit name; - value = children.${name}; - }) names); + in + listToAttrs (map + (name: { + inherit name; + value = children.${name}; + }) + names); # Create a mark containing the location of this attribute and # a list of all child attribute names added by readTree. @@ -57,12 +60,13 @@ let # Import a file and enforce our calling convention importFile = args: scopedArgs: path: parts: filter: - let - importedFile = if scopedArgs != {} - then builtins.scopedImport scopedArgs path - else import path; + let + importedFile = + if scopedArgs != { } + then builtins.scopedImport scopedArgs path + else import path; pathType = builtins.typeOf importedFile; - in + in if pathType != "lambda" then builtins.throw "readTree: trying to import ${toString path}, but it’s a ${pathType}, you need to make it a function like { depot, pkgs, ... }" else importedFile (filter parts (argsWithPath args parts)); @@ -76,8 +80,9 @@ let dir = readDirVisible initPath; joinChild = c: initPath + ("/" + c); - self = if rootDir - then { __readTree = []; } + self = + if rootDir + then { __readTree = [ ]; } else importFile args scopedArgs initPath parts argsFilter; # Import subdirectories of the current one, unless the special @@ -88,33 +93,41 @@ let # should be ignored, but its content is not inspected by # readTree filterDir = f: dir."${f}" == "directory"; - children = if hasAttr ".skip-subtree" dir then [] else map (c: { - name = c; - value = readTree { - inherit argsFilter scopedArgs; - args = args; - initPath = (joinChild c); - rootDir = false; - parts = (parts ++ [ c ]); - }; - }) (filter filterDir (attrNames dir)); + children = if hasAttr ".skip-subtree" dir then [ ] else + map + (c: { + name = c; + value = readTree { + inherit argsFilter scopedArgs; + args = args; + initPath = (joinChild c); + rootDir = false; + parts = (parts ++ [ c ]); + }; + }) + (filter filterDir (attrNames dir)); # Import Nix files - nixFiles = if hasAttr ".skip-subtree" dir then [] + nixFiles = + if hasAttr ".skip-subtree" dir then [ ] else filter (f: f != null) (map nixFileName (attrNames dir)); - nixChildren = map (c: let - p = joinChild (c + ".nix"); - childParts = parts ++ [ c ]; - imported = importFile args scopedArgs p childParts argsFilter; - in { - name = c; - value = - if isAttrs imported - then imported // marker childParts {} - else imported; - }) nixFiles; - - nodeValue = if dir ? "default.nix" then self else {}; + nixChildren = map + (c: + let + p = joinChild (c + ".nix"); + childParts = parts ++ [ c ]; + imported = importFile args scopedArgs p childParts argsFilter; + in + { + name = c; + value = + if isAttrs imported + then imported // marker childParts { } + else imported; + }) + nixFiles; + + nodeValue = if dir ? "default.nix" then self else { }; allChildren = listToAttrs ( if dir ? "default.nix" @@ -123,9 +136,9 @@ let ); in - if isAttrs nodeValue - then nodeValue // allChildren // (marker parts allChildren) - else nodeValue; + if isAttrs nodeValue + then nodeValue // allChildren // (marker parts allChildren) + else nodeValue; # Function which can be used to find all readTree targets within an # attribute set. @@ -143,40 +156,42 @@ let # should be included in the build. gather = eligible: node: if node ? __readTree then - # Include the node itself if it is eligible. - (if eligible node then [ node ] else []) + # Include the node itself if it is eligible. + (if eligible node then [ node ] else [ ]) # Include eligible children of the node ++ concatMap (gather eligible) (map (attr: node."${attr}") node.__readTreeChildren) # Include specified sub-targets of the node ++ filter eligible (map - (k: (node."${k}" or {}) // { - # Keep the same tree location, but explicitly mark this - # node as a subtarget. - __readTree = node.__readTree; - __readTreeChildren = []; - __subtarget = k; - }) - (node.meta.targets or [])) - else []; + (k: (node."${k}" or { }) // { + # Keep the same tree location, but explicitly mark this + # node as a subtarget. + __readTree = node.__readTree; + __readTreeChildren = [ ]; + __subtarget = k; + }) + (node.meta.targets or [ ])) + else [ ]; # Determine whether a given value is a derivation. # Copied from nixpkgs/lib for cases where lib is not available yet. isDerivation = x: isAttrs x && x ? type && x.type == "derivation"; -in { +in +{ inherit gather; __functor = _: { path , args , filter ? (_parts: x: x) - , scopedArgs ? {} }: - readTree { - inherit args scopedArgs; - argsFilter = filter; - initPath = path; - rootDir = true; - parts = []; - }; + , scopedArgs ? { } + }: + readTree { + inherit args scopedArgs; + argsFilter = filter; + initPath = path; + rootDir = true; + parts = [ ]; + }; # In addition to readTree itself, some functionality is exposed that # is useful for users of readTree. @@ -193,7 +208,7 @@ in { # which should be able to access the restricted folder. # # reason: Textual explanation for the restriction (included in errors) - restrictFolder = { folder, exceptions ? [], reason }: parts: args: + restrictFolder = { folder, exceptions ? [ ], reason }: parts: args: if (elemAt parts 0) == folder || elem parts exceptions then args else args // { @@ -224,8 +239,8 @@ in { drvTargets = attrs: attrs // { meta = { targets = builtins.filter - (x: isDerivation attrs."${x}") - (builtins.attrNames attrs); - } // (attrs.meta or {}); + (x: isDerivation attrs."${x}") + (builtins.attrNames attrs); + } // (attrs.meta or { }); }; } diff --git a/nix/readTree/tests/default.nix b/nix/readTree/tests/default.nix index 3354a4fe5e..fcca141714 100644 --- a/nix/readTree/tests/default.nix +++ b/nix/readTree/tests/default.nix @@ -10,13 +10,13 @@ let tree-ex = depot.nix.readTree { path = ./test-example; - args = {}; + args = { }; }; example = it "corresponds to the README example" [ (assertEq "third_party attrset" (lib.isAttrs tree-ex.third_party - && (! lib.isDerivation tree-ex.third_party)) + && (! lib.isDerivation tree-ex.third_party)) true) (assertEq "third_party attrset other attribute" tree-ex.third_party.favouriteColour @@ -37,7 +37,7 @@ let tree-tl = depot.nix.readTree { path = ./test-tree-traversal; - args = {}; + args = { }; }; traversal-logic = it "corresponds to the traversal logic in the README" [ @@ -82,7 +82,7 @@ let "Picked up through the drv") (assertEq "default.nix drv is not changed by readTree" tree-tl.default-nix.can-be-drv - (import ./test-tree-traversal/default-nix/can-be-drv/default.nix {})) + (import ./test-tree-traversal/default-nix/can-be-drv/default.nix { })) ]; # these each call readTree themselves because the throws have to happen inside assertThrows @@ -90,7 +90,7 @@ let (assertThrows "this file is not a function" (depot.nix.readTree { path = ./test-wrong-not-a-function; - args = {}; + args = { }; }).not-a-function) # can’t test for that, assertThrows can’t catch this error # (assertThrows "this file is a function but doesn’t have dots" @@ -99,12 +99,13 @@ let read-markers = depot.nix.readTree { path = ./test-marker; - args = {}; + args = { }; }; assertMarkerByPath = path: assertEq "${lib.concatStringsSep "." path} is marked correctly" - (lib.getAttrFromPath path read-markers).__readTree path; + (lib.getAttrFromPath path read-markers).__readTree + path; markers = it "marks nodes correctly" [ (assertMarkerByPath [ "directory-marked" ]) @@ -119,7 +120,8 @@ let read-markers.directory-marked.nested.__readTreeChildren [ ]) ]; -in runTestsuite "readTree" [ +in +runTestsuite "readTree" [ example traversal-logic wrong diff --git a/nix/readTree/tests/test-marker/directory-marked/default.nix b/nix/readTree/tests/test-marker/directory-marked/default.nix index a3f961128e..5bd3e36b53 100644 --- a/nix/readTree/tests/test-marker/directory-marked/default.nix +++ b/nix/readTree/tests/test-marker/directory-marked/default.nix @@ -1,3 +1,3 @@ { ... }: -{} +{ } diff --git a/nix/readTree/tests/test-marker/directory-marked/nested/default.nix b/nix/readTree/tests/test-marker/directory-marked/nested/default.nix index a3f961128e..5bd3e36b53 100644 --- a/nix/readTree/tests/test-marker/directory-marked/nested/default.nix +++ b/nix/readTree/tests/test-marker/directory-marked/nested/default.nix @@ -1,3 +1,3 @@ { ... }: -{} +{ } diff --git a/nix/readTree/tests/test-marker/file-children/one.nix b/nix/readTree/tests/test-marker/file-children/one.nix index a3f961128e..5bd3e36b53 100644 --- a/nix/readTree/tests/test-marker/file-children/one.nix +++ b/nix/readTree/tests/test-marker/file-children/one.nix @@ -1,3 +1,3 @@ { ... }: -{} +{ } diff --git a/nix/readTree/tests/test-marker/file-children/two.nix b/nix/readTree/tests/test-marker/file-children/two.nix index a3f961128e..5bd3e36b53 100644 --- a/nix/readTree/tests/test-marker/file-children/two.nix +++ b/nix/readTree/tests/test-marker/file-children/two.nix @@ -1,3 +1,3 @@ { ... }: -{} +{ } diff --git a/nix/renderMarkdown/default.nix b/nix/renderMarkdown/default.nix index 3e5a59954b..8d6b31cfcc 100644 --- a/nix/renderMarkdown/default.nix +++ b/nix/renderMarkdown/default.nix @@ -3,6 +3,6 @@ with depot.nix.yants; -defun [ path drv ] (file: pkgs.runCommandNoCC "${file}.rendered.html" {} '' +defun [ path drv ] (file: pkgs.runCommandNoCC "${file}.rendered.html" { } '' cat ${file} | ${depot.tools.cheddar}/bin/cheddar --about-filter ${file} > $out '') diff --git a/nix/runExecline/default.nix b/nix/runExecline/default.nix index fd92203d01..76fffdce7b 100644 --- a/nix/runExecline/default.nix +++ b/nix/runExecline/default.nix @@ -9,7 +9,7 @@ let runExeclineLocal = name: args: execline: runExecline name (args // { - derivationArgs = args.derivationArgs or {} // { + derivationArgs = args.derivationArgs or { } // { preferLocalBuild = true; allowSubstitutes = false; }; @@ -23,7 +23,8 @@ let inherit pkgs; }; -in { +in +{ __functor = _: runExecline; local = runExeclineLocal; inherit tests; diff --git a/nix/runExecline/runExecline.nix b/nix/runExecline/runExecline.nix index 0e45080735..23b9a63303 100644 --- a/nix/runExecline/runExecline.nix +++ b/nix/runExecline/runExecline.nix @@ -35,32 +35,32 @@ let bins = getBins pkgs.execline [ - "execlineb" - { use = "if"; as = "execlineIf"; } - "redirfd" - "importas" - "exec" - ] - // getBins pkgs.s6-portable-utils [ - "s6-cat" - "s6-grep" - "s6-touch" - "s6-test" - "s6-chmod" - ]; + "execlineb" + { use = "if"; as = "execlineIf"; } + "redirfd" + "importas" + "exec" + ] + // getBins pkgs.s6-portable-utils [ + "s6-cat" + "s6-grep" + "s6-touch" + "s6-test" + "s6-chmod" + ]; in # TODO: move name into the attrset name: { -# a string to pass as stdin to the execline script -stdin ? "" -# a program wrapping the acutal execline invocation; -# should be in Bernstein-chaining style + # a string to pass as stdin to the execline script + stdin ? "" + # a program wrapping the acutal execline invocation; + # should be in Bernstein-chaining style , builderWrapper ? bins.exec -# additional arguments to pass to the derivation -, derivationArgs ? {} + # additional arguments to pass to the derivation +, derivationArgs ? { } }: # the execline script as a nested list of string, # representing the blocks; @@ -90,33 +90,33 @@ derivation (derivationArgs // { passAsFile = [ "_runExeclineScript" "_runExeclineStdin" - ] ++ derivationArgs.passAsFile or []; + ] ++ derivationArgs.passAsFile or [ ]; # the default, exec acts as identity executable builder = builderWrapper; args = [ - bins.importas # import script file as $script - "-ui" # drop the envvar afterwards - "script" # substitution name + bins.importas # import script file as $script + "-ui" # drop the envvar afterwards + "script" # substitution name "_runExeclineScriptPath" # passed script file - bins.importas # do the same for $stdin + bins.importas # do the same for $stdin "-ui" "stdin" "_runExeclineStdinPath" - bins.redirfd # now we - "-r" # read the file - "0" # into the stdin of execlineb - "$stdin" # that was given via stdin + bins.redirfd # now we + "-r" # read the file + "0" # into the stdin of execlineb + "$stdin" # that was given via stdin - bins.execlineb # the actual invocation + bins.execlineb # the actual invocation # TODO(Profpatsch): depending on the use-case, -S0 might not be enough # in all use-cases, then a wrapper for execlineb arguments # should be added (-P, -S, -s). - "-S0" # set $@ inside the execline script - "-W" # die on syntax error - "$script" # substituted by importas + "-S0" # set $@ inside the execline script + "-W" # die on syntax error + "$script" # substituted by importas ]; }) diff --git a/nix/runExecline/tests.nix b/nix/runExecline/tests.nix index d2f5a1780c..f82b544224 100644 --- a/nix/runExecline/tests.nix +++ b/nix/runExecline/tests.nix @@ -1,23 +1,29 @@ -{ stdenv, pkgs, runExecline, runExeclineLocal, getBins, writeScript -# https://www.mail-archive.com/skaware@list.skarnet.org/msg01256.html -, coreutils }: +{ stdenv +, pkgs +, runExecline +, runExeclineLocal +, getBins +, writeScript + # https://www.mail-archive.com/skaware@list.skarnet.org/msg01256.html +, coreutils +}: let bins = getBins coreutils [ "mv" ] - // getBins pkgs.execline [ - "execlineb" - { use = "if"; as = "execlineIf"; } - "redirfd" - "importas" - ] - // getBins pkgs.s6-portable-utils [ - "s6-chmod" - "s6-grep" - "s6-touch" - "s6-cat" - "s6-test" - ]; + // getBins pkgs.execline [ + "execlineb" + { use = "if"; as = "execlineIf"; } + "redirfd" + "importas" + ] + // getBins pkgs.s6-portable-utils [ + "s6-chmod" + "s6-grep" + "s6-touch" + "s6-cat" + "s6-test" + ]; # execline block of depth 1 block = args: builtins.map (arg: " ${arg}") args ++ [ "" ]; @@ -31,49 +37,80 @@ let builder = bins.execlineIf; args = (block [ - bins.redirfd "-r" "0" file # read file to stdin - bins.s6-grep "-F" "-q" line # and grep for the line + bins.redirfd + "-r" + "0" + file # read file to stdin + bins.s6-grep + "-F" + "-q" + line # and grep for the line ]) ++ [ # if the block succeeded, touch $out - bins.importas "-ui" "out" "out" - bins.s6-touch "$out" + bins.importas + "-ui" + "out" + "out" + bins.s6-touch + "$out" ]; preferLocalBuild = true; allowSubstitutes = false; }; # basic test that touches out - basic = runExeclineLocal "run-execline-test-basic" { - } [ - "importas" "-ui" "out" "out" - "${bins.s6-touch}" "$out" + basic = runExeclineLocal "run-execline-test-basic" + { } [ + "importas" + "-ui" + "out" + "out" + "${bins.s6-touch}" + "$out" ]; # whether the stdin argument works as intended - stdin = fileHasLine "foo" (runExeclineLocal "run-execline-test-stdin" { - stdin = "foo\nbar\nfoo"; - } [ - "importas" "-ui" "out" "out" - # this pipes stdout of s6-cat to $out - # and s6-cat redirects from stdin to stdout - "redirfd" "-w" "1" "$out" bins.s6-cat + stdin = fileHasLine "foo" (runExeclineLocal "run-execline-test-stdin" + { + stdin = "foo\nbar\nfoo"; + } [ + "importas" + "-ui" + "out" + "out" + # this pipes stdout of s6-cat to $out + # and s6-cat redirects from stdin to stdout + "redirfd" + "-w" + "1" + "$out" + bins.s6-cat ]); - wrapWithVar = runExeclineLocal "run-execline-test-wrap-with-var" { - builderWrapper = writeScript "var-wrapper" '' - #!${bins.execlineb} -S0 - export myvar myvalue $@ - ''; - } [ - "importas" "-ui" "v" "myvar" - "if" [ bins.s6-test "myvalue" "=" "$v" ] - "importas" "out" "out" - bins.s6-touch "$out" + wrapWithVar = runExeclineLocal "run-execline-test-wrap-with-var" + { + builderWrapper = writeScript "var-wrapper" '' + #!${bins.execlineb} -S0 + export myvar myvalue $@ + ''; + } [ + "importas" + "-ui" + "v" + "myvar" + "if" + [ bins.s6-test "myvalue" "=" "$v" ] + "importas" + "out" + "out" + bins.s6-touch + "$out" ]; -in [ +in +[ basic stdin wrapWithVar diff --git a/nix/runTestsuite/default.nix b/nix/runTestsuite/default.nix index 9eb5070996..8b02ed86d8 100644 --- a/nix/runTestsuite/default.nix +++ b/nix/runTestsuite/default.nix @@ -38,11 +38,11 @@ let ; bins = depot.nix.getBins pkgs.coreutils [ "printf" ] - // depot.nix.getBins pkgs.s6-portable-utils [ "s6-touch" "s6-false" "s6-cat" ]; + // depot.nix.getBins pkgs.s6-portable-utils [ "s6-touch" "s6-false" "s6-cat" ]; # Returns true if the given expression throws when `deepSeq`-ed throws = expr: - !(builtins.tryEval (builtins.deepSeq expr {})).success; + !(builtins.tryEval (builtins.deepSeq expr { })).success; # rewrite the builtins.partition result # to use `ok` and `err` instead of `right` and `wrong`. @@ -99,11 +99,12 @@ let (context: desc: res: if res then { yep = { test = desc; }; } - else { nope = { - test = desc; - inherit context; - }; - }); + else { + nope = { + test = desc; + inherit context; + }; + }); # assert that left and right values are equal assertEq = defun [ string any any AssertResult ] @@ -111,7 +112,7 @@ let let context = { not-equal = { inherit left right; }; }; in - assertBoolContext context desc (left == right)); + assertBoolContext context desc (left == right)); # assert that the expression throws when `deepSeq`-ed assertThrows = defun [ string any AssertResult ] @@ -119,7 +120,7 @@ let let context = { should-throw = { inherit expr; }; }; in - assertBoolContext context desc (throws expr)); + assertBoolContext context desc (throws expr)); # assert that the expression does not throw when `deepSeq`-ed assertDoesNotThrow = defun [ string any AssertResult ] @@ -144,31 +145,50 @@ let yep = _: true; nope = _: false; }; - res = partitionTests (it: - (partitionTests goodAss it.asserts).err == [] - ) itResults; - prettyRes = lib.generators.toPretty {} res; + res = partitionTests + (it: + (partitionTests goodAss it.asserts).err == [ ] + ) + itResults; + prettyRes = lib.generators.toPretty { } res; in - if res.err == [] - then depot.nix.runExecline.local "testsuite-${name}-successful" {} [ - "importas" "out" "out" + if res.err == [ ] + then + depot.nix.runExecline.local "testsuite-${name}-successful" { } [ + "importas" + "out" + "out" # force derivation to rebuild if test case list changes - "ifelse" [ bins.s6-false ] [ - bins.printf "" (builtins.hashString "sha512" prettyRes) + "ifelse" + [ bins.s6-false ] + [ + bins.printf + "" + (builtins.hashString "sha512" prettyRes) ] - "if" [ bins.printf "%s\n" "testsuite ${name} successful!" ] - bins.s6-touch "$out" + "if" + [ bins.printf "%s\n" "testsuite ${name} successful!" ] + bins.s6-touch + "$out" ] - else depot.nix.runExecline.local "testsuite-${name}-failed" { - stdin = prettyRes + "\n"; - } [ - "importas" "out" "out" - "if" [ bins.printf "%s\n" "testsuite ${name} failed!" ] - "if" [ bins.s6-cat ] - "exit" "1" + else + depot.nix.runExecline.local "testsuite-${name}-failed" + { + stdin = prettyRes + "\n"; + } [ + "importas" + "out" + "out" + "if" + [ bins.printf "%s\n" "testsuite ${name} failed!" ] + "if" + [ bins.s6-cat ] + "exit" + "1" ]); -in { +in +{ inherit assertEq assertThrows diff --git a/nix/sparseTree/default.nix b/nix/sparseTree/default.nix index 5184f33d5c..16fc9b6103 100644 --- a/nix/sparseTree/default.nix +++ b/nix/sparseTree/default.nix @@ -45,14 +45,16 @@ let let withLeading = p: if builtins.substring 0 1 p == "/" then p else "/" + p; fullPath = - /**/ if builtins.isPath path then path + /**/ + if builtins.isPath path then path else if builtins.isString path then (root + withLeading path) else builtins.throw "Unsupported path type ${builtins.typeOf path}"; strPath = toString fullPath; contextPath = "${fullPath}"; belowRoot = builtins.substring rootLength (-1) strPath; prefix = builtins.substring 0 rootLength strPath; - in assert toString root == prefix; { + in + assert toString root == prefix; { src = contextPath; dst = belowRoot; }; @@ -61,10 +63,12 @@ let in # TODO(sterni): teach readTree to also read symlinked directories, -# so we ln -sT instead of cp -aT. -pkgs.runCommandNoCC "sparse-${builtins.baseNameOf root}" {} ( - lib.concatMapStrings ({ src, dst }: '' - mkdir -p "$(dirname "$out${dst}")" - cp -aT --reflink=auto "${src}" "$out${dst}" - '') symlinks + # so we ln -sT instead of cp -aT. +pkgs.runCommandNoCC "sparse-${builtins.baseNameOf root}" { } ( + lib.concatMapStrings + ({ src, dst }: '' + mkdir -p "$(dirname "$out${dst}")" + cp -aT --reflink=auto "${src}" "$out${dst}" + '') + symlinks ) diff --git a/nix/tag/default.nix b/nix/tag/default.nix index 9c55e6263b..0038404460 100644 --- a/nix/tag/default.nix +++ b/nix/tag/default.nix @@ -4,22 +4,24 @@ let # if so sets `isTag` to `true` and sets the name and value. # If not, sets `isTag` to `false` and sets `errmsg`. verifyTag = tag: - let cases = builtins.attrNames tag; - len = builtins.length cases; + let + cases = builtins.attrNames tag; + len = builtins.length cases; in if builtins.length cases == 1 - then let name = builtins.head cases; in { - isTag = true; - name = name; - val = tag.${name}; - errmsg = null; - } + then + let name = builtins.head cases; in { + isTag = true; + name = name; + val = tag.${name}; + errmsg = null; + } else { isTag = false; errmsg = - ( "match: an instance of a sum is an attrset " - + "with exactly one element, yours had ${toString len}" - + ", namely: ${lib.generators.toPretty {} cases}" ); + ("match: an instance of a sum is an attrset " + + "with exactly one element, yours had ${toString len}" + + ", namely: ${lib.generators.toPretty {} cases}"); name = null; val = null; }; @@ -63,21 +65,22 @@ let # ] 1 # => { smol = 1; } discrDef = defTag: fs: v: - let res = lib.findFirst - (t: t.val v) - null - (map assertIsTag fs); + let + res = lib.findFirst + (t: t.val v) + null + (map assertIsTag fs); in - if res == null - then { ${defTag} = v; } - else { ${res.name} = v; }; + if res == null + then { ${defTag} = v; } + else { ${res.name} = v; }; # Like `discrDef`, but fail if there is no match. discr = fs: v: let res = discrDef null fs v; in - assert lib.assertMsg (res != null) - "tag.discr: No predicate found that matches ${lib.generators.toPretty {} v}"; - res; + assert lib.assertMsg (res != null) + "tag.discr: No predicate found that matches ${lib.generators.toPretty {} v}"; + res; # The canonical pattern matching primitive. # A sum value is an attribute set with one element, @@ -104,17 +107,17 @@ let match = sum: matcher: let cases = builtins.attrNames sum; in assert - let len = builtins.length cases; in - lib.assertMsg (len == 1) - ( "match: an instance of a sum is an attrset " - + "with exactly one element, yours had ${toString len}" - + ", namely: ${lib.generators.toPretty {} cases}" ); + let len = builtins.length cases; in + lib.assertMsg (len == 1) + ("match: an instance of a sum is an attrset " + + "with exactly one element, yours had ${toString len}" + + ", namely: ${lib.generators.toPretty {} cases}"); let case = builtins.head cases; in assert - lib.assertMsg (matcher ? ${case}) - ( "match: \"${case}\" is not a valid case of this sum, " + lib.assertMsg (matcher ? ${case}) + ("match: \"${case}\" is not a valid case of this sum, " + "the matcher accepts: ${lib.generators.toPretty {} - (builtins.attrNames matcher)}" ); + (builtins.attrNames matcher)}"); matcher.${case} sum.${case}; # A `match` with the arguments flipped. @@ -148,15 +151,16 @@ let ; }; -in { - inherit - verifyTag - tagName - tagValue - discr - discrDef - match - matchLam - tests - ; +in +{ + inherit + verifyTag + tagName + tagValue + discr + discrDef + match + matchLam + tests + ; } diff --git a/nix/tag/tests.nix b/nix/tag/tests.nix index 8c9c738074..bcc42c758a 100644 --- a/nix/tag/tests.nix +++ b/nix/tag/tests.nix @@ -17,7 +17,7 @@ let errmsg = null; }) (assertEq "is not Tag" - (removeAttrs (verifyTag { foo = "bar"; baz = 42; }) ["errmsg"]) + (removeAttrs (verifyTag { foo = "bar"; baz = 42; }) [ "errmsg" ]) { isTag = false; name = null; @@ -41,7 +41,8 @@ let (discr [ { bool = lib.isBool; } { int = lib.isInt; } - ] true) + ] + true) { bool = true; }) (assertEq "fallback to default" (discrDef "def" [ @@ -53,19 +54,24 @@ let match-test = it "can match things" [ (assertEq "match example" - (let - success = { res = 42; }; - failure = { err = "no answer"; }; - matcher = { - res = i: i + 1; - err = _: 0; - }; - in { - one = match success matcher; - two = match failure matcher; + ( + let + success = { res = 42; }; + failure = { err = "no answer"; }; + matcher = { + res = i: i + 1; + err = _: 0; + }; + in + { + one = match success matcher; + two = match failure matcher; + } + ) + { + one = 43; + two = 0; }) - { one = 43; - two = 0; }) (assertEq "matchLam & pipe" (lib.pipe { foo = 42; } [ (matchLam { @@ -81,8 +87,8 @@ let ]; in - runTestsuite "tag" [ - isTag-test - discr-test - match-test - ] +runTestsuite "tag" [ + isTag-test + discr-test + match-test +] diff --git a/nix/tailscale/default.nix b/nix/tailscale/default.nix index 8d6a0f661b..363f717db6 100644 --- a/nix/tailscale/default.nix +++ b/nix/tailscale/default.nix @@ -27,4 +27,5 @@ let # Actual ACL entries ACLs = list acl; }; -in config: pkgs.writeText "tailscale-acl.json" (toJSON (aclConfig config)) +in +config: pkgs.writeText "tailscale-acl.json" (toJSON (aclConfig config)) diff --git a/nix/utils/default.nix b/nix/utils/default.nix index 258e372a2a..cabea5bbee 100644 --- a/nix/utils/default.nix +++ b/nix/utils/default.nix @@ -34,14 +34,14 @@ let basename = builtins.unsafeDiscardStringContext (builtins.baseNameOf strPath); in - # If p is a direct child of storeDir, we need to remove + # If p is a direct child of storeDir, we need to remove # the leading hash as well to make sure that: # `storePathName drv == storePathName (toString drv)`. - if noStoreDir == basename - then builtins.substring 33 (-1) basename - else basename + if noStoreDir == basename + then builtins.substring 33 (-1) basename + else basename else builtins.throw "Don't know how to get (base)name of " - + lib.generators.toPretty {} p; + + lib.generators.toPretty { } p; /* Query the type of a path exposing the same information as would be by `builtins.readDir`, but for a single, specific target path. @@ -106,7 +106,7 @@ let # We need to call toString to prevent unsafeDiscardStringContext # from importing a path into store which messes with base- and # dirname of course. - path'= builtins.unsafeDiscardStringContext (toString path); + path' = builtins.unsafeDiscardStringContext (toString path); # To read the containing directory we absolutely need # to keep the string context, otherwise a derivation # would not be realized before our check (at eval time) @@ -120,20 +120,22 @@ let # directory. If not, either the target doesn't exist or is a regular file. # TODO(sterni): is there a way to check reliably if the symlink target exists? isSymlinkDir = builtins.pathExists (path' + "/."); - in { + in + { ${thisPathType} = - /**/ if thisPathType != "symlink" then true - else if isSymlinkDir then "directory" - else "regular-or-missing"; + /**/ + if thisPathType != "symlink" then true + else if isSymlinkDir then "directory" + else "regular-or-missing"; }; pathType' = path: let p = pathType path; in - if p ? missing - then builtins.throw "${lib.generators.toPretty {} path} does not exist" - else p; + if p ? missing + then builtins.throw "${lib.generators.toPretty {} path} does not exist" + else p; /* Check whether the given path is a directory. Throws if the path in question doesn't exist. @@ -151,9 +153,11 @@ let Type: path(-like) -> bool */ - realPathIsDirectory = path: let - pt = pathType' path; - in pt ? directory || pt.symlink or null == "directory"; + realPathIsDirectory = path: + let + pt = pathType' path; + in + pt ? directory || pt.symlink or null == "directory"; /* Check whether the given path is a regular file. Throws if the path in question doesn't exist. @@ -169,7 +173,8 @@ let */ isSymlink = path: pathType' path ? symlink; -in { +in +{ inherit storePathName pathType diff --git a/nix/utils/tests/default.nix b/nix/utils/tests/default.nix index 8a078684f3..52b7ca41d2 100644 --- a/nix/utils/tests/default.nix +++ b/nix/utils/tests/default.nix @@ -26,38 +26,53 @@ let pathPredicates = it "judges paths correctly" (lib.flatten [ # isDirectory (assertUtilsPred "directory isDirectory" - (isDirectory ./directory) true) + (isDirectory ./directory) + true) (assertUtilsPred "symlink not isDirectory" - (isDirectory ./symlink-directory) false) + (isDirectory ./symlink-directory) + false) (assertUtilsPred "file not isDirectory" - (isDirectory ./directory/file) false) + (isDirectory ./directory/file) + false) # realPathIsDirectory (assertUtilsPred "directory realPathIsDirectory" - (realPathIsDirectory ./directory) true) + (realPathIsDirectory ./directory) + true) (assertUtilsPred "symlink to directory realPathIsDirectory" - (realPathIsDirectory ./symlink-directory) true) + (realPathIsDirectory ./symlink-directory) + true) (assertUtilsPred "realPathIsDirectory resolves chained symlinks" - (realPathIsDirectory ./symlink-symlink-directory) true) + (realPathIsDirectory ./symlink-symlink-directory) + true) # isRegularFile (assertUtilsPred "file isRegularFile" - (isRegularFile ./directory/file) true) + (isRegularFile ./directory/file) + true) (assertUtilsPred "symlink not isRegularFile" - (isRegularFile ./symlink-file) false) + (isRegularFile ./symlink-file) + false) (assertUtilsPred "directory not isRegularFile" - (isRegularFile ./directory) false) + (isRegularFile ./directory) + false) # isSymlink (assertUtilsPred "symlink to file isSymlink" - (isSymlink ./symlink-file) true) + (isSymlink ./symlink-file) + true) (assertUtilsPred "symlink to directory isSymlink" - (isSymlink ./symlink-directory) true) + (isSymlink ./symlink-directory) + true) (assertUtilsPred "symlink to symlink isSymlink" - (isSymlink ./symlink-symlink-file) true) + (isSymlink ./symlink-symlink-file) + true) (assertUtilsPred "symlink to missing file isSymlink" - (isSymlink ./missing) true) + (isSymlink ./missing) + true) (assertUtilsPred "directory not isSymlink" - (isSymlink ./directory) false) + (isSymlink ./directory) + false) (assertUtilsPred "file not isSymlink" - (isSymlink ./directory/file) false) + (isSymlink ./directory/file) + false) # missing files throw (assertThrows "isDirectory throws on missing file" (isDirectory ./does-not-exist)) @@ -89,15 +104,18 @@ let storePathNameTests = it "correctly gets the basename of a store path" [ (assertEq "base name of a derivation" - (storePathName depot.tools.cheddar) depot.tools.cheddar.name) + (storePathName depot.tools.cheddar) + depot.tools.cheddar.name) (assertEq "base name of a store path string" - (storePathName cheddarStorePath) depot.tools.cheddar.name) + (storePathName cheddarStorePath) + depot.tools.cheddar.name) (assertEq "base name of a path within a store path" (storePathName "${cheddarStorePath}/bin/cheddar") "cheddar") (assertEq "base name of a path" (storePathName ../default.nix) "default.nix") (assertEq "base name of a cleanSourced path" - (storePathName cleanedSource) cleanedSource.name) + (storePathName cleanedSource) + cleanedSource.name) ]; in diff --git a/nix/writeElispBin/default.nix b/nix/writeElispBin/default.nix index c116607e0a..3ea2da58aa 100644 --- a/nix/writeElispBin/default.nix +++ b/nix/writeElispBin/default.nix @@ -1,6 +1,6 @@ { depot, pkgs, ... }: -{ name, src, deps ? (_: []), emacs ? pkgs.emacs27-nox }: +{ name, src, deps ? (_: [ ]), emacs ? pkgs.emacs27-nox }: let inherit (pkgs) emacsPackages emacsPackagesGen; @@ -8,11 +8,13 @@ let finalEmacs = (emacsPackagesGen emacs).emacsWithPackages deps; - srcFile = if isString src + srcFile = + if isString src then toFile "${name}.el" src else src; -in depot.nix.writeScriptBin name '' +in +depot.nix.writeScriptBin name '' #!/bin/sh ${finalEmacs}/bin/emacs --batch --no-site-file --script ${srcFile} $@ '' diff --git a/nix/writeExecline/default.nix b/nix/writeExecline/default.nix index 8626aa4608..5169b01386 100644 --- a/nix/writeExecline/default.nix +++ b/nix/writeExecline/default.nix @@ -14,9 +14,10 @@ name: # "env": don’t substitute, set # and 0…n environment vaariables, where n=$# # "none": don’t substitute or set any positional arguments # "env-no-push": like "env", but bypass the push-phase. Not recommended. - argMode ? "var", - # Number of arguments to be substituted as variables (passed to "var"/"-s" or "var-full"/"-S" - readNArgs ? 0, + argMode ? "var" +, # Number of arguments to be substituted as variables (passed to "var"/"-s" or "var-full"/"-S" + readNArgs ? 0 +, }: # Nested list of lists of commands. # Inner lists are translated to execline blocks. @@ -24,7 +25,7 @@ argList: let env = - if argMode == "var" then "s${toString readNArgs}" + if argMode == "var" then "s${toString readNArgs}" else if argMode == "var-full" then "S${toString readNArgs}" else if argMode == "env" then "" else if argMode == "none" then "P" @@ -32,7 +33,7 @@ let else abort ''"${toString argMode}" is not a valid argMode, use one of "var", "var-full", "env", "none", "env-no-push".''; in - depot.nix.writeScript name '' - #!${pkgs.execline}/bin/execlineb -W${env} - ${depot.nix.escapeExecline argList} - '' +depot.nix.writeScript name '' + #!${pkgs.execline}/bin/execlineb -W${env} + ${depot.nix.escapeExecline argList} +'' diff --git a/nix/writeScript/default.nix b/nix/writeScript/default.nix index e8e6e0fa10..1f53b4e4ff 100644 --- a/nix/writeScript/default.nix +++ b/nix/writeScript/default.nix @@ -5,25 +5,31 @@ let bins = depot.nix.getBins pkgs.s6-portable-utils [ - "s6-cat" - "s6-chmod" - ]; + "s6-cat" + "s6-chmod" + ]; in name: # string of the executable script that is put in $out script: -depot.nix.runExecline name { +depot.nix.runExecline name +{ stdin = script; derivationArgs = { preferLocalBuild = true; allowSubstitutes = false; }; } [ - "importas" "out" "out" + "importas" + "out" + "out" # this pipes stdout of s6-cat to $out # and s6-cat redirects from stdin to stdout - "if" [ "redirfd" "-w" "1" "$out" bins.s6-cat ] - bins.s6-chmod "0755" "$out" + "if" + [ "redirfd" "-w" "1" "$out" bins.s6-cat ] + bins.s6-chmod + "0755" + "$out" ] diff --git a/nix/writers/default.nix b/nix/writers/default.nix index b0795ab2c8..55355913a9 100644 --- a/nix/writers/default.nix +++ b/nix/writers/default.nix @@ -2,62 +2,71 @@ let bins = depot.nix.getBins pkgs.s6-portable-utils [ "s6-ln" "s6-ls" "s6-touch" ] - ; + ; - linkTo = name: path: depot.nix.runExecline.local name {} [ - "importas" "out" "out" - bins.s6-ln "-s" path "$out" + linkTo = name: path: depot.nix.runExecline.local name { } [ + "importas" + "out" + "out" + bins.s6-ln + "-s" + path + "$out" ]; # Build a rust executable, $out is the executable. - rustSimple = args@{name, ...}: src: + rustSimple = args@{ name, ... }: src: linkTo name "${rustSimpleBin args src}/bin/${name}"; # Like `rustSimple`, but put the binary in `$out/bin/`. - rustSimpleBin = { - name, - dependencies ? [], - doCheck ? true, - }: src: + rustSimpleBin = + { name + , dependencies ? [ ] + , doCheck ? true + , + }: src: (if doCheck then testRustSimple else pkgs.lib.id) - (pkgs.buildRustCrate ({ - pname = name; - version = "1.0.0"; - crateName = name; - crateBin = [ name ]; - dependencies = dependencies; - src = pkgs.runCommandLocal "write-main.rs" { - src = src; - passAsFile = [ "src" ]; - } '' - mkdir -p $out/src/bin - cp "$srcPath" $out/src/bin/${name}.rs - find $out - ''; - })); + (pkgs.buildRustCrate ({ + pname = name; + version = "1.0.0"; + crateName = name; + crateBin = [ name ]; + dependencies = dependencies; + src = pkgs.runCommandLocal "write-main.rs" + { + src = src; + passAsFile = [ "src" ]; + } '' + mkdir -p $out/src/bin + cp "$srcPath" $out/src/bin/${name}.rs + find $out + ''; + })); # Build a rust library, that can be used as dependency to `rustSimple`. # Wrapper around `pkgs.buildRustCrate`, takes all its arguments. - rustSimpleLib = { - name, - dependencies ? [], - doCheck ? true, - }: src: + rustSimpleLib = + { name + , dependencies ? [ ] + , doCheck ? true + , + }: src: (if doCheck then testRustSimple else pkgs.lib.id) - (pkgs.buildRustCrate ({ - pname = name; - version = "1.0.0"; - crateName = name; - dependencies = dependencies; - src = pkgs.runCommandLocal "write-lib.rs" { - src = src; - passAsFile = [ "src" ]; - } '' - mkdir -p $out/src - cp "$srcPath" $out/src/lib.rs - find $out - ''; - })); + (pkgs.buildRustCrate ({ + pname = name; + version = "1.0.0"; + crateName = name; + dependencies = dependencies; + src = pkgs.runCommandLocal "write-lib.rs" + { + src = src; + passAsFile = [ "src" ]; + } '' + mkdir -p $out/src + cp "$srcPath" $out/src/lib.rs + find $out + ''; + })); /* Takes a `buildRustCrate` derivation as an input, * builds it with `{ buildTests = true; }` and runs @@ -72,19 +81,30 @@ let testRustSimple = rustDrv: let crate = buildTests: rustDrv.override { inherit buildTests; }; - tests = depot.nix.runExecline.local "${rustDrv.name}-tests-run" {} [ - "importas" "out" "out" - "if" [ - "pipeline" [ bins.s6-ls "${crate true}/tests" ] - "forstdin" "-o0" "test" - "importas" "test" "test" + tests = depot.nix.runExecline.local "${rustDrv.name}-tests-run" { } [ + "importas" + "out" + "out" + "if" + [ + "pipeline" + [ bins.s6-ls "${crate true}/tests" ] + "forstdin" + "-o0" + "test" + "importas" + "test" + "test" "${crate true}/tests/$test" ] - bins.s6-touch "$out" + bins.s6-touch + "$out" ]; - in depot.nix.drvSeqL [ tests ] (crate false); + in + depot.nix.drvSeqL [ tests ] (crate false); -in { +in +{ inherit rustSimple rustSimpleBin diff --git a/nix/writers/tests/rust.nix b/nix/writers/tests/rust.nix index 8a12c95ec7..232a2dc608 100644 --- a/nix/writers/tests/rust.nix +++ b/nix/writers/tests/rust.nix @@ -11,15 +11,20 @@ let coreutils ; - run = drv: depot.nix.runExecline.local "run-${drv.name}" {} [ - "if" [ drv ] - "importas" "out" "out" - "${coreutils}/bin/touch" "$out" + run = drv: depot.nix.runExecline.local "run-${drv.name}" { } [ + "if" + [ drv ] + "importas" + "out" + "out" + "${coreutils}/bin/touch" + "$out" ]; - rustTransitiveLib = rustSimpleLib { - name = "transitive"; - } '' + rustTransitiveLib = rustSimpleLib + { + name = "transitive"; + } '' pub fn transitive(s: &str) -> String { let mut new = s.to_string(); new.push_str(" 1 2 3"); @@ -37,10 +42,11 @@ let } ''; - rustTestLib = rustSimpleLib { - name = "test_lib"; - dependencies = [ rustTransitiveLib ]; - } '' + rustTestLib = rustSimpleLib + { + name = "test_lib"; + dependencies = [ rustTransitiveLib ]; + } '' extern crate transitive; use transitive::{transitive}; pub fn test() -> String { @@ -48,10 +54,11 @@ let } ''; - rustWithLib = run (rustSimple { - name = "rust-with-lib"; - dependencies = [ rustTestLib ]; - } '' + rustWithLib = run (rustSimple + { + name = "rust-with-lib"; + dependencies = [ rustTestLib ]; + } '' extern crate test_lib; fn main() { @@ -60,7 +67,8 @@ let ''); -in depot.nix.readTree.drvTargets { +in +depot.nix.readTree.drvTargets { inherit rustTransitiveLib rustWithLib diff --git a/nix/yants/default.nix b/nix/yants/default.nix index 2bbf4dd15a..cb9fc08287 100644 --- a/nix/yants/default.nix +++ b/nix/yants/default.nix @@ -6,10 +6,10 @@ # # All types (should) compose as expected. -{ lib ? (import {}).lib, ... }: +{ lib ? (import { }).lib, ... }: with builtins; let - prettyPrint = lib.generators.toPretty {}; + prettyPrint = lib.generators.toPretty { }; # typedef' :: struct { # name = string; @@ -34,41 +34,44 @@ with builtins; let # # This function is the low-level primitive used to create types. For # many cases the higher-level 'typedef' function is more appropriate. - typedef' = { name, checkType - , checkToBool ? (result: result.ok) - , toError ? (_: result: result.err) - , def ? null - , match ? null }: { - inherit name checkToBool toError; - - # check :: a -> bool - # - # This function is used to determine whether a given type is - # conformant. - check = value: checkToBool (checkType value); - - # checkType :: a -> struct { ok = bool; err = option string; } - # - # This function checks whether the passed value is type conformant - # and returns an optional type error string otherwise. - inherit checkType; - - # __functor :: a -> a - # - # This function checks whether the passed value is type conformant - # and throws an error if it is not. - # - # The name of this function is a special attribute in Nix that - # makes it possible to execute a type attribute set like a normal - # function. - __functor = self: value: - let result = self.checkType value; - in if checkToBool result then value - else throw (toError value result); - }; + typedef' = + { name + , checkType + , checkToBool ? (result: result.ok) + , toError ? (_: result: result.err) + , def ? null + , match ? null + }: { + inherit name checkToBool toError; + + # check :: a -> bool + # + # This function is used to determine whether a given type is + # conformant. + check = value: checkToBool (checkType value); + + # checkType :: a -> struct { ok = bool; err = option string; } + # + # This function checks whether the passed value is type conformant + # and returns an optional type error string otherwise. + inherit checkType; + + # __functor :: a -> a + # + # This function checks whether the passed value is type conformant + # and throws an error if it is not. + # + # The name of this function is a special attribute in Nix that + # makes it possible to execute a type attribute set like a normal + # function. + __functor = self: value: + let result = self.checkType value; + in if checkToBool result then value + else throw (toError value result); + }; typeError = type: val: - "expected type '${type}', but value '${prettyPrint val}' is of type '${typeOf val}'"; + "expected type '${type}', but value '${prettyPrint val}' is of type '${typeOf val}'"; # typedef :: string -> (a -> bool) -> type # @@ -85,27 +88,34 @@ with builtins; let }); }; - checkEach = name: t: l: foldl' (acc: e: - let res = t.checkType e; + checkEach = name: t: l: foldl' + (acc: e: + let + res = t.checkType e; isT = t.checkToBool res; - in { - ok = acc.ok && isT; - err = if isT - then acc.err - else acc.err + "${prettyPrint e}: ${t.toError e res}\n"; - }) { ok = true; err = "expected type ${name}, but found:\n"; } l; -in lib.fix (self: { + in + { + ok = acc.ok && isT; + err = + if isT + then acc.err + else acc.err + "${prettyPrint e}: ${t.toError e res}\n"; + }) + { ok = true; err = "expected type ${name}, but found:\n"; } + l; +in +lib.fix (self: { # Primitive types - any = typedef "any" (_: true); - unit = typedef "unit" (v: v == {}); - int = typedef "int" isInt; - bool = typedef "bool" isBool; - float = typedef "float" isFloat; - string = typedef "string" isString; - path = typedef "path" (x: typeOf x == "path"); - drv = typedef "derivation" (x: isAttrs x && x ? "type" && x.type == "derivation"); + any = typedef "any" (_: true); + unit = typedef "unit" (v: v == { }); + int = typedef "int" isInt; + bool = typedef "bool" isBool; + float = typedef "float" isFloat; + string = typedef "string" isString; + path = typedef "path" (x: typeOf x == "path"); + drv = typedef "derivation" (x: isAttrs x && x ? "type" && x.type == "derivation"); function = typedef "function" (x: isFunction x || (isAttrs x && x ? "__functor" - && isFunction x.__functor)); + && isFunction x.__functor)); # Type for types themselves. Useful when defining polymorphic types. type = typedef "type" (x: @@ -124,7 +134,7 @@ in lib.fix (self: { in { ok = isNull v || (self.type t).checkToBool res; err = "expected type ${name}, but value does not conform to '${t.name}': " - + t.toError v res; + + t.toError v res; }; }; @@ -136,7 +146,8 @@ in lib.fix (self: { list = t: typedef' rec { name = "list<${t.name}>"; - checkType = v: if isList v + checkType = v: + if isList v then checkEach name (self.type t) v else { ok = false; @@ -147,7 +158,8 @@ in lib.fix (self: { attrs = t: typedef' rec { name = "attrs<${t.name}>"; - checkType = v: if isAttrs v + checkType = v: + if isAttrs v then checkEach name (self.type t) (attrValues v) else { ok = false; @@ -172,20 +184,23 @@ in lib.fix (self: { # checkField checks an individual field of the struct against # its definition and creates a typecheck result. These results # are aggregated during the actual checking. - checkField = def: name: value: let result = def.checkType value; in rec { - ok = def.checkToBool result; - err = if !ok && isNull value - then "missing required ${def.name} field '${name}'\n" - else "field '${name}': ${def.toError value result}\n"; - }; + checkField = def: name: value: + let result = def.checkType value; in rec { + ok = def.checkToBool result; + err = + if !ok && isNull value + then "missing required ${def.name} field '${name}'\n" + else "field '${name}': ${def.toError value result}\n"; + }; # checkExtraneous determines whether a (closed) struct contains # any fields that are not part of the definition. checkExtraneous = def: has: acc: if (length has) == 0 then acc else if (hasAttr (head has) def) - then checkExtraneous def (tail has) acc - else checkExtraneous def (tail has) { + then checkExtraneous def (tail has) acc + else + checkExtraneous def (tail has) { ok = false; err = acc.err + "unexpected struct field '${head has}'\n"; }; @@ -197,85 +212,102 @@ in lib.fix (self: { init = { ok = true; err = ""; }; extraneous = checkExtraneous def (attrNames value) init; - checkedFields = map (n: - let v = if hasAttr n value then value."${n}" else null; - in checkField def."${n}" n v) (attrNames def); - - combined = foldl' (acc: res: { - ok = acc.ok && res.ok; - err = if !res.ok then acc.err + res.err else acc.err; - }) init checkedFields; - in { + checkedFields = map + (n: + let v = if hasAttr n value then value."${n}" else null; + in checkField def."${n}" n v) + (attrNames def); + + combined = foldl' + (acc: res: { + ok = acc.ok && res.ok; + err = if !res.ok then acc.err + res.err else acc.err; + }) + init + checkedFields; + in + { ok = combined.ok && extraneous.ok; err = combined.err + extraneous.err; }; struct' = name: def: typedef' { inherit name def; - checkType = value: if isAttrs value + checkType = value: + if isAttrs value then (checkStruct (self.attrs self.type def) value) else { ok = false; err = typeError name value; }; - toError = _: result: "expected '${name}'-struct, but found:\n" + result.err; + toError = _: result: "expected '${name}'-struct, but found:\n" + result.err; }; - in arg: if isString arg then (struct' arg) else (struct' "anon" arg); + in + arg: if isString arg then (struct' arg) else (struct' "anon" arg); # Enums & pattern matching enum = - let - plain = name: def: typedef' { - inherit name def; + let + plain = name: def: typedef' { + inherit name def; - checkType = (x: isString x && elem x def); - checkToBool = x: x; - toError = value: _: "'${prettyPrint value} is not a member of enum ${name}"; - }; - enum' = name: def: lib.fix (e: (plain name def) // { - match = x: actions: deepSeq (map e (attrNames actions)) ( - let - actionKeys = attrNames actions; - missing = foldl' (m: k: if (elem k actionKeys) then m else m ++ [ k ]) [] def; - in if (length missing) > 0 - then throw "Missing match action for members: ${prettyPrint missing}" - else actions."${e x}"); - }); - in arg: if isString arg then (enum' arg) else (enum' "anon" arg); + checkType = (x: isString x && elem x def); + checkToBool = x: x; + toError = value: _: "'${prettyPrint value} is not a member of enum ${name}"; + }; + enum' = name: def: lib.fix (e: (plain name def) // { + match = x: actions: deepSeq (map e (attrNames actions)) ( + let + actionKeys = attrNames actions; + missing = foldl' (m: k: if (elem k actionKeys) then m else m ++ [ k ]) [ ] def; + in + if (length missing) > 0 + then throw "Missing match action for members: ${prettyPrint missing}" + else actions."${e x}" + ); + }); + in + arg: if isString arg then (enum' arg) else (enum' "anon" arg); # Sum types # # The representation of a sum type is an attribute set with only one # value, where the key of the value denotes the variant of the type. sum = - let - plain = name: def: typedef' { - inherit name def; - checkType = (x: - let variant = elemAt (attrNames x) 0; - in if isAttrs x && length (attrNames x) == 1 && hasAttr variant def - then let t = def."${variant}"; - v = x."${variant}"; - res = t.checkType v; - in if t.checkToBool res - then { ok = true; } - else { - ok = false; - err = "while checking '${name}' variant '${variant}': " - + t.toError v res; - } + let + plain = name: def: typedef' { + inherit name def; + checkType = (x: + let variant = elemAt (attrNames x) 0; + in if isAttrs x && length (attrNames x) == 1 && hasAttr variant def + then + let + t = def."${variant}"; + v = x."${variant}"; + res = t.checkType v; + in + if t.checkToBool res + then { ok = true; } + else { + ok = false; + err = "while checking '${name}' variant '${variant}': " + + t.toError v res; + } else { ok = false; err = typeError name x; } - ); - }; - sum' = name: def: lib.fix (s: (plain name def) // { - match = x: actions: - let variant = deepSeq (s x) (elemAt (attrNames x) 0); - actionKeys = attrNames actions; - defKeys = attrNames def; - missing = foldl' (m: k: if (elem k actionKeys) then m else m ++ [ k ]) [] defKeys; - in if (length missing) > 0 - then throw "Missing match action for variants: ${prettyPrint missing}" - else actions."${variant}" x."${variant}"; - }); - in arg: if isString arg then (sum' arg) else (sum' "anon" arg); + ); + }; + sum' = name: def: lib.fix (s: (plain name def) // { + match = x: actions: + let + variant = deepSeq (s x) (elemAt (attrNames x) 0); + actionKeys = attrNames actions; + defKeys = attrNames def; + missing = foldl' (m: k: if (elem k actionKeys) then m else m ++ [ k ]) [ ] defKeys; + in + if (length missing) > 0 + then throw "Missing match action for variants: ${prettyPrint missing}" + else actions."${variant}" x."${variant}"; + }); + in + arg: if isString arg then (sum' arg) else (sum' "anon" arg); # Typed function definitions # @@ -289,15 +321,19 @@ in lib.fix (self: { mkFunc = sig: f: { inherit sig; __toString = self: foldl' (s: t: "${s} -> ${t.name}") - "λ :: ${(head self.sig).name}" (tail self.sig); + "λ :: ${(head self.sig).name}" + (tail self.sig); __functor = _: f; }; - defun' = sig: func: if length sig > 2 + defun' = sig: func: + if length sig > 2 then mkFunc sig (x: defun' (tail sig) (func ((head sig) x))) else mkFunc sig (x: ((head (tail sig)) (func ((head sig) x)))); - in sig: func: if length sig < 2 + in + sig: func: + if length sig < 2 then (throw "Signature must at least have two types (a -> b)") else defun' sig func; @@ -311,21 +347,22 @@ in lib.fix (self: { # depend on the value being of the wrapped type. restrict = name: pred: t: let restriction = "${t.name}[${name}]"; in typedef' { - name = restriction; - checkType = v: - let res = t.checkType v; - in + name = restriction; + checkType = v: + let res = t.checkType v; + in if !(t.checkToBool res) then res else let iok = pred v; - in if isBool iok then { + in + if isBool iok then { ok = iok; err = "${prettyPrint v} does not conform to restriction '${restriction}'"; } else - # use throw here to avoid spamming the build log + # use throw here to avoid spamming the build log throw "restriction '${restriction}' predicate returned unexpected value '${prettyPrint iok}' instead of boolean"; - }; + }; }) diff --git a/nix/yants/tests/default.nix b/nix/yants/tests/default.nix index 9a0b2403e1..0c7ec24188 100644 --- a/nix/yants/tests/default.nix +++ b/nix/yants/tests/default.nix @@ -25,7 +25,7 @@ let }; testPrimitives = it "checks that all primitive types match" [ - (assertDoesNotThrow "unit type" (unit {})) + (assertDoesNotThrow "unit type" (unit { })) (assertDoesNotThrow "int type" (int 15)) (assertDoesNotThrow "bool type" (bool false)) (assertDoesNotThrow "float type" (float 13.37)) @@ -44,7 +44,7 @@ let # Test that structures work as planned. person = struct "person" { name = string; - age = int; + age = int; contact = option (struct { email = string; @@ -55,7 +55,7 @@ let testStruct = it "checks that structures work as intended" [ (assertDoesNotThrow "person struct" (person { name = "Brynhjulf"; - age = 42; + age = 42; contact.email = "brynhjulf@yants.nix"; })) ]; @@ -70,7 +70,8 @@ let testEnum = it "checks enum definitions and matching" [ (assertEq "enum is matched correctly" - "It is in fact red!" (colour.match "red" colourMatcher)) + "It is in fact red!" + (colour.match "red" colourMatcher)) (assertThrows "out of bounds enum fails" (colour.match "alpha" (colourMatcher // { alpha = "This should never happen"; @@ -97,7 +98,8 @@ let testSum = it "checks sum types definitions and matching" [ (assertDoesNotThrow "creature sum type" some-human) (assertEq "sum type is matched correctly" - "It's a human named Brynhjulf" (creature.match some-human { + "It's a human named Brynhjulf" + (creature.match some-human { human = v: "It's a human named ${v.name}"; pet = v: "It's not supposed to be a pet!"; }) @@ -106,7 +108,7 @@ let # Test curried function definitions func = defun [ string int string ] - (name: age: "${name} is ${toString age} years old"); + (name: age: "${name} is ${toString age} years old"); testFunctions = it "checks function definitions" [ (assertDoesNotThrow "function application" (func "Brynhjulf" 42)) @@ -144,13 +146,13 @@ let ]; in - runTestsuite "yants" [ - testPrimitives - testPoly - testStruct - testEnum - testSum - testFunctions - testTypes - testRestrict - ] +runTestsuite "yants" [ + testPrimitives + testPoly + testStruct + testEnum + testSum + testFunctions + testTypes + testRestrict +] -- cgit 1.4.1