diff options
92 files changed, 2201 insertions, 1256 deletions
diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md new file mode 100644 index 000000000000..3372b1f03f7d --- /dev/null +++ b/.github/ISSUE_TEMPLATE.md @@ -0,0 +1,27 @@ +<!-- + +# Filing a Nix issue + +*WAIT* Are you sure you're filing your issue in the right repository? + +We appreciate you taking the time to tell us about issues you encounter, but routing the issue to the right place will get you help sooner and save everyone time. + +This is the Nix repository, and issues here should be about Nix the build and package management *_tool_*. + +If you have a problem with a specific package on NixOS or when using Nix, you probably want to file an issue with _nixpkgs_, whose issue tracker is over at https://github.com/NixOS/nixpkgs/issues. + +Examples of _Nix_ issues: + +- Nix segfaults when I run `nix-build -A blahblah` +- The Nix language needs a new builtin: `builtins.foobar` +- Regression in the behavior of `nix-env` in Nix 2.0 + +Examples of _nixpkgs_ issues: + +- glibc is b0rked on aarch64 +- chromium in NixOS doesn't support U2F but google-chrome does! +- The OpenJDK package on macOS is missing a key component + +Chances are if you're a newcomer to the Nix world, you'll probably want the [nixpkgs tracker](https://github.com/NixOS/nixpkgs/issues). It also gets a lot more eyeball traffic so you'll probably get a response a lot more quickly. + +--> diff --git a/Makefile.config.in b/Makefile.config.in index 45a70cd6dd1a..fab82194656e 100644 --- a/Makefile.config.in +++ b/Makefile.config.in @@ -6,6 +6,7 @@ CXXFLAGS = @CXXFLAGS@ ENABLE_S3 = @ENABLE_S3@ HAVE_SODIUM = @HAVE_SODIUM@ HAVE_READLINE = @HAVE_READLINE@ +HAVE_BROTLI = @HAVE_BROTLI@ LIBCURL_LIBS = @LIBCURL_LIBS@ OPENSSL_LIBS = @OPENSSL_LIBS@ PACKAGE_NAME = @PACKAGE_NAME@ @@ -13,9 +14,10 @@ PACKAGE_VERSION = @PACKAGE_VERSION@ SODIUM_LIBS = @SODIUM_LIBS@ LIBLZMA_LIBS = @LIBLZMA_LIBS@ SQLITE3_LIBS = @SQLITE3_LIBS@ +LIBBROTLI_LIBS = @LIBBROTLI_LIBS@ bash = @bash@ bindir = @bindir@ -bro = @bro@ +brotli = @brotli@ lsof = @lsof@ datadir = @datadir@ datarootdir = @datarootdir@ diff --git a/configure.ac b/configure.ac index 9d8a81d0427b..83b2346d065c 100644 --- a/configure.ac +++ b/configure.ac @@ -61,6 +61,7 @@ CFLAGS= CXXFLAGS= AC_PROG_CC AC_PROG_CXX +AC_PROG_CPP AX_CXX_COMPILE_STDCXX_11 @@ -127,7 +128,7 @@ NEED_PROG(gzip, gzip) NEED_PROG(xz, xz) AC_PATH_PROG(dot, dot) AC_PATH_PROG(pv, pv, pv) -AC_PATH_PROG(bro, bro, bro) +AC_PATH_PROGS(brotli, brotli bro, bro) AC_PATH_PROG(lsof, lsof, lsof) @@ -176,6 +177,13 @@ AC_SUBST(HAVE_SODIUM, [$have_sodium]) PKG_CHECK_MODULES([LIBLZMA], [liblzma], [CXXFLAGS="$LIBLZMA_CFLAGS $CXXFLAGS"]) +# Look for libbrotli{enc,dec}, optional dependencies +PKG_CHECK_MODULES([LIBBROTLI], [libbrotlienc libbrotlidec], + [AC_DEFINE([HAVE_BROTLI], [1], [Whether to use libbrotli.]) + CXXFLAGS="$LIBBROTLI_CFLAGS $CXXFLAGS"] + have_brotli=1], [have_brotli=]) +AC_SUBST(HAVE_BROTLI, [$have_brotli]) + # Look for libseccomp, required for Linux sandboxing. if test "$sys_name" = linux; then PKG_CHECK_MODULES([LIBSECCOMP], [libseccomp], @@ -186,11 +194,17 @@ fi # Look for aws-cpp-sdk-s3. AC_LANG_PUSH(C++) AC_CHECK_HEADERS([aws/s3/S3Client.h], - [AC_DEFINE([ENABLE_S3], [1], [Whether to enable S3 support via aws-cpp-sdk-s3.]) + [AC_DEFINE([ENABLE_S3], [1], [Whether to enable S3 support via aws-sdk-cpp.]) enable_s3=1], [enable_s3=]) AC_SUBST(ENABLE_S3, [$enable_s3]) AC_LANG_POP(C++) +if test -n "$enable_s3"; then + declare -a aws_version_tokens=($(printf '#include <aws/core/VersionConfig.h>\nAWS_SDK_VERSION_STRING' | $CPP - | grep -v '^#.*' | sed 's/"//g' | tr '.' ' ')) + AC_DEFINE_UNQUOTED([AWS_VERSION_MAJOR], ${aws_version_tokens@<:@0@:>@}, [Major version of aws-sdk-cpp.]) + AC_DEFINE_UNQUOTED([AWS_VERSION_MINOR], ${aws_version_tokens@<:@1@:>@}, [Minor version of aws-sdk-cpp.]) +fi + # Whether to use the Boehm garbage collector. AC_ARG_ENABLE(gc, AC_HELP_STRING([--enable-gc], diff --git a/corepkgs/fetchurl.nix b/corepkgs/fetchurl.nix index e135b947fdbb..0ce1bab112f3 100644 --- a/corepkgs/fetchurl.nix +++ b/corepkgs/fetchurl.nix @@ -1,4 +1,4 @@ -{ system ? builtins.currentSystem +{ system ? "" # obsolete , url , md5 ? "", sha1 ? "", sha256 ? "", sha512 ? "" , outputHash ? @@ -17,7 +17,9 @@ derivation { inherit outputHashAlgo outputHash; outputHashMode = if unpack || executable then "recursive" else "flat"; - inherit name system url executable unpack; + inherit name url executable unpack; + + system = "builtin"; # No need to double the amount of network traffic preferLocalBuild = true; diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index e52cbcd535e3..fff7994f28df 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -312,7 +312,7 @@ false</literal>.</para> </varlistentry> - <varlistentry><term><literal>use-substitutes</literal></term> + <varlistentry><term><literal>substitute</literal></term> <listitem><para>If set to <literal>true</literal> (default), Nix will use binary substitutes if available. This option can be @@ -321,6 +321,20 @@ false</literal>.</para> </varlistentry> + <varlistentry><term><literal>builders-use-substitutes</literal></term> + + <listitem><para>If set to <literal>true</literal>, Nix will instruct + remote build machines to use their own binary substitutes if available. In + practical terms, this means that remote hosts will fetch as many build + dependencies as possible from their own substitutes (e.g, from + <literal>cache.nixos.org</literal>), instead of waiting for this host to + upload them all. This can drastically reduce build times if the network + connection between this computer and the remote build host is slow. Defaults + to <literal>false</literal>.</para></listitem> + + </varlistentry> + + <varlistentry><term><literal>fallback</literal></term> <listitem><para>If set to <literal>true</literal>, Nix will fall @@ -720,6 +734,14 @@ builtins.fetchurl { </varlistentry> + <varlistentry xml:id="conf-show-trace"><term><literal>show-trace</literal></term> + + <listitem><para>Causes Nix to print out a stack trace in case of Nix + expression evaluation errors.</para></listitem> + + </varlistentry> + + </variablelist> </para> diff --git a/doc/manual/command-ref/env-common.xml b/doc/manual/command-ref/env-common.xml index a83aeaf2e575..361d3e2b0330 100644 --- a/doc/manual/command-ref/env-common.xml +++ b/doc/manual/command-ref/env-common.xml @@ -154,6 +154,8 @@ $ mount -o bind /mnt/otherdisk/nix /nix</screen> <literal>daemon</literal> if you want to use the Nix daemon to execute Nix operations. This is necessary in <link linkend="ssec-multi-user">multi-user Nix installations</link>. + If the Nix daemon's Unix socket is at some non-standard path, + this variable should be set to <literal>unix://path/to/socket</literal>. Otherwise, it should be left unset.</para></listitem> </varlistentry> diff --git a/doc/manual/command-ref/nix-build.xml b/doc/manual/command-ref/nix-build.xml index d6b2e5e5adb7..40fe7a43f10c 100644 --- a/doc/manual/command-ref/nix-build.xml +++ b/doc/manual/command-ref/nix-build.xml @@ -29,8 +29,6 @@ </group> <replaceable>attrPath</replaceable> </arg> - <arg><option>--drv-link</option> <replaceable>drvlink</replaceable></arg> - <arg><option>--add-drv-link</option></arg> <arg><option>--no-out-link</option></arg> <arg> <group choice='req'> @@ -91,25 +89,6 @@ also <xref linkend="sec-common-options" />.</phrase></para> <variablelist> - <varlistentry><term><option>--drv-link</option> <replaceable>drvlink</replaceable></term> - - <listitem><para>Add a symlink named - <replaceable>drvlink</replaceable> to the store derivation - produced by <command>nix-instantiate</command>. The derivation is - a root of the garbage collector until the symlink is deleted or - renamed. If there are multiple derivations, numbers are suffixed - to <replaceable>drvlink</replaceable> to distinguish between - them.</para></listitem> - - </varlistentry> - - <varlistentry><term><option>--add-drv-link</option></term> - - <listitem><para>Shorthand for <option>--drv-link</option> - <filename>./derivation</filename>.</para></listitem> - - </varlistentry> - <varlistentry><term><option>--no-out-link</option></term> <listitem><para>Do not create a symlink to the output path. Note diff --git a/doc/manual/command-ref/opt-common-syn.xml b/doc/manual/command-ref/opt-common-syn.xml index 3aff4e1b6357..168bef080f4f 100644 --- a/doc/manual/command-ref/opt-common-syn.xml +++ b/doc/manual/command-ref/opt-common-syn.xml @@ -47,7 +47,6 @@ </arg> <arg><option>--fallback</option></arg> <arg><option>--readonly-mode</option></arg> -<arg><option>--show-trace</option></arg> <arg> <option>-I</option> <replaceable>path</replaceable> diff --git a/doc/manual/command-ref/opt-common.xml b/doc/manual/command-ref/opt-common.xml index 32d53c753a22..bcb60b30125c 100644 --- a/doc/manual/command-ref/opt-common.xml +++ b/doc/manual/command-ref/opt-common.xml @@ -301,13 +301,6 @@ </varlistentry> -<varlistentry><term><option>--show-trace</option></term> - - <listitem><para>Causes Nix to print out a stack trace in case of Nix - expression evaluation errors.</para></listitem> - -</varlistentry> - <varlistentry xml:id="opt-I"><term><option>-I</option> <replaceable>path</replaceable></term> diff --git a/doc/manual/expressions/builtins.xml b/doc/manual/expressions/builtins.xml index 5a3a8645c1d9..81770bcf6292 100644 --- a/doc/manual/expressions/builtins.xml +++ b/doc/manual/expressions/builtins.xml @@ -308,8 +308,9 @@ stdenv.mkDerivation { … } </varlistentry> - <varlistentry><term><function>builtins.filterSource</function> - <replaceable>e1</replaceable> <replaceable>e2</replaceable></term> + <varlistentry xml:id='builtin-filterSource'> + <term><function>builtins.filterSource</function> + <replaceable>e1</replaceable> <replaceable>e2</replaceable></term> <listitem> @@ -768,6 +769,75 @@ Evaluates to <literal>[ "foo" ]</literal>. </varlistentry> + <varlistentry> + <term> + <function>builtins.path</function> + <replaceable>args</replaceable> + </term> + + <listitem> + <para> + An enrichment of the built-in path type, based on the attributes + present in <replaceable>args</replaceable>. All are optional + except <varname>path</varname>: + </para> + + <variablelist> + <varlistentry> + <term>path</term> + <listitem> + <para>The underlying path.</para> + </listitem> + </varlistentry> + <varlistentry> + <term>name</term> + <listitem> + <para> + The name of the path when added to the store. This can + used to reference paths that have nix-illegal characters + in their names, like <literal>@</literal>. + </para> + </listitem> + </varlistentry> + <varlistentry> + <term>filter</term> + <listitem> + <para> + A function of the type expected by + <link linkend="builtin-filterSource">builtins.filterSource</link>, + with the same semantics. + </para> + </listitem> + </varlistentry> + <varlistentry> + <term>recursive</term> + <listitem> + <para> + When <literal>false</literal>, when + <varname>path</varname> is added to the store it is with a + flat hash, rather than a hash of the NAR serialization of + the file. Thus, <varname>path</varname> must refer to a + regular file, not a directory. This allows similar + behavior to <literal>fetchurl</literal>. Defaults to + <literal>true</literal>. + </para> + </listitem> + </varlistentry> + <varlistentry> + <term>sha256</term> + <listitem> + <para> + When provided, this is the expected hash of the file at + the path. Evaluation will fail if the hash is incorrect, + and providing a hash allows + <literal>builtins.path</literal> to be used even when the + <literal>pure-eval</literal> nix config option is on. + </para> + </listitem> + </varlistentry> + </variablelist> + </listitem> + </varlistentry> <varlistentry><term><function>builtins.pathExists</function> <replaceable>path</replaceable></term> diff --git a/doc/manual/expressions/debug-build.xml b/doc/manual/expressions/debug-build.xml deleted file mode 100644 index 0c1f4e6719b2..000000000000 --- a/doc/manual/expressions/debug-build.xml +++ /dev/null @@ -1,34 +0,0 @@ -<section xmlns="http://docbook.org/ns/docbook" - xmlns:xlink="http://www.w3.org/1999/xlink" - xmlns:xi="http://www.w3.org/2001/XInclude" - version="5.0" - xml:id="sec-debug-build"> - -<title>Debugging Build Failures</title> - -<para>At the beginning of each phase of the build (such as unpacking, -building or installing), the set of all shell variables is written to -the file <filename>env-vars</filename> at the top-level build -directory. This is useful for debugging: it allows you to recreate -the environment in which a build was performed. For instance, if a -build fails, then assuming you used the <option>-K</option> flag, you -can go to the output directory and <quote>switch</quote> to the -environment of the builder: - -<screen> -$ nix-build -K ./foo.nix -... fails, keeping build directory `/tmp/nix-1234-0' - -$ cd /tmp/nix-1234-0 - -$ source env-vars - -<lineannotation>(edit some files...)</lineannotation> - -$ make - -<lineannotation>(execution continues with the same GCC, make, etc.)</lineannotation></screen> - -</para> - -</section> diff --git a/doc/manual/expressions/language-constructs.xml b/doc/manual/expressions/language-constructs.xml index 2f0027d479cd..47d95f8a13e3 100644 --- a/doc/manual/expressions/language-constructs.xml +++ b/doc/manual/expressions/language-constructs.xml @@ -61,7 +61,7 @@ evaluates to <literal>"foobar"</literal>. <simplesect><title>Inheriting attributes</title> -<para>When defining a set it is often convenient to copy variables +<para>When defining a set or in a let-expression it is often convenient to copy variables from the surrounding lexical scope (e.g., when you want to propagate attributes). This can be shortened using the <literal>inherit</literal> keyword. For instance, @@ -72,7 +72,15 @@ let x = 123; in y = 456; }</programlisting> -evaluates to <literal>{ x = 123; y = 456; }</literal>. (Note that +is equivalent to + +<programlisting> +let x = 123; in +{ x = x; + y = 456; +}</programlisting> + +and both evaluate to <literal>{ x = 123; y = 456; }</literal>. (Note that this works because <varname>x</varname> is added to the lexical scope by the <literal>let</literal> construct.) It is also possible to inherit attributes from another set. For instance, in this fragment @@ -101,6 +109,26 @@ variables from the surrounding scope (<varname>fetchurl</varname> <varname>libXaw</varname> (the X Athena Widgets) from the <varname>xlibs</varname> (X11 client-side libraries) set.</para> +<para> +Summarizing the fragment + +<programlisting> +... +inherit x y z; +inherit (src-set) a b c; +...</programlisting> + +is equivalent to + +<programlisting> +... +x = x; y = y; z = z; +a = src-set.a; b = src-set.b; c = src-set.c; +...</programlisting> + +when used while defining local variables in a let-expression or +while defining a set.</para> + </simplesect> diff --git a/doc/manual/expressions/simple-building-testing.xml b/doc/manual/expressions/simple-building-testing.xml index bd3901a13351..0348c082b205 100644 --- a/doc/manual/expressions/simple-building-testing.xml +++ b/doc/manual/expressions/simple-building-testing.xml @@ -81,6 +81,4 @@ Just pass the option <link linkend='opt-max-jobs'><option>-j in parallel, or set. Typically this should be the number of CPUs.</para> -<xi:include href="debug-build.xml" /> - </section> diff --git a/doc/manual/installation/installing-binary.xml b/doc/manual/installation/installing-binary.xml index 24e76eafeb18..7e8dfb0db3d4 100644 --- a/doc/manual/installation/installing-binary.xml +++ b/doc/manual/installation/installing-binary.xml @@ -79,16 +79,6 @@ alice$ ./install </para> -<para>Nix can be uninstalled using <command>rpm -e nix</command> or -<command>dpkg -r nix</command> on RPM- and Dpkg-based systems, -respectively. After this you should manually remove the Nix store and -other auxiliary data, if desired: - -<screen> -$ rm -rf /nix</screen> - -</para> - <para>You can uninstall Nix simply by running: <screen> diff --git a/doc/manual/installation/supported-platforms.xml b/doc/manual/installation/supported-platforms.xml index a468a5640637..6858573ff407 100644 --- a/doc/manual/installation/supported-platforms.xml +++ b/doc/manual/installation/supported-platforms.xml @@ -33,7 +33,4 @@ </para> -<para>Nix is fairly portable, so it should work on most platforms that -support POSIX threads and have a C++11 compiler.</para> - </chapter> diff --git a/doc/manual/introduction/quick-start.xml b/doc/manual/introduction/quick-start.xml index aa239b7538b4..1ce6c8d50a1b 100644 --- a/doc/manual/introduction/quick-start.xml +++ b/doc/manual/introduction/quick-start.xml @@ -15,7 +15,7 @@ to subsequent chapters.</para> <step><para>Install single-user Nix by running the following: <screen> -$ curl https://nixos.org/nix/install | sh +$ bash <(curl https://nixos.org/nix/install) </screen> This will install Nix in <filename>/nix</filename>. The install script diff --git a/doc/manual/manual.xml b/doc/manual/manual.xml index 61205d916993..b5a6af7d0c3b 100644 --- a/doc/manual/manual.xml +++ b/doc/manual/manual.xml @@ -12,14 +12,11 @@ <firstname>Eelco</firstname> <surname>Dolstra</surname> </personname> - <affiliation> - <orgname>LogicBlox</orgname> - </affiliation> <contrib>Author</contrib> </author> <copyright> - <year>2004-2014</year> + <year>2004-2017</year> <holder>Eelco Dolstra</holder> </copyright> @@ -41,7 +38,6 @@ <xi:include href="expressions/writing-nix-expressions.xml" /> <xi:include href="advanced-topics/advanced-topics.xml" /> <xi:include href="command-ref/command-ref.xml" /> - <xi:include href="troubleshooting/troubleshooting.xml" /> <xi:include href="glossary/glossary.xml" /> <xi:include href="hacking.xml" /> <xi:include href="release-notes/release-notes.xml" /> diff --git a/doc/manual/release-notes/release-notes.xml b/doc/manual/release-notes/release-notes.xml index c4b14bc5499e..b8392a647af9 100644 --- a/doc/manual/release-notes/release-notes.xml +++ b/doc/manual/release-notes/release-notes.xml @@ -12,7 +12,7 @@ </partintro> --> -<xi:include href="rl-1.12.xml" /> +<xi:include href="rl-2.0.xml" /> <xi:include href="rl-1.11.10.xml" /> <xi:include href="rl-1.11.xml" /> <xi:include href="rl-1.10.xml" /> diff --git a/doc/manual/release-notes/rl-1.12.xml b/doc/manual/release-notes/rl-2.0.xml index 29943e3e6e97..32cdb1d0cefc 100644 --- a/doc/manual/release-notes/rl-1.12.xml +++ b/doc/manual/release-notes/rl-2.0.xml @@ -2,9 +2,9 @@ xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xi="http://www.w3.org/2001/XInclude" version="5.0" - xml:id="ssec-relnotes-1.12"> + xml:id="ssec-relnotes-2.0"> -<title>Release 1.12 (TBA)</title> +<title>Release 2.0 (2018-02-??)</title> <para>This release has the following new features:</para> @@ -79,6 +79,11 @@ <listitem><para><command>nix add-to-store</command> (970366266b8df712f5f9cedb45af183ef5a8357f).</para></listitem> + <listitem><para><command>nix upgrade-nix</command> upgrades Nix + to the latest stable version. This requires that Nix is + installed in a profile. (Thus it won’t work on NixOS, or if it’s + installed outside of the Nix store.)</para></listitem> + <listitem><para>Progress indicator.</para></listitem> <listitem><para>All options are available as flags now @@ -94,11 +99,11 @@ </listitem> <listitem> - <para>New build mode <command>nix-build --hash</command> that - builds a derivation, computes the hash of the output, and moves - the output to the store path corresponding to what a fixed-output - derivation with that hash would produce. - (Add docs and examples; see d367b8e7875161e655deaa96bf8a5dd0bcf8229e)</para> + <para>If a fixed-output derivation produces a result with an + incorrect hash, the output path will be moved to the location + corresponding to the actual hash and registered as valid. Thus, a + subsequent build of the fixed-output derivation with the correct + hash is unnecessary.</para> </listitem> <listitem> diff --git a/doc/manual/troubleshooting/collisions-nixenv.xml b/doc/manual/troubleshooting/collisions-nixenv.xml deleted file mode 100644 index 23cc43faf088..000000000000 --- a/doc/manual/troubleshooting/collisions-nixenv.xml +++ /dev/null @@ -1,38 +0,0 @@ -<section xmlns="http://docbook.org/ns/docbook" - xmlns:xlink="http://www.w3.org/1999/xlink" - xmlns:xi="http://www.w3.org/2001/XInclude" - version="5.0" - xml:id="sec-collisions-nixenv"> - -<title>Collisions in <command>nix-env</command></title> - -<para>Symptom: when installing or upgrading, you get an error message such as - -<screen> -$ nix-env -i docbook-xml -... -adding /nix/store/s5hyxgm62gk2...-docbook-xml-4.2 -collision between `/nix/store/s5hyxgm62gk2...-docbook-xml-4.2/xml/dtd/docbook/calstblx.dtd' - and `/nix/store/06h377hr4b33...-docbook-xml-4.3/xml/dtd/docbook/calstblx.dtd' - at /nix/store/...-builder.pl line 62.</screen> - -</para> - -<para>The cause is that two installed packages in the user environment -have overlapping filenames (e.g., -<filename>xml/dtd/docbook/calstblx.dtd</filename>. This usually -happens when you accidentally try to install two versions of the same -package. For instance, in the example above, the Nix Packages -collection contains two versions of <literal>docbook-xml</literal>, so -<command>nix-env -i</command> will try to install both. The default -user environment builder has no way to way to resolve such conflicts, -so it just gives up.</para> - -<para>Solution: remove one of the offending packages from the user -environment (if already installed) using <command>nix-env --e</command>, or specify exactly which version should be installed -(e.g., <literal>nix-env -i docbook-xml-4.2</literal>).</para> - -<!-- FIXME: describe priorities --> - -</section> diff --git a/doc/manual/troubleshooting/links-nix-store.xml b/doc/manual/troubleshooting/links-nix-store.xml deleted file mode 100644 index c768889567d0..000000000000 --- a/doc/manual/troubleshooting/links-nix-store.xml +++ /dev/null @@ -1,43 +0,0 @@ -<section xmlns="http://docbook.org/ns/docbook" - xmlns:xlink="http://www.w3.org/1999/xlink" - xmlns:xi="http://www.w3.org/2001/XInclude" - version="5.0" - xml:id="sec-links-nix-store"> - -<title><quote>Too many links</quote> Error in the Nix store</title> - - -<para>Symptom: when building something, you get an error message such as - -<screen> -... -<literal>mkdir: cannot create directory `/nix/store/<replaceable>name</replaceable>': Too many links</literal></screen> - -</para> - -<para>This is usually because you have more than 32,000 subdirectories -in <filename>/nix/store</filename>, as can be seen using <command>ls --l</command>: - -<screen> -$ ls -ld /nix/store -drwxrwxrwt 32000 nix nix 4620288 Sep 8 15:08 store</screen> - -The <literal>ext2</literal> file system is limited to an inode link -count of 32,000 (each subdirectory increasing the count by one). -Furthermore, the <literal>st_nlink</literal> field of the -<function>stat</function> system call is a 16-bit value.</para> - -<para>This only happens on very large Nix installations (such as build -machines).</para> - -<para>Quick solution: run the garbage collector. You may want to use -the <option>--max-links</option> option.</para> - -<para>Real solution: put the Nix store on a file system that supports -more than 32,000 subdirectories per directory, such as ext4. (This -doesn’t solve the <literal>st_nlink</literal> limit, but ext4 lies to -the kernel by reporting a link count of 1 if it exceeds the -limit.)</para> - -</section> diff --git a/doc/manual/troubleshooting/troubleshooting.xml b/doc/manual/troubleshooting/troubleshooting.xml deleted file mode 100644 index 1e973a192b18..000000000000 --- a/doc/manual/troubleshooting/troubleshooting.xml +++ /dev/null @@ -1,16 +0,0 @@ -<appendix xmlns="http://docbook.org/ns/docbook" - xmlns:xlink="http://www.w3.org/1999/xlink" - xmlns:xi="http://www.w3.org/2001/XInclude" - version="5.0" - xml:id="ch-troubleshooting"> - -<title>Troubleshooting</title> - -<para>This section provides solutions for some common problems. See -the <link xlink:href="https://github.com/NixOS/nix/issues">Nix bug -tracker</link> for a list of currently known issues.</para> - -<xi:include href="collisions-nixenv.xml" /> -<xi:include href="links-nix-store.xml" /> - -</appendix> diff --git a/local.mk b/local.mk index 0a225423741d..40a910991a48 100644 --- a/local.mk +++ b/local.mk @@ -1,6 +1,5 @@ ifeq ($(MAKECMDGOALS), dist) - # Make sure we are in repo root with `--git-dir` - dist-files += $(shell git --git-dir=.git ls-files || find * -type f) + dist-files += $(shell cat .dist-files) endif dist-files += configure config.h.in nix.spec perl/configure diff --git a/mk/tests.mk b/mk/tests.mk index e353d46a0d02..70c30661b95f 100644 --- a/mk/tests.mk +++ b/mk/tests.mk @@ -39,7 +39,7 @@ installcheck: echo "$${red}$$failed out of $$total tests failed $$normal"; \ exit 1; \ else \ - echo "$${green}All tests succeeded"; \ + echo "$${green}All tests succeeded$$normal"; \ fi .PHONY: check installcheck diff --git a/release-common.nix b/release-common.nix index 4553118e1f56..a4ae24ba4826 100644 --- a/release-common.nix +++ b/release-common.nix @@ -1,7 +1,9 @@ { pkgs }: rec { - sh = pkgs.busybox.override { + # Use "busybox-sandbox-shell" if present, + # if not (legacy) fallback and hope it's sufficient. + sh = pkgs.busybox-sandbox-shell or (pkgs.busybox.override { useMusl = true; enableStatic = true; enableMinimal = true; @@ -11,7 +13,7 @@ rec { CONFIG_ASH_TEST y CONFIG_ASH_OPTIMIZE_FOR_SIZE y ''; - }; + }); configureFlags = [ "--disable-init-state" diff --git a/release.nix b/release.nix index 538211ca9498..d9c4f1efb4b9 100644 --- a/release.nix +++ b/release.nix @@ -1,12 +1,12 @@ -{ nix ? { outPath = ./.; revCount = 1234; shortRev = "abcdef"; } -, nixpkgs ? { outPath = <nixpkgs>; revCount = 1234; shortRev = "abcdef"; } +{ nix ? builtins.fetchGit ./. +, nixpkgs ? fetchTarball channel:nixos-17.09 , officialRelease ? false , systems ? [ "x86_64-linux" "i686-linux" "x86_64-darwin" "aarch64-linux" ] }: let - pkgs = import <nixpkgs> {}; + pkgs = import nixpkgs { system = builtins.currentSystem or "x86_64-linux"; }; jobs = rec { @@ -27,16 +27,13 @@ let pkgconfig sqlite libsodium boehmgc docbook5 docbook5_xsl autoconf-archive - git ] ++ lib.optional stdenv.isLinux libseccomp; configureFlags = "--enable-gc"; postUnpack = '' - # Clean up when building from a working tree. - if [[ -d $sourceRoot/.git ]]; then - git -C $sourceRoot clean -fd - fi + (cd source && find . -type f) | cut -c3- > source/.dist-files + cat source/.dist-files ''; preConfigure = '' @@ -62,7 +59,7 @@ let build = pkgs.lib.genAttrs systems (system: - with import <nixpkgs> { inherit system; }; + with import nixpkgs { inherit system; }; with import ./release-common.nix { inherit pkgs; }; @@ -105,7 +102,7 @@ let perlBindings = pkgs.lib.genAttrs systems (system: - let pkgs = import <nixpkgs> { inherit system; }; in with pkgs; + let pkgs = import nixpkgs { inherit system; }; in with pkgs; releaseTools.nixBuild { name = "nix-perl"; @@ -131,7 +128,7 @@ let binaryTarball = pkgs.lib.genAttrs systems (system: # FIXME: temporarily use a different branch for the Darwin build. - with import <nixpkgs> { inherit system; }; + with import nixpkgs { inherit system; }; let toplevel = builtins.getAttr system jobs.build; @@ -174,7 +171,7 @@ let coverage = - with import <nixpkgs> { system = "x86_64-linux"; }; + with import nixpkgs { system = "x86_64-linux"; }; releaseTools.coverageAnalysis { name = "nix-build"; @@ -218,10 +215,12 @@ let # System tests. tests.remoteBuilds = (import ./tests/remote-builds.nix rec { + inherit nixpkgs; nix = build.x86_64-linux; system = "x86_64-linux"; }); tests.nix-copy-closure = (import ./tests/nix-copy-closure.nix rec { + inherit nixpkgs; nix = build.x86_64-linux; system = "x86_64-linux"; }); @@ -229,11 +228,12 @@ let (pkgs.lib.filter (system: system == "x86_64-linux" || system == "i686-linux") systems) (system: import ./tests/setuid.nix rec { + inherit nixpkgs; nix = build.${system}; inherit system; }); tests.binaryTarball = - with import <nixpkgs> { system = "x86_64-linux"; }; + with import nixpkgs { system = "x86_64-linux"; }; vmTools.runInLinuxImage (runCommand "nix-binary-tarball-test" { diskImage = vmTools.diskImages.ubuntu1204x86_64; } @@ -252,7 +252,7 @@ let ''); # */ tests.evalNixpkgs = - import <nixpkgs/pkgs/top-level/make-tarball.nix> { + import (nixpkgs + "/pkgs/top-level/make-tarball.nix") { inherit nixpkgs; inherit pkgs; nix = build.x86_64-linux; @@ -306,7 +306,7 @@ let makeRPM = system: diskImageFun: extraPackages: - with import <nixpkgs> { inherit system; }; + with import nixpkgs { inherit system; }; releaseTools.rpmBuild rec { name = "nix-rpm"; @@ -315,7 +315,8 @@ let { extraPackages = [ "sqlite" "sqlite-devel" "bzip2-devel" "libcurl-devel" "openssl-devel" "xz-devel" "libseccomp-devel" ] ++ extraPackages; }; - memSize = 1024; + # At most 2047MB can be simulated in qemu-system-i386 + memSize = 2047; meta.schedulingPriority = 50; postRPMInstall = "cd /tmp/rpmout/BUILD/nix-* && make installcheck"; #enableParallelBuilding = true; @@ -328,7 +329,7 @@ let makeDeb = system: diskImageFun: extraPackages: extraDebPackages: - with import <nixpkgs> { inherit system; }; + with import nixpkgs { inherit system; }; releaseTools.debBuild { name = "nix-deb"; diff --git a/scripts/install-darwin-multi-user.sh b/scripts/install-darwin-multi-user.sh index b6b3305bac71..8d59c1c2b9aa 100644 --- a/scripts/install-darwin-multi-user.sh +++ b/scripts/install-darwin-multi-user.sh @@ -33,7 +33,7 @@ readonly NIX_FIRST_BUILD_UID="30001" readonly NIX_ROOT="/nix" readonly PLIST_DEST=/Library/LaunchDaemons/org.nixos.nix-daemon.plist -readonly PROFILE_TARGETS=("/etc/profile" "/etc/bashrc" "/etc/zshrc") +readonly PROFILE_TARGETS=("/etc/bashrc" "/etc/zshrc") readonly PROFILE_BACKUP_SUFFIX=".backup-before-nix" readonly PROFILE_NIX_FILE="$NIX_ROOT/var/nix/profiles/default/etc/profile.d/nix-daemon.sh" @@ -153,7 +153,7 @@ subheader() { } row() { - printf "$BOLD%s$ESC:\t%s\n" "$1" "$2" + printf "$BOLD%s$ESC:\\t%s\\n" "$1" "$2" } task() { @@ -218,7 +218,7 @@ __sudo() { echo "I am executing:" echo "" - printf " $ sudo %s\n" "$cmd" + printf " $ sudo %s\\n" "$cmd" echo "" echo "$expl" echo "" @@ -647,7 +647,7 @@ chat_about_sudo() { cat <<EOF This script is going to call sudo a lot. Normally, it would show you exactly what commands it is running and why. However, the script is -run in a headless fashion, like this: +run in a headless fashion, like this: $ curl https://nixos.org/nix/install | sh @@ -695,7 +695,7 @@ install_from_extracted_nix() { cd "$EXTRACTED_NIX_PATH" _sudo "to copy the basic Nix files to the new store at $NIX_ROOT/store" \ - rsync -rlpt "$(pwd)/store/" "$NIX_ROOT/store/" + rsync -rlpt ./store/* "$NIX_ROOT/store/" if [ -d "$NIX_INSTALLED_NIX" ]; then echo " Alright! We have our first nix at $NIX_INSTALLED_NIX" diff --git a/scripts/nix-profile.sh.in b/scripts/nix-profile.sh.in index ab95c09c8305..a5f52274fc70 100644 --- a/scripts/nix-profile.sh.in +++ b/scripts/nix-profile.sh.in @@ -60,12 +60,6 @@ if [ -n "$HOME" ] && [ -n "$USER" ]; then # This part should be kept in sync with nixpkgs:nixos/modules/programs/environment.nix NIX_PROFILES="@localstatedir@/nix/profiles/default $NIX_USER_PROFILE_DIR" - for i in $NIX_PROFILES; do - if [ -d "$i/lib/aspell" ]; then - export ASPELL_CONF="dict-dir $i/lib/aspell" - fi - done - # Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work. if [ -e /etc/ssl/certs/ca-certificates.crt ]; then # NixOS, Ubuntu, Debian, Gentoo, Arch export NIX_SSL_CERT_FILE=/etc/ssl/certs/ca-certificates.crt @@ -81,7 +75,7 @@ if [ -n "$HOME" ] && [ -n "$USER" ]; then export NIX_SSL_CERT_FILE="$NIX_LINK/etc/ca-bundle.crt" fi - if [ -n ${MANPATH} ]; then + if [ -n "${MANPATH}" ]; then export MANPATH="$NIX_LINK/share/man:$MANPATH" fi diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index 445006b327fd..df579729af29 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -218,9 +218,11 @@ connected: signal(SIGALRM, old); } + auto substitute = settings.buildersUseSubstitutes ? Substitute : NoSubstitute; + { Activity act(*logger, lvlTalkative, actUnknown, fmt("copying dependencies to '%s'", storeUri)); - copyPaths(store, ref<Store>(sshStore), inputs, NoRepair, NoCheckSigs); + copyPaths(store, ref<Store>(sshStore), inputs, NoRepair, NoCheckSigs, substitute); } uploadLock = -1; @@ -240,7 +242,7 @@ connected: if (!missing.empty()) { Activity act(*logger, lvlTalkative, actUnknown, fmt("copying outputs from '%s'", storeUri)); setenv("NIX_HELD_LOCKS", concatStringsSep(" ", missing).c_str(), 1); /* FIXME: ugly */ - copyPaths(ref<Store>(sshStore), store, missing, NoRepair, NoCheckSigs); + copyPaths(ref<Store>(sshStore), store, missing, NoRepair, NoCheckSigs, substitute); } return; diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 63de2d60a147..0b0a0f7b1790 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -300,16 +300,25 @@ EvalState::EvalState(const Strings & _searchPath, ref<Store> store) { countCalls = getEnv("NIX_COUNT_CALLS", "0") != "0"; - restricted = settings.restrictEval; - assert(gcInitialised); /* Initialise the Nix expression search path. */ - Strings paths = parseNixPath(getEnv("NIX_PATH", "")); - for (auto & i : _searchPath) addToSearchPath(i); - for (auto & i : paths) addToSearchPath(i); + if (!settings.pureEval) { + Strings paths = parseNixPath(getEnv("NIX_PATH", "")); + for (auto & i : _searchPath) addToSearchPath(i); + for (auto & i : paths) addToSearchPath(i); + } addToSearchPath("nix=" + settings.nixDataDir + "/nix/corepkgs"); + if (settings.restrictEval || settings.pureEval) { + allowedPaths = PathSet(); + for (auto & i : searchPath) { + auto r = resolveSearchPathElem(i); + if (!r.first) continue; + allowedPaths->insert(r.second); + } + } + clearValue(vEmptySet); vEmptySet.type = tAttrs; vEmptySet.attrs = allocBindings(0); @@ -326,38 +335,36 @@ EvalState::~EvalState() Path EvalState::checkSourcePath(const Path & path_) { - if (!restricted) return path_; + if (!allowedPaths) return path_; + + bool found = false; + + for (auto & i : *allowedPaths) { + if (isDirOrInDir(path_, i)) { + found = true; + break; + } + } + + if (!found) + throw RestrictedPathError("access to path '%1%' is forbidden in restricted mode", path_); /* Resolve symlinks. */ debug(format("checking access to '%s'") % path_); Path path = canonPath(path_, true); - for (auto & i : searchPath) { - auto r = resolveSearchPathElem(i); - if (!r.first) continue; - if (path == r.second || isInDir(path, r.second)) + for (auto & i : *allowedPaths) { + if (isDirOrInDir(path, i)) return path; } - /* To support import-from-derivation, allow access to anything in - the store. FIXME: only allow access to paths that have been - constructed by this evaluation. */ - if (store->isInStore(path)) return path; - -#if 0 - /* Hack to support the chroot dependencies of corepkgs (see - corepkgs/config.nix.in). */ - if (path == settings.nixPrefix && isStorePath(settings.nixPrefix)) - return path; -#endif - - throw RestrictedPathError(format("access to path '%1%' is forbidden in restricted mode") % path_); + throw RestrictedPathError("access to path '%1%' is forbidden in restricted mode", path); } void EvalState::checkURI(const std::string & uri) { - if (!restricted) return; + if (!settings.restrictEval) return; /* 'uri' should be equal to a prefix, or in a subdirectory of a prefix. Thus, the prefix https://github.co does not permit @@ -371,10 +378,32 @@ void EvalState::checkURI(const std::string & uri) && (prefix[prefix.size() - 1] == '/' || uri[prefix.size()] == '/'))) return; + /* If the URI is a path, then check it against allowedPaths as + well. */ + if (hasPrefix(uri, "/")) { + checkSourcePath(uri); + return; + } + + if (hasPrefix(uri, "file://")) { + checkSourcePath(std::string(uri, 7)); + return; + } + throw RestrictedPathError("access to URI '%s' is forbidden in restricted mode", uri); } +Path EvalState::toRealPath(const Path & path, const PathSet & context) +{ + // FIXME: check whether 'path' is in 'context'. + return + !context.empty() && store->isInStore(path) + ? store->toRealPath(path) + : path; +}; + + void EvalState::addConstant(const string & name, Value & v) { Value * v2 = allocValue(); @@ -386,7 +415,7 @@ void EvalState::addConstant(const string & name, Value & v) } -void EvalState::addPrimOp(const string & name, +Value * EvalState::addPrimOp(const string & name, unsigned int arity, PrimOpFun primOp) { Value * v = allocValue(); @@ -397,6 +426,7 @@ void EvalState::addPrimOp(const string & name, staticBaseEnv.vars[symbols.create(name)] = baseEnvDispl; baseEnv.values[baseEnvDispl++] = v; baseEnv.values[0]->attrs->push_back(Attr(sym, v)); + return v; } @@ -649,8 +679,10 @@ Value * ExprPath::maybeThunk(EvalState & state, Env & env) } -void EvalState::evalFile(const Path & path, Value & v) +void EvalState::evalFile(const Path & path_, Value & v) { + auto path = checkSourcePath(path_); + FileEvalCache::iterator i; if ((i = fileEvalCache.find(path)) != fileEvalCache.end()) { v = i->second; @@ -1546,7 +1578,7 @@ string EvalState::copyPathToStore(PathSet & context, const Path & path) dstPath = srcToStore[path]; else { dstPath = settings.readOnlyMode - ? store->computeStorePathForPath(checkSourcePath(path)).first + ? store->computeStorePathForPath(baseNameOf(path), checkSourcePath(path)).first : store->addToStore(baseNameOf(path), checkSourcePath(path), true, htSHA256, defaultPathFilter, repair); srcToStore[path] = dstPath; printMsg(lvlChatty, format("copied source '%1%' -> '%2%'") diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index f0ab1435bff3..9e3d30d95f49 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -76,9 +76,9 @@ public: already exist there. */ RepairFlag repair; - /* If set, don't allow access to files outside of the Nix search - path or to environment variables. */ - bool restricted; + /* The allowed filesystem paths in restricted or pure evaluation + mode. */ + std::experimental::optional<PathSet> allowedPaths; Value vEmptySet; @@ -112,6 +112,15 @@ public: void checkURI(const std::string & uri); + /* When using a diverted store and 'path' is in the Nix store, map + 'path' to the diverted location (e.g. /nix/store/foo is mapped + to /home/alice/my-nix/nix/store/foo). However, this is only + done if the context is not empty, since otherwise we're + probably trying to read from the actual /nix/store. This is + intended to distinguish between import-from-derivation and + sources stored in the actual /nix/store. */ + Path toRealPath(const Path & path, const PathSet & context); + /* Parse a Nix expression from the specified file. */ Expr * parseExprFromFile(const Path & path); Expr * parseExprFromFile(const Path & path, StaticEnv & staticEnv); @@ -203,7 +212,7 @@ private: void addConstant(const string & name, Value & v); - void addPrimOp(const string & name, + Value * addPrimOp(const string & name, unsigned int arity, PrimOpFun primOp); public: diff --git a/src/libexpr/json-to-value.cc b/src/libexpr/json-to-value.cc index 9380de3a66b3..8b1404595548 100644 --- a/src/libexpr/json-to-value.cc +++ b/src/libexpr/json-to-value.cc @@ -106,10 +106,16 @@ static void parseJSON(EvalState & state, const char * & s, Value & v) tmp_number += *s++; } - if (number_type == tFloat) - mkFloat(v, stod(tmp_number)); - else - mkInt(v, stoi(tmp_number)); + try { + if (number_type == tFloat) + mkFloat(v, stod(tmp_number)); + else + mkInt(v, stoi(tmp_number)); + } catch (std::invalid_argument e) { + throw JSONParseError("invalid JSON number"); + } catch (std::out_of_range e) { + throw JSONParseError("out-of-range JSON number"); + } } else if (strncmp(s, "true", 4) == 0) { diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index e3b5dfb420b4..466fd13e8698 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -39,7 +39,7 @@ std::pair<string, string> decodeContext(const string & s) size_t index = s.find("!", 1); return std::pair<string, string>(string(s, index + 1), string(s, 1, index - 1)); } else - return std::pair<string, string>(s.at(0) == '/' ? s: string(s, 1), ""); + return std::pair<string, string>(s.at(0) == '/' ? s : string(s, 1), ""); } @@ -49,24 +49,38 @@ InvalidPathError::InvalidPathError(const Path & path) : void EvalState::realiseContext(const PathSet & context) { PathSet drvs; + for (auto & i : context) { std::pair<string, string> decoded = decodeContext(i); Path ctx = decoded.first; assert(store->isStorePath(ctx)); if (!store->isValidPath(ctx)) throw InvalidPathError(ctx); - if (!decoded.second.empty() && nix::isDerivation(ctx)) + if (!decoded.second.empty() && nix::isDerivation(ctx)) { drvs.insert(decoded.first + "!" + decoded.second); + + /* Add the output of this derivation to the allowed + paths. */ + if (allowedPaths) { + auto drv = store->derivationFromPath(decoded.first); + DerivationOutputs::iterator i = drv.outputs.find(decoded.second); + if (i == drv.outputs.end()) + throw Error("derivation '%s' does not have an output named '%s'", decoded.first, decoded.second); + allowedPaths->insert(i->second.path); + } + } } - if (!drvs.empty()) { - if (!settings.enableImportFromDerivation) - throw EvalError(format("attempted to realize '%1%' during evaluation but 'allow-import-from-derivation' is false") % *(drvs.begin())); - /* For performance, prefetch all substitute info. */ - PathSet willBuild, willSubstitute, unknown; - unsigned long long downloadSize, narSize; - store->queryMissing(drvs, willBuild, willSubstitute, unknown, downloadSize, narSize); - store->buildPaths(drvs); - } + + if (drvs.empty()) return; + + if (!settings.enableImportFromDerivation) + throw EvalError(format("attempted to realize '%1%' during evaluation but 'allow-import-from-derivation' is false") % *(drvs.begin())); + + /* For performance, prefetch all substitute info. */ + PathSet willBuild, willSubstitute, unknown; + unsigned long long downloadSize, narSize; + store->queryMissing(drvs, willBuild, willSubstitute, unknown, downloadSize, narSize); + store->buildPaths(drvs); } @@ -84,10 +98,10 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args % path % e.path % pos); } - path = state.checkSourcePath(path); + Path realPath = state.checkSourcePath(state.toRealPath(path, context)); if (state.store->isStorePath(path) && state.store->isValidPath(path) && isDerivation(path)) { - Derivation drv = readDerivation(path); + Derivation drv = readDerivation(realPath); Value & w = *state.allocValue(); state.mkAttrs(w, 3 + drv.outputs.size()); Value * v2 = state.allocAttr(w, state.sDrvPath); @@ -114,7 +128,7 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args } else { state.forceAttrs(*args[0]); if (args[0]->attrs->empty()) - state.evalFile(path, v); + state.evalFile(realPath, v); else { Env * env = &state.allocEnv(args[0]->attrs->size()); env->up = &state.baseEnv; @@ -127,8 +141,8 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args env->values[displ++] = attr.value; } - printTalkative("evaluating file '%1%'", path); - Expr * e = state.parseExprFromFile(resolveExprPath(path), staticEnv); + printTalkative("evaluating file '%1%'", realPath); + Expr * e = state.parseExprFromFile(resolveExprPath(realPath), staticEnv); e->eval(state, *env, v); } @@ -439,7 +453,7 @@ static void prim_tryEval(EvalState & state, const Pos & pos, Value * * args, Val static void prim_getEnv(EvalState & state, const Pos & pos, Value * * args, Value & v) { string name = state.forceStringNoCtx(*args[0], pos); - mkString(v, state.restricted ? "" : getEnv(name)); + mkString(v, settings.restrictEval || settings.pureEval ? "" : getEnv(name)); } @@ -863,7 +877,7 @@ static void prim_readFile(EvalState & state, const Pos & pos, Value * * args, Va throw EvalError(format("cannot read '%1%', since path '%2%' is not valid, at %3%") % path % e.path % pos); } - string s = readFile(state.checkSourcePath(path)); + string s = readFile(state.checkSourcePath(state.toRealPath(path, context))); if (s.find((char) 0) != string::npos) throw Error(format("the contents of the file '%1%' cannot be represented as a Nix string") % path); mkString(v, s.c_str()); @@ -1009,20 +1023,13 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu } -static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void addPath(EvalState & state, const Pos & pos, const string & name, const Path & path_, + Value * filterFun, bool recursive, const Hash & expectedHash, Value & v) { - PathSet context; - Path path = state.coerceToPath(pos, *args[1], context); - if (!context.empty()) - throw EvalError(format("string '%1%' cannot refer to other paths, at %2%") % path % pos); - - state.forceValue(*args[0]); - if (args[0]->type != tLambda) - throw TypeError(format("first argument in call to 'filterSource' is not a function but %1%, at %2%") % showType(*args[0]) % pos); - - path = state.checkSourcePath(path); - - PathFilter filter = [&](const Path & path) { + const auto path = settings.pureEval && expectedHash ? + path_ : + state.checkSourcePath(path_); + PathFilter filter = filterFun ? ([&](const Path & path) { auto st = lstat(path); /* Call the filter function. The first argument is the path, @@ -1031,7 +1038,7 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args mkString(arg1, path); Value fun2; - state.callFunction(*args[0], arg1, fun2, noPos); + state.callFunction(*filterFun, arg1, fun2, noPos); Value arg2; mkString(arg2, @@ -1044,16 +1051,79 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args state.callFunction(fun2, arg2, res, noPos); return state.forceBool(res, pos); - }; + }) : defaultPathFilter; - Path dstPath = settings.readOnlyMode - ? state.store->computeStorePathForPath(path, true, htSHA256, filter).first - : state.store->addToStore(baseNameOf(path), path, true, htSHA256, filter, state.repair); + Path expectedStorePath; + if (expectedHash) { + expectedStorePath = + state.store->makeFixedOutputPath(recursive, expectedHash, name); + } + Path dstPath; + if (!expectedHash || !state.store->isValidPath(expectedStorePath)) { + dstPath = settings.readOnlyMode + ? state.store->computeStorePathForPath(name, path, recursive, htSHA256, filter).first + : state.store->addToStore(name, path, recursive, htSHA256, filter, state.repair); + if (expectedHash && expectedStorePath != dstPath) { + throw Error(format("store path mismatch in (possibly filtered) path added from '%1%'") % path); + } + } else + dstPath = expectedStorePath; mkString(v, dstPath, {dstPath}); } +static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args, Value & v) +{ + PathSet context; + Path path = state.coerceToPath(pos, *args[1], context); + if (!context.empty()) + throw EvalError(format("string '%1%' cannot refer to other paths, at %2%") % path % pos); + + state.forceValue(*args[0]); + if (args[0]->type != tLambda) + throw TypeError(format("first argument in call to 'filterSource' is not a function but %1%, at %2%") % showType(*args[0]) % pos); + + addPath(state, pos, baseNameOf(path), path, args[0], true, Hash(), v); +} + +static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value & v) +{ + state.forceAttrs(*args[0], pos); + Path path; + string name; + Value * filterFun = nullptr; + auto recursive = true; + Hash expectedHash; + + for (auto & attr : *args[0]->attrs) { + const string & n(attr.name); + if (n == "path") { + PathSet context; + path = state.coerceToPath(*attr.pos, *attr.value, context); + if (!context.empty()) + throw EvalError(format("string '%1%' cannot refer to other paths, at %2%") % path % *attr.pos); + } else if (attr.name == state.sName) + name = state.forceStringNoCtx(*attr.value, *attr.pos); + else if (n == "filter") { + state.forceValue(*attr.value); + filterFun = attr.value; + } else if (n == "recursive") + recursive = state.forceBool(*attr.value, *attr.pos); + else if (n == "sha256") + expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256); + else + throw EvalError(format("unsupported argument '%1%' to 'addPath', at %2%") % attr.name % *attr.pos); + } + if (path.empty()) + throw EvalError(format("'path' required, at %1%") % pos); + if (name.empty()) + name = baseNameOf(path); + + addPath(state, pos, name, path, filterFun, recursive, expectedHash, v); +} + + /************************************************************* * Sets *************************************************************/ @@ -1653,6 +1723,14 @@ static void prim_unsafeDiscardStringContext(EvalState & state, const Pos & pos, } +static void prim_hasContext(EvalState & state, const Pos & pos, Value * * args, Value & v) +{ + PathSet context; + state.forceString(*args[0], context, pos); + mkBool(v, !context.empty()); +} + + /* Sometimes we want to pass a derivation path (i.e. pkg.drvPath) to a builder without causing the derivation to be built (for instance, in the derivation that builds NARs in nix-push, when doing @@ -1921,7 +1999,14 @@ void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v, state.checkURI(url); + if (settings.pureEval && !expectedHash) + throw Error("in pure evaluation mode, '%s' requires a 'sha256' argument", who); + Path res = getDownloader()->downloadCached(state.store, url, unpack, name, expectedHash); + + if (state.allowedPaths) + state.allowedPaths->insert(res); + mkString(v, res, PathSet({res})); } @@ -1973,11 +2058,24 @@ void EvalState::createBaseEnv() mkNull(v); addConstant("null", v); - mkInt(v, time(0)); - addConstant("__currentTime", v); + auto vThrow = addPrimOp("throw", 1, prim_throw); + + auto addPurityError = [&](const std::string & name) { + Value * v2 = allocValue(); + mkString(*v2, fmt("'%s' is not allowed in pure evaluation mode", name)); + mkApp(v, *vThrow, *v2); + addConstant(name, v); + }; - mkString(v, settings.thisSystem); - addConstant("__currentSystem", v); + if (!settings.pureEval) { + mkInt(v, time(0)); + addConstant("__currentTime", v); + } + + if (!settings.pureEval) { + mkString(v, settings.thisSystem); + addConstant("__currentSystem", v); + } mkString(v, nixVersion); addConstant("__nixVersion", v); @@ -1993,10 +2091,10 @@ void EvalState::createBaseEnv() addConstant("__langVersion", v); // Miscellaneous - addPrimOp("scopedImport", 2, prim_scopedImport); + auto vScopedImport = addPrimOp("scopedImport", 2, prim_scopedImport); Value * v2 = allocValue(); mkAttrs(*v2, 0); - mkApp(v, *baseEnv.values[baseEnvDispl - 1], *v2); + mkApp(v, *vScopedImport, *v2); forceValue(v); addConstant("import", v); if (settings.enableNativeCode) { @@ -2012,7 +2110,6 @@ void EvalState::createBaseEnv() addPrimOp("__isBool", 1, prim_isBool); addPrimOp("__genericClosure", 1, prim_genericClosure); addPrimOp("abort", 1, prim_abort); - addPrimOp("throw", 1, prim_throw); addPrimOp("__addErrorContext", 2, prim_addErrorContext); addPrimOp("__tryEval", 1, prim_tryEval); addPrimOp("__getEnv", 1, prim_getEnv); @@ -2027,7 +2124,10 @@ void EvalState::createBaseEnv() // Paths addPrimOp("__toPath", 1, prim_toPath); - addPrimOp("__storePath", 1, prim_storePath); + if (settings.pureEval) + addPurityError("__storePath"); + else + addPrimOp("__storePath", 1, prim_storePath); addPrimOp("__pathExists", 1, prim_pathExists); addPrimOp("baseNameOf", 1, prim_baseNameOf); addPrimOp("dirOf", 1, prim_dirOf); @@ -2041,6 +2141,7 @@ void EvalState::createBaseEnv() addPrimOp("__fromJSON", 1, prim_fromJSON); addPrimOp("__toFile", 2, prim_toFile); addPrimOp("__filterSource", 2, prim_filterSource); + addPrimOp("__path", 1, prim_path); // Sets addPrimOp("__attrNames", 1, prim_attrNames); @@ -2083,6 +2184,7 @@ void EvalState::createBaseEnv() addPrimOp("toString", 1, prim_toString); addPrimOp("__substring", 3, prim_substring); addPrimOp("__stringLength", 1, prim_stringLength); + addPrimOp("__hasContext", 1, prim_hasContext); addPrimOp("__unsafeDiscardStringContext", 1, prim_unsafeDiscardStringContext); addPrimOp("__unsafeDiscardOutputDependency", 1, prim_unsafeDiscardOutputDependency); addPrimOp("__hashString", 2, prim_hashString); diff --git a/src/libexpr/primops/fetchGit.cc b/src/libexpr/primops/fetchGit.cc index e92e0638031f..2e3e2634db8f 100644 --- a/src/libexpr/primops/fetchGit.cc +++ b/src/libexpr/primops/fetchGit.cc @@ -22,10 +22,15 @@ struct GitInfo uint64_t revCount = 0; }; +std::regex revRegex("^[0-9a-fA-F]{40}$"); + GitInfo exportGit(ref<Store> store, const std::string & uri, - std::experimental::optional<std::string> ref, const std::string & rev, + std::experimental::optional<std::string> ref, std::string rev, const std::string & name) { + if (settings.pureEval && rev == "") + throw Error("in pure evaluation mode, 'fetchGit' requires a Git revision"); + if (!ref && rev == "" && hasPrefix(uri, "/") && pathExists(uri + "/.git")) { bool clean = true; @@ -68,20 +73,20 @@ GitInfo exportGit(ref<Store> store, const std::string & uri, return gitInfo; } + + // clean working tree, but no ref or rev specified. Use 'HEAD'. + rev = chomp(runProgram("git", true, { "-C", uri, "rev-parse", "HEAD" })); + ref = "HEAD"s; } if (!ref) ref = "master"s; - if (rev != "") { - std::regex revRegex("^[0-9a-fA-F]{40}$"); - if (!std::regex_match(rev, revRegex)) - throw Error("invalid Git revision '%s'", rev); - } + if (rev != "" && !std::regex_match(rev, revRegex)) + throw Error("invalid Git revision '%s'", rev); Path cacheDir = getCacheDir() + "/nix/git"; if (!pathExists(cacheDir)) { - createDirs(cacheDir); runProgram("git", true, { "init", "--bare", cacheDir }); } @@ -228,6 +233,9 @@ static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Va mkString(*state.allocAttr(v, state.symbols.create("shortRev")), gitInfo.shortRev); mkInt(*state.allocAttr(v, state.symbols.create("revCount")), gitInfo.revCount); v.attrs->sort(); + + if (state.allowedPaths) + state.allowedPaths->insert(gitInfo.storePath); } static RegisterPrimOp r("fetchGit", 1, prim_fetchGit); diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc index a317476c5829..5517d83df824 100644 --- a/src/libexpr/primops/fetchMercurial.cc +++ b/src/libexpr/primops/fetchMercurial.cc @@ -27,6 +27,9 @@ std::regex commitHashRegex("^[0-9a-fA-F]{40}$"); HgInfo exportMercurial(ref<Store> store, const std::string & uri, std::string rev, const std::string & name) { + if (settings.pureEval && rev == "") + throw Error("in pure evaluation mode, 'fetchMercurial' requires a Mercurial revision"); + if (rev == "" && hasPrefix(uri, "/") && pathExists(uri + "/.hg")) { bool clean = runProgram("hg", true, { "status", "-R", uri, "--modified", "--added", "--removed" }) == ""; @@ -196,6 +199,9 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar mkString(*state.allocAttr(v, state.symbols.create("shortRev")), std::string(hgInfo.rev, 0, 12)); mkInt(*state.allocAttr(v, state.symbols.create("revCount")), hgInfo.revCount); v.attrs->sort(); + + if (state.allowedPaths) + state.allowedPaths->insert(hgInfo.storePath); } static RegisterPrimOp r("fetchMercurial", 1, prim_fetchMercurial); diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 85d3c077ba5e..90a4867163df 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -193,9 +193,6 @@ LegacyArgs::LegacyArgs(const std::string & programName, mkFlag(0, "readonly-mode", "do not write to the Nix store", &settings.readOnlyMode); - mkFlag(0, "show-trace", "show Nix expression stack trace in evaluation errors", - &settings.showTrace); - mkFlag(0, "no-gc-warning", "disable warning about not using '--add-root'", &gcWarning, false); diff --git a/src/libstore/build.cc b/src/libstore/build.cc index d4bd650baf22..5be7ce60dab9 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -6,6 +6,7 @@ #include "archive.hh" #include "affinity.hh" #include "builtins.hh" +#include "download.hh" #include "finally.hh" #include "compression.hh" #include "json.hh" @@ -1123,11 +1124,6 @@ void DerivationGoal::haveDerivation() return; } - /* Reject doing a hash build of anything other than a fixed-output - derivation. */ - if (buildMode == bmHash && !drv->isFixedOutput()) - throw Error("cannot do a hash build of non-fixed-output derivation '%1%'", drvPath); - /* We are first going to try to create the invalid output paths through substitutes. If that doesn't work, we'll build them. */ @@ -1319,9 +1315,7 @@ void DerivationGoal::inputsRealised() allPaths.insert(inputPaths.begin(), inputPaths.end()); /* Is this a fixed-output derivation? */ - fixedOutput = true; - for (auto & i : drv->outputs) - if (i.second.hash == "") fixedOutput = false; + fixedOutput = drv->isFixedOutput(); /* Don't repeat fixed-output derivations since they're already verified by their output hash.*/ @@ -1777,6 +1771,19 @@ PathSet exportReferences(Store & store, PathSet storePaths) return paths; } +static std::once_flag dns_resolve_flag; + +static void preloadNSS() { + /* builtin:fetchurl can trigger a DNS lookup, which with glibc can trigger a dynamic library load of + one of the glibc NSS libraries in a sandboxed child, which will fail unless the library's already + been loaded in the parent. So we force a download of an invalid URL to force the NSS machinery to + load its lookup libraries in the parent before any child gets a chance to. */ + std::call_once(dns_resolve_flag, []() { + DownloadRequest request("http://this.pre-initializes.the.dns.resolvers.invalid"); + request.tries = 1; // We only need to do it once, and this also suppresses an annoying warning + try { getDownloader()->download(request); } catch (...) {} + }); +} void DerivationGoal::startBuilder() { @@ -1787,6 +1794,9 @@ void DerivationGoal::startBuilder() % drv->platform % settings.thisSystem % drvPath); } + if (drv->isBuiltin()) + preloadNSS(); + #if __APPLE__ additionalSandboxProfile = get(drv->env, "__sandboxProfile"); #endif @@ -1810,8 +1820,13 @@ void DerivationGoal::startBuilder() useChroot = !fixedOutput && get(drv->env, "__noChroot") != "1"; } - if (worker.store.storeDir != worker.store.realStoreDir) - useChroot = true; + if (worker.store.storeDir != worker.store.realStoreDir) { + #if __linux__ + useChroot = true; + #else + throw Error("building using a diverted store is not supported on this platform"); + #endif + } /* If `build-users-group' is not empty, then we have to build as one of the members of that group. */ @@ -2928,8 +2943,13 @@ void DerivationGoal::runChild() if (drv->isBuiltin()) { try { logger = makeJSONLogger(*logger); + + BasicDerivation drv2(*drv); + for (auto & e : drv2.env) + e.second = rewriteStrings(e.second, inputRewrites); + if (drv->builder == "builtin:fetchurl") - builtinFetchurl(*drv, netrcData); + builtinFetchurl(drv2, netrcData); else throw Error(format("unsupported builtin function '%1%'") % string(drv->builder, 8)); _exit(0); @@ -2992,6 +3012,8 @@ void DerivationGoal::registerOutputs() bool runDiffHook = settings.runDiffHook; bool keepPreviousRound = settings.keepFailed || runDiffHook; + std::exception_ptr delayedException; + /* Check whether the output paths were created, and grep each output path to determine what other paths it references. Also make all output paths read-only. */ @@ -3066,7 +3088,7 @@ void DerivationGoal::registerOutputs() /* Check that fixed-output derivations produced the right outputs (i.e., the content hash should match the specified hash). */ - if (i.second.hash != "") { + if (fixedOutput) { bool recursive; Hash h; i.second.parseHashInfo(recursive, h); @@ -3082,27 +3104,34 @@ void DerivationGoal::registerOutputs() /* Check the hash. In hash mode, move the path produced by the derivation to its content-addressed location. */ Hash h2 = recursive ? hashPath(h.type, actualPath).first : hashFile(h.type, actualPath); - if (buildMode == bmHash) { - Path dest = worker.store.makeFixedOutputPath(recursive, h2, drv->env["name"]); - printError(format("build produced path '%1%' with %2% hash '%3%'") - % dest % printHashType(h.type) % printHash16or32(h2)); - if (worker.store.isValidPath(dest)) - return; + + Path dest = worker.store.makeFixedOutputPath(recursive, h2, drv->env["name"]); + + if (h != h2) { + + /* Throw an error after registering the path as + valid. */ + delayedException = std::make_exception_ptr( + BuildError("fixed-output derivation produced path '%s' with %s hash '%s' instead of the expected hash '%s'", + dest, printHashType(h.type), printHash16or32(h2), printHash16or32(h))); + Path actualDest = worker.store.toRealPath(dest); + + if (worker.store.isValidPath(dest)) + std::rethrow_exception(delayedException); + if (actualPath != actualDest) { PathLocks outputLocks({actualDest}); deletePath(actualDest); if (rename(actualPath.c_str(), actualDest.c_str()) == -1) throw SysError(format("moving '%1%' to '%2%'") % actualPath % dest); } + path = dest; actualPath = actualDest; - } else { - if (h != h2) - throw BuildError( - format("output path '%1%' has %2% hash '%3%' when '%4%' was expected") - % path % i.second.hashAlgo % printHash16or32(h2) % printHash16or32(h)); } + else + assert(path == dest); info.ca = makeFixedOutputCA(recursive, h2); } @@ -3279,6 +3308,11 @@ void DerivationGoal::registerOutputs() paths referenced by each of them. If there are cycles in the outputs, this will fail. */ worker.store.registerValidPaths(infos); + + /* In case of a fixed-output derivation hash mismatch, throw an + exception now that we have registered the output as valid. */ + if (delayedException) + std::rethrow_exception(delayedException); } @@ -3636,7 +3670,7 @@ void SubstitutionGoal::tryNext() /* Update the total expected download size. */ auto narInfo = std::dynamic_pointer_cast<const NarInfo>(info); - maintainExpectedNar = std::make_unique<MaintainCount<uint64_t>>(worker.expectedNarSize, narInfo->narSize); + maintainExpectedNar = std::make_unique<MaintainCount<uint64_t>>(worker.expectedNarSize, info->narSize); maintainExpectedDownload = narInfo && narInfo->fileSize @@ -3650,7 +3684,10 @@ void SubstitutionGoal::tryNext() /* Bail out early if this substituter lacks a valid signature. LocalStore::addToStore() also checks for this, but only after we've downloaded the path. */ - if (worker.store.requireSigs && !info->checkSignatures(worker.store, worker.store.publicKeys)) { + if (worker.store.requireSigs + && !sub->isTrusted + && !info->checkSignatures(worker.store, worker.store.publicKeys)) + { printInfo(format("warning: substituter '%s' does not have a valid signature for path '%s'") % sub->getUri() % storePath); tryNext(); @@ -3718,7 +3755,7 @@ void SubstitutionGoal::tryToRun() PushActivity pact(act.id); copyStorePath(ref<Store>(sub), ref<Store>(worker.store.shared_from_this()), - storePath, repair); + storePath, repair, sub->isTrusted ? NoCheckSigs : CheckSigs); promise.set_value(); } catch (...) { diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 4474dfd4b968..258d7937cc39 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -17,11 +17,13 @@ #include <curl/curl.h> -#include <queue> -#include <iostream> -#include <thread> +#include <algorithm> #include <cmath> +#include <cstring> +#include <iostream> +#include <queue> #include <random> +#include <thread> using namespace std::string_literals; @@ -91,6 +93,8 @@ struct CurlDownloader : public Downloader { if (!request.expectedETag.empty()) requestHeaders = curl_slist_append(requestHeaders, ("If-None-Match: " + request.expectedETag).c_str()); + if (!request.mimeType.empty()) + requestHeaders = curl_slist_append(requestHeaders, ("Content-Type: " + request.mimeType).c_str()); } ~DownloadItem() @@ -185,6 +189,22 @@ struct CurlDownloader : public Downloader return 0; } + size_t readOffset = 0; + int readCallback(char *buffer, size_t size, size_t nitems) + { + if (readOffset == request.data->length()) + return 0; + auto count = std::min(size * nitems, request.data->length() - readOffset); + memcpy(buffer, request.data->data() + readOffset, count); + readOffset += count; + return count; + } + + static int readCallbackWrapper(char *buffer, size_t size, size_t nitems, void * userp) + { + return ((DownloadItem *) userp)->readCallback(buffer, size, nitems); + } + long lowSpeedTimeout = 300; void init() @@ -225,6 +245,13 @@ struct CurlDownloader : public Downloader if (request.head) curl_easy_setopt(req, CURLOPT_NOBODY, 1); + if (request.data) { + curl_easy_setopt(req, CURLOPT_UPLOAD, 1L); + curl_easy_setopt(req, CURLOPT_READFUNCTION, readCallbackWrapper); + curl_easy_setopt(req, CURLOPT_READDATA, this); + curl_easy_setopt(req, CURLOPT_INFILESIZE_LARGE, (curl_off_t) request.data->length()); + } + if (request.verifyTLS) { if (settings.caFile != "") curl_easy_setopt(req, CURLOPT_CAINFO, settings.caFile.c_str()); @@ -265,7 +292,7 @@ struct CurlDownloader : public Downloader } if (code == CURLE_OK && - (httpStatus == 200 || httpStatus == 304 || httpStatus == 226 /* FTP */ || httpStatus == 0 /* other protocol */)) + (httpStatus == 200 || httpStatus == 201 || httpStatus == 204 || httpStatus == 304 || httpStatus == 226 /* FTP */ || httpStatus == 0 /* other protocol */)) { result.cached = httpStatus == 304; done = true; @@ -303,6 +330,7 @@ struct CurlDownloader : public Downloader // Don't bother retrying on certain cURL errors either switch (code) { case CURLE_FAILED_INIT: + case CURLE_URL_MALFORMAT: case CURLE_NOT_BUILT_IN: case CURLE_REMOTE_ACCESS_DENIED: case CURLE_FILE_COULDNT_READ_FILE: @@ -311,10 +339,10 @@ struct CurlDownloader : public Downloader case CURLE_BAD_FUNCTION_ARGUMENT: case CURLE_INTERFACE_FAILED: case CURLE_UNKNOWN_OPTION: - err = Misc; - break; + err = Misc; + break; default: // Shut up warnings - break; + break; } } diff --git a/src/libstore/download.hh b/src/libstore/download.hh index f2d65ad8d61d..d9d525d4e65f 100644 --- a/src/libstore/download.hh +++ b/src/libstore/download.hh @@ -18,6 +18,8 @@ struct DownloadRequest unsigned int baseRetryTimeMs = 250; ActivityId parentAct; bool decompress = true; + std::shared_ptr<std::string> data; + std::string mimeType; DownloadRequest(const std::string & uri) : uri(uri), parentAct(curActivity) { } diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index ab2c5ca0274c..943b16c28fa3 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -324,10 +324,8 @@ Roots LocalStore::findRootsNoTemp() { Roots roots; - /* Process direct roots in {gcroots,manifests,profiles}. */ + /* Process direct roots in {gcroots,profiles}. */ findRoots(stateDir + "/" + gcRootsDir, DT_UNKNOWN, roots); - if (pathExists(stateDir + "/manifests")) - findRoots(stateDir + "/manifests", DT_UNKNOWN, roots); findRoots(stateDir + "/profiles", DT_UNKNOWN, roots); /* Add additional roots returned by the program specified by the diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 5c857cbb6a9c..20ac8fe4e9ae 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -138,6 +138,11 @@ public: Setting<std::string> builders{this, "@" + nixConfDir + "/machines", "builders", "A semicolon-separated list of build machines, in the format of nix.machines."}; + Setting<bool> buildersUseSubstitutes{this, false, "builders-use-substitutes", + "Whether build machines should use their own substitutes for obtaining " + "build dependencies if possible, rather than waiting for this host to " + "upload them."}; + Setting<off_t> reservedSize{this, 8 * 1024 * 1024, "gc-reserved-space", "Amount of reserved disk space for the garbage collector."}; @@ -150,7 +155,7 @@ public: Setting<bool> syncBeforeRegistering{this, false, "sync-before-registering", "Whether to call sync() before registering a path as valid."}; - Setting<bool> useSubstitutes{this, true, "use-substitutes", + Setting<bool> useSubstitutes{this, true, "substitute", "Whether to use substitutes.", {"build-use-substitutes"}}; @@ -206,7 +211,8 @@ public: bool lockCPU; /* Whether to show a stack trace if Nix evaluation fails. */ - bool showTrace = false; + Setting<bool> showTrace{this, false, "show-trace", + "Whether to show a stack trace on evaluation errors."}; Setting<bool> enableNativeCode{this, false, "allow-unsafe-native-code-during-evaluation", "Whether builtin functions that allow executing native code should be enabled."}; @@ -227,6 +233,9 @@ public: "Whether to restrict file system access to paths in $NIX_PATH, " "and network access to the URI prefixes listed in 'allowed-uris'."}; + Setting<bool> pureEval{this, false, "pure-eval", + "Whether to restrict file system and network access to files specified by cryptographic hash."}; + Setting<size_t> buildRepeat{this, 0, "repeat", "The number of times to repeat a build in order to verify determinism.", {"build-repeat"}}; @@ -278,10 +287,7 @@ public: Setting<unsigned int> tarballTtl{this, 60 * 60, "tarball-ttl", "How soon to expire files fetched by builtins.fetchTarball and builtins.fetchurl."}; - Setting<std::string> signedBinaryCaches{this, "*", "signed-binary-caches", - "Obsolete."}; - - Setting<bool> requireSigs{this, signedBinaryCaches == "*", "require-sigs", + Setting<bool> requireSigs{this, true, "require-sigs", "Whether to check that any non-content-addressed path added to the " "Nix store has a valid signature (that is, one signed using a key " "listed in 'trusted-public-keys'."}; diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index 057337685791..b9e9cd5daba5 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -38,7 +38,7 @@ public: try { BinaryCacheStore::init(); } catch (UploadToHTTP &) { - throw Error(format("'%s' does not appear to be a binary cache") % cacheUri); + throw Error("'%s' does not appear to be a binary cache", cacheUri); } diskCache->createCache(cacheUri, storeDir, wantMassQuery_, priority); } @@ -67,7 +67,14 @@ protected: const std::string & data, const std::string & mimeType) override { - throw UploadToHTTP("uploading to an HTTP binary cache is not supported"); + auto req = DownloadRequest(cacheUri + "/" + path); + req.data = std::make_shared<string>(data); // FIXME: inefficient + req.mimeType = mimeType; + try { + getDownloader()->download(req); + } catch (DownloadError & e) { + throw UploadToHTTP(format("uploading to HTTP binary cache at %1% not supported: %2%") % cacheUri % e.msg()); + } } void getFile(const std::string & path, diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index d35cd1a949eb..30bef3a799d4 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -19,7 +19,7 @@ namespace nix { /* Nix store and database schema version. Version 1 (or 0) was Nix <= 0.7. Version 2 was Nix 0.8 and 0.9. Version 3 is Nix 0.10. Version 4 is Nix 0.11. Version 5 is Nix 0.12-0.16. Version 6 is - Nix 1.0. Version 7 is Nix 1.3. Version 10 is 1.12. */ + Nix 1.0. Version 7 is Nix 1.3. Version 10 is 2.0. */ const int nixSchemaVersion = 10; diff --git a/src/libstore/nar-accessor.hh b/src/libstore/nar-accessor.hh index 1903355a236e..2871199de16e 100644 --- a/src/libstore/nar-accessor.hh +++ b/src/libstore/nar-accessor.hh @@ -1,5 +1,7 @@ #pragma once +#include <functional> + #include "fs-accessor.hh" namespace nix { diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 77b41b6bf8a8..8f0b65557ac4 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -78,9 +78,22 @@ UDSRemoteStore::UDSRemoteStore(const Params & params) } +UDSRemoteStore::UDSRemoteStore(std::string socket_path, const Params & params) + : Store(params) + , LocalFSStore(params) + , RemoteStore(params) + , path(socket_path) +{ +} + + std::string UDSRemoteStore::getUri() { - return "daemon"; + if (path) { + return std::string("unix://") + *path; + } else { + return "daemon"; + } } @@ -98,7 +111,7 @@ ref<RemoteStore::Connection> UDSRemoteStore::openConnection() throw SysError("cannot create Unix domain socket"); closeOnExec(conn->fd.get()); - string socketPath = settings.nixDaemonSocketFile; + string socketPath = path ? *path : settings.nixDaemonSocketFile; struct sockaddr_un addr; addr.sun_family = AF_UNIX; @@ -721,5 +734,14 @@ void RemoteStore::Connection::processStderr(Sink * sink, Source * source) } } +static std::string uriScheme = "unix://"; + +static RegisterStoreImplementation regStore([]( + const std::string & uri, const Store::Params & params) + -> std::shared_ptr<Store> +{ + if (std::string(uri, 0, uriScheme.size()) != uriScheme) return 0; + return std::make_shared<UDSRemoteStore>(std::string(uri, uriScheme.size()), params); +}); } diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh index 30c6beae6ff2..7f36e206416b 100644 --- a/src/libstore/remote-store.hh +++ b/src/libstore/remote-store.hh @@ -134,6 +134,7 @@ class UDSRemoteStore : public LocalFSStore, public RemoteStore public: UDSRemoteStore(const Params & params); + UDSRemoteStore(std::string path, const Params & params); std::string getUri() override; @@ -145,6 +146,7 @@ private: }; ref<RemoteStore::Connection> openConnection() override; + std::experimental::optional<std::string> path; }; diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 0079da1becfb..23af452094cf 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -10,6 +10,7 @@ #include "istringstream_nocopy.hh" #include <aws/core/Aws.h> +#include <aws/core/VersionConfig.h> #include <aws/core/auth/AWSCredentialsProvider.h> #include <aws/core/auth/AWSCredentialsProviderChain.h> #include <aws/core/client/ClientConfiguration.h> @@ -87,7 +88,14 @@ S3Helper::S3Helper(const std::string & profile, const std::string & region) std::make_shared<Aws::Auth::DefaultAWSCredentialsProviderChain>()) : std::dynamic_pointer_cast<Aws::Auth::AWSCredentialsProvider>( std::make_shared<Aws::Auth::ProfileConfigFileAWSCredentialsProvider>(profile.c_str())), - *config, true, false)) + *config, + // FIXME: https://github.com/aws/aws-sdk-cpp/issues/759 +#if AWS_VERSION_MAJOR == 1 && AWS_VERSION_MINOR < 3 + false, +#else + Aws::Client::AWSAuthV4Signer::PayloadSigningPolicy::Never, +#endif + false)) { } diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 77ab87ef728e..7abb300a9bb8 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -222,11 +222,10 @@ Path Store::makeTextPath(const string & name, const Hash & hash, } -std::pair<Path, Hash> Store::computeStorePathForPath(const Path & srcPath, - bool recursive, HashType hashAlgo, PathFilter & filter) const +std::pair<Path, Hash> Store::computeStorePathForPath(const string & name, + const Path & srcPath, bool recursive, HashType hashAlgo, PathFilter & filter) const { Hash h = recursive ? hashPath(hashAlgo, srcPath, filter).first : hashFile(hashAlgo, srcPath); - string name = baseNameOf(srcPath); Path dstPath = makeFixedOutputPath(recursive, h, name); return std::pair<Path, Hash>(dstPath, h); } diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index d1e1b5d6f452..563aa566bd37 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -192,7 +192,7 @@ struct ValidPathInfo typedef list<ValidPathInfo> ValidPathInfos; -enum BuildMode { bmNormal, bmRepair, bmCheck, bmHash }; +enum BuildMode { bmNormal, bmRepair, bmCheck }; struct BuildResult @@ -248,6 +248,8 @@ public: const Setting<int> pathInfoCacheSize{this, 65536, "path-info-cache-size", "size of the in-memory store path information cache"}; + const Setting<bool> isTrusted{this, false, "trusted", "whether paths from this store can be used as substitutes even when they lack trusted signatures"}; + protected: struct State @@ -305,9 +307,9 @@ public: /* This is the preparatory part of addToStore(); it computes the store path to which srcPath is to be copied. Returns the store path and the cryptographic hash of the contents of srcPath. */ - std::pair<Path, Hash> computeStorePathForPath(const Path & srcPath, - bool recursive = true, HashType hashAlgo = htSHA256, - PathFilter & filter = defaultPathFilter) const; + std::pair<Path, Hash> computeStorePathForPath(const string & name, + const Path & srcPath, bool recursive = true, + HashType hashAlgo = htSHA256, PathFilter & filter = defaultPathFilter) const; /* Preparatory part of addTextToStore(). @@ -597,6 +599,11 @@ public: "nix-cache-info" file. Lower value means higher priority. */ virtual int getPriority() { return 0; } + virtual Path toRealPath(const Path & storePath) + { + return storePath; + } + protected: Stats stats; @@ -639,9 +646,10 @@ public: virtual Path getRealStoreDir() { return storeDir; } - Path toRealPath(const Path & storePath) + Path toRealPath(const Path & storePath) override { - return getRealStoreDir() + "/" + baseNameOf(storePath); + assert(isInStore(storePath)); + return getRealStoreDir() + "/" + std::string(storePath, storeDir.size() + 1); } std::shared_ptr<std::string> getBuildLog(const Path & path) override; @@ -699,6 +707,9 @@ void removeTempRoots(); * ‘daemon’: The Nix store accessed via a Unix domain socket connection to nix-daemon. + * ‘unix://<path>’: The Nix store accessed via a Unix domain socket + connection to nix-daemon, with the socket located at <path>. + * ‘auto’ or ‘’: Equivalent to ‘local’ or ‘daemon’ depending on whether the user has write access to the local Nix store/database. diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc index 2b3dff3a5ea1..5e2631ba3408 100644 --- a/src/libutil/compression.cc +++ b/src/libutil/compression.cc @@ -7,6 +7,11 @@ #include <cstdio> #include <cstring> +#if HAVE_BROTLI +#include <brotli/decode.h> +#include <brotli/encode.h> +#endif // HAVE_BROTLI + #include <iostream> namespace nix { @@ -94,8 +99,56 @@ static ref<std::string> decompressBzip2(const std::string & in) static ref<std::string> decompressBrotli(const std::string & in) { - // FIXME: use libbrotli - return make_ref<std::string>(runProgram(BRO, true, {"-d"}, {in})); +#if !HAVE_BROTLI + return make_ref<std::string>(runProgram(BROTLI, true, {"-d"}, {in})); +#else + auto *s = BrotliDecoderCreateInstance(nullptr, nullptr, nullptr); + if (!s) + throw CompressionError("unable to initialize brotli decoder"); + + Finally free([s]() { BrotliDecoderDestroyInstance(s); }); + + uint8_t outbuf[BUFSIZ]; + ref<std::string> res = make_ref<std::string>(); + const uint8_t *next_in = (uint8_t *)in.c_str(); + size_t avail_in = in.size(); + uint8_t *next_out = outbuf; + size_t avail_out = sizeof(outbuf); + + while (true) { + checkInterrupt(); + + auto ret = BrotliDecoderDecompressStream(s, + &avail_in, &next_in, + &avail_out, &next_out, + nullptr); + + switch (ret) { + case BROTLI_DECODER_RESULT_ERROR: + throw CompressionError("error while decompressing brotli file"); + case BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: + throw CompressionError("incomplete or corrupt brotli file"); + case BROTLI_DECODER_RESULT_SUCCESS: + if (avail_in != 0) + throw CompressionError("unexpected input after brotli decompression"); + break; + case BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: + // I'm not sure if this can happen, but abort if this happens with empty buffer + if (avail_out == sizeof(outbuf)) + throw CompressionError("brotli decompression requires larger buffer"); + break; + } + + // Always ensure we have full buffer for next invocation + if (avail_out < sizeof(outbuf)) { + res->append((char*)outbuf, sizeof(outbuf) - avail_out); + next_out = outbuf; + avail_out = sizeof(outbuf); + } + + if (ret == BROTLI_DECODER_RESULT_SUCCESS) return res; + } +#endif // HAVE_BROTLI } ref<std::string> compress(const std::string & method, const std::string & in) @@ -270,33 +323,131 @@ struct BzipSink : CompressionSink } }; -struct BrotliSink : CompressionSink +struct LambdaCompressionSink : CompressionSink { Sink & nextSink; std::string data; + using CompressFnTy = std::function<std::string(const std::string&)>; + CompressFnTy compressFn; + LambdaCompressionSink(Sink& nextSink, CompressFnTy compressFn) + : nextSink(nextSink) + , compressFn(std::move(compressFn)) + { + }; + + void finish() override + { + flush(); + nextSink(compressFn(data)); + } + + void write(const unsigned char * data, size_t len) override + { + checkInterrupt(); + this->data.append((const char *) data, len); + } +}; + +struct BrotliCmdSink : LambdaCompressionSink +{ + BrotliCmdSink(Sink& nextSink) + : LambdaCompressionSink(nextSink, [](const std::string& data) { + return runProgram(BROTLI, true, {}, data); + }) + { + } +}; + +#if HAVE_BROTLI +struct BrotliSink : CompressionSink +{ + Sink & nextSink; + uint8_t outbuf[BUFSIZ]; + BrotliEncoderState *state; + bool finished = false; BrotliSink(Sink & nextSink) : nextSink(nextSink) { + state = BrotliEncoderCreateInstance(nullptr, nullptr, nullptr); + if (!state) + throw CompressionError("unable to initialise brotli encoder"); } ~BrotliSink() { + BrotliEncoderDestroyInstance(state); } - // FIXME: use libbrotli - void finish() override { flush(); - nextSink(runProgram(BRO, true, {}, data)); + assert(!finished); + + const uint8_t *next_in = nullptr; + size_t avail_in = 0; + uint8_t *next_out = outbuf; + size_t avail_out = sizeof(outbuf); + while (!finished) { + checkInterrupt(); + + if (!BrotliEncoderCompressStream(state, + BROTLI_OPERATION_FINISH, + &avail_in, &next_in, + &avail_out, &next_out, + nullptr)) + throw CompressionError("error while finishing brotli file"); + + finished = BrotliEncoderIsFinished(state); + if (avail_out == 0 || finished) { + nextSink(outbuf, sizeof(outbuf) - avail_out); + next_out = outbuf; + avail_out = sizeof(outbuf); + } + } } void write(const unsigned char * data, size_t len) override { - checkInterrupt(); - this->data.append((const char *) data, len); + assert(!finished); + + // Don't feed brotli too much at once + const size_t CHUNK_SIZE = sizeof(outbuf) << 2; + while (len) { + size_t n = std::min(CHUNK_SIZE, len); + writeInternal(data, n); + data += n; + len -= n; + } + } + private: + void writeInternal(const unsigned char * data, size_t len) + { + assert(!finished); + + const uint8_t *next_in = data; + size_t avail_in = len; + uint8_t *next_out = outbuf; + size_t avail_out = sizeof(outbuf); + + while (avail_in > 0) { + checkInterrupt(); + + if (!BrotliEncoderCompressStream(state, + BROTLI_OPERATION_PROCESS, + &avail_in, &next_in, + &avail_out, &next_out, + nullptr)) + throw CompressionError("error while compressing brotli file"); + + if (avail_out < sizeof(outbuf) || avail_in == 0) { + nextSink(outbuf, sizeof(outbuf) - avail_out); + next_out = outbuf; + avail_out = sizeof(outbuf); + } + } } }; +#endif // HAVE_BROTLI ref<CompressionSink> makeCompressionSink(const std::string & method, Sink & nextSink) { @@ -307,7 +458,11 @@ ref<CompressionSink> makeCompressionSink(const std::string & method, Sink & next else if (method == "bzip2") return make_ref<BzipSink>(nextSink); else if (method == "br") +#if HAVE_BROTLI return make_ref<BrotliSink>(nextSink); +#else + return make_ref<BrotliCmdSink>(nextSink); +#endif else throw UnknownCompressionMethod(format("unknown compression method '%s'") % method); } diff --git a/src/libutil/local.mk b/src/libutil/local.mk index 0721b21c2089..5fc2aab569da 100644 --- a/src/libutil/local.mk +++ b/src/libutil/local.mk @@ -6,8 +6,8 @@ libutil_DIR := $(d) libutil_SOURCES := $(wildcard $(d)/*.cc) -libutil_LDFLAGS = $(LIBLZMA_LIBS) -lbz2 -pthread $(OPENSSL_LIBS) +libutil_LDFLAGS = $(LIBLZMA_LIBS) -lbz2 -pthread $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) libutil_LIBS = libformat -libutil_CXXFLAGS = -DBRO=\"$(bro)\" +libutil_CXXFLAGS = -DBROTLI=\"$(brotli)\" diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 197df0c44aa0..272997397794 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -192,6 +192,12 @@ bool isInDir(const Path & path, const Path & dir) } +bool isDirOrInDir(const Path & path, const Path & dir) +{ + return path == dir or isInDir(path, dir); +} + + struct stat lstat(const Path & path) { struct stat st; diff --git a/src/libutil/util.hh b/src/libutil/util.hh index a3494e09b09b..75eb9751524e 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -53,10 +53,12 @@ Path dirOf(const Path & path); following the final `/'. */ string baseNameOf(const Path & path); -/* Check whether a given path is a descendant of the given - directory. */ +/* Check whether 'path' is a descendant of 'dir'. */ bool isInDir(const Path & path, const Path & dir); +/* Check whether 'path' is equal to 'dir' or a descendant of 'dir'. */ +bool isDirOrInDir(const Path & path, const Path & dir); + /* Get status of `path'. */ struct stat lstat(const Path & path); diff --git a/src/linenoise/linenoise.cpp b/src/linenoise/linenoise.cpp index 8ee8984d6948..c57505d2fa97 100644 --- a/src/linenoise/linenoise.cpp +++ b/src/linenoise/linenoise.cpp @@ -2587,13 +2587,6 @@ int InputBuffer::getInputLine(PromptBase& pi) { // ctrl-I/tab, command completion, needs to be before switch statement if (c == ctrlChar('I') && completionCallback) { - if (pos == 0) // SERVER-4967 -- in earlier versions, you could paste - // previous output - continue; // back into the shell ... this output may have leading - // tabs. - // This hack (i.e. what the old code did) prevents command completion - // on an empty line but lets users paste text with leading tabs. - killRing.lastAction = KillRing::actionOther; historyRecallMostRecent = false; diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 58366daa6e86..1581c282c75c 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -141,7 +141,7 @@ void mainWrapped(int argc, char * * argv) else if (*arg == "--version") printVersion(myName); - else if (*arg == "--add-drv-link") + else if (*arg == "--add-drv-link" || *arg == "--indirect") ; // obsolete else if (*arg == "--no-out-link" || *arg == "--no-link") @@ -167,9 +167,6 @@ void mainWrapped(int argc, char * * argv) buildMode = bmRepair; } - else if (*arg == "--hash") - buildMode = bmHash; - else if (*arg == "--run-env") // obsolete runEnv = true; @@ -279,8 +276,8 @@ void mainWrapped(int argc, char * * argv) else /* If we're in a #! script, interpret filenames relative to the script. */ - exprs.push_back(state.parseExprFromFile(resolveExprPath(lookupFileArg(state, - inShebang && !packages ? absPath(i, absPath(dirOf(script))) : i)))); + exprs.push_back(state.parseExprFromFile(resolveExprPath(state.checkSourcePath(lookupFileArg(state, + inShebang && !packages ? absPath(i, absPath(dirOf(script))) : i))))); } /* Evaluate them into derivations. */ diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index 5629cc64b96e..b5d49b6428ac 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -411,7 +411,7 @@ static void performOp(TunnelLogger * logger, ref<LocalStore> store, /* Repairing is not atomic, so disallowed for "untrusted" clients. */ if (mode == bmRepair && !trusted) - throw Error("repairing is not supported when building through the Nix daemon"); + throw Error("repairing is not allowed because you are not in 'trusted-users'"); } logger->startWork(); store->buildPaths(drvs, mode); diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index 55ac007e8682..e05040a42deb 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -182,7 +182,7 @@ int main(int argc, char * * argv) for (auto & i : files) { Expr * e = fromArgs ? state.parseExprFromString(i, absPath(".")) - : state.parseExprFromFile(resolveExprPath(lookupFileArg(state, i))); + : state.parseExprFromFile(resolveExprPath(state.checkSourcePath(lookupFileArg(state, i)))); processExpr(state, attrPaths, parseOnly, strict, autoArgs, evalOnly, outputKind, xmlOutputSourceLocation, e); } diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index f6f276dd1798..4fc3421c0dde 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -122,7 +122,6 @@ static void opRealise(Strings opFlags, Strings opArgs) if (i == "--dry-run") dryRun = true; else if (i == "--repair") buildMode = bmRepair; else if (i == "--check") buildMode = bmCheck; - else if (i == "--hash") buildMode = bmHash; else if (i == "--ignore-unknown") ignoreUnknown = true; else throw UsageError(format("unknown flag '%1%'") % i); diff --git a/src/nix/command.hh b/src/nix/command.hh index 6b34e3881e79..a7863c49f37a 100644 --- a/src/nix/command.hh +++ b/src/nix/command.hh @@ -5,6 +5,8 @@ namespace nix { +extern std::string programPath; + struct Value; class Bindings; class EvalState; diff --git a/src/nix/edit.cc b/src/nix/edit.cc index 127be321eee2..7eaa86e2f914 100644 --- a/src/nix/edit.cc +++ b/src/nix/edit.cc @@ -52,7 +52,12 @@ struct CmdEdit : InstallableCommand throw Error("cannot parse meta.position attribute '%s'", pos); std::string filename(pos, 0, colon); - int lineno = std::stoi(std::string(pos, colon + 1)); + int lineno; + try { + lineno = std::stoi(std::string(pos, colon + 1)); + } catch (std::invalid_argument e) { + throw Error("cannot parse line number '%s'", pos); + } auto editor = getEnv("EDITOR", "cat"); diff --git a/src/nix/eval.cc b/src/nix/eval.cc index 0fbeca1c121d..b7058361cbec 100644 --- a/src/nix/eval.cc +++ b/src/nix/eval.cc @@ -5,10 +5,11 @@ #include "eval.hh" #include "json.hh" #include "value-to-json.hh" +#include "progress-bar.hh" using namespace nix; -struct CmdEval : MixJSON, InstallablesCommand +struct CmdEval : MixJSON, InstallableCommand { bool raw = false; @@ -56,20 +57,19 @@ struct CmdEval : MixJSON, InstallablesCommand auto state = getEvalState(); - auto jsonOut = json ? std::make_unique<JSONList>(std::cout) : nullptr; + auto v = installable->toValue(*state); + PathSet context; - for (auto & i : installables) { - auto v = i->toValue(*state); - PathSet context; - if (raw) { - std::cout << state->coerceToString(noPos, *v, context); - } else if (json) { - auto jsonElem = jsonOut->placeholder(); - printValueAsJSON(*state, true, *v, jsonElem, context); - } else { - state->forceValueDeep(*v); - std::cout << *v << "\n"; - } + stopProgressBar(); + + if (raw) { + std::cout << state->coerceToString(noPos, *v, context); + } else if (json) { + JSONPlaceholder jsonOut(std::cout); + printValueAsJSON(*state, true, *v, jsonOut, context); + } else { + state->forceValueDeep(*v); + std::cout << *v << "\n"; } } }; diff --git a/src/nix/installables.cc b/src/nix/installables.cc index ae93c4ef649e..c3b06c22eba8 100644 --- a/src/nix/installables.cc +++ b/src/nix/installables.cc @@ -30,10 +30,8 @@ Value * SourceExprCommand::getSourceExpr(EvalState & state) vSourceExpr = state.allocValue(); - if (file != "") { - Expr * e = state.parseExprFromFile(resolveExprPath(lookupFileArg(state, file))); - state.eval(e, *vSourceExpr); - } + if (file != "") + state.evalFile(lookupFileArg(state, file), *vSourceExpr); else { diff --git a/src/nix/local.mk b/src/nix/local.mk index bddd53b168d3..f76da194467c 100644 --- a/src/nix/local.mk +++ b/src/nix/local.mk @@ -6,4 +6,6 @@ nix_SOURCES := $(wildcard $(d)/*.cc) $(wildcard src/linenoise/*.cpp) nix_LIBS = libexpr libmain libstore libutil libformat +nix_LDFLAGS = -pthread + $(eval $(call install-symlink, nix, $(bindir)/nix-hash)) diff --git a/src/nix/log.cc b/src/nix/log.cc index 966ad8b65087..f07ec4e93a16 100644 --- a/src/nix/log.cc +++ b/src/nix/log.cc @@ -50,6 +50,7 @@ struct CmdLog : InstallableCommand auto b = installable->toBuildable(); + RunPager pager; for (auto & sub : subs) { auto log = b.drvPath != "" ? sub->getBuildLog(b.drvPath) : nullptr; for (auto & output : b.outputs) { diff --git a/src/nix/main.cc b/src/nix/main.cc index 06bb8a1c3043..8f6bbe8f51ae 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -16,6 +16,8 @@ void chrootHelper(int argc, char * * argv); namespace nix { +std::string programPath; + struct NixArgs : virtual MultiCommand, virtual MixCommonArgs { NixArgs() : MultiCommand(*RegisterCommand::commands), MixCommonArgs("nix") @@ -78,7 +80,8 @@ void mainWrapped(int argc, char * * argv) initNix(); initGC(); - string programName = baseNameOf(argv[0]); + programPath = argv[0]; + string programName = baseNameOf(programPath); { auto legacy = (*RegisterLegacyCommand::commands)[programName]; diff --git a/src/nix/progress-bar.cc b/src/nix/progress-bar.cc index fb9955190b40..252d12c5d37f 100644 --- a/src/nix/progress-bar.cc +++ b/src/nix/progress-bar.cc @@ -3,8 +3,9 @@ #include "sync.hh" #include "store-api.hh" -#include <map> #include <atomic> +#include <map> +#include <thread> namespace nix { @@ -101,15 +102,28 @@ private: Sync<State> state_; + std::thread updateThread; + + std::condition_variable quitCV, updateCV; + public: ProgressBar() { + updateThread = std::thread([&]() { + auto state(state_.lock()); + while (state->active) { + state.wait(updateCV); + draw(*state); + state.wait_for(quitCV, std::chrono::milliseconds(50)); + } + }); } ~ProgressBar() { stop(); + updateThread.join(); } void stop() @@ -121,6 +135,8 @@ public: writeToStderr("\r\e[K"); if (status != "") writeToStderr("[" + status + "]\n"); + updateCV.notify_one(); + quitCV.notify_one(); } void log(Verbosity lvl, const FormatOrString & fs) override @@ -132,7 +148,7 @@ public: void log(State & state, Verbosity lvl, const std::string & s) { writeToStderr("\r\e[K" + s + ANSI_NORMAL "\n"); - update(state); + draw(state); } void startActivity(ActivityId act, Verbosity lvl, ActivityType type, @@ -167,7 +183,12 @@ public: if (type == actSubstitute) { auto name = storePathToName(getS(fields, 0)); - i->s = fmt("fetching " ANSI_BOLD "%s" ANSI_NORMAL " from %s", name, getS(fields, 1)); + auto sub = getS(fields, 1); + i->s = fmt( + hasPrefix(sub, "local") + ? "copying " ANSI_BOLD "%s" ANSI_NORMAL " from %s" + : "fetching " ANSI_BOLD "%s" ANSI_NORMAL " from %s", + name, sub); } if (type == actQueryPathInfo) { @@ -180,7 +201,7 @@ public: || (type == actCopyPath && hasAncestor(*state, actSubstitute, parent))) i->visible = false; - update(*state); + update(); } /* Check whether an activity has an ancestore with the specified @@ -215,7 +236,7 @@ public: state->its.erase(i); } - update(*state); + update(); } void result(ActivityId act, ResultType type, const std::vector<Field> & fields) override @@ -225,7 +246,7 @@ public: if (type == resFileLinked) { state->filesLinked++; state->bytesLinked += getI(fields, 0); - update(*state); + update(); } else if (type == resBuildLogLine) { @@ -238,25 +259,25 @@ public: info.lastLine = lastLine; state->activities.emplace_back(info); i->second = std::prev(state->activities.end()); - update(*state); + update(); } } else if (type == resUntrustedPath) { state->untrustedPaths++; - update(*state); + update(); } else if (type == resCorruptedPath) { state->corruptedPaths++; - update(*state); + update(); } else if (type == resSetPhase) { auto i = state->its.find(act); assert(i != state->its.end()); i->second->phase = getS(fields, 0); - update(*state); + update(); } else if (type == resProgress) { @@ -267,7 +288,7 @@ public: actInfo.expected = getI(fields, 1); actInfo.running = getI(fields, 2); actInfo.failed = getI(fields, 3); - update(*state); + update(); } else if (type == resSetExpected) { @@ -279,17 +300,16 @@ public: state->activitiesByType[type].expected -= j; j = getI(fields, 1); state->activitiesByType[type].expected += j; - update(*state); + update(); } } void update() { - auto state(state_.lock()); - update(*state); + updateCV.notify_one(); } - void update(State & state) + void draw(State & state) { if (!state.active) return; diff --git a/src/nix/repl.cc b/src/nix/repl.cc index 1adb816c5bf0..9216209173d9 100644 --- a/src/nix/repl.cc +++ b/src/nix/repl.cc @@ -186,7 +186,16 @@ bool NixRepl::getLine(string & input, const std::string &prompt) { char * s = linenoise(prompt.c_str()); Finally doFree([&]() { free(s); }); - if (!s) return false; + if (!s) { + switch (auto type = linenoiseKeyType()) { + case 1: // ctrl-C + return true; + case 2: // ctrl-D + return false; + default: + throw Error(format("Unexpected linenoise keytype: %1%") % type); + } + } input += s; return true; } diff --git a/src/nix/upgrade-nix.cc b/src/nix/upgrade-nix.cc new file mode 100644 index 000000000000..758bbbc688bc --- /dev/null +++ b/src/nix/upgrade-nix.cc @@ -0,0 +1,131 @@ +#include "command.hh" +#include "store-api.hh" +#include "download.hh" +#include "eval.hh" +#include "attr-path.hh" + +using namespace nix; + +struct CmdUpgradeNix : StoreCommand +{ + Path profileDir; + + CmdUpgradeNix() + { + mkFlag() + .longName("profile") + .shortName('p') + .labels({"profile-dir"}) + .description("the Nix profile to upgrade") + .dest(&profileDir); + } + + std::string name() override + { + return "upgrade-nix"; + } + + std::string description() override + { + return "upgrade Nix to the latest stable version"; + } + + Examples examples() override + { + return { + Example{ + "To upgrade Nix to the latest stable version:", + "nix upgrade-nix" + }, + Example{ + "To upgrade Nix in a specific profile:", + "nix upgrade-nix -p /nix/var/nix/profiles/per-user/alice/profile" + }, + }; + } + + void run(ref<Store> store) override + { + settings.pureEval = true; + + if (profileDir == "") + profileDir = getProfileDir(store); + + printInfo("upgrading Nix in profile '%s'", profileDir); + + Path storePath; + { + Activity act(*logger, lvlInfo, actUnknown, "querying latest Nix version"); + storePath = getLatestNix(store); + } + + { + Activity act(*logger, lvlInfo, actUnknown, fmt("downloading '%s'...", storePath)); + store->ensurePath(storePath); + } + + { + Activity act(*logger, lvlInfo, actUnknown, fmt("verifying that '%s' works...", storePath)); + auto program = storePath + "/bin/nix-env"; + auto s = runProgram(program, false, {"--version"}); + if (s.find("Nix") == std::string::npos) + throw Error("could not verify that '%s' works", program); + } + + { + Activity act(*logger, lvlInfo, actUnknown, fmt("installing '%s' into profile '%s'...", storePath, profileDir)); + runProgram(settings.nixBinDir + "/nix-env", false, + {"--profile", profileDir, "-i", storePath, "--no-sandbox"}); + } + } + + /* Return the profile in which Nix is installed. */ + Path getProfileDir(ref<Store> store) + { + Path where; + + for (auto & dir : tokenizeString<Strings>(getEnv("PATH"), ":")) + if (pathExists(dir + "/nix-env")) { + where = dir; + break; + } + + if (where == "") + throw Error("couldn't figure out how Nix is installed, so I can't upgrade it"); + + printInfo("found Nix in '%s'", where); + + if (hasPrefix(where, "/run/current-system")) + throw Error("Nix on NixOS must be upgraded via 'nixos-rebuild'"); + + Path profileDir; + Path userEnv; + + if (baseNameOf(where) != "bin" || + !hasSuffix(userEnv = canonPath(profileDir = dirOf(where), true), "user-environment")) + throw Error("directory '%s' does not appear to be part of a Nix profile", where); + + if (!store->isValidPath(userEnv)) + throw Error("directory '%s' is not in the Nix store", userEnv); + + return profileDir; + } + + /* Return the store path of the latest stable Nix. */ + Path getLatestNix(ref<Store> store) + { + // FIXME: use nixos.org? + auto req = DownloadRequest("https://github.com/NixOS/nixpkgs/raw/master/nixos/modules/installer/tools/nix-fallback-paths.nix"); + auto res = getDownloader()->download(req); + + EvalState state(Strings(), store); + auto v = state.allocValue(); + state.eval(state.parseExprFromString(*res.data, "/no-such-path"), *v); + Bindings & bindings(*state.allocBindings(0)); + auto v2 = findAlongAttrPath(state, settings.thisSystem, bindings, *v); + + return state.forceString(*v2); + } +}; + +static RegisterCommand r1(make_ref<CmdUpgradeNix>()); diff --git a/src/nlohmann/json.hpp b/src/nlohmann/json.hpp index 9754e464c76b..5b0b0ea5b301 100644 --- a/src/nlohmann/json.hpp +++ b/src/nlohmann/json.hpp @@ -1,7 +1,7 @@ /* __ _____ _____ _____ __| | __| | | | JSON for Modern C++ -| | |__ | | | | | | version 2.1.1 +| | |__ | | | | | | version 3.0.1 |_____|_____|_____|_|___| https://github.com/nlohmann/json Licensed under the MIT License <http://opensource.org/licenses/MIT>. @@ -109,7 +109,7 @@ SOFTWARE. #define JSON_UNLIKELY(x) x #endif -// cpp language standard detection +// C++ language standard detection #if (defined(__cplusplus) && __cplusplus >= 201703L) || (defined(_HAS_CXX17) && _HAS_CXX17 == 1) // fix for issue #464 #define JSON_HAS_CPP_17 #define JSON_HAS_CPP_14 @@ -128,20 +128,18 @@ template<typename = void, typename = void> struct adl_serializer; // forward declaration of basic_json (required to split the class) -template<template<typename U, typename V, typename... Args> class ObjectType = - std::map, - template<typename U, typename... Args> class ArrayType = std::vector, +template<template<typename, typename, typename...> class ObjectType = std::map, + template<typename, typename...> class ArrayType = std::vector, class StringType = std::string, class BooleanType = bool, class NumberIntegerType = std::int64_t, class NumberUnsignedType = std::uint64_t, class NumberFloatType = double, - template<typename U> class AllocatorType = std::allocator, - template<typename T, typename SFINAE = void> class JSONSerializer = - adl_serializer> + template<typename> class AllocatorType = std::allocator, + template<typename, typename = void> class JSONSerializer = adl_serializer> class basic_json; -// Ugly macros to avoid uglier copy-paste when specializing basic_json -// This is only temporary and will be removed in 3.0 +// Ugly macros to avoid uglier copy-paste when specializing basic_json. They +// may be removed in the future once the class is split. #define NLOHMANN_BASIC_JSON_TPL_DECLARATION \ template<template<typename, typename, typename...> class ObjectType, \ @@ -227,7 +225,7 @@ class exception : public std::exception /*! @brief exception indicating a parse error -This excpetion is thrown by the library when a parse error occurs. Parse errors +This exception is thrown by the library when a parse error occurs. Parse errors can occur during the deserialization of JSON text, CBOR, MessagePack, as well as when using JSON Patch. @@ -243,12 +241,12 @@ json.exception.parse_error.102 | parse error at 14: missing or wrong low surroga json.exception.parse_error.103 | parse error: code points above 0x10FFFF are invalid | Unicode supports code points up to 0x10FFFF. Code points above 0x10FFFF are invalid. json.exception.parse_error.104 | parse error: JSON patch must be an array of objects | [RFC 6902](https://tools.ietf.org/html/rfc6902) requires a JSON Patch document to be a JSON document that represents an array of objects. json.exception.parse_error.105 | parse error: operation must have string member 'op' | An operation of a JSON Patch document must contain exactly one "op" member, whose value indicates the operation to perform. Its value must be one of "add", "remove", "replace", "move", "copy", or "test"; other values are errors. -json.exception.parse_error.106 | parse error: array index '01' must not begin with '0' | An array index in a JSON Pointer ([RFC 6901](https://tools.ietf.org/html/rfc6901)) may be `0` or any number wihtout a leading `0`. +json.exception.parse_error.106 | parse error: array index '01' must not begin with '0' | An array index in a JSON Pointer ([RFC 6901](https://tools.ietf.org/html/rfc6901)) may be `0` or any number without a leading `0`. json.exception.parse_error.107 | parse error: JSON pointer must be empty or begin with '/' - was: 'foo' | A JSON Pointer must be a Unicode string containing a sequence of zero or more reference tokens, each prefixed by a `/` character. json.exception.parse_error.108 | parse error: escape character '~' must be followed with '0' or '1' | In a JSON Pointer, only `~0` and `~1` are valid escape sequences. json.exception.parse_error.109 | parse error: array index 'one' is not a number | A JSON Pointer array index must be a number. json.exception.parse_error.110 | parse error at 1: cannot read 2 bytes from vector | When parsing CBOR or MessagePack, the byte vector ends before the complete value has been read. -json.exception.parse_error.112 | parse error at 1: error reading CBOR; last byte: 0xf8 | Not all types of CBOR or MessagePack are supported. This exception occurs if an unsupported byte was read. +json.exception.parse_error.112 | parse error at 1: error reading CBOR; last byte: 0xF8 | Not all types of CBOR or MessagePack are supported. This exception occurs if an unsupported byte was read. json.exception.parse_error.113 | parse error at 2: expected a CBOR string; last byte: 0x98 | While parsing a map key, a value that is not a string has been read. @note For an input with n bytes, 1 is the index of the first character and n+1 @@ -378,6 +376,7 @@ json.exception.type_error.312 | cannot use update() with string | The @ref updat json.exception.type_error.313 | invalid value to unflatten | The @ref unflatten function converts an object whose keys are JSON Pointers back into an arbitrary nested JSON value. The JSON Pointers must not overlap, because then the resulting value would not be well defined. json.exception.type_error.314 | only objects can be unflattened | The @ref unflatten function only works for an object whose keys are JSON Pointers. json.exception.type_error.315 | values in object must be primitive | The @ref unflatten function only works for an object whose keys are JSON Pointers and whose values are primitive. +json.exception.type_error.316 | invalid UTF-8 byte at index 10: 0x7E | The @ref dump function only works with UTF-8 encoded strings; that is, if you assign a `std::string` to a JSON value, make sure it is UTF-8 encoded. | @liveexample{The following code shows how a `type_error` exception can be caught.,type_error} @@ -457,7 +456,6 @@ Exceptions have ids 5xx. name / id | example message | description ------------------------------ | --------------- | ------------------------- json.exception.other_error.501 | unsuccessful: {"op":"test","path":"/baz", "value":"bar"} | A JSON Patch operation 'test' failed. The unsuccessful operation is also printed. -json.exception.other_error.502 | invalid object size for conversion | Some conversions to user-defined types impose constraints on the object size (e.g. std::pair) @sa @ref exception for the base class of the library exceptions @sa @ref parse_error for exceptions indicating a parse error @@ -540,20 +538,14 @@ Returns an ordering that is similar to Python: inline bool operator<(const value_t lhs, const value_t rhs) noexcept { static constexpr std::array<uint8_t, 8> order = {{ - 0, // null - 3, // object - 4, // array - 5, // string - 1, // boolean - 2, // integer - 2, // unsigned - 2, // float + 0 /* null */, 3 /* object */, 4 /* array */, 5 /* string */, + 1 /* boolean */, 2 /* integer */, 2 /* unsigned */, 2 /* float */ } }; const auto l_index = static_cast<std::size_t>(lhs); const auto r_index = static_cast<std::size_t>(rhs); - return (l_index < order.size() and r_index < order.size() and order[l_index] < order[r_index]); + return l_index < order.size() and r_index < order.size() and order[l_index] < order[r_index]; } @@ -591,17 +583,15 @@ struct merge_and_renumber; template<std::size_t... I1, std::size_t... I2> struct merge_and_renumber<index_sequence<I1...>, index_sequence<I2...>> - : index_sequence < I1..., (sizeof...(I1) + I2)... > - {}; + : index_sequence < I1..., (sizeof...(I1) + I2)... > {}; template<std::size_t N> struct make_index_sequence : merge_and_renumber < typename make_index_sequence < N / 2 >::type, - typename make_index_sequence < N - N / 2 >::type > -{}; + typename make_index_sequence < N - N / 2 >::type > {}; -template<> struct make_index_sequence<0> : index_sequence<> { }; -template<> struct make_index_sequence<1> : index_sequence<0> { }; +template<> struct make_index_sequence<0> : index_sequence<> {}; +template<> struct make_index_sequence<1> : index_sequence<0> {}; template<typename... Ts> using index_sequence_for = make_index_sequence<sizeof...(Ts)>; @@ -624,7 +614,7 @@ template<class B1> struct conjunction<B1> : B1 {}; template<class B1, class... Bn> struct conjunction<B1, Bn...> : std::conditional<bool(B1::value), conjunction<Bn...>, B1>::type {}; -template<class B> struct negation : std::integral_constant < bool, !B::value > {}; +template<class B> struct negation : std::integral_constant<bool, not B::value> {}; // dispatch utility (taken from ranges-v3) template<unsigned N> struct priority_tag : priority_tag < N - 1 > {}; @@ -725,8 +715,7 @@ struct external_constructor<value_t::array> } template<typename BasicJsonType, typename CompatibleArrayType, - enable_if_t<not std::is_same<CompatibleArrayType, - typename BasicJsonType::array_t>::value, + enable_if_t<not std::is_same<CompatibleArrayType, typename BasicJsonType::array_t>::value, int> = 0> static void construct(BasicJsonType& j, const CompatibleArrayType& arr) { @@ -743,7 +732,7 @@ struct external_constructor<value_t::array> j.m_type = value_t::array; j.m_value = value_t::array; j.m_value.array->reserve(arr.size()); - for (bool x : arr) + for (const bool x : arr) { j.m_value.array->push_back(x); } @@ -782,8 +771,7 @@ struct external_constructor<value_t::object> } template<typename BasicJsonType, typename CompatibleObjectType, - enable_if_t<not std::is_same<CompatibleObjectType, - typename BasicJsonType::object_t>::value, int> = 0> + enable_if_t<not std::is_same<CompatibleObjectType, typename BasicJsonType::object_t>::value, int> = 0> static void construct(BasicJsonType& j, const CompatibleObjectType& obj) { using std::begin; @@ -896,7 +884,7 @@ struct is_compatible_integer_type is_compatible_integer_type_impl < std::is_integral<CompatibleNumberIntegerType>::value and not std::is_same<bool, CompatibleNumberIntegerType>::value, - RealIntegerType, CompatibleNumberIntegerType > ::value; + RealIntegerType, CompatibleNumberIntegerType >::value; }; @@ -922,10 +910,8 @@ template<typename BasicJsonType, typename T> struct has_non_default_from_json { private: - template < - typename U, - typename = enable_if_t<std::is_same< - T, decltype(uncvref_t<U>::from_json(std::declval<BasicJsonType>()))>::value >> + template<typename U, typename = + enable_if_t<std::is_same<T, decltype(uncvref_t<U>::from_json(std::declval<BasicJsonType>()))>::value>> static int detect(U&&); static void detect(...); @@ -954,22 +940,21 @@ struct has_to_json // to_json // ///////////// -template<typename BasicJsonType, typename T, enable_if_t< - std::is_same<T, typename BasicJsonType::boolean_t>::value, int> = 0> +template<typename BasicJsonType, typename T, + enable_if_t<std::is_same<T, typename BasicJsonType::boolean_t>::value, int> = 0> void to_json(BasicJsonType& j, T b) noexcept { external_constructor<value_t::boolean>::construct(j, b); } template<typename BasicJsonType, typename CompatibleString, - enable_if_t<std::is_constructible<typename BasicJsonType::string_t, - CompatibleString>::value, int> = 0> + enable_if_t<std::is_constructible<typename BasicJsonType::string_t, CompatibleString>::value, int> = 0> void to_json(BasicJsonType& j, const CompatibleString& s) { external_constructor<value_t::string>::construct(j, s); } -template <typename BasicJsonType> +template<typename BasicJsonType> void to_json(BasicJsonType& j, typename BasicJsonType::string_t&& s) { external_constructor<value_t::string>::construct(j, std::move(s)); @@ -982,19 +967,15 @@ void to_json(BasicJsonType& j, FloatType val) noexcept external_constructor<value_t::number_float>::construct(j, static_cast<typename BasicJsonType::number_float_t>(val)); } -template < - typename BasicJsonType, typename CompatibleNumberUnsignedType, - enable_if_t<is_compatible_integer_type<typename BasicJsonType::number_unsigned_t, - CompatibleNumberUnsignedType>::value, int> = 0 > +template<typename BasicJsonType, typename CompatibleNumberUnsignedType, + enable_if_t<is_compatible_integer_type<typename BasicJsonType::number_unsigned_t, CompatibleNumberUnsignedType>::value, int> = 0> void to_json(BasicJsonType& j, CompatibleNumberUnsignedType val) noexcept { external_constructor<value_t::number_unsigned>::construct(j, static_cast<typename BasicJsonType::number_unsigned_t>(val)); } -template < - typename BasicJsonType, typename CompatibleNumberIntegerType, - enable_if_t<is_compatible_integer_type<typename BasicJsonType::number_integer_t, - CompatibleNumberIntegerType>::value, int> = 0 > +template<typename BasicJsonType, typename CompatibleNumberIntegerType, + enable_if_t<is_compatible_integer_type<typename BasicJsonType::number_integer_t, CompatibleNumberIntegerType>::value, int> = 0> void to_json(BasicJsonType& j, CompatibleNumberIntegerType val) noexcept { external_constructor<value_t::number_integer>::construct(j, static_cast<typename BasicJsonType::number_integer_t>(val)); @@ -1014,49 +995,43 @@ void to_json(BasicJsonType& j, const std::vector<bool>& e) external_constructor<value_t::array>::construct(j, e); } -template < - typename BasicJsonType, typename CompatibleArrayType, - enable_if_t < - is_compatible_array_type<BasicJsonType, CompatibleArrayType>::value or - std::is_same<typename BasicJsonType::array_t, CompatibleArrayType>::value, - int > = 0 > +template<typename BasicJsonType, typename CompatibleArrayType, + enable_if_t<is_compatible_array_type<BasicJsonType, CompatibleArrayType>::value or + std::is_same<typename BasicJsonType::array_t, CompatibleArrayType>::value, + int> = 0> void to_json(BasicJsonType& j, const CompatibleArrayType& arr) { external_constructor<value_t::array>::construct(j, arr); } -template <typename BasicJsonType, typename T, - enable_if_t<std::is_convertible<T, BasicJsonType>::value, int> = 0> +template<typename BasicJsonType, typename T, + enable_if_t<std::is_convertible<T, BasicJsonType>::value, int> = 0> void to_json(BasicJsonType& j, std::valarray<T> arr) { external_constructor<value_t::array>::construct(j, std::move(arr)); } -template <typename BasicJsonType> +template<typename BasicJsonType> void to_json(BasicJsonType& j, typename BasicJsonType::array_t&& arr) { external_constructor<value_t::array>::construct(j, std::move(arr)); } -template < - typename BasicJsonType, typename CompatibleObjectType, - enable_if_t<is_compatible_object_type<BasicJsonType, CompatibleObjectType>::value, - int> = 0 > +template<typename BasicJsonType, typename CompatibleObjectType, + enable_if_t<is_compatible_object_type<BasicJsonType, CompatibleObjectType>::value, int> = 0> void to_json(BasicJsonType& j, const CompatibleObjectType& obj) { external_constructor<value_t::object>::construct(j, obj); } -template <typename BasicJsonType> +template<typename BasicJsonType> void to_json(BasicJsonType& j, typename BasicJsonType::object_t&& obj) { external_constructor<value_t::object>::construct(j, std::move(obj)); } template<typename BasicJsonType, typename T, std::size_t N, - enable_if_t<not std::is_constructible< - typename BasicJsonType::string_t, T (&)[N]>::value, - int> = 0> + enable_if_t<not std::is_constructible<typename BasicJsonType::string_t, T (&)[N]>::value, int> = 0> void to_json(BasicJsonType& j, T (&arr)[N]) { external_constructor<value_t::array>::construct(j, arr); @@ -1087,8 +1062,7 @@ void to_json(BasicJsonType& j, const std::tuple<Args...>& t) // overloads for basic_json template parameters template<typename BasicJsonType, typename ArithmeticType, enable_if_t<std::is_arithmetic<ArithmeticType>::value and - not std::is_same<ArithmeticType, - typename BasicJsonType::boolean_t>::value, + not std::is_same<ArithmeticType, typename BasicJsonType::boolean_t>::value, int> = 0> void get_arithmetic_value(const BasicJsonType& j, ArithmeticType& val) { @@ -1351,6 +1325,13 @@ struct to_json_fn { static_assert(sizeof(BasicJsonType) == 0, "could not find to_json() method in T's namespace"); + +#ifdef _MSC_VER + // MSVC does not show a stacktrace for the above assert + using decayed = uncvref_t<T>; + static_assert(sizeof(typename decayed::force_msvc_stacktrace) == 0, + "forcing MSVC stacktrace to show which T we're talking about."); +#endif } public: @@ -1378,6 +1359,12 @@ struct from_json_fn { static_assert(sizeof(BasicJsonType) == 0, "could not find from_json() method in T's namespace"); +#ifdef _MSC_VER + // MSVC does not show a stacktrace for the above assert + using decayed = uncvref_t<T>; + static_assert(sizeof(typename decayed::force_msvc_stacktrace) == 0, + "forcing MSVC stacktrace to show which T we're talking about."); +#endif } public: @@ -1448,7 +1435,7 @@ class input_stream_adapter : public input_adapter_protocol explicit input_stream_adapter(std::istream& i) : is(i), sb(*i.rdbuf()) { - // ignore Byte Order Mark at start of input + // skip byte order mark std::char_traits<char>::int_type c; if ((c = get_character()) == 0xEF) { @@ -1472,7 +1459,7 @@ class input_stream_adapter : public input_adapter_protocol } else if (c != std::char_traits<char>::eof()) { - is.unget(); // Not BOM. Process as usual. + is.unget(); // no byte order mark; process as usual } } @@ -1481,8 +1468,8 @@ class input_stream_adapter : public input_adapter_protocol input_stream_adapter& operator=(input_stream_adapter&) = delete; // std::istream/std::streambuf use std::char_traits<char>::to_int_type, to - // ensure that std::char_traits<char>::eof() and the character 0xff do not - // end up as the same value, eg. 0xffffffff. + // ensure that std::char_traits<char>::eof() and the character 0xFF do not + // end up as the same value, eg. 0xFFFFFFFF. std::char_traits<char>::int_type get_character() override { return sb.sbumpc(); @@ -1561,8 +1548,7 @@ class input_adapter template<typename CharT, typename std::enable_if< std::is_pointer<CharT>::value and - std::is_integral< - typename std::remove_pointer<CharT>::type>::value and + std::is_integral<typename std::remove_pointer<CharT>::type>::value and sizeof(typename std::remove_pointer<CharT>::type) == 1, int>::type = 0> input_adapter(CharT b, std::size_t l) @@ -1574,8 +1560,7 @@ class input_adapter template<typename CharT, typename std::enable_if< std::is_pointer<CharT>::value and - std::is_integral< - typename std::remove_pointer<CharT>::type>::value and + std::is_integral<typename std::remove_pointer<CharT>::type>::value and sizeof(typename std::remove_pointer<CharT>::type) == 1, int>::type = 0> input_adapter(CharT b) @@ -1585,8 +1570,7 @@ class input_adapter /// input adapter for iterator range with contiguous storage template<class IteratorType, typename std::enable_if< - std::is_same<typename std::iterator_traits<IteratorType>::iterator_category, - std::random_access_iterator_tag>::value, + std::is_same<typename std::iterator_traits<IteratorType>::iterator_category, std::random_access_iterator_tag>::value, int>::type = 0> input_adapter(IteratorType first, IteratorType last) { @@ -1624,13 +1608,10 @@ class input_adapter : input_adapter(std::begin(array), std::end(array)) {} /// input adapter for contiguous container - template < - class ContiguousContainer, - typename std::enable_if < - not std::is_pointer<ContiguousContainer>::value and - std::is_base_of<std::random_access_iterator_tag, - typename std::iterator_traits<decltype(std::begin(std::declval<ContiguousContainer const>()))>::iterator_category>::value, - int >::type = 0 > + template<class ContiguousContainer, typename + std::enable_if<not std::is_pointer<ContiguousContainer>::value and + std::is_base_of<std::random_access_iterator_tag, typename std::iterator_traits<decltype(std::begin(std::declval<ContiguousContainer const>()))>::iterator_category>::value, + int>::type = 0> input_adapter(const ContiguousContainer& c) : input_adapter(std::begin(c), std::end(c)) {} @@ -1804,6 +1785,12 @@ class lexer checks if it is inside the range. If a violation was detected, set up an error message and return false. Otherwise, return true. + @param[in] ranges list of integers; interpreted as list of pairs of + inclusive lower and upper bound, respectively + + @pre The passed list @a ranges must have 2, 4, or 6 elements; that is, + 1, 2, or 3 pairs. This precondition is enforced by an assertion. + @return true if and only if no range violation was detected */ bool next_byte_in_range(std::initializer_list<int> ranges) @@ -1970,19 +1957,19 @@ class lexer // result of the above calculation yields a proper codepoint assert(0x00 <= codepoint and codepoint <= 0x10FFFF); - // translate code point to bytes + // translate codepoint into bytes if (codepoint < 0x80) { // 1-byte characters: 0xxxxxxx (ASCII) add(codepoint); } - else if (codepoint <= 0x7ff) + else if (codepoint <= 0x7FF) { // 2-byte characters: 110xxxxx 10xxxxxx add(0xC0 | (codepoint >> 6)); add(0x80 | (codepoint & 0x3F)); } - else if (codepoint <= 0xffff) + else if (codepoint <= 0xFFFF) { // 3-byte characters: 1110xxxx 10xxxxxx 10xxxxxx add(0xE0 | (codepoint >> 12)); @@ -2021,12 +2008,12 @@ class lexer case 0x07: case 0x08: case 0x09: - case 0x0a: - case 0x0b: - case 0x0c: - case 0x0d: - case 0x0e: - case 0x0f: + case 0x0A: + case 0x0B: + case 0x0C: + case 0x0D: + case 0x0E: + case 0x0F: case 0x10: case 0x11: case 0x12: @@ -2037,12 +2024,12 @@ class lexer case 0x17: case 0x18: case 0x19: - case 0x1a: - case 0x1b: - case 0x1c: - case 0x1d: - case 0x1e: - case 0x1f: + case 0x1A: + case 0x1B: + case 0x1C: + case 0x1D: + case 0x1E: + case 0x1F: { error_message = "invalid string: control character must be escaped"; return token_type::parse_error; @@ -2058,12 +2045,12 @@ class lexer case 0x27: case 0x28: case 0x29: - case 0x2a: - case 0x2b: - case 0x2c: - case 0x2d: - case 0x2e: - case 0x2f: + case 0x2A: + case 0x2B: + case 0x2C: + case 0x2D: + case 0x2E: + case 0x2F: case 0x30: case 0x31: case 0x32: @@ -2074,12 +2061,12 @@ class lexer case 0x37: case 0x38: case 0x39: - case 0x3a: - case 0x3b: - case 0x3c: - case 0x3d: - case 0x3e: - case 0x3f: + case 0x3A: + case 0x3B: + case 0x3C: + case 0x3D: + case 0x3E: + case 0x3F: case 0x40: case 0x41: case 0x42: @@ -2090,12 +2077,12 @@ class lexer case 0x47: case 0x48: case 0x49: - case 0x4a: - case 0x4b: - case 0x4c: - case 0x4d: - case 0x4e: - case 0x4f: + case 0x4A: + case 0x4B: + case 0x4C: + case 0x4D: + case 0x4E: + case 0x4F: case 0x50: case 0x51: case 0x52: @@ -2106,11 +2093,11 @@ class lexer case 0x57: case 0x58: case 0x59: - case 0x5a: - case 0x5b: - case 0x5d: - case 0x5e: - case 0x5f: + case 0x5A: + case 0x5B: + case 0x5D: + case 0x5E: + case 0x5F: case 0x60: case 0x61: case 0x62: @@ -2121,12 +2108,12 @@ class lexer case 0x67: case 0x68: case 0x69: - case 0x6a: - case 0x6b: - case 0x6c: - case 0x6d: - case 0x6e: - case 0x6f: + case 0x6A: + case 0x6B: + case 0x6C: + case 0x6D: + case 0x6E: + case 0x6F: case 0x70: case 0x71: case 0x72: @@ -2137,48 +2124,48 @@ class lexer case 0x77: case 0x78: case 0x79: - case 0x7a: - case 0x7b: - case 0x7c: - case 0x7d: - case 0x7e: - case 0x7f: + case 0x7A: + case 0x7B: + case 0x7C: + case 0x7D: + case 0x7E: + case 0x7F: { add(current); break; } // U+0080..U+07FF: bytes C2..DF 80..BF - case 0xc2: - case 0xc3: - case 0xc4: - case 0xc5: - case 0xc6: - case 0xc7: - case 0xc8: - case 0xc9: - case 0xca: - case 0xcb: - case 0xcc: - case 0xcd: - case 0xce: - case 0xcf: - case 0xd0: - case 0xd1: - case 0xd2: - case 0xd3: - case 0xd4: - case 0xd5: - case 0xd6: - case 0xd7: - case 0xd8: - case 0xd9: - case 0xda: - case 0xdb: - case 0xdc: - case 0xdd: - case 0xde: - case 0xdf: + case 0xC2: + case 0xC3: + case 0xC4: + case 0xC5: + case 0xC6: + case 0xC7: + case 0xC8: + case 0xC9: + case 0xCA: + case 0xCB: + case 0xCC: + case 0xCD: + case 0xCE: + case 0xCF: + case 0xD0: + case 0xD1: + case 0xD2: + case 0xD3: + case 0xD4: + case 0xD5: + case 0xD6: + case 0xD7: + case 0xD8: + case 0xD9: + case 0xDA: + case 0xDB: + case 0xDC: + case 0xDD: + case 0xDE: + case 0xDF: { if (JSON_UNLIKELY(not next_byte_in_range({0x80, 0xBF}))) { @@ -2188,7 +2175,7 @@ class lexer } // U+0800..U+0FFF: bytes E0 A0..BF 80..BF - case 0xe0: + case 0xE0: { if (JSON_UNLIKELY(not (next_byte_in_range({0xA0, 0xBF, 0x80, 0xBF})))) { @@ -2199,20 +2186,20 @@ class lexer // U+1000..U+CFFF: bytes E1..EC 80..BF 80..BF // U+E000..U+FFFF: bytes EE..EF 80..BF 80..BF - case 0xe1: - case 0xe2: - case 0xe3: - case 0xe4: - case 0xe5: - case 0xe6: - case 0xe7: - case 0xe8: - case 0xe9: - case 0xea: - case 0xeb: - case 0xec: - case 0xee: - case 0xef: + case 0xE1: + case 0xE2: + case 0xE3: + case 0xE4: + case 0xE5: + case 0xE6: + case 0xE7: + case 0xE8: + case 0xE9: + case 0xEA: + case 0xEB: + case 0xEC: + case 0xEE: + case 0xEF: { if (JSON_UNLIKELY(not (next_byte_in_range({0x80, 0xBF, 0x80, 0xBF})))) { @@ -2222,7 +2209,7 @@ class lexer } // U+D000..U+D7FF: bytes ED 80..9F 80..BF - case 0xed: + case 0xED: { if (JSON_UNLIKELY(not (next_byte_in_range({0x80, 0x9F, 0x80, 0xBF})))) { @@ -2232,7 +2219,7 @@ class lexer } // U+10000..U+3FFFF F0 90..BF 80..BF 80..BF - case 0xf0: + case 0xF0: { if (JSON_UNLIKELY(not (next_byte_in_range({0x90, 0xBF, 0x80, 0xBF, 0x80, 0xBF})))) { @@ -2242,9 +2229,9 @@ class lexer } // U+40000..U+FFFFF F1..F3 80..BF 80..BF 80..BF - case 0xf1: - case 0xf2: - case 0xf3: + case 0xF1: + case 0xF2: + case 0xF3: { if (JSON_UNLIKELY(not (next_byte_in_range({0x80, 0xBF, 0x80, 0xBF, 0x80, 0xBF})))) { @@ -2254,7 +2241,7 @@ class lexer } // U+100000..U+10FFFF F4 80..8F 80..BF 80..BF - case 0xf4: + case 0xF4: { if (JSON_UNLIKELY(not (next_byte_in_range({0x80, 0x8F, 0x80, 0xBF, 0x80, 0xBF})))) { @@ -2772,9 +2759,9 @@ scan_number_done: { // escape control characters std::string result; - for (auto c : token_string) + for (const auto c : token_string) { - if ('\x00' <= c and c <= '\x1f') + if ('\x00' <= c and c <= '\x1F') { // escape control characters std::stringstream ss; @@ -2877,10 +2864,10 @@ scan_number_done: std::size_t chars_read = 0; /// raw input token string (for error messages) - std::vector<char> token_string { }; + std::vector<char> token_string {}; /// buffer for variable-length tokens (numbers, strings) - std::string yytext { }; + std::string yytext {}; /// a description of occurred lexer errors const char* error_message = ""; @@ -3281,7 +3268,7 @@ class parser } /*! - @brief the acutal acceptor + @brief the actual acceptor @invariant 1. The last token is not yet processed. Therefore, the caller of this function must make sure a token has been read. @@ -3539,7 +3526,7 @@ class primitive_iterator_t return *this; } - primitive_iterator_t operator++(int) + primitive_iterator_t const operator++(int) { auto result = *this; m_it++; @@ -3552,7 +3539,7 @@ class primitive_iterator_t return *this; } - primitive_iterator_t operator--(int) + primitive_iterator_t const operator--(int) { auto result = *this; m_it--; @@ -3618,7 +3605,7 @@ This class implements a both iterators (iterator and const_iterator) for the iterators in version 3.0.0 (see https://github.com/nlohmann/json/issues/593) */ template<typename BasicJsonType> -class iter_impl : public std::iterator<std::bidirectional_iterator_tag, BasicJsonType> +class iter_impl { /// allow basic_json to access private members friend iter_impl<typename std::conditional<std::is_const<BasicJsonType>::value, typename std::remove_const<BasicJsonType>::type, const BasicJsonType>::type>; @@ -3632,6 +3619,14 @@ class iter_impl : public std::iterator<std::bidirectional_iterator_tag, BasicJso "iter_impl only accepts (const) basic_json"); public: + + /// The std::iterator class template (used as a base class to provide typedefs) is deprecated in C++17. + /// The C++ Standard has never required user-defined iterators to derive from std::iterator. + /// A user-defined iterator should provide publicly accessible typedefs named + /// iterator_category, value_type, difference_type, pointer, and reference. + /// Note that value_type is required to be non-const, even for constant iterators. + using iterator_category = std::bidirectional_iterator_tag; + /// the type of the values when the iterator is dereferenced using value_type = typename BasicJsonType::value_type; /// a type to represent differences between iterators @@ -3855,7 +3850,7 @@ class iter_impl : public std::iterator<std::bidirectional_iterator_tag, BasicJso @brief post-increment (it++) @pre The iterator is initialized; i.e. `m_object != nullptr`. */ - iter_impl operator++(int) + iter_impl const operator++(int) { auto result = *this; ++(*this); @@ -3898,7 +3893,7 @@ class iter_impl : public std::iterator<std::bidirectional_iterator_tag, BasicJso @brief post-decrement (it--) @pre The iterator is initialized; i.e. `m_object != nullptr`. */ - iter_impl operator--(int) + iter_impl const operator--(int) { auto result = *this; --(*this); @@ -4291,7 +4286,7 @@ class json_reverse_iterator : public std::reverse_iterator<Base> { public: using difference_type = std::ptrdiff_t; - /// shortcut to the reverse iterator adaptor + /// shortcut to the reverse iterator adapter using base_iterator = std::reverse_iterator<Base>; /// the reference type for the pointed-to element using reference = typename Base::reference; @@ -4304,7 +4299,7 @@ class json_reverse_iterator : public std::reverse_iterator<Base> json_reverse_iterator(const base_iterator& it) noexcept : base_iterator(it) {} /// post-increment (it++) - json_reverse_iterator operator++(int) + json_reverse_iterator const operator++(int) { return static_cast<json_reverse_iterator>(base_iterator::operator++(1)); } @@ -4316,7 +4311,7 @@ class json_reverse_iterator : public std::reverse_iterator<Base> } /// post-decrement (it--) - json_reverse_iterator operator--(int) + json_reverse_iterator const operator--(int) { return static_cast<json_reverse_iterator>(base_iterator::operator--(1)); } @@ -4576,12 +4571,12 @@ class binary_reader case 0x07: case 0x08: case 0x09: - case 0x0a: - case 0x0b: - case 0x0c: - case 0x0d: - case 0x0e: - case 0x0f: + case 0x0A: + case 0x0B: + case 0x0C: + case 0x0D: + case 0x0E: + case 0x0F: case 0x10: case 0x11: case 0x12: @@ -4598,10 +4593,10 @@ class binary_reader case 0x19: // Unsigned integer (two-byte uint16_t follows) return get_number<uint16_t>(); - case 0x1a: // Unsigned integer (four-byte uint32_t follows) + case 0x1A: // Unsigned integer (four-byte uint32_t follows) return get_number<uint32_t>(); - case 0x1b: // Unsigned integer (eight-byte uint64_t follows) + case 0x1B: // Unsigned integer (eight-byte uint64_t follows) return get_number<uint64_t>(); // Negative integer -1-0x00..-1-0x17 (-1..-24) @@ -4615,12 +4610,12 @@ class binary_reader case 0x27: case 0x28: case 0x29: - case 0x2a: - case 0x2b: - case 0x2c: - case 0x2d: - case 0x2e: - case 0x2f: + case 0x2A: + case 0x2B: + case 0x2C: + case 0x2D: + case 0x2E: + case 0x2F: case 0x30: case 0x31: case 0x32: @@ -4642,12 +4637,12 @@ class binary_reader return static_cast<number_integer_t>(-1) - get_number<uint16_t>(); } - case 0x3a: // Negative integer -1-n (four-byte uint32_t follows) + case 0x3A: // Negative integer -1-n (four-byte uint32_t follows) { return static_cast<number_integer_t>(-1) - get_number<uint32_t>(); } - case 0x3b: // Negative integer -1-n (eight-byte uint64_t follows) + case 0x3B: // Negative integer -1-n (eight-byte uint64_t follows) { return static_cast<number_integer_t>(-1) - static_cast<number_integer_t>(get_number<uint64_t>()); @@ -4664,12 +4659,12 @@ class binary_reader case 0x67: case 0x68: case 0x69: - case 0x6a: - case 0x6b: - case 0x6c: - case 0x6d: - case 0x6e: - case 0x6f: + case 0x6A: + case 0x6B: + case 0x6C: + case 0x6D: + case 0x6E: + case 0x6F: case 0x70: case 0x71: case 0x72: @@ -4680,9 +4675,9 @@ class binary_reader case 0x77: case 0x78: // UTF-8 string (one-byte uint8_t for n follows) case 0x79: // UTF-8 string (two-byte uint16_t for n follow) - case 0x7a: // UTF-8 string (four-byte uint32_t for n follow) - case 0x7b: // UTF-8 string (eight-byte uint64_t for n follow) - case 0x7f: // UTF-8 string (indefinite length) + case 0x7A: // UTF-8 string (four-byte uint32_t for n follow) + case 0x7B: // UTF-8 string (eight-byte uint64_t for n follow) + case 0x7F: // UTF-8 string (indefinite length) { return get_cbor_string(); } @@ -4698,12 +4693,12 @@ class binary_reader case 0x87: case 0x88: case 0x89: - case 0x8a: - case 0x8b: - case 0x8c: - case 0x8d: - case 0x8e: - case 0x8f: + case 0x8A: + case 0x8B: + case 0x8C: + case 0x8D: + case 0x8E: + case 0x8F: case 0x90: case 0x91: case 0x92: @@ -4713,7 +4708,7 @@ class binary_reader case 0x96: case 0x97: { - return get_cbor_array(current & 0x1f); + return get_cbor_array(current & 0x1F); } case 0x98: // array (one-byte uint8_t for n follows) @@ -4726,20 +4721,20 @@ class binary_reader return get_cbor_array(get_number<uint16_t>()); } - case 0x9a: // array (four-byte uint32_t for n follow) + case 0x9A: // array (four-byte uint32_t for n follow) { return get_cbor_array(get_number<uint32_t>()); } - case 0x9b: // array (eight-byte uint64_t for n follow) + case 0x9B: // array (eight-byte uint64_t for n follow) { return get_cbor_array(get_number<uint64_t>()); } - case 0x9f: // array (indefinite length) + case 0x9F: // array (indefinite length) { BasicJsonType result = value_t::array; - while (get() != 0xff) + while (get() != 0xFF) { result.push_back(parse_cbor_internal(false)); } @@ -4747,58 +4742,58 @@ class binary_reader } // map (0x00..0x17 pairs of data items follow) - case 0xa0: - case 0xa1: - case 0xa2: - case 0xa3: - case 0xa4: - case 0xa5: - case 0xa6: - case 0xa7: - case 0xa8: - case 0xa9: - case 0xaa: - case 0xab: - case 0xac: - case 0xad: - case 0xae: - case 0xaf: - case 0xb0: - case 0xb1: - case 0xb2: - case 0xb3: - case 0xb4: - case 0xb5: - case 0xb6: - case 0xb7: - { - return get_cbor_object(current & 0x1f); - } - - case 0xb8: // map (one-byte uint8_t for n follows) + case 0xA0: + case 0xA1: + case 0xA2: + case 0xA3: + case 0xA4: + case 0xA5: + case 0xA6: + case 0xA7: + case 0xA8: + case 0xA9: + case 0xAA: + case 0xAB: + case 0xAC: + case 0xAD: + case 0xAE: + case 0xAF: + case 0xB0: + case 0xB1: + case 0xB2: + case 0xB3: + case 0xB4: + case 0xB5: + case 0xB6: + case 0xB7: + { + return get_cbor_object(current & 0x1F); + } + + case 0xB8: // map (one-byte uint8_t for n follows) { return get_cbor_object(get_number<uint8_t>()); } - case 0xb9: // map (two-byte uint16_t for n follow) + case 0xB9: // map (two-byte uint16_t for n follow) { return get_cbor_object(get_number<uint16_t>()); } - case 0xba: // map (four-byte uint32_t for n follow) + case 0xBA: // map (four-byte uint32_t for n follow) { return get_cbor_object(get_number<uint32_t>()); } - case 0xbb: // map (eight-byte uint64_t for n follow) + case 0xBB: // map (eight-byte uint64_t for n follow) { return get_cbor_object(get_number<uint64_t>()); } - case 0xbf: // map (indefinite length) + case 0xBF: // map (indefinite length) { BasicJsonType result = value_t::object; - while (get() != 0xff) + while (get() != 0xFF) { auto key = get_cbor_string(); result[key] = parse_cbor_internal(); @@ -4806,22 +4801,22 @@ class binary_reader return result; } - case 0xf4: // false + case 0xF4: // false { return false; } - case 0xf5: // true + case 0xF5: // true { return true; } - case 0xf6: // null + case 0xF6: // null { return value_t::null; } - case 0xf9: // Half-Precision Float (two-byte IEEE 754) + case 0xF9: // Half-Precision Float (two-byte IEEE 754) { const int byte1 = get(); check_eof(); @@ -4837,8 +4832,8 @@ class binary_reader // half-precision floating-point numbers in the C language // is shown in Fig. 3. const int half = (byte1 << 8) + byte2; - const int exp = (half >> 10) & 0x1f; - const int mant = half & 0x3ff; + const int exp = (half >> 10) & 0x1F; + const int mant = half & 0x3FF; double val; if (exp == 0) { @@ -4856,12 +4851,12 @@ class binary_reader return (half & 0x8000) != 0 ? -val : val; } - case 0xfa: // Single-Precision Float (four-byte IEEE 754) + case 0xFA: // Single-Precision Float (four-byte IEEE 754) { return get_number<float>(); } - case 0xfb: // Double-Precision Float (eight-byte IEEE 754) + case 0xFB: // Double-Precision Float (eight-byte IEEE 754) { return get_number<double>(); } @@ -4869,7 +4864,7 @@ class binary_reader default: // anything else (0xFF is handled inside the other types) { std::stringstream ss; - ss << std::setw(2) << std::setfill('0') << std::hex << current; + ss << std::setw(2) << std::uppercase << std::setfill('0') << std::hex << current; JSON_THROW(parse_error::create(112, chars_read, "error reading CBOR; last byte: 0x" + ss.str())); } } @@ -4894,12 +4889,12 @@ class binary_reader case 0x07: case 0x08: case 0x09: - case 0x0a: - case 0x0b: - case 0x0c: - case 0x0d: - case 0x0e: - case 0x0f: + case 0x0A: + case 0x0B: + case 0x0C: + case 0x0D: + case 0x0E: + case 0x0F: case 0x10: case 0x11: case 0x12: @@ -4910,12 +4905,12 @@ class binary_reader case 0x17: case 0x18: case 0x19: - case 0x1a: - case 0x1b: - case 0x1c: - case 0x1d: - case 0x1e: - case 0x1f: + case 0x1A: + case 0x1B: + case 0x1C: + case 0x1D: + case 0x1E: + case 0x1F: case 0x20: case 0x21: case 0x22: @@ -4926,12 +4921,12 @@ class binary_reader case 0x27: case 0x28: case 0x29: - case 0x2a: - case 0x2b: - case 0x2c: - case 0x2d: - case 0x2e: - case 0x2f: + case 0x2A: + case 0x2B: + case 0x2C: + case 0x2D: + case 0x2E: + case 0x2F: case 0x30: case 0x31: case 0x32: @@ -4942,12 +4937,12 @@ class binary_reader case 0x37: case 0x38: case 0x39: - case 0x3a: - case 0x3b: - case 0x3c: - case 0x3d: - case 0x3e: - case 0x3f: + case 0x3A: + case 0x3B: + case 0x3C: + case 0x3D: + case 0x3E: + case 0x3F: case 0x40: case 0x41: case 0x42: @@ -4958,12 +4953,12 @@ class binary_reader case 0x47: case 0x48: case 0x49: - case 0x4a: - case 0x4b: - case 0x4c: - case 0x4d: - case 0x4e: - case 0x4f: + case 0x4A: + case 0x4B: + case 0x4C: + case 0x4D: + case 0x4E: + case 0x4F: case 0x50: case 0x51: case 0x52: @@ -4974,12 +4969,12 @@ class binary_reader case 0x57: case 0x58: case 0x59: - case 0x5a: - case 0x5b: - case 0x5c: - case 0x5d: - case 0x5e: - case 0x5f: + case 0x5A: + case 0x5B: + case 0x5C: + case 0x5D: + case 0x5E: + case 0x5F: case 0x60: case 0x61: case 0x62: @@ -4990,12 +4985,12 @@ class binary_reader case 0x67: case 0x68: case 0x69: - case 0x6a: - case 0x6b: - case 0x6c: - case 0x6d: - case 0x6e: - case 0x6f: + case 0x6A: + case 0x6B: + case 0x6C: + case 0x6D: + case 0x6E: + case 0x6F: case 0x70: case 0x71: case 0x72: @@ -5006,12 +5001,12 @@ class binary_reader case 0x77: case 0x78: case 0x79: - case 0x7a: - case 0x7b: - case 0x7c: - case 0x7d: - case 0x7e: - case 0x7f: + case 0x7A: + case 0x7B: + case 0x7C: + case 0x7D: + case 0x7E: + case 0x7F: return static_cast<number_unsigned_t>(current); // fixmap @@ -5025,14 +5020,14 @@ class binary_reader case 0x87: case 0x88: case 0x89: - case 0x8a: - case 0x8b: - case 0x8c: - case 0x8d: - case 0x8e: - case 0x8f: + case 0x8A: + case 0x8B: + case 0x8C: + case 0x8D: + case 0x8E: + case 0x8F: { - return get_msgpack_object(current & 0x0f); + return get_msgpack_object(current & 0x0F); } // fixarray @@ -5046,154 +5041,154 @@ class binary_reader case 0x97: case 0x98: case 0x99: - case 0x9a: - case 0x9b: - case 0x9c: - case 0x9d: - case 0x9e: - case 0x9f: + case 0x9A: + case 0x9B: + case 0x9C: + case 0x9D: + case 0x9E: + case 0x9F: { - return get_msgpack_array(current & 0x0f); + return get_msgpack_array(current & 0x0F); } // fixstr - case 0xa0: - case 0xa1: - case 0xa2: - case 0xa3: - case 0xa4: - case 0xa5: - case 0xa6: - case 0xa7: - case 0xa8: - case 0xa9: - case 0xaa: - case 0xab: - case 0xac: - case 0xad: - case 0xae: - case 0xaf: - case 0xb0: - case 0xb1: - case 0xb2: - case 0xb3: - case 0xb4: - case 0xb5: - case 0xb6: - case 0xb7: - case 0xb8: - case 0xb9: - case 0xba: - case 0xbb: - case 0xbc: - case 0xbd: - case 0xbe: - case 0xbf: + case 0xA0: + case 0xA1: + case 0xA2: + case 0xA3: + case 0xA4: + case 0xA5: + case 0xA6: + case 0xA7: + case 0xA8: + case 0xA9: + case 0xAA: + case 0xAB: + case 0xAC: + case 0xAD: + case 0xAE: + case 0xAF: + case 0xB0: + case 0xB1: + case 0xB2: + case 0xB3: + case 0xB4: + case 0xB5: + case 0xB6: + case 0xB7: + case 0xB8: + case 0xB9: + case 0xBA: + case 0xBB: + case 0xBC: + case 0xBD: + case 0xBE: + case 0xBF: return get_msgpack_string(); - case 0xc0: // nil + case 0xC0: // nil return value_t::null; - case 0xc2: // false + case 0xC2: // false return false; - case 0xc3: // true + case 0xC3: // true return true; - case 0xca: // float 32 + case 0xCA: // float 32 return get_number<float>(); - case 0xcb: // float 64 + case 0xCB: // float 64 return get_number<double>(); - case 0xcc: // uint 8 + case 0xCC: // uint 8 return get_number<uint8_t>(); - case 0xcd: // uint 16 + case 0xCD: // uint 16 return get_number<uint16_t>(); - case 0xce: // uint 32 + case 0xCE: // uint 32 return get_number<uint32_t>(); - case 0xcf: // uint 64 + case 0xCF: // uint 64 return get_number<uint64_t>(); - case 0xd0: // int 8 + case 0xD0: // int 8 return get_number<int8_t>(); - case 0xd1: // int 16 + case 0xD1: // int 16 return get_number<int16_t>(); - case 0xd2: // int 32 + case 0xD2: // int 32 return get_number<int32_t>(); - case 0xd3: // int 64 + case 0xD3: // int 64 return get_number<int64_t>(); - case 0xd9: // str 8 - case 0xda: // str 16 - case 0xdb: // str 32 + case 0xD9: // str 8 + case 0xDA: // str 16 + case 0xDB: // str 32 return get_msgpack_string(); - case 0xdc: // array 16 + case 0xDC: // array 16 { return get_msgpack_array(get_number<uint16_t>()); } - case 0xdd: // array 32 + case 0xDD: // array 32 { return get_msgpack_array(get_number<uint32_t>()); } - case 0xde: // map 16 + case 0xDE: // map 16 { return get_msgpack_object(get_number<uint16_t>()); } - case 0xdf: // map 32 + case 0xDF: // map 32 { return get_msgpack_object(get_number<uint32_t>()); } // positive fixint - case 0xe0: - case 0xe1: - case 0xe2: - case 0xe3: - case 0xe4: - case 0xe5: - case 0xe6: - case 0xe7: - case 0xe8: - case 0xe9: - case 0xea: - case 0xeb: - case 0xec: - case 0xed: - case 0xee: - case 0xef: - case 0xf0: - case 0xf1: - case 0xf2: - case 0xf3: - case 0xf4: - case 0xf5: - case 0xf6: - case 0xf7: - case 0xf8: - case 0xf9: - case 0xfa: - case 0xfb: - case 0xfc: - case 0xfd: - case 0xfe: - case 0xff: + case 0xE0: + case 0xE1: + case 0xE2: + case 0xE3: + case 0xE4: + case 0xE5: + case 0xE6: + case 0xE7: + case 0xE8: + case 0xE9: + case 0xEA: + case 0xEB: + case 0xEC: + case 0xED: + case 0xEE: + case 0xEF: + case 0xF0: + case 0xF1: + case 0xF2: + case 0xF3: + case 0xF4: + case 0xF5: + case 0xF6: + case 0xF7: + case 0xF8: + case 0xF9: + case 0xFA: + case 0xFB: + case 0xFC: + case 0xFD: + case 0xFE: + case 0xFF: return static_cast<int8_t>(current); default: // anything else { std::stringstream ss; - ss << std::setw(2) << std::setfill('0') << std::hex << current; + ss << std::setw(2) << std::uppercase << std::setfill('0') << std::hex << current; JSON_THROW(parse_error::create(112, chars_read, "error reading MessagePack; last byte: 0x" + ss.str())); } @@ -5309,12 +5304,12 @@ class binary_reader case 0x67: case 0x68: case 0x69: - case 0x6a: - case 0x6b: - case 0x6c: - case 0x6d: - case 0x6e: - case 0x6f: + case 0x6A: + case 0x6B: + case 0x6C: + case 0x6D: + case 0x6E: + case 0x6F: case 0x70: case 0x71: case 0x72: @@ -5324,7 +5319,7 @@ class binary_reader case 0x76: case 0x77: { - return get_string(current & 0x1f); + return get_string(current & 0x1F); } case 0x78: // UTF-8 string (one-byte uint8_t for n follows) @@ -5337,20 +5332,20 @@ class binary_reader return get_string(get_number<uint16_t>()); } - case 0x7a: // UTF-8 string (four-byte uint32_t for n follow) + case 0x7A: // UTF-8 string (four-byte uint32_t for n follow) { return get_string(get_number<uint32_t>()); } - case 0x7b: // UTF-8 string (eight-byte uint64_t for n follow) + case 0x7B: // UTF-8 string (eight-byte uint64_t for n follow) { return get_string(get_number<uint64_t>()); } - case 0x7f: // UTF-8 string (indefinite length) + case 0x7F: // UTF-8 string (indefinite length) { std::string result; - while (get() != 0xff) + while (get() != 0xFF) { check_eof(); result.push_back(static_cast<char>(current)); @@ -5361,7 +5356,7 @@ class binary_reader default: { std::stringstream ss; - ss << std::setw(2) << std::setfill('0') << std::hex << current; + ss << std::setw(2) << std::uppercase << std::setfill('0') << std::hex << current; JSON_THROW(parse_error::create(113, chars_read, "expected a CBOR string; last byte: 0x" + ss.str())); } } @@ -5412,53 +5407,53 @@ class binary_reader switch (current) { // fixstr - case 0xa0: - case 0xa1: - case 0xa2: - case 0xa3: - case 0xa4: - case 0xa5: - case 0xa6: - case 0xa7: - case 0xa8: - case 0xa9: - case 0xaa: - case 0xab: - case 0xac: - case 0xad: - case 0xae: - case 0xaf: - case 0xb0: - case 0xb1: - case 0xb2: - case 0xb3: - case 0xb4: - case 0xb5: - case 0xb6: - case 0xb7: - case 0xb8: - case 0xb9: - case 0xba: - case 0xbb: - case 0xbc: - case 0xbd: - case 0xbe: - case 0xbf: - { - return get_string(current & 0x1f); - } - - case 0xd9: // str 8 + case 0xA0: + case 0xA1: + case 0xA2: + case 0xA3: + case 0xA4: + case 0xA5: + case 0xA6: + case 0xA7: + case 0xA8: + case 0xA9: + case 0xAA: + case 0xAB: + case 0xAC: + case 0xAD: + case 0xAE: + case 0xAF: + case 0xB0: + case 0xB1: + case 0xB2: + case 0xB3: + case 0xB4: + case 0xB5: + case 0xB6: + case 0xB7: + case 0xB8: + case 0xB9: + case 0xBA: + case 0xBB: + case 0xBC: + case 0xBD: + case 0xBE: + case 0xBF: + { + return get_string(current & 0x1F); + } + + case 0xD9: // str 8 { return get_string(get_number<uint8_t>()); } - case 0xda: // str 16 + case 0xDA: // str 16 { return get_string(get_number<uint16_t>()); } - case 0xdb: // str 32 + case 0xDB: // str 32 { return get_string(get_number<uint32_t>()); } @@ -5466,7 +5461,7 @@ class binary_reader default: { std::stringstream ss; - ss << std::setw(2) << std::setfill('0') << std::hex << current; + ss << std::setw(2) << std::uppercase << std::setfill('0') << std::hex << current; JSON_THROW(parse_error::create(113, chars_read, "expected a MessagePack string; last byte: 0x" + ss.str())); } @@ -5562,15 +5557,15 @@ class binary_writer { case value_t::null: { - oa->write_character(static_cast<CharType>(0xf6)); + oa->write_character(static_cast<CharType>(0xF6)); break; } case value_t::boolean: { oa->write_character(j.m_value.boolean - ? static_cast<CharType>(0xf5) - : static_cast<CharType>(0xf4)); + ? static_cast<CharType>(0xF5) + : static_cast<CharType>(0xF4)); break; } @@ -5597,12 +5592,12 @@ class binary_writer } else if (j.m_value.number_integer <= (std::numeric_limits<uint32_t>::max)()) { - oa->write_character(static_cast<CharType>(0x1a)); + oa->write_character(static_cast<CharType>(0x1A)); write_number(static_cast<uint32_t>(j.m_value.number_integer)); } else { - oa->write_character(static_cast<CharType>(0x1b)); + oa->write_character(static_cast<CharType>(0x1B)); write_number(static_cast<uint64_t>(j.m_value.number_integer)); } } @@ -5627,12 +5622,12 @@ class binary_writer } else if (positive_number <= (std::numeric_limits<uint32_t>::max)()) { - oa->write_character(static_cast<CharType>(0x3a)); + oa->write_character(static_cast<CharType>(0x3A)); write_number(static_cast<uint32_t>(positive_number)); } else { - oa->write_character(static_cast<CharType>(0x3b)); + oa->write_character(static_cast<CharType>(0x3B)); write_number(static_cast<uint64_t>(positive_number)); } } @@ -5657,12 +5652,12 @@ class binary_writer } else if (j.m_value.number_unsigned <= (std::numeric_limits<uint32_t>::max)()) { - oa->write_character(static_cast<CharType>(0x1a)); + oa->write_character(static_cast<CharType>(0x1A)); write_number(static_cast<uint32_t>(j.m_value.number_unsigned)); } else { - oa->write_character(static_cast<CharType>(0x1b)); + oa->write_character(static_cast<CharType>(0x1B)); write_number(static_cast<uint64_t>(j.m_value.number_unsigned)); } break; @@ -5670,7 +5665,7 @@ class binary_writer case value_t::number_float: // Double-Precision Float { - oa->write_character(static_cast<CharType>(0xfb)); + oa->write_character(static_cast<CharType>(0xFB)); write_number(j.m_value.number_float); break; } @@ -5683,25 +5678,25 @@ class binary_writer { write_number(static_cast<uint8_t>(0x60 + N)); } - else if (N <= 0xff) + else if (N <= 0xFF) { oa->write_character(static_cast<CharType>(0x78)); write_number(static_cast<uint8_t>(N)); } - else if (N <= 0xffff) + else if (N <= 0xFFFF) { oa->write_character(static_cast<CharType>(0x79)); write_number(static_cast<uint16_t>(N)); } - else if (N <= 0xffffffff) + else if (N <= 0xFFFFFFFF) { - oa->write_character(static_cast<CharType>(0x7a)); + oa->write_character(static_cast<CharType>(0x7A)); write_number(static_cast<uint32_t>(N)); } // LCOV_EXCL_START - else if (N <= 0xffffffffffffffff) + else if (N <= 0xFFFFFFFFFFFFFFFF) { - oa->write_character(static_cast<CharType>(0x7b)); + oa->write_character(static_cast<CharType>(0x7B)); write_number(static_cast<uint64_t>(N)); } // LCOV_EXCL_STOP @@ -5721,25 +5716,25 @@ class binary_writer { write_number(static_cast<uint8_t>(0x80 + N)); } - else if (N <= 0xff) + else if (N <= 0xFF) { oa->write_character(static_cast<CharType>(0x98)); write_number(static_cast<uint8_t>(N)); } - else if (N <= 0xffff) + else if (N <= 0xFFFF) { oa->write_character(static_cast<CharType>(0x99)); write_number(static_cast<uint16_t>(N)); } - else if (N <= 0xffffffff) + else if (N <= 0xFFFFFFFF) { - oa->write_character(static_cast<CharType>(0x9a)); + oa->write_character(static_cast<CharType>(0x9A)); write_number(static_cast<uint32_t>(N)); } // LCOV_EXCL_START - else if (N <= 0xffffffffffffffff) + else if (N <= 0xFFFFFFFFFFFFFFFF) { - oa->write_character(static_cast<CharType>(0x9b)); + oa->write_character(static_cast<CharType>(0x9B)); write_number(static_cast<uint64_t>(N)); } // LCOV_EXCL_STOP @@ -5758,27 +5753,27 @@ class binary_writer const auto N = j.m_value.object->size(); if (N <= 0x17) { - write_number(static_cast<uint8_t>(0xa0 + N)); + write_number(static_cast<uint8_t>(0xA0 + N)); } - else if (N <= 0xff) + else if (N <= 0xFF) { - oa->write_character(static_cast<CharType>(0xb8)); + oa->write_character(static_cast<CharType>(0xB8)); write_number(static_cast<uint8_t>(N)); } - else if (N <= 0xffff) + else if (N <= 0xFFFF) { - oa->write_character(static_cast<CharType>(0xb9)); + oa->write_character(static_cast<CharType>(0xB9)); write_number(static_cast<uint16_t>(N)); } - else if (N <= 0xffffffff) + else if (N <= 0xFFFFFFFF) { - oa->write_character(static_cast<CharType>(0xba)); + oa->write_character(static_cast<CharType>(0xBA)); write_number(static_cast<uint32_t>(N)); } // LCOV_EXCL_START - else if (N <= 0xffffffffffffffff) + else if (N <= 0xFFFFFFFFFFFFFFFF) { - oa->write_character(static_cast<CharType>(0xbb)); + oa->write_character(static_cast<CharType>(0xBB)); write_number(static_cast<uint64_t>(N)); } // LCOV_EXCL_STOP @@ -5806,15 +5801,15 @@ class binary_writer { case value_t::null: // nil { - oa->write_character(static_cast<CharType>(0xc0)); + oa->write_character(static_cast<CharType>(0xC0)); break; } case value_t::boolean: // true and false { oa->write_character(j.m_value.boolean - ? static_cast<CharType>(0xc3) - : static_cast<CharType>(0xc2)); + ? static_cast<CharType>(0xC3) + : static_cast<CharType>(0xC2)); break; } @@ -5833,25 +5828,25 @@ class binary_writer else if (j.m_value.number_unsigned <= (std::numeric_limits<uint8_t>::max)()) { // uint 8 - oa->write_character(static_cast<CharType>(0xcc)); + oa->write_character(static_cast<CharType>(0xCC)); write_number(static_cast<uint8_t>(j.m_value.number_integer)); } else if (j.m_value.number_unsigned <= (std::numeric_limits<uint16_t>::max)()) { // uint 16 - oa->write_character(static_cast<CharType>(0xcd)); + oa->write_character(static_cast<CharType>(0xCD)); write_number(static_cast<uint16_t>(j.m_value.number_integer)); } else if (j.m_value.number_unsigned <= (std::numeric_limits<uint32_t>::max)()) { // uint 32 - oa->write_character(static_cast<CharType>(0xce)); + oa->write_character(static_cast<CharType>(0xCE)); write_number(static_cast<uint32_t>(j.m_value.number_integer)); } else if (j.m_value.number_unsigned <= (std::numeric_limits<uint64_t>::max)()) { // uint 64 - oa->write_character(static_cast<CharType>(0xcf)); + oa->write_character(static_cast<CharType>(0xCF)); write_number(static_cast<uint64_t>(j.m_value.number_integer)); } } @@ -5866,28 +5861,28 @@ class binary_writer j.m_value.number_integer <= (std::numeric_limits<int8_t>::max)()) { // int 8 - oa->write_character(static_cast<CharType>(0xd0)); + oa->write_character(static_cast<CharType>(0xD0)); write_number(static_cast<int8_t>(j.m_value.number_integer)); } else if (j.m_value.number_integer >= (std::numeric_limits<int16_t>::min)() and j.m_value.number_integer <= (std::numeric_limits<int16_t>::max)()) { // int 16 - oa->write_character(static_cast<CharType>(0xd1)); + oa->write_character(static_cast<CharType>(0xD1)); write_number(static_cast<int16_t>(j.m_value.number_integer)); } else if (j.m_value.number_integer >= (std::numeric_limits<int32_t>::min)() and j.m_value.number_integer <= (std::numeric_limits<int32_t>::max)()) { // int 32 - oa->write_character(static_cast<CharType>(0xd2)); + oa->write_character(static_cast<CharType>(0xD2)); write_number(static_cast<int32_t>(j.m_value.number_integer)); } else if (j.m_value.number_integer >= (std::numeric_limits<int64_t>::min)() and j.m_value.number_integer <= (std::numeric_limits<int64_t>::max)()) { // int 64 - oa->write_character(static_cast<CharType>(0xd3)); + oa->write_character(static_cast<CharType>(0xD3)); write_number(static_cast<int64_t>(j.m_value.number_integer)); } } @@ -5904,25 +5899,25 @@ class binary_writer else if (j.m_value.number_unsigned <= (std::numeric_limits<uint8_t>::max)()) { // uint 8 - oa->write_character(static_cast<CharType>(0xcc)); + oa->write_character(static_cast<CharType>(0xCC)); write_number(static_cast<uint8_t>(j.m_value.number_integer)); } else if (j.m_value.number_unsigned <= (std::numeric_limits<uint16_t>::max)()) { // uint 16 - oa->write_character(static_cast<CharType>(0xcd)); + oa->write_character(static_cast<CharType>(0xCD)); write_number(static_cast<uint16_t>(j.m_value.number_integer)); } else if (j.m_value.number_unsigned <= (std::numeric_limits<uint32_t>::max)()) { // uint 32 - oa->write_character(static_cast<CharType>(0xce)); + oa->write_character(static_cast<CharType>(0xCE)); write_number(static_cast<uint32_t>(j.m_value.number_integer)); } else if (j.m_value.number_unsigned <= (std::numeric_limits<uint64_t>::max)()) { // uint 64 - oa->write_character(static_cast<CharType>(0xcf)); + oa->write_character(static_cast<CharType>(0xCF)); write_number(static_cast<uint64_t>(j.m_value.number_integer)); } break; @@ -5930,7 +5925,7 @@ class binary_writer case value_t::number_float: // float 64 { - oa->write_character(static_cast<CharType>(0xcb)); + oa->write_character(static_cast<CharType>(0xCB)); write_number(j.m_value.number_float); break; } @@ -5942,24 +5937,24 @@ class binary_writer if (N <= 31) { // fixstr - write_number(static_cast<uint8_t>(0xa0 | N)); + write_number(static_cast<uint8_t>(0xA0 | N)); } else if (N <= 255) { // str 8 - oa->write_character(static_cast<CharType>(0xd9)); + oa->write_character(static_cast<CharType>(0xD9)); write_number(static_cast<uint8_t>(N)); } else if (N <= 65535) { // str 16 - oa->write_character(static_cast<CharType>(0xda)); + oa->write_character(static_cast<CharType>(0xDA)); write_number(static_cast<uint16_t>(N)); } else if (N <= 4294967295) { // str 32 - oa->write_character(static_cast<CharType>(0xdb)); + oa->write_character(static_cast<CharType>(0xDB)); write_number(static_cast<uint32_t>(N)); } @@ -5979,16 +5974,16 @@ class binary_writer // fixarray write_number(static_cast<uint8_t>(0x90 | N)); } - else if (N <= 0xffff) + else if (N <= 0xFFFF) { // array 16 - oa->write_character(static_cast<CharType>(0xdc)); + oa->write_character(static_cast<CharType>(0xDC)); write_number(static_cast<uint16_t>(N)); } - else if (N <= 0xffffffff) + else if (N <= 0xFFFFFFFF) { // array 32 - oa->write_character(static_cast<CharType>(0xdd)); + oa->write_character(static_cast<CharType>(0xDD)); write_number(static_cast<uint32_t>(N)); } @@ -6007,18 +6002,18 @@ class binary_writer if (N <= 15) { // fixmap - write_number(static_cast<uint8_t>(0x80 | (N & 0xf))); + write_number(static_cast<uint8_t>(0x80 | (N & 0xF))); } else if (N <= 65535) { // map 16 - oa->write_character(static_cast<CharType>(0xde)); + oa->write_character(static_cast<CharType>(0xDE)); write_number(static_cast<uint16_t>(N)); } else if (N <= 4294967295) { // map 32 - oa->write_character(static_cast<CharType>(0xdf)); + oa->write_character(static_cast<CharType>(0xDF)); write_number(static_cast<uint32_t>(N)); } @@ -6363,9 +6358,9 @@ class serializer case 0x05: case 0x06: case 0x07: - case 0x0b: - case 0x0e: - case 0x0f: + case 0x0B: + case 0x0E: + case 0x0F: case 0x10: case 0x11: case 0x12: @@ -6376,12 +6371,12 @@ class serializer case 0x17: case 0x18: case 0x19: - case 0x1a: - case 0x1b: - case 0x1c: - case 0x1d: - case 0x1e: - case 0x1f: + case 0x1A: + case 0x1B: + case 0x1C: + case 0x1D: + case 0x1E: + case 0x1F: { // from c (1 byte) to \uxxxx (6 bytes) res += 5; @@ -6393,12 +6388,8 @@ class serializer if (ensure_ascii and (s[i] & 0x80 or s[i] == 0x7F)) { const auto bytes = bytes_following(static_cast<uint8_t>(s[i])); - if (bytes == std::string::npos) - { - // invalid characters are treated as is, so no - // additional space will be used - break; - } + // invalid characters will be detected by throw_if_invalid_utf8 + assert (bytes != std::string::npos); if (bytes == 3) { @@ -6492,6 +6483,8 @@ class serializer */ void dump_escaped(const string_t& s, const bool ensure_ascii) const { + throw_if_invalid_utf8(s); + const auto space = extra_space(s, ensure_ascii); if (space == 0) { @@ -6514,7 +6507,7 @@ class serializer break; } - case '\\': // reverse solidus (0x5c) + case '\\': // reverse solidus (0x5C) { // nothing to change pos += 2; @@ -6528,21 +6521,21 @@ class serializer break; } - case '\f': // formfeed (0x0c) + case '\f': // formfeed (0x0C) { result[pos + 1] = 'f'; pos += 2; break; } - case '\n': // newline (0x0a) + case '\n': // newline (0x0A) { result[pos + 1] = 'n'; pos += 2; break; } - case '\r': // carriage return (0x0d) + case '\r': // carriage return (0x0D) { result[pos + 1] = 'r'; pos += 2; @@ -6564,21 +6557,18 @@ class serializer (ensure_ascii and (s[i] & 0x80 or s[i] == 0x7F))) { const auto bytes = bytes_following(static_cast<uint8_t>(s[i])); - if (bytes == std::string::npos) - { - // copy invalid character as is - result[pos++] = s[i]; - break; - } + // invalid characters will be detected by throw_if_invalid_utf8 + assert (bytes != std::string::npos); // check that the additional bytes are present assert(i + bytes < s.size()); - // to use \uxxxx escaping, we first need to caluclate + // to use \uxxxx escaping, we first need to calculate // the codepoint from the UTF-8 bytes int codepoint = 0; - assert(0 <= bytes and bytes <= 3); + // bytes is unsigned type: + assert(bytes <= 3); switch (bytes) { case 0: @@ -6641,11 +6631,10 @@ class serializer @param[in] x integer number (signed or unsigned) to dump @tparam NumberType either @a number_integer_t or @a number_unsigned_t */ - template < - typename NumberType, - detail::enable_if_t<std::is_same<NumberType, number_unsigned_t>::value or - std::is_same<NumberType, number_integer_t>::value, - int> = 0 > + template<typename NumberType, detail::enable_if_t< + std::is_same<NumberType, number_unsigned_t>::value or + std::is_same<NumberType, number_integer_t>::value, + int> = 0> void dump_integer(NumberType x) { // special case for "0" @@ -6743,6 +6732,87 @@ class serializer } } + /*! + @brief check whether a string is UTF-8 encoded + + The function checks each byte of a string whether it is UTF-8 encoded. The + result of the check is stored in the @a state parameter. The function must + be called initially with state 0 (accept). State 1 means the string must + be rejected, because the current byte is not allowed. If the string is + completely processed, but the state is non-zero, the string ended + prematurely; that is, the last byte indicated more bytes should have + followed. + + @param[in,out] state the state of the decoding + @param[in] byte next byte to decode + + @note The function has been edited: a std::array is used and the code + point is not calculated. + + @copyright Copyright (c) 2008-2009 Bjoern Hoehrmann <bjoern@hoehrmann.de> + @sa http://bjoern.hoehrmann.de/utf-8/decoder/dfa/ + */ + static void decode(uint8_t& state, const uint8_t byte) + { + static const std::array<uint8_t, 400> utf8d = + { + { + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 00..1F + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 20..3F + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 40..5F + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 60..7F + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, // 80..9F + 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, // A0..BF + 8, 8, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, // C0..DF + 0xA, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x4, 0x3, 0x3, // E0..EF + 0xB, 0x6, 0x6, 0x6, 0x5, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, // F0..FF + 0x0, 0x1, 0x2, 0x3, 0x5, 0x8, 0x7, 0x1, 0x1, 0x1, 0x4, 0x6, 0x1, 0x1, 0x1, 0x1, // s0..s0 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, // s1..s2 + 1, 2, 1, 1, 1, 1, 1, 2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, // s3..s4 + 1, 2, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 3, 1, 1, 1, 1, 1, 1, // s5..s6 + 1, 3, 1, 1, 1, 1, 1, 3, 1, 3, 1, 1, 1, 1, 1, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 // s7..s8 + } + }; + + const uint8_t type = utf8d[byte]; + state = utf8d[256u + state * 16u + type]; + } + + /*! + @brief throw an exception if a string is not UTF-8 encoded + + @param[in] str UTF-8 string to check + @throw type_error.316 if passed string is not UTF-8 encoded + + @since version 3.0.0 + */ + static void throw_if_invalid_utf8(const std::string& str) + { + // start with state 0 (= accept) + uint8_t state = 0; + + for (size_t i = 0; i < str.size(); ++i) + { + const auto byte = static_cast<uint8_t>(str[i]); + decode(state, byte); + if (state == 1) + { + // state 1 means reject + std::stringstream ss; + ss << std::setw(2) << std::uppercase << std::setfill('0') << std::hex << static_cast<int>(byte); + JSON_THROW(type_error::create(316, "invalid UTF-8 byte at index " + std::to_string(i) + ": 0x" + ss.str())); + } + } + + if (state != 0) + { + // we finish reading, but do not accept: string was incomplete + std::stringstream ss; + ss << std::setw(2) << std::uppercase << std::setfill('0') << std::hex << static_cast<int>(static_cast<uint8_t>(str.back())); + JSON_THROW(type_error::create(316, "incomplete UTF-8 string; last byte: 0x" + ss.str())); + } + } + private: /// the output of the serializer output_adapter_t<char> o = nullptr; @@ -6771,27 +6841,20 @@ class json_ref using value_type = BasicJsonType; json_ref(value_type&& value) - : owned_value(std::move(value)), - value_ref(&owned_value), - is_rvalue(true) + : owned_value(std::move(value)), value_ref(&owned_value), is_rvalue(true) {} json_ref(const value_type& value) - : value_ref(const_cast<value_type*>(&value)), - is_rvalue(false) + : value_ref(const_cast<value_type*>(&value)), is_rvalue(false) {} json_ref(std::initializer_list<json_ref> init) - : owned_value(init), - value_ref(&owned_value), - is_rvalue(true) + : owned_value(init), value_ref(&owned_value), is_rvalue(true) {} - template <class... Args> + template<class... Args> json_ref(Args&& ... args) - : owned_value(std::forward<Args>(args)...), - value_ref(&owned_value), - is_rvalue(true) + : owned_value(std::forward<Args>(args)...), value_ref(&owned_value), is_rvalue(true) {} // class should be movable only @@ -6949,6 +7012,27 @@ class json_pointer return to_string(); } + /*! + @param[in] s reference token to be converted into an array index + + @return integer representation of @a s + + @throw out_of_range.404 if string @a s could not be converted to an integer + */ + static int array_index(const std::string& s) + { + size_t processed_chars = 0; + const int res = std::stoi(s, &processed_chars); + + // check if the string was completely read + if (JSON_UNLIKELY(processed_chars != s.size())) + { + JSON_THROW(detail::out_of_range::create(404, "unresolved reference token '" + s + "'")); + } + + return res; + } + private: /*! @brief remove and return last reference pointer @@ -6984,7 +7068,6 @@ class json_pointer return result; } - /*! @brief create and return a reference to the pointed to value @@ -7320,11 +7403,11 @@ class basic_json public: using value_t = detail::value_t; - // forward declarations + /// @copydoc nlohmann::json_pointer using json_pointer = ::nlohmann::json_pointer; template<typename T, typename SFINAE> using json_serializer = JSONSerializer<T, SFINAE>; - + /// helper type for initializer lists of basic_json values using initializer_list_t = std::initializer_list<detail::json_ref<basic_json>>; //////////////// @@ -7436,7 +7519,7 @@ class basic_json result["url"] = "https://github.com/nlohmann/json"; result["version"] = { - {"string", "2.1.1"}, {"major", 2}, {"minor", 1}, {"patch", 1} + {"string", "3.0.1"}, {"major", 3}, {"minor", 0}, {"patch", 1} }; #ifdef _WIN32 @@ -7489,6 +7572,14 @@ class basic_json /// the template arguments passed to class @ref basic_json. /// @{ +#if defined(JSON_HAS_CPP_14) + // Use transparent comparator if possible, combined with perfect forwarding + // on find() and count() calls prevents unnecessary string construction. + using object_comparator_t = std::less<>; +#else + using object_comparator_t = std::less<StringType>; +#endif + /*! @brief a type for an object @@ -7572,14 +7663,6 @@ class basic_json 7159](http://rfc7159.net/rfc7159), because any order implements the specified "unordered" nature of JSON objects. */ - -#if defined(JSON_HAS_CPP_14) - // Use transparent comparator if possible, combined with perfect forwarding - // on find() and count() calls prevents unnecessary string construction. - using object_comparator_t = std::less<>; -#else - using object_comparator_t = std::less<StringType>; -#endif using object_t = ObjectType<StringType, basic_json, object_comparator_t, @@ -7931,12 +8014,14 @@ class basic_json static T* create(Args&& ... args) { AllocatorType<T> alloc; + using AllocatorTraits = std::allocator_traits<AllocatorType<T>>; + auto deleter = [&](T * object) { - alloc.deallocate(object, 1); + AllocatorTraits::deallocate(alloc, object, 1); }; - std::unique_ptr<T, decltype(deleter)> object(alloc.allocate(1), deleter); - alloc.construct(object.get(), std::forward<Args>(args)...); + std::unique_ptr<T, decltype(deleter)> object(AllocatorTraits::allocate(alloc, 1), deleter); + AllocatorTraits::construct(alloc, object.get(), std::forward<Args>(args)...); assert(object != nullptr); return object.release(); } @@ -8054,7 +8139,7 @@ class basic_json object = nullptr; // silence warning, see #821 if (JSON_UNLIKELY(t == value_t::null)) { - JSON_THROW(other_error::create(500, "961c151d2e87f2686a955a9be24d316f1362bf21 2.1.1")); // LCOV_EXCL_LINE + JSON_THROW(other_error::create(500, "961c151d2e87f2686a955a9be24d316f1362bf21 3.0.1")); // LCOV_EXCL_LINE } break; } @@ -8104,24 +8189,24 @@ class basic_json case value_t::object: { AllocatorType<object_t> alloc; - alloc.destroy(object); - alloc.deallocate(object, 1); + std::allocator_traits<decltype(alloc)>::destroy(alloc, object); + std::allocator_traits<decltype(alloc)>::deallocate(alloc, object, 1); break; } case value_t::array: { AllocatorType<array_t> alloc; - alloc.destroy(array); - alloc.deallocate(array, 1); + std::allocator_traits<decltype(alloc)>::destroy(alloc, array); + std::allocator_traits<decltype(alloc)>::deallocate(alloc, array, 1); break; } case value_t::string: { AllocatorType<string_t> alloc; - alloc.destroy(string); - alloc.deallocate(string, 1); + std::allocator_traits<decltype(alloc)>::destroy(alloc, string); + std::allocator_traits<decltype(alloc)>::deallocate(alloc, string, 1); break; } @@ -8154,6 +8239,21 @@ class basic_json // JSON parser callback // ////////////////////////// + /*! + @brief parser event types + + The parser callback distinguishes the following events: + - `object_start`: the parser read `{` and started to process a JSON object + - `key`: the parser read a key of a value in an object + - `object_end`: the parser read `}` and finished processing a JSON object + - `array_start`: the parser read `[` and started to process a JSON array + - `array_end`: the parser read `]` and finished processing a JSON array + - `value`: the parser finished reading a JSON value + + @image html callback_events.png "Example when certain parse events are triggered" + + @sa @ref parser_callback_t for more information and examples + */ using parse_event_t = typename parser::parse_event_t; /*! @@ -8280,7 +8380,7 @@ class basic_json @brief create a JSON value This is a "catch all" constructor for all compatible JSON types; that is, - types for which a `to_json()` method exsits. The constructor forwards the + types for which a `to_json()` method exists. The constructor forwards the parameter @a val to that method (to `json_serializer<U>::to_json` method with `U = uncvref_t<CompatibleType>`, to be exact). @@ -8952,11 +9052,14 @@ class basic_json @param[in] indent_char The character to use for indentation if @a indent is greater than `0`. The default is ` ` (space). @param[in] ensure_ascii If @a ensure_ascii is true, all non-ASCII characters - in the output are escaped with \uXXXX sequences, and the result consists + in the output are escaped with `\uXXXX` sequences, and the result consists of ASCII characters only. @return string containing the serialization of the JSON value + @throw type_error.316 if a string stored inside the JSON value is not + UTF-8 encoded + @complexity Linear. @exceptionsafety Strong guarantee: if an exception is thrown, there are no @@ -8968,8 +9071,8 @@ class basic_json @see https://docs.python.org/2/library/json.html#json.dump - @since version 1.0.0; indentation character @a indent_char and option - @a ensure_ascii added in version 3.0.0 + @since version 1.0.0; indentation character @a indent_char, option + @a ensure_ascii and exceptions added in version 3.0.0 */ string_t dump(const int indent = -1, const char indent_char = ' ', const bool ensure_ascii = false) const @@ -9003,7 +9106,7 @@ class basic_json string | value_t::string number (integer) | value_t::number_integer number (unsigned integer) | value_t::number_unsigned - number (foating-point) | value_t::number_float + number (floating-point) | value_t::number_float object | value_t::object array | value_t::array discarded | value_t::discarded @@ -9507,11 +9610,9 @@ class basic_json @since version 2.1.0 */ - template < - typename BasicJsonType, - detail::enable_if_t<std::is_same<typename std::remove_const<BasicJsonType>::type, - basic_json_t>::value, - int> = 0 > + template<typename BasicJsonType, detail::enable_if_t< + std::is_same<typename std::remove_const<BasicJsonType>::type, basic_json_t>::value, + int> = 0> basic_json get() const { return *this; @@ -9556,14 +9657,12 @@ class basic_json @since version 2.1.0 */ - template < - typename ValueTypeCV, - typename ValueType = detail::uncvref_t<ValueTypeCV>, - detail::enable_if_t < - not std::is_same<basic_json_t, ValueType>::value and - detail::has_from_json<basic_json_t, ValueType>::value and - not detail::has_non_default_from_json<basic_json_t, ValueType>::value, - int > = 0 > + template<typename ValueTypeCV, typename ValueType = detail::uncvref_t<ValueTypeCV>, + detail::enable_if_t < + not std::is_same<basic_json_t, ValueType>::value and + detail::has_from_json<basic_json_t, ValueType>::value and + not detail::has_non_default_from_json<basic_json_t, ValueType>::value, + int> = 0> ValueType get() const noexcept(noexcept( JSONSerializer<ValueType>::from_json(std::declval<const basic_json_t&>(), std::declval<ValueType&>()))) { @@ -9611,12 +9710,10 @@ class basic_json @since version 2.1.0 */ - template < - typename ValueTypeCV, - typename ValueType = detail::uncvref_t<ValueTypeCV>, - detail::enable_if_t<not std::is_same<basic_json_t, ValueType>::value and - detail::has_non_default_from_json<basic_json_t, - ValueType>::value, int> = 0 > + template<typename ValueTypeCV, typename ValueType = detail::uncvref_t<ValueTypeCV>, + detail::enable_if_t<not std::is_same<basic_json_t, ValueType>::value and + detail::has_non_default_from_json<basic_json_t, ValueType>::value, + int> = 0> ValueType get() const noexcept(noexcept( JSONSerializer<ValueTypeCV>::from_json(std::declval<const basic_json_t&>()))) { @@ -10110,7 +10207,7 @@ class basic_json @return const reference to the element at index @a idx - @throw type_error.305 if the JSON value is not an array; in that cases, + @throw type_error.305 if the JSON value is not an array; in that case, using the [] operator with an index makes no sense. @complexity Constant. @@ -10193,7 +10290,7 @@ class basic_json @pre The element with key @a key must exist. **This precondition is enforced with an assertion.** - @throw type_error.305 if the JSON value is not an object; in that cases, + @throw type_error.305 if the JSON value is not an object; in that case, using the [] operator with a key makes no sense. @complexity Logarithmic in the size of the container. @@ -10282,7 +10379,7 @@ class basic_json @pre The element with key @a key must exist. **This precondition is enforced with an assertion.** - @throw type_error.305 if the JSON value is not an object; in that cases, + @throw type_error.305 if the JSON value is not an object; in that case, using the [] operator with a key makes no sense. @complexity Logarithmic in the size of the container. @@ -10342,7 +10439,7 @@ class basic_json @return copy of the element at key @a key or @a default_value if @a key is not found - @throw type_error.306 if the JSON value is not an objec; in that cases, + @throw type_error.306 if the JSON value is not an object; in that case, using `value()` with a key makes no sense. @complexity Logarithmic in the size of the container. @@ -10415,7 +10512,7 @@ class basic_json @return copy of the element at key @a key or @a default_value if @a key is not found - @throw type_error.306 if the JSON value is not an objec; in that cases, + @throw type_error.306 if the JSON value is not an objec; in that case, using `value()` with a key makes no sense. @complexity Logarithmic in the size of the container. @@ -10619,8 +10716,8 @@ class basic_json if (is_string()) { AllocatorType<string_t> alloc; - alloc.destroy(m_value.string); - alloc.deallocate(m_value.string, 1); + std::allocator_traits<decltype(alloc)>::destroy(alloc, m_value.string); + std::allocator_traits<decltype(alloc)>::deallocate(alloc, m_value.string, 1); m_value.string = nullptr; } @@ -10725,8 +10822,8 @@ class basic_json if (is_string()) { AllocatorType<string_t> alloc; - alloc.destroy(m_value.string); - alloc.deallocate(m_value.string, 1); + std::allocator_traits<decltype(alloc)>::destroy(alloc, m_value.string); + std::allocator_traits<decltype(alloc)>::deallocate(alloc, m_value.string, 1); m_value.string = nullptr; } @@ -11220,22 +11317,62 @@ class basic_json reference to the JSON values is returned, so there is no access to the underlying iterator. + For loop without iterator_wrapper: + + @code{cpp} + for (auto it = j_object.begin(); it != j_object.end(); ++it) + { + std::cout << "key: " << it.key() << ", value:" << it.value() << '\n'; + } + @endcode + + Range-based for loop without iterator proxy: + + @code{cpp} + for (auto it : j_object) + { + // "it" is of type json::reference and has no key() member + std::cout << "value: " << it << '\n'; + } + @endcode + + Range-based for loop with iterator proxy: + + @code{cpp} + for (auto it : json::iterator_wrapper(j_object)) + { + std::cout << "key: " << it.key() << ", value:" << it.value() << '\n'; + } + @endcode + + @note When iterating over an array, `key()` will return the index of the + element as string (see example). + + @param[in] ref reference to a JSON value + @return iteration proxy object wrapping @a ref with an interface to use in + range-based for loops + @liveexample{The following code shows how the wrapper is used,iterator_wrapper} + @exceptionsafety Strong guarantee: if an exception is thrown, there are no + changes in the JSON value. + + @complexity Constant. + @note The name of this function is not yet final and may change in the future. */ - static iteration_proxy<iterator> iterator_wrapper(reference cont) + static iteration_proxy<iterator> iterator_wrapper(reference ref) { - return iteration_proxy<iterator>(cont); + return iteration_proxy<iterator>(ref); } /*! @copydoc iterator_wrapper(reference) */ - static iteration_proxy<const_iterator> iterator_wrapper(const_reference cont) + static iteration_proxy<const_iterator> iterator_wrapper(const_reference ref) { - return iteration_proxy<const_iterator>(cont); + return iteration_proxy<const_iterator>(ref); } /// @} @@ -12120,7 +12257,7 @@ class basic_json JSON_THROW(type_error::create(312, "cannot use update() with " + std::string(j.type_name()))); } - for (auto it = j.begin(); it != j.end(); ++it) + for (auto it = j.cbegin(); it != j.cend(); ++it) { m_value.object->operator[](it.key()) = it.value(); } @@ -12341,7 +12478,7 @@ class basic_json [comparison function](https://github.com/mariokonrad/marnav/blob/master/src/marnav/math/floatingpoint.hpp#L34-#L39) could be used, for instance @code {.cpp} - template <typename T, typename = typename std::enable_if<std::is_floating_point<T>::value, T>::type> + template<typename T, typename = typename std::enable_if<std::is_floating_point<T>::value, T>::type> inline bool is_same(T a, T b, T epsilon = std::numeric_limits<T>::epsilon()) noexcept { return std::abs(a - b) <= epsilon; @@ -12769,7 +12906,7 @@ class basic_json `std::setw(4)` on @a o sets the indentation level to `4` and the serialization result is the same as calling `dump(4)`. - - The indentation characrer can be controlled with the member variable + - The indentation character can be controlled with the member variable `fill` of the output stream @a o. For instance, the manipulator `std::setfill('\\t')` sets indentation to use a tab character rather than the default space character. @@ -12779,12 +12916,15 @@ class basic_json @return the stream @a o + @throw type_error.316 if a string stored inside the JSON value is not + UTF-8 encoded + @complexity Linear. @liveexample{The example below shows the serialization with different parameters to `width` to adjust the indentation level.,operator_serialize} - @since version 1.0.0; indentaction character added in version 3.0.0 + @since version 1.0.0; indentation character added in version 3.0.0 */ friend std::ostream& operator<<(std::ostream& o, const basic_json& j) { @@ -13124,40 +13264,40 @@ class basic_json JSON value type | value/range | CBOR type | first byte --------------- | ------------------------------------------ | ---------------------------------- | --------------- - null | `null` | Null | 0xf6 - boolean | `true` | True | 0xf5 - boolean | `false` | False | 0xf4 - number_integer | -9223372036854775808..-2147483649 | Negative integer (8 bytes follow) | 0x3b - number_integer | -2147483648..-32769 | Negative integer (4 bytes follow) | 0x3a + null | `null` | Null | 0xF6 + boolean | `true` | True | 0xF5 + boolean | `false` | False | 0xF4 + number_integer | -9223372036854775808..-2147483649 | Negative integer (8 bytes follow) | 0x3B + number_integer | -2147483648..-32769 | Negative integer (4 bytes follow) | 0x3A number_integer | -32768..-129 | Negative integer (2 bytes follow) | 0x39 number_integer | -128..-25 | Negative integer (1 byte follow) | 0x38 number_integer | -24..-1 | Negative integer | 0x20..0x37 number_integer | 0..23 | Integer | 0x00..0x17 number_integer | 24..255 | Unsigned integer (1 byte follow) | 0x18 number_integer | 256..65535 | Unsigned integer (2 bytes follow) | 0x19 - number_integer | 65536..4294967295 | Unsigned integer (4 bytes follow) | 0x1a - number_integer | 4294967296..18446744073709551615 | Unsigned integer (8 bytes follow) | 0x1b + number_integer | 65536..4294967295 | Unsigned integer (4 bytes follow) | 0x1A + number_integer | 4294967296..18446744073709551615 | Unsigned integer (8 bytes follow) | 0x1B number_unsigned | 0..23 | Integer | 0x00..0x17 number_unsigned | 24..255 | Unsigned integer (1 byte follow) | 0x18 number_unsigned | 256..65535 | Unsigned integer (2 bytes follow) | 0x19 - number_unsigned | 65536..4294967295 | Unsigned integer (4 bytes follow) | 0x1a - number_unsigned | 4294967296..18446744073709551615 | Unsigned integer (8 bytes follow) | 0x1b - number_float | *any value* | Double-Precision Float | 0xfb + number_unsigned | 65536..4294967295 | Unsigned integer (4 bytes follow) | 0x1A + number_unsigned | 4294967296..18446744073709551615 | Unsigned integer (8 bytes follow) | 0x1B + number_float | *any value* | Double-Precision Float | 0xFB string | *length*: 0..23 | UTF-8 string | 0x60..0x77 string | *length*: 23..255 | UTF-8 string (1 byte follow) | 0x78 string | *length*: 256..65535 | UTF-8 string (2 bytes follow) | 0x79 - string | *length*: 65536..4294967295 | UTF-8 string (4 bytes follow) | 0x7a - string | *length*: 4294967296..18446744073709551615 | UTF-8 string (8 bytes follow) | 0x7b + string | *length*: 65536..4294967295 | UTF-8 string (4 bytes follow) | 0x7A + string | *length*: 4294967296..18446744073709551615 | UTF-8 string (8 bytes follow) | 0x7B array | *size*: 0..23 | array | 0x80..0x97 array | *size*: 23..255 | array (1 byte follow) | 0x98 array | *size*: 256..65535 | array (2 bytes follow) | 0x99 - array | *size*: 65536..4294967295 | array (4 bytes follow) | 0x9a - array | *size*: 4294967296..18446744073709551615 | array (8 bytes follow) | 0x9b - object | *size*: 0..23 | map | 0xa0..0xb7 - object | *size*: 23..255 | map (1 byte follow) | 0xb8 - object | *size*: 256..65535 | map (2 bytes follow) | 0xb9 - object | *size*: 65536..4294967295 | map (4 bytes follow) | 0xba - object | *size*: 4294967296..18446744073709551615 | map (8 bytes follow) | 0xbb + array | *size*: 65536..4294967295 | array (4 bytes follow) | 0x9A + array | *size*: 4294967296..18446744073709551615 | array (8 bytes follow) | 0x9B + object | *size*: 0..23 | map | 0xA0..0xB7 + object | *size*: 23..255 | map (1 byte follow) | 0xB8 + object | *size*: 256..65535 | map (2 bytes follow) | 0xB9 + object | *size*: 65536..4294967295 | map (4 bytes follow) | 0xBA + object | *size*: 4294967296..18446744073709551615 | map (8 bytes follow) | 0xBB @note The mapping is **complete** in the sense that any JSON value type can be converted to a CBOR value. @@ -13167,20 +13307,20 @@ class basic_json function which serializes NaN or Infinity to `null`. @note The following CBOR types are not used in the conversion: - - byte strings (0x40..0x5f) - - UTF-8 strings terminated by "break" (0x7f) - - arrays terminated by "break" (0x9f) - - maps terminated by "break" (0xbf) - - date/time (0xc0..0xc1) - - bignum (0xc2..0xc3) - - decimal fraction (0xc4) - - bigfloat (0xc5) - - tagged items (0xc6..0xd4, 0xd8..0xdb) - - expected conversions (0xd5..0xd7) - - simple values (0xe0..0xf3, 0xf8) - - undefined (0xf7) - - half and single-precision floats (0xf9-0xfa) - - break (0xff) + - byte strings (0x40..0x5F) + - UTF-8 strings terminated by "break" (0x7F) + - arrays terminated by "break" (0x9F) + - maps terminated by "break" (0xBF) + - date/time (0xC0..0xC1) + - bignum (0xC2..0xC3) + - decimal fraction (0xC4) + - bigfloat (0xC5) + - tagged items (0xC6..0xD4, 0xD8..0xDB) + - expected conversions (0xD5..0xD7) + - simple values (0xE0..0xF3, 0xF8) + - undefined (0xF7) + - half and single-precision floats (0xF9-0xFA) + - break (0xFF) @param[in] j JSON value to serialize @return MessagePack serialization as byte vector @@ -13226,35 +13366,35 @@ class basic_json JSON value type | value/range | MessagePack type | first byte --------------- | --------------------------------- | ---------------- | ---------- - null | `null` | nil | 0xc0 - boolean | `true` | true | 0xc3 - boolean | `false` | false | 0xc2 - number_integer | -9223372036854775808..-2147483649 | int64 | 0xd3 - number_integer | -2147483648..-32769 | int32 | 0xd2 - number_integer | -32768..-129 | int16 | 0xd1 - number_integer | -128..-33 | int8 | 0xd0 - number_integer | -32..-1 | negative fixint | 0xe0..0xff - number_integer | 0..127 | positive fixint | 0x00..0x7f - number_integer | 128..255 | uint 8 | 0xcc - number_integer | 256..65535 | uint 16 | 0xcd - number_integer | 65536..4294967295 | uint 32 | 0xce - number_integer | 4294967296..18446744073709551615 | uint 64 | 0xcf - number_unsigned | 0..127 | positive fixint | 0x00..0x7f - number_unsigned | 128..255 | uint 8 | 0xcc - number_unsigned | 256..65535 | uint 16 | 0xcd - number_unsigned | 65536..4294967295 | uint 32 | 0xce - number_unsigned | 4294967296..18446744073709551615 | uint 64 | 0xcf - number_float | *any value* | float 64 | 0xcb - string | *length*: 0..31 | fixstr | 0xa0..0xbf - string | *length*: 32..255 | str 8 | 0xd9 - string | *length*: 256..65535 | str 16 | 0xda - string | *length*: 65536..4294967295 | str 32 | 0xdb - array | *size*: 0..15 | fixarray | 0x90..0x9f - array | *size*: 16..65535 | array 16 | 0xdc - array | *size*: 65536..4294967295 | array 32 | 0xdd - object | *size*: 0..15 | fix map | 0x80..0x8f - object | *size*: 16..65535 | map 16 | 0xde - object | *size*: 65536..4294967295 | map 32 | 0xdf + null | `null` | nil | 0xC0 + boolean | `true` | true | 0xC3 + boolean | `false` | false | 0xC2 + number_integer | -9223372036854775808..-2147483649 | int64 | 0xD3 + number_integer | -2147483648..-32769 | int32 | 0xD2 + number_integer | -32768..-129 | int16 | 0xD1 + number_integer | -128..-33 | int8 | 0xD0 + number_integer | -32..-1 | negative fixint | 0xE0..0xFF + number_integer | 0..127 | positive fixint | 0x00..0x7F + number_integer | 128..255 | uint 8 | 0xCC + number_integer | 256..65535 | uint 16 | 0xCD + number_integer | 65536..4294967295 | uint 32 | 0xCE + number_integer | 4294967296..18446744073709551615 | uint 64 | 0xCF + number_unsigned | 0..127 | positive fixint | 0x00..0x7F + number_unsigned | 128..255 | uint 8 | 0xCC + number_unsigned | 256..65535 | uint 16 | 0xCD + number_unsigned | 65536..4294967295 | uint 32 | 0xCE + number_unsigned | 4294967296..18446744073709551615 | uint 64 | 0xCF + number_float | *any value* | float 64 | 0xCB + string | *length*: 0..31 | fixstr | 0xA0..0xBF + string | *length*: 32..255 | str 8 | 0xD9 + string | *length*: 256..65535 | str 16 | 0xDA + string | *length*: 65536..4294967295 | str 32 | 0xDB + array | *size*: 0..15 | fixarray | 0x90..0x9F + array | *size*: 16..65535 | array 16 | 0xDC + array | *size*: 65536..4294967295 | array 32 | 0xDD + object | *size*: 0..15 | fix map | 0x80..0x8F + object | *size*: 16..65535 | map 16 | 0xDE + object | *size*: 65536..4294967295 | map 32 | 0xDF @note The mapping is **complete** in the sense that any JSON value type can be converted to a MessagePack value. @@ -13265,10 +13405,10 @@ class basic_json - objects with more than 4294967295 elements @note The following MessagePack types are not used in the conversion: - - bin 8 - bin 32 (0xc4..0xc6) - - ext 8 - ext 32 (0xc7..0xc9) - - float 32 (0xca) - - fixext 1 - fixext 16 (0xd4..0xd8) + - bin 8 - bin 32 (0xC4..0xC6) + - ext 8 - ext 32 (0xC7..0xC9) + - float 32 (0xCA) + - fixext 1 - fixext 16 (0xD4..0xD8) @note Any MessagePack output created @ref to_msgpack can be successfully parsed by @ref from_msgpack. @@ -13322,51 +13462,51 @@ class basic_json Integer | number_unsigned | 0x00..0x17 Unsigned integer | number_unsigned | 0x18 Unsigned integer | number_unsigned | 0x19 - Unsigned integer | number_unsigned | 0x1a - Unsigned integer | number_unsigned | 0x1b + Unsigned integer | number_unsigned | 0x1A + Unsigned integer | number_unsigned | 0x1B Negative integer | number_integer | 0x20..0x37 Negative integer | number_integer | 0x38 Negative integer | number_integer | 0x39 - Negative integer | number_integer | 0x3a - Negative integer | number_integer | 0x3b + Negative integer | number_integer | 0x3A + Negative integer | number_integer | 0x3B Negative integer | number_integer | 0x40..0x57 UTF-8 string | string | 0x60..0x77 UTF-8 string | string | 0x78 UTF-8 string | string | 0x79 - UTF-8 string | string | 0x7a - UTF-8 string | string | 0x7b - UTF-8 string | string | 0x7f + UTF-8 string | string | 0x7A + UTF-8 string | string | 0x7B + UTF-8 string | string | 0x7F array | array | 0x80..0x97 array | array | 0x98 array | array | 0x99 - array | array | 0x9a - array | array | 0x9b - array | array | 0x9f - map | object | 0xa0..0xb7 - map | object | 0xb8 - map | object | 0xb9 - map | object | 0xba - map | object | 0xbb - map | object | 0xbf - False | `false` | 0xf4 - True | `true` | 0xf5 - Nill | `null` | 0xf6 - Half-Precision Float | number_float | 0xf9 - Single-Precision Float | number_float | 0xfa - Double-Precision Float | number_float | 0xfb + array | array | 0x9A + array | array | 0x9B + array | array | 0x9F + map | object | 0xA0..0xB7 + map | object | 0xB8 + map | object | 0xB9 + map | object | 0xBA + map | object | 0xBB + map | object | 0xBF + False | `false` | 0xF4 + True | `true` | 0xF5 + Nill | `null` | 0xF6 + Half-Precision Float | number_float | 0xF9 + Single-Precision Float | number_float | 0xFA + Double-Precision Float | number_float | 0xFB @warning The mapping is **incomplete** in the sense that not all CBOR types can be converted to a JSON value. The following CBOR types are not supported and will yield parse errors (parse_error.112): - - byte strings (0x40..0x5f) - - date/time (0xc0..0xc1) - - bignum (0xc2..0xc3) - - decimal fraction (0xc4) - - bigfloat (0xc5) - - tagged items (0xc6..0xd4, 0xd8..0xdb) - - expected conversions (0xd5..0xd7) - - simple values (0xe0..0xf3, 0xf8) - - undefined (0xf7) + - byte strings (0x40..0x5F) + - date/time (0xC0..0xC1) + - bignum (0xC2..0xC3) + - decimal fraction (0xC4) + - bigfloat (0xC5) + - tagged items (0xC6..0xD4, 0xD8..0xDB) + - expected conversions (0xD5..0xD7) + - simple values (0xE0..0xF3, 0xF8) + - undefined (0xF7) @warning CBOR allows map keys of any type, whereas JSON only allows strings as keys in object values. Therefore, CBOR maps with keys @@ -13426,38 +13566,38 @@ class basic_json MessagePack type | JSON value type | first byte ---------------- | --------------- | ---------- - positive fixint | number_unsigned | 0x00..0x7f - fixmap | object | 0x80..0x8f - fixarray | array | 0x90..0x9f - fixstr | string | 0xa0..0xbf - nil | `null` | 0xc0 - false | `false` | 0xc2 - true | `true` | 0xc3 - float 32 | number_float | 0xca - float 64 | number_float | 0xcb - uint 8 | number_unsigned | 0xcc - uint 16 | number_unsigned | 0xcd - uint 32 | number_unsigned | 0xce - uint 64 | number_unsigned | 0xcf - int 8 | number_integer | 0xd0 - int 16 | number_integer | 0xd1 - int 32 | number_integer | 0xd2 - int 64 | number_integer | 0xd3 - str 8 | string | 0xd9 - str 16 | string | 0xda - str 32 | string | 0xdb - array 16 | array | 0xdc - array 32 | array | 0xdd - map 16 | object | 0xde - map 32 | object | 0xdf - negative fixint | number_integer | 0xe0-0xff + positive fixint | number_unsigned | 0x00..0x7F + fixmap | object | 0x80..0x8F + fixarray | array | 0x90..0x9F + fixstr | string | 0xA0..0xBF + nil | `null` | 0xC0 + false | `false` | 0xC2 + true | `true` | 0xC3 + float 32 | number_float | 0xCA + float 64 | number_float | 0xCB + uint 8 | number_unsigned | 0xCC + uint 16 | number_unsigned | 0xCD + uint 32 | number_unsigned | 0xCE + uint 64 | number_unsigned | 0xCF + int 8 | number_integer | 0xD0 + int 16 | number_integer | 0xD1 + int 32 | number_integer | 0xD2 + int 64 | number_integer | 0xD3 + str 8 | string | 0xD9 + str 16 | string | 0xDA + str 32 | string | 0xDB + array 16 | array | 0xDC + array 32 | array | 0xDD + map 16 | object | 0xDE + map 32 | object | 0xDF + negative fixint | number_integer | 0xE0-0xFF @warning The mapping is **incomplete** in the sense that not all MessagePack types can be converted to a JSON value. The following MessagePack types are not supported and will yield parse errors: - - bin 8 - bin 32 (0xc4..0xc6) - - ext 8 - ext 32 (0xc7..0xc9) - - fixext 1 - fixext 16 (0xd4..0xd8) + - bin 8 - bin 32 (0xC4..0xC6) + - ext 8 - ext 32 (0xC7..0xC9) + - fixext 1 - fixext 16 (0xD4..0xD8) @note Any MessagePack output created @ref to_msgpack can be successfully parsed by @ref from_msgpack. @@ -13601,6 +13741,9 @@ class basic_json pointer @a ptr. As `at` provides checked access (and no elements are implicitly inserted), the index '-' is always invalid. See example below. + @throw out_of_range.403 if the JSON pointer describes a key of an object + which cannot be found. See example below. + @throw out_of_range.404 if the JSON pointer @a ptr can not be resolved. See example below. @@ -13641,6 +13784,9 @@ class basic_json pointer @a ptr. As `at` provides checked access (and no elements are implicitly inserted), the index '-' is always invalid. See example below. + @throw out_of_range.403 if the JSON pointer describes a key of an object + which cannot be found. See example below. + @throw out_of_range.404 if the JSON pointer @a ptr can not be resolved. See example below. @@ -13856,7 +14002,7 @@ class basic_json } else { - const auto idx = std::stoi(last_path); + const auto idx = json_pointer::array_index(last_path); if (JSON_UNLIKELY(static_cast<size_type>(idx) > parent.size())) { // avoid undefined behavior @@ -13904,7 +14050,7 @@ class basic_json else if (parent.is_array()) { // note erase performs range check - parent.erase(static_cast<size_type>(std::stoi(last_path))); + parent.erase(static_cast<size_type>(json_pointer::array_index(last_path))); } }; @@ -13999,7 +14145,12 @@ class basic_json const json_pointer from_ptr(from_path); // the "from" location must exist - use at() - result[ptr] = result.at(from_ptr); + basic_json v = result.at(from_ptr); + + // The copy is functionally identical to an "add" + // operation at the target location using the value + // specified in the "from" member. + operation_add(ptr, v); break; } @@ -14141,7 +14292,7 @@ class basic_json case value_t::object: { // first pass: traverse this object's elements - for (auto it = source.begin(); it != source.end(); ++it) + for (auto it = source.cbegin(); it != source.cend(); ++it) { // escape the key name to be used in a JSON patch const auto key = json_pointer::escape(it.key()); @@ -14163,7 +14314,7 @@ class basic_json } // second pass: traverse other object's elements - for (auto it = target.begin(); it != target.end(); ++it) + for (auto it = target.cbegin(); it != target.cend(); ++it) { if (source.find(it.key()) == source.end()) { @@ -14256,7 +14407,7 @@ json_pointer::get_and_create(NLOHMANN_BASIC_JSON_TPL& j) const // create an entry in the array JSON_TRY { - result = &result->operator[](static_cast<size_type>(std::stoi(reference_token))); + result = &result->operator[](static_cast<size_type>(array_index(reference_token))); } JSON_CATCH(std::invalid_argument&) { @@ -14333,7 +14484,7 @@ json_pointer::get_unchecked(NLOHMANN_BASIC_JSON_TPL* ptr) const JSON_TRY { ptr = &ptr->operator[]( - static_cast<size_type>(std::stoi(reference_token))); + static_cast<size_type>(array_index(reference_token))); } JSON_CATCH(std::invalid_argument&) { @@ -14388,7 +14539,7 @@ json_pointer::get_checked(NLOHMANN_BASIC_JSON_TPL* ptr) const // note: at performs range check JSON_TRY { - ptr = &ptr->at(static_cast<size_type>(std::stoi(reference_token))); + ptr = &ptr->at(static_cast<size_type>(array_index(reference_token))); } JSON_CATCH(std::invalid_argument&) { @@ -14443,7 +14594,7 @@ json_pointer::get_unchecked(const NLOHMANN_BASIC_JSON_TPL* ptr) const JSON_TRY { ptr = &ptr->operator[]( - static_cast<size_type>(std::stoi(reference_token))); + static_cast<size_type>(array_index(reference_token))); } JSON_CATCH(std::invalid_argument&) { @@ -14497,7 +14648,7 @@ json_pointer::get_checked(const NLOHMANN_BASIC_JSON_TPL* ptr) const // note: at performs range check JSON_TRY { - ptr = &ptr->at(static_cast<size_type>(std::stoi(reference_token))); + ptr = &ptr->at(static_cast<size_type>(array_index(reference_token))); } JSON_CATCH(std::invalid_argument&) { diff --git a/tests/brotli.sh b/tests/brotli.sh new file mode 100644 index 000000000000..645dd4214ec6 --- /dev/null +++ b/tests/brotli.sh @@ -0,0 +1,28 @@ +source common.sh + + +# Only test if we found brotli libraries +# (CLI tool is likely unavailable if libraries are missing) +if [ -n "$HAVE_BROTLI" ]; then + +clearStore +clearCache + +cacheURI="file://$cacheDir?compression=br" + +outPath=$(nix-build dependencies.nix --no-out-link) + +nix copy --to $cacheURI $outPath + +HASH=$(nix hash-path $outPath) + +clearStore +clearCacheCache + +nix copy --from $cacheURI $outPath --no-check-sigs + +HASH2=$(nix hash-path $outPath) + +[[ $HASH = $HASH2 ]] + +fi # HAVE_BROTLI diff --git a/tests/build-remote.sh b/tests/build-remote.sh index cf3bb4633183..9bca0f4a3856 100644 --- a/tests/build-remote.sh +++ b/tests/build-remote.sh @@ -2,7 +2,7 @@ source common.sh clearStore -if [[ $(uname) != Linux ]]; then exit; fi +if ! canUseSandbox; then exit; fi if [[ ! $SHELL =~ /nix/store ]]; then exit; fi chmod -R u+w $TEST_ROOT/store0 || true diff --git a/tests/check.nix b/tests/check.nix new file mode 100644 index 000000000000..08aac2fb0a77 --- /dev/null +++ b/tests/check.nix @@ -0,0 +1,17 @@ +with import ./config.nix; + +{ + nondeterministic = mkDerivation { + name = "nondeterministic"; + buildCommand = + '' + mkdir $out + date +%s.%N > $out/date + ''; + }; + + fetchurl = import <nix/fetchurl.nix> { + url = "file://" + toString ./lang/eval-okay-xml.exp.xml; + sha256 = "0kg4sla7ihm8ijr8cb3117fhl99zrc2bwy1jrngsfmkh8bav4m0v"; + }; +} diff --git a/tests/check.sh b/tests/check.sh new file mode 100644 index 000000000000..b05e40ffbeea --- /dev/null +++ b/tests/check.sh @@ -0,0 +1,32 @@ +source common.sh + +clearStore + +nix-build dependencies.nix --no-out-link +nix-build dependencies.nix --no-out-link --check + +nix-build check.nix -A nondeterministic --no-out-link +(! nix-build check.nix -A nondeterministic --no-out-link --check 2> $TEST_ROOT/log) +grep 'may not be deterministic' $TEST_ROOT/log + +clearStore + +nix-build dependencies.nix --no-out-link --repeat 3 + +(! nix-build check.nix -A nondeterministic --no-out-link --repeat 1 2> $TEST_ROOT/log) +grep 'differs from previous round' $TEST_ROOT/log + +path=$(nix-build check.nix -A fetchurl --no-out-link --hashed-mirrors '') + +chmod +w $path +echo foo > $path +chmod -w $path + +nix-build check.nix -A fetchurl --no-out-link --check --hashed-mirrors '' + +# Note: "check" doesn't repair anything, it just compares to the hash stored in the database. +[[ $(cat $path) = foo ]] + +nix-build check.nix -A fetchurl --no-out-link --repair --hashed-mirrors '' + +[[ $(cat $path) != foo ]] diff --git a/tests/common.sh.in b/tests/common.sh.in index 09f2949141a4..195205988afb 100644 --- a/tests/common.sh.in +++ b/tests/common.sh.in @@ -11,7 +11,6 @@ export NIX_LOCALSTATE_DIR=$TEST_ROOT/var export NIX_LOG_DIR=$TEST_ROOT/var/log/nix export NIX_STATE_DIR=$TEST_ROOT/var/nix export NIX_CONF_DIR=$TEST_ROOT/etc -export NIX_MANIFESTS_DIR=$TEST_ROOT/var/nix/manifests export _NIX_TEST_SHARED=$TEST_ROOT/shared if [[ -n $NIX_STORE ]]; then export _NIX_TEST_NO_SANDBOX=1 @@ -32,6 +31,7 @@ export xmllint="@xmllint@" export SHELL="@bash@" export PAGER=cat export HAVE_SODIUM="@HAVE_SODIUM@" +export HAVE_BROTLI="@HAVE_BROTLI@" export version=@PACKAGE_VERSION@ export system=@system@ @@ -86,6 +86,24 @@ killDaemon() { trap "" EXIT } +canUseSandbox() { + if [[ $(uname) != Linux ]]; then return 1; fi + + if [ ! -L /proc/self/ns/user ]; then + echo "Kernel doesn't support user namespaces, skipping this test..." + return 1 + fi + + if [ -e /proc/sys/kernel/unprivileged_userns_clone ]; then + if [ "$(cat /proc/sys/kernel/unprivileged_userns_clone)" != 1 ]; then + echo "Unprivileged user namespaces disabled by sysctl, skipping this test..." + return 1 + fi + fi + + return 0 +} + fail() { echo "$1" exit 1 diff --git a/tests/fetchGit.sh b/tests/fetchGit.sh index 09e4f742668e..530ac7bb813c 100644 --- a/tests/fetchGit.sh +++ b/tests/fetchGit.sh @@ -29,10 +29,17 @@ rev2=$(git -C $repo rev-parse HEAD) path=$(nix eval --raw "(builtins.fetchGit file://$repo).outPath") [[ $(cat $path/hello) = world ]] +# In pure eval mode, fetchGit without a revision should fail. +[[ $(nix eval --raw "(builtins.readFile (fetchGit file://$repo + \"/hello\"))") = world ]] +(! nix eval --pure-eval --raw "(builtins.readFile (fetchGit file://$repo + \"/hello\"))") + # Fetch using an explicit revision hash. path2=$(nix eval --raw "(builtins.fetchGit { url = file://$repo; rev = \"$rev2\"; }).outPath") [[ $path = $path2 ]] +# In pure eval mode, fetchGit with a revision should succeed. +[[ $(nix eval --pure-eval --raw "(builtins.readFile (fetchGit { url = file://$repo; rev = \"$rev2\"; } + \"/hello\"))") = world ]] + # Fetch again. This should be cached. mv $repo ${repo}-tmp path2=$(nix eval --raw "(builtins.fetchGit file://$repo).outPath") @@ -93,3 +100,42 @@ git -C $repo add hello git -C $repo commit -m 'Bla4' rev3=$(git -C $repo rev-parse HEAD) nix eval --tarball-ttl 3600 "(builtins.fetchGit { url = $repo; rev = \"$rev3\"; })" >/dev/null + +# Update 'path' to reflect latest master +path=$(nix eval --raw "(builtins.fetchGit file://$repo).outPath") + +# Check behavior when non-master branch is used +git -C $repo checkout $rev2 -b dev +echo dev > $repo/hello + +# File URI uses 'master' unless specified otherwise +path2=$(nix eval --raw "(builtins.fetchGit file://$repo).outPath") +[[ $path = $path2 ]] + +# Using local path with branch other than 'master' should work when clean or dirty +path3=$(nix eval --raw "(builtins.fetchGit $repo).outPath") +# (check dirty-tree handling was used) +[[ $(nix eval --raw "(builtins.fetchGit $repo).rev") = 0000000000000000000000000000000000000000 ]] + +# Committing shouldn't change store path, or switch to using 'master' +git -C $repo commit -m 'Bla5' -a +path4=$(nix eval --raw "(builtins.fetchGit $repo).outPath") +[[ $(cat $path4/hello) = dev ]] +[[ $path3 = $path4 ]] + +# Confirm same as 'dev' branch +path5=$(nix eval --raw "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath") +[[ $path3 = $path5 ]] + + +# Nuke the cache +rm -rf $TEST_HOME/.cache/nix/git + +# Try again, but without 'git' on PATH +NIX=$(command -v nix) +# This should fail +(! PATH= $NIX eval --raw "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath" ) + +# Try again, with 'git' available. This should work. +path5=$(nix eval --raw "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath") +[[ $path3 = $path5 ]] diff --git a/tests/fetchMercurial.sh b/tests/fetchMercurial.sh index 271350ecd171..4088dbd39796 100644 --- a/tests/fetchMercurial.sh +++ b/tests/fetchMercurial.sh @@ -29,10 +29,17 @@ rev2=$(hg log --cwd $repo -r tip --template '{node}') path=$(nix eval --raw "(builtins.fetchMercurial file://$repo).outPath") [[ $(cat $path/hello) = world ]] +# In pure eval mode, fetchGit without a revision should fail. +[[ $(nix eval --raw "(builtins.readFile (fetchMercurial file://$repo + \"/hello\"))") = world ]] +(! nix eval --pure-eval --raw "(builtins.readFile (fetchMercurial file://$repo + \"/hello\"))") + # Fetch using an explicit revision hash. path2=$(nix eval --raw "(builtins.fetchMercurial { url = file://$repo; rev = \"$rev2\"; }).outPath") [[ $path = $path2 ]] +# In pure eval mode, fetchGit with a revision should succeed. +[[ $(nix eval --pure-eval --raw "(builtins.readFile (fetchMercurial { url = file://$repo; rev = \"$rev2\"; } + \"/hello\"))") = world ]] + # Fetch again. This should be cached. mv $repo ${repo}-tmp path2=$(nix eval --raw "(builtins.fetchMercurial file://$repo).outPath") diff --git a/tests/fixed.sh b/tests/fixed.sh index cac3f0be91b0..8f51403a7071 100644 --- a/tests/fixed.sh +++ b/tests/fixed.sh @@ -5,15 +5,22 @@ clearStore export IMPURE_VAR1=foo export IMPURE_VAR2=bar +path=$(nix-store -q $(nix-instantiate fixed.nix -A good.0)) + +echo 'testing bad...' +nix-build fixed.nix -A bad --no-out-link && fail "should fail" + +# Building with the bad hash should produce the "good" output path as +# a side-effect. +[[ -e $path ]] +nix path-info --json $path | grep fixed:md5:2qk15sxzzjlnpjk9brn7j8ppcd + echo 'testing good...' nix-build fixed.nix -A good --no-out-link echo 'testing good2...' nix-build fixed.nix -A good2 --no-out-link -echo 'testing bad...' -nix-build fixed.nix -A bad --no-out-link && fail "should fail" - echo 'testing reallyBad...' nix-instantiate fixed.nix -A reallyBad && fail "should fail" diff --git a/tests/lang/data b/tests/lang/data new file mode 100644 index 000000000000..257cc5642cb1 --- /dev/null +++ b/tests/lang/data @@ -0,0 +1 @@ +foo diff --git a/tests/lang/eval-okay-path.exp b/tests/lang/eval-okay-path.exp new file mode 100644 index 000000000000..6827d49ffa11 --- /dev/null +++ b/tests/lang/eval-okay-path.exp @@ -0,0 +1 @@ +"/run/user/1000/nix-test/store/wjagrv37lfvfx92g2gf3yqflwypj0q1y-output" diff --git a/tests/lang/eval-okay-path.nix b/tests/lang/eval-okay-path.nix new file mode 100644 index 000000000000..e67168cf3edf --- /dev/null +++ b/tests/lang/eval-okay-path.nix @@ -0,0 +1,7 @@ +builtins.path + { path = ./.; + filter = path: _: baseNameOf path == "data"; + recursive = true; + sha256 = "1yhm3gwvg5a41yylymgblsclk95fs6jy72w0wv925mmidlhcq4sw"; + name = "output"; + } diff --git a/tests/linux-sandbox.sh b/tests/linux-sandbox.sh index 4a686bb59a3b..acfd46c54170 100644 --- a/tests/linux-sandbox.sh +++ b/tests/linux-sandbox.sh @@ -2,7 +2,7 @@ source common.sh clearStore -if [[ $(uname) != Linux ]]; then exit; fi +if ! canUseSandbox; then exit; fi # Note: we need to bind-mount $SHELL into the chroot. Currently we # only support the case where $SHELL is in the Nix store, because diff --git a/tests/local.mk b/tests/local.mk index baf74224bb12..e90b9f7da4ad 100644 --- a/tests/local.mk +++ b/tests/local.mk @@ -19,7 +19,10 @@ nix_tests = \ fetchGit.sh \ fetchMercurial.sh \ signing.sh \ - run.sh + run.sh \ + brotli.sh \ + pure-eval.sh \ + check.sh # parallel.sh install-tests += $(foreach x, $(nix_tests), tests/$(x)) diff --git a/tests/nix-copy-closure.nix b/tests/nix-copy-closure.nix index 44126dd64e47..be0a4a683cda 100644 --- a/tests/nix-copy-closure.nix +++ b/tests/nix-copy-closure.nix @@ -1,8 +1,8 @@ # Test ‘nix-copy-closure’. -{ system, nix }: +{ nixpkgs, system, nix }: -with import <nixpkgs/nixos/lib/testing.nix> { inherit system; }; +with import (nixpkgs + "/nixos/lib/testing.nix") { inherit system; }; makeTest (let pkgA = pkgs.cowsay; pkgB = pkgs.wget; pkgC = pkgs.hello; in { diff --git a/tests/pure-eval.nix b/tests/pure-eval.nix new file mode 100644 index 000000000000..ed25b3d45637 --- /dev/null +++ b/tests/pure-eval.nix @@ -0,0 +1,3 @@ +{ + x = 123; +} diff --git a/tests/pure-eval.sh b/tests/pure-eval.sh new file mode 100644 index 000000000000..49c8564487c3 --- /dev/null +++ b/tests/pure-eval.sh @@ -0,0 +1,18 @@ +source common.sh + +clearStore + +nix eval --pure-eval '(assert 1 + 2 == 3; true)' + +[[ $(nix eval '(builtins.readFile ./pure-eval.sh)') =~ clearStore ]] + +(! nix eval --pure-eval '(builtins.readFile ./pure-eval.sh)') + +(! nix eval --pure-eval '(builtins.currentTime)') +(! nix eval --pure-eval '(builtins.currentSystem)') + +(! nix-instantiate --pure-eval ./simple.nix) + +[[ $(nix eval "((import (builtins.fetchurl { url = file://$(pwd)/pure-eval.nix; })).x)") == 123 ]] +(! nix eval --pure-eval "((import (builtins.fetchurl { url = file://$(pwd)/pure-eval.nix; })).x)") +nix eval --pure-eval "((import (builtins.fetchurl { url = file://$(pwd)/pure-eval.nix; sha256 = \"$(nix hash-file pure-eval.nix --type sha256)\"; })).x)" diff --git a/tests/remote-builds.nix b/tests/remote-builds.nix index 58a26d8b6182..75704ace2dba 100644 --- a/tests/remote-builds.nix +++ b/tests/remote-builds.nix @@ -1,8 +1,8 @@ # Test Nix's remote build feature. -{ system, nix }: +{ nixpkgs, system, nix }: -with import <nixpkgs/nixos/lib/testing.nix> { inherit system; }; +with import (nixpkgs + "/nixos/lib/testing.nix") { inherit system; }; makeTest ( diff --git a/tests/restricted.nix b/tests/restricted.nix new file mode 100644 index 000000000000..e0ef5840209c --- /dev/null +++ b/tests/restricted.nix @@ -0,0 +1 @@ +1 + 2 diff --git a/tests/restricted.sh b/tests/restricted.sh index c063c8693d55..0605383cc86a 100644 --- a/tests/restricted.sh +++ b/tests/restricted.sh @@ -3,7 +3,8 @@ source common.sh clearStore nix-instantiate --restrict-eval --eval -E '1 + 2' -(! nix-instantiate --restrict-eval ./simple.nix) +(! nix-instantiate --restrict-eval ./restricted.nix) +(! nix-instantiate --eval --restrict-eval <(echo '1 + 2')) nix-instantiate --restrict-eval ./simple.nix -I src=. nix-instantiate --restrict-eval ./simple.nix -I src1=simple.nix -I src2=config.nix -I src3=./simple.builder.sh @@ -28,3 +29,12 @@ nix eval --raw "(builtins.fetchurl file://$(pwd)/restricted.sh)" --restrict-eval (! nix eval --raw "(builtins.fetchurl https://github.com/NixOS/patchelf/archive/master.tar.gz)" --restrict-eval) (! nix eval --raw "(builtins.fetchTarball https://github.com/NixOS/patchelf/archive/master.tar.gz)" --restrict-eval) (! nix eval --raw "(fetchGit git://github.com/NixOS/patchelf.git)" --restrict-eval) + +ln -sfn $(pwd)/restricted.nix $TEST_ROOT/restricted.nix +[[ $(nix-instantiate --eval $TEST_ROOT/restricted.nix) == 3 ]] +(! nix-instantiate --eval --restrict-eval $TEST_ROOT/restricted.nix) +(! nix-instantiate --eval --restrict-eval $TEST_ROOT/restricted.nix -I $TEST_ROOT) +(! nix-instantiate --eval --restrict-eval $TEST_ROOT/restricted.nix -I .) +nix-instantiate --eval --restrict-eval $TEST_ROOT/restricted.nix -I $TEST_ROOT -I . + +[[ $(nix eval --raw --restrict-eval -I . '(builtins.readFile "${import ./simple.nix}/hello")') == 'Hello World!' ]] diff --git a/tests/run.sh b/tests/run.sh index 784d29183cf3..d1dbfd6bd4a6 100644 --- a/tests/run.sh +++ b/tests/run.sh @@ -6,24 +6,23 @@ clearCache nix run -f run.nix hello -c hello | grep 'Hello World' nix run -f run.nix hello -c hello NixOS | grep 'Hello NixOS' -if [[ $(uname) = Linux ]]; then +if ! canUseSandbox; then exit; fi - chmod -R u+w $TEST_ROOT/store0 || true - rm -rf $TEST_ROOT/store0 +chmod -R u+w $TEST_ROOT/store0 || true +rm -rf $TEST_ROOT/store0 - clearStore +clearStore - path=$(nix eval --raw -f run.nix hello) +path=$(nix eval --raw -f run.nix hello) - # Note: we need the sandbox paths to ensure that the shell is - # visible in the sandbox. - nix run --sandbox-build-dir /build-tmp \ - --sandbox-paths '/nix? /bin? /lib? /usr?' \ - --store $TEST_ROOT/store0 -f run.nix hello -c hello | grep 'Hello World' +# Note: we need the sandbox paths to ensure that the shell is +# visible in the sandbox. +nix run --sandbox-build-dir /build-tmp \ + --sandbox-paths '/nix? /bin? /lib? /lib64? /usr?' \ + --store $TEST_ROOT/store0 -f run.nix hello -c hello | grep 'Hello World' - path2=$(nix run --sandbox-paths '/nix? /bin? /lib? /usr?' --store $TEST_ROOT/store0 -f run.nix hello -c $SHELL -c 'type -p hello') +path2=$(nix run --sandbox-paths '/nix? /bin? /lib? /lib64? /usr?' --store $TEST_ROOT/store0 -f run.nix hello -c $SHELL -c 'type -p hello') - [[ $path/bin/hello = $path2 ]] +[[ $path/bin/hello = $path2 ]] - [[ -e $TEST_ROOT/store0/nix/store/$(basename $path)/bin/hello ]] -fi +[[ -e $TEST_ROOT/store0/nix/store/$(basename $path)/bin/hello ]] diff --git a/tests/setuid.nix b/tests/setuid.nix index c982d9cf0366..77e83c8d6c2c 100644 --- a/tests/setuid.nix +++ b/tests/setuid.nix @@ -1,8 +1,8 @@ # Verify that Linux builds cannot create setuid or setgid binaries. -{ system, nix }: +{ nixpkgs, system, nix }: -with import <nixpkgs/nixos/lib/testing.nix> { inherit system; }; +with import (nixpkgs + "/nixos/lib/testing.nix") { inherit system; }; makeTest { diff --git a/tests/shell.shebang.sh b/tests/shell.shebang.sh index c8e55ca9b90c..f7132043de44 100755 --- a/tests/shell.shebang.sh +++ b/tests/shell.shebang.sh @@ -1,4 +1,4 @@ #! @ENV_PROG@ nix-shell -#! nix-shell -I nixpkgs=shell.nix --no-use-substitutes +#! nix-shell -I nixpkgs=shell.nix --no-substitute #! nix-shell --pure -i bash -p foo bar echo "$(foo) $(bar) $@" diff --git a/version b/version index 35d51f33b34f..415b19fc3623 100644 --- a/version +++ b/version @@ -1 +1 @@ -1.12 \ No newline at end of file +2.0 \ No newline at end of file |