diff --git a/.clang-format b/.clang-format new file mode 100644 index 000000000..9c0c0946a --- /dev/null +++ b/.clang-format @@ -0,0 +1,30 @@ +BasedOnStyle: LLVM +IndentWidth: 4 +BreakBeforeBraces: Custom +BraceWrapping: + AfterStruct: true + AfterClass: true + AfterFunction: true + AfterUnion: true + SplitEmptyRecord: false +PointerAlignment: Middle +FixNamespaceComments: false +SortIncludes: Never +#IndentPPDirectives: BeforeHash +SpaceAfterCStyleCast: true +SpaceAfterTemplateKeyword: false +AccessModifierOffset: -4 +AlignAfterOpenBracket: AlwaysBreak +AlignEscapedNewlines: DontAlign +ColumnLimit: 120 +BreakStringLiterals: false +BitFieldColonSpacing: None +AllowShortFunctionsOnASingleLine: Empty +AlwaysBreakTemplateDeclarations: Yes +BinPackParameters: false +BreakConstructorInitializers: BeforeComma +EmptyLineAfterAccessModifier: Leave # change to always/never later? +EmptyLineBeforeAccessModifier: Leave +#PackConstructorInitializers: BinPack +BreakBeforeBinaryOperators: NonAssignment +AlwaysBreakBeforeMultilineStrings: true diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index ab5908649..526fecabf 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -10,9 +10,8 @@ # This file .github/CODEOWNERS @edolstra -# Public documentation -/doc @fricklerhandwerk -*.md @fricklerhandwerk +# Documentation of built-in functions +src/libexpr/primops.cc @roberth # Libstore layer /src/libstore @thufschmitt diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 217b19108..d12a4d36c 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -10,6 +10,8 @@ -# Priorities +# Priorities and Process Add :+1: to [pull requests you find important](https://github.com/NixOS/nix/pulls?q=is%3Aopen+sort%3Areactions-%2B1-desc). + +The Nix maintainer team uses a [GitHub project board](https://github.com/orgs/NixOS/projects/19) to [schedule and track reviews](https://github.com/NixOS/nix/tree/master/maintainers#project-board-protocol). diff --git a/.github/labeler.yml b/.github/labeler.yml index 7544f07a6..b1b18c488 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -1,23 +1,30 @@ "documentation": - - doc/manual/* - - src/nix/**/*.md + - changed-files: + - any-glob-to-any-file: "doc/manual/*" + - any-glob-to-any-file: "src/nix/**/*.md" "store": - - src/libstore/store-api.* - - src/libstore/*-store.* + - changed-files: + - any-glob-to-any-file: "src/libstore/store-api.*" + - any-glob-to-any-file: "src/libstore/*-store.*" "fetching": - - src/libfetchers/**/* + - changed-files: + - any-glob-to-any-file: "src/libfetchers/**/*" "repl": - - src/libcmd/repl.* - - src/nix/repl.* + - changed-files: + - any-glob-to-any-file: "src/libcmd/repl.*" + - any-glob-to-any-file: "src/nix/repl.*" "new-cli": - - src/nix/**/* + - changed-files: + - any-glob-to-any-file: "src/nix/**/*" "with-tests": - # Unit tests - - src/*/tests/**/* - # Functional and integration tests - - tests/functional/**/* + - changed-files: + # Unit tests + - any-glob-to-any-file: "src/*/tests/**/*" + # Functional and integration tests + - any-glob-to-any-file: "tests/functional/**/*" + diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 893f4a56f..f003114ba 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -21,7 +21,7 @@ jobs: fetch-depth: 0 - name: Create backport PRs # should be kept in sync with `version` - uses: zeebe-io/backport-action@v2.1.0 + uses: zeebe-io/backport-action@v2.3.0 with: # Config README: https://github.com/zeebe-io/backport-action#backport-action github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index afe4dc2e3..aa2551424 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -20,12 +20,12 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: cachix/install-nix-action@v23 + - uses: cachix/install-nix-action@v24 with: # The sandbox would otherwise be disabled by default on Darwin extra_nix_config: "sandbox = true" - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - uses: cachix/cachix-action@v12 + - uses: cachix/cachix-action@v13 if: needs.check_secrets.outputs.cachix == 'true' with: name: '${{ env.CACHIX_NAME }}' @@ -62,10 +62,10 @@ jobs: with: fetch-depth: 0 - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - uses: cachix/install-nix-action@v23 + - uses: cachix/install-nix-action@v24 with: install_url: https://releases.nixos.org/nix/nix-2.13.3/install - - uses: cachix/cachix-action@v12 + - uses: cachix/cachix-action@v13 with: name: '${{ env.CACHIX_NAME }}' signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}' @@ -84,7 +84,7 @@ jobs: steps: - uses: actions/checkout@v4 - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - uses: cachix/install-nix-action@v23 + - uses: cachix/install-nix-action@v24 with: install_url: '${{needs.installer.outputs.installerURL}}' install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve" @@ -114,12 +114,12 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: cachix/install-nix-action@v23 + - uses: cachix/install-nix-action@v24 with: install_url: https://releases.nixos.org/nix/nix-2.13.3/install - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV - - uses: cachix/cachix-action@v12 + - uses: cachix/cachix-action@v13 if: needs.check_secrets.outputs.cachix == 'true' with: name: '${{ env.CACHIX_NAME }}' diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml index d83cb4f18..34aa4e6bd 100644 --- a/.github/workflows/labels.yml +++ b/.github/workflows/labels.yml @@ -18,7 +18,7 @@ jobs: runs-on: ubuntu-latest if: github.repository_owner == 'NixOS' steps: - - uses: actions/labeler@v4 + - uses: actions/labeler@v5 with: repo-token: ${{ secrets.GITHUB_TOKEN }} sync-labels: false diff --git a/.gitignore b/.gitignore index 04d96ca2c..a47b195bb 100644 --- a/.gitignore +++ b/.gitignore @@ -21,12 +21,16 @@ perl/Makefile.config /doc/manual/language.json /doc/manual/xp-features.json /doc/manual/src/SUMMARY.md +/doc/manual/src/SUMMARY-rl-next.md +/doc/manual/src/store/types/* +!/doc/manual/src/store/types/index.md.in /doc/manual/src/command-ref/new-cli /doc/manual/src/command-ref/conf-file.md /doc/manual/src/command-ref/experimental-features-shortlist.md /doc/manual/src/contributing/experimental-feature-descriptions.md /doc/manual/src/language/builtins.md /doc/manual/src/language/builtin-constants.md +/doc/manual/src/release-notes/rl-next.md # /scripts/ /scripts/nix-profile.sh @@ -41,18 +45,18 @@ perl/Makefile.config /src/libexpr/parser-tab.hh /src/libexpr/parser-tab.output /src/libexpr/nix.tbl -/src/libexpr/tests/libnixexpr-tests +/tests/unit/libexpr/libnixexpr-tests # /src/libstore/ *.gen.* -/src/libstore/tests/libnixstore-tests +/tests/unit/libstore/libnixstore-tests # /src/libutil/ -/src/libutil/tests/libnixutil-tests +/tests/unit/libutil/libnixutil-tests /src/nix/nix -/src/nix/doc +/src/nix/generated-doc # /src/nix-env/ /src/nix-env/nix-env @@ -137,6 +141,7 @@ compile_commands.json nix-rust/target result +result-* # IDE .vscode/ @@ -144,3 +149,6 @@ result # clangd and possibly more .cache/ + +# Mac OS +.DS_Store diff --git a/.version b/.version index ef0f38abe..7329e21c3 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -2.19.0 +2.20.0 diff --git a/Makefile b/Makefile index 4f4ac0c6e..7bbfbddbe 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,11 @@ --include Makefile.config -clean-files += Makefile.config +# External build directory support + +include mk/build-dir.mk + +-include $(buildprefix)Makefile.config +clean-files += $(buildprefix)Makefile.config + +# List makefiles ifeq ($(ENABLE_BUILD), yes) makefiles = \ @@ -19,31 +25,30 @@ makefiles = \ misc/zsh/local.mk \ misc/systemd/local.mk \ misc/launchd/local.mk \ - misc/upstart/local.mk \ - doc/manual/local.mk \ - doc/internal-api/local.mk + misc/upstart/local.mk endif -ifeq ($(ENABLE_BUILD)_$(ENABLE_TESTS), yes_yes) -UNIT_TEST_ENV = _NIX_TEST_UNIT_DATA=unit-test-data +ifeq ($(ENABLE_UNIT_TESTS), yes) makefiles += \ - src/libutil/tests/local.mk \ - src/libstore/tests/local.mk \ - src/libexpr/tests/local.mk + tests/unit/libutil/local.mk \ + tests/unit/libutil-support/local.mk \ + tests/unit/libstore/local.mk \ + tests/unit/libstore-support/local.mk \ + tests/unit/libexpr/local.mk \ + tests/unit/libexpr-support/local.mk endif -ifeq ($(ENABLE_TESTS), yes) +ifeq ($(ENABLE_FUNCTIONAL_TESTS), yes) makefiles += \ tests/functional/local.mk \ tests/functional/ca/local.mk \ tests/functional/dyn-drv/local.mk \ tests/functional/test-libstoreconsumer/local.mk \ tests/functional/plugins/local.mk -else -makefiles += \ - mk/disable-tests.mk endif +# Miscellaneous global Flags + OPTIMIZE = 1 ifeq ($(OPTIMIZE), 1) @@ -53,6 +58,63 @@ else GLOBAL_CXXFLAGS += -O0 -U_FORTIFY_SOURCE endif +include mk/platform.mk + +ifdef HOST_WINDOWS + # Windows DLLs are stricter about symbol visibility than Unix shared + # objects --- see https://gcc.gnu.org/wiki/Visibility for details. + # This is a temporary sledgehammer to export everything like on Unix, + # and not detail with this yet. + # + # TODO do not do this, and instead do fine-grained export annotations. + GLOBAL_LDFLAGS += -Wl,--export-all-symbols +endif + +GLOBAL_CXXFLAGS += -g -Wall -include $(buildprefix)config.h -std=c++2a -I src + +# Include the main lib, causing rules to be defined + include mk/lib.mk -GLOBAL_CXXFLAGS += -g -Wall -include config.h -std=c++2a -I src +# Fallback stub rules for better UX when things are disabled +# +# These must be defined after `mk/lib.mk`. Otherwise the first rule +# incorrectly becomes the default target. + +ifneq ($(ENABLE_UNIT_TESTS), yes) +.PHONY: check +check: + @echo "Unit tests are disabled. Configure without '--disable-unit-tests', or avoid calling 'make check'." + @exit 1 +endif + +ifneq ($(ENABLE_FUNCTIONAL_TESTS), yes) +.PHONY: installcheck +installcheck: + @echo "Functional tests are disabled. Configure without '--disable-functional-tests', or avoid calling 'make installcheck'." + @exit 1 +endif + +# Documentation or else fallback stub rules. +# +# The documentation makefiles be included after `mk/lib.mk` so rules +# refer to variables defined by `mk/lib.mk`. Rules are not "lazy" like +# variables, unfortunately. + +ifeq ($(ENABLE_DOC_GEN), yes) +$(eval $(call include-sub-makefile, doc/manual/local.mk)) +else +.PHONY: manual-html manpages +manual-html manpages: + @echo "Generated docs are disabled. Configure without '--disable-doc-gen', or avoid calling 'make manpages' and 'make manual-html'." + @exit 1 +endif + +ifeq ($(ENABLE_INTERNAL_API_DOCS), yes) +$(eval $(call include-sub-makefile, doc/internal-api/local.mk)) +else +.PHONY: internal-api-html +internal-api-html: + @echo "Internal API docs are disabled. Configure with '--enable-internal-api-docs', or avoid calling 'make internal-api-html'." + @exit 1 +endif diff --git a/Makefile.config.in b/Makefile.config.in index 19992fa20..21a9f41ec 100644 --- a/Makefile.config.in +++ b/Makefile.config.in @@ -8,15 +8,22 @@ CXX = @CXX@ CXXFLAGS = @CXXFLAGS@ CXXLTO = @CXXLTO@ EDITLINE_LIBS = @EDITLINE_LIBS@ +ENABLE_BUILD = @ENABLE_BUILD@ +ENABLE_DOC_GEN = @ENABLE_DOC_GEN@ +ENABLE_FUNCTIONAL_TESTS = @ENABLE_FUNCTIONAL_TESTS@ +ENABLE_INTERNAL_API_DOCS = @ENABLE_INTERNAL_API_DOCS@ ENABLE_S3 = @ENABLE_S3@ +ENABLE_UNIT_TESTS = @ENABLE_UNIT_TESTS@ GTEST_LIBS = @GTEST_LIBS@ HAVE_LIBCPUID = @HAVE_LIBCPUID@ HAVE_SECCOMP = @HAVE_SECCOMP@ HOST_OS = @host_os@ +INSTALL_UNIT_TESTS = @INSTALL_UNIT_TESTS@ LDFLAGS = @LDFLAGS@ LIBARCHIVE_LIBS = @LIBARCHIVE_LIBS@ LIBBROTLI_LIBS = @LIBBROTLI_LIBS@ LIBCURL_LIBS = @LIBCURL_LIBS@ +LIBGIT2_LIBS = @LIBGIT2_LIBS@ LIBSECCOMP_LIBS = @LIBSECCOMP_LIBS@ LOWDOWN_LIBS = @LOWDOWN_LIBS@ OPENSSL_LIBS = @OPENSSL_LIBS@ @@ -28,9 +35,10 @@ SODIUM_LIBS = @SODIUM_LIBS@ SQLITE3_LIBS = @SQLITE3_LIBS@ bash = @bash@ bindir = @bindir@ +checkbindir = @checkbindir@ +checklibdir = @checklibdir@ datadir = @datadir@ datarootdir = @datarootdir@ -doc_generate = @doc_generate@ docdir = @docdir@ embedded_sandbox_shell = @embedded_sandbox_shell@ exec_prefix = @exec_prefix@ @@ -46,6 +54,3 @@ sandbox_shell = @sandbox_shell@ storedir = @storedir@ sysconfdir = @sysconfdir@ system = @system@ -ENABLE_BUILD = @ENABLE_BUILD@ -ENABLE_TESTS = @ENABLE_TESTS@ -internal_api_docs = @internal_api_docs@ diff --git a/boehmgc-traceable_allocator-public.diff b/boehmgc-traceable_allocator-public.diff new file mode 100644 index 000000000..903c707a6 --- /dev/null +++ b/boehmgc-traceable_allocator-public.diff @@ -0,0 +1,12 @@ +diff --git a/include/gc_allocator.h b/include/gc_allocator.h +index 597c7f13..587286be 100644 +--- a/include/gc_allocator.h ++++ b/include/gc_allocator.h +@@ -312,6 +312,7 @@ public: + + template<> + class traceable_allocator { ++public: + typedef size_t size_type; + typedef ptrdiff_t difference_type; + typedef void* pointer; diff --git a/configure.ac b/configure.ac index 75ce7d01d..2594544ab 100644 --- a/configure.ac +++ b/configure.ac @@ -122,7 +122,6 @@ AC_PATH_PROG(flex, flex, false) AC_PATH_PROG(bison, bison, false) AC_PATH_PROG(dot, dot) AC_PATH_PROG(lsof, lsof, lsof) -NEED_PROG(jq, jq) AC_SUBST(coreutils, [$(dirname $(type -p cat))]) @@ -133,6 +132,48 @@ AC_ARG_WITH(store-dir, AS_HELP_STRING([--with-store-dir=PATH],[path of the Nix s AC_SUBST(storedir) +# Running the functional tests without building Nix is useful for testing +# different pre-built versions of Nix against each other. +AC_ARG_ENABLE(build, AS_HELP_STRING([--disable-build],[Do not build nix]), + ENABLE_BUILD=$enableval, ENABLE_BUILD=yes) +AC_SUBST(ENABLE_BUILD) + +# Building without unit tests is useful for bootstrapping with a smaller footprint +# or running the tests in a separate derivation. Otherwise, we do compile and +# run them. + +AC_ARG_ENABLE(unit-tests, AS_HELP_STRING([--disable-unit-tests],[Do not build the tests]), + ENABLE_UNIT_TESTS=$enableval, ENABLE_UNIT_TESTS=$ENABLE_BUILD) +AC_SUBST(ENABLE_UNIT_TESTS) + +AS_IF( + [test "$ENABLE_BUILD" == "no" && test "$ENABLE_UNIT_TESTS" == "yes"], + [AC_MSG_ERROR([Cannot enable unit tests when building overall is disabled. Please do not pass '--enable-unit-tests' or do not pass '--disable-build'.])]) + +AC_ARG_ENABLE(functional-tests, AS_HELP_STRING([--disable-functional-tests],[Do not build the tests]), + ENABLE_FUNCTIONAL_TESTS=$enableval, ENABLE_FUNCTIONAL_TESTS=yes) +AC_SUBST(ENABLE_FUNCTIONAL_TESTS) + +# documentation generation switch +AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation generation]), + ENABLE_DOC_GEN=$enableval, ENABLE_DOC_GEN=$ENABLE_BUILD) +AC_SUBST(ENABLE_DOC_GEN) + +AS_IF( + [test "$ENABLE_BUILD" == "no" && test "$ENABLE_DOC_GEN" == "yes"], + [AC_MSG_ERROR([Cannot enable generated docs when building overall is disabled. Please do not pass '--enable-doc-gen' or do not pass '--disable-build'.])]) + +# Building without API docs is the default as Nix' C++ interfaces are internal and unstable. +AC_ARG_ENABLE(internal-api-docs, AS_HELP_STRING([--enable-internal-api-docs],[Build API docs for Nix's internal unstable C++ interfaces]), + ENABLE_INTERNAL_API_DOCS=$enableval, ENABLE_INTERNAL_API_DOCS=no) +AC_SUBST(ENABLE_INTERNAL_API_DOCS) + +AS_IF( + [test "$ENABLE_FUNCTIONAL_TESTS" == "yes" || test "$ENABLE_DOC_GEN" == "yes"], + [NEED_PROG(jq, jq)]) + +AS_IF([test "$ENABLE_BUILD" == "yes"],[ + # Look for boost, a required dependency. # Note that AX_BOOST_BASE only exports *CPP* BOOST_CPPFLAGS, no CXX flags, # and CPPFLAGS are not passed to the C++ compiler automatically. @@ -155,22 +196,17 @@ if test "x$GCC_ATOMIC_BUILTINS_NEED_LIBATOMIC" = xyes; then LDFLAGS="-latomic $LDFLAGS" fi -# Running the functional tests without building Nix is useful for testing -# different pre-built versions of Nix against each other. -AC_ARG_ENABLE(build, AS_HELP_STRING([--disable-build],[Do not build nix]), - ENABLE_BUILD=$enableval, ENABLE_BUILD=yes) -AC_SUBST(ENABLE_BUILD) -# Building without tests is useful for bootstrapping with a smaller footprint -# or running the tests in a separate derivation. Otherwise, we do compile and -# run them. -AC_ARG_ENABLE(tests, AS_HELP_STRING([--disable-tests],[Do not build the tests]), - ENABLE_TESTS=$enableval, ENABLE_TESTS=yes) -AC_SUBST(ENABLE_TESTS) +AC_ARG_ENABLE(install-unit-tests, AS_HELP_STRING([--enable-install-unit-tests],[Install the unit tests for running later (default no)]), + INSTALL_UNIT_TESTS=$enableval, INSTALL_UNIT_TESTS=no) +AC_SUBST(INSTALL_UNIT_TESTS) -# Building without API docs is the default as Nix' C++ interfaces are internal and unstable. -AC_ARG_ENABLE(internal_api_docs, AS_HELP_STRING([--enable-internal-api-docs],[Build API docs for Nix's internal unstable C++ interfaces]), - internal_api_docs=$enableval, internal_api_docs=no) -AC_SUBST(internal_api_docs) +AC_ARG_WITH(check-bin-dir, AS_HELP_STRING([--with-check-bin-dir=PATH],[path to install unit tests for running later (defaults to $libexecdir/nix)]), + checkbindir=$withval, checkbindir=$libexecdir/nix) +AC_SUBST(checkbindir) + +AC_ARG_WITH(check-lib-dir, AS_HELP_STRING([--with-check-lib-dir=PATH],[path to install unit tests for running later (defaults to $libdir)]), + checklibdir=$withval, checklibdir=$libdir) +AC_SUBST(checklibdir) # LTO is currently broken with clang for unknown reasons; ld segfaults in the llvm plugin AC_ARG_ENABLE(lto, AS_HELP_STRING([--enable-lto],[Enable LTO (only supported with GCC) [default=no]]), @@ -215,17 +251,25 @@ PKG_CHECK_MODULES([SQLITE3], [sqlite3 >= 3.6.19], [CXXFLAGS="$SQLITE3_CFLAGS $CX # Look for libcurl, a required dependency. PKG_CHECK_MODULES([LIBCURL], [libcurl], [CXXFLAGS="$LIBCURL_CFLAGS $CXXFLAGS"]) -# Look for editline, a required dependency. +# Look for editline or readline, a required dependency. # The the libeditline.pc file was added only in libeditline >= 1.15.2, # see https://github.com/troglobit/editline/commit/0a8f2ef4203c3a4a4726b9dd1336869cd0da8607, -# but e.g. Ubuntu 16.04 has an older version, so we fall back to searching for -# editline.h when the pkg-config approach fails. -PKG_CHECK_MODULES([EDITLINE], [libeditline], [CXXFLAGS="$EDITLINE_CFLAGS $CXXFLAGS"], [ - AC_CHECK_HEADERS([editline.h], [true], - [AC_MSG_ERROR([Nix requires libeditline; it was found neither via pkg-config nor its normal header.])]) - AC_SEARCH_LIBS([readline read_history], [editline], [], - [AC_MSG_ERROR([Nix requires libeditline; it was not found via pkg-config, but via its header, but required functions do not work. Maybe it is too old? >= 1.14 is required.])]) -]) +# Older versions are no longer supported. +AC_ARG_WITH( + [readline-flavor], + AS_HELP_STRING([--with-readline-flavor],[Which library to use for nice line editting with the Nix language REPL" [default=editline]]), + [readline_flavor=$withval], + [readline_flavor=editline]) +AS_CASE(["$readline_flavor"], + [editline], [ + readline_flavor_pc=libeditline + ], + [readline], [ + readline_flavor_pc=readline + AC_DEFINE([USE_READLINE], [1], [Use readline instead of editline]) + ], + [AC_MSG_ERROR([bad value "$readline_flavor" for --with-readline-flavor, must be one of: editline, readline])]) +PKG_CHECK_MODULES([EDITLINE], [$readline_flavor_pc], [CXXFLAGS="$EDITLINE_CFLAGS $CXXFLAGS"]) # Look for libsodium. PKG_CHECK_MODULES([SODIUM], [libsodium], [CXXFLAGS="$SODIUM_CFLAGS $CXXFLAGS"]) @@ -270,6 +314,14 @@ case "$host_os" in esac AC_SUBST(HAVE_SECCOMP, [$have_seccomp]) +# Optional dependencies for better normalizing file system data +AC_CHECK_HEADERS([sys/xattr.h]) +AS_IF([test "$ac_cv_header_sys_xattr_h" = "yes"],[ + AC_CHECK_FUNCS([llistxattr lremovexattr]) + AS_IF([test "$ac_cv_func_llistxattr" = "yes" && test "$ac_cv_func_lremovexattr" = "yes"],[ + AC_DEFINE([HAVE_ACL_SUPPORT], [1], [Define if we can manipulate file system Access Control Lists]) + ]) +]) # Look for aws-cpp-sdk-s3. AC_LANG_PUSH(C++) @@ -296,8 +348,7 @@ if test "$gc" = yes; then AC_DEFINE(HAVE_BOEHMGC, 1, [Whether to use the Boehm garbage collector.]) fi - -if test "$ENABLE_TESTS" = yes; then +AS_IF([test "$ENABLE_UNIT_TESTS" == "yes"],[ # Look for gtest. PKG_CHECK_MODULES([GTEST], [gtest_main]) @@ -324,19 +375,32 @@ AC_LINK_IFELSE([ [AC_MSG_ERROR([librapidcheck is not found.])]) AC_LANG_POP(C++) -fi +]) # Look for nlohmann/json. PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9]) -# documentation generation switch -AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation generation]), - doc_generate=$enableval, doc_generate=yes) -AC_SUBST(doc_generate) - # Look for lowdown library. -PKG_CHECK_MODULES([LOWDOWN], [lowdown >= 0.9.0], [CXXFLAGS="$LOWDOWN_CFLAGS $CXXFLAGS"]) +AC_ARG_ENABLE([markdown], AS_HELP_STRING([--enable-markdown], [Enable Markdown rendering in the Nix binary (requires lowdown) [default=auto]]), + enable_markdown=$enableval, enable_markdown=auto) +AS_CASE(["$enable_markdown"], + [yes | auto], [ + PKG_CHECK_MODULES([LOWDOWN], [lowdown >= 0.9.0], [ + CXXFLAGS="$LOWDOWN_CFLAGS $CXXFLAGS" + have_lowdown=1 + AC_DEFINE(HAVE_LOWDOWN, 1, [Whether lowdown is available and should be used for Markdown rendering.]) + ], [ + AS_IF([test "x$enable_markdown" == "xyes"], [AC_MSG_ERROR([--enable-markdown was specified, but lowdown was not found.])]) + ]) + ], + [no], [have_lowdown=], + [AC_MSG_ERROR([bad value "$enable_markdown" for --enable-markdown, must be one of: yes, no, auto])]) + + +# Look for libgit2. +PKG_CHECK_MODULES([LIBGIT2], [libgit2]) + # Setuid installations. AC_CHECK_FUNCS([setresuid setreuid lchown]) @@ -368,6 +432,8 @@ if test "$embedded_sandbox_shell" = yes; then AC_DEFINE(HAVE_EMBEDDED_SANDBOX_SHELL, 1, [Include the sandbox shell in the Nix binary.]) fi +]) + # Expand all variables in config.status. test "$prefix" = NONE && prefix=$ac_default_prefix diff --git a/doc/internal-api/doxygen.cfg.in b/doc/internal-api/doxygen.cfg.in index 599be2470..ad5af97e6 100644 --- a/doc/internal-api/doxygen.cfg.in +++ b/doc/internal-api/doxygen.cfg.in @@ -39,17 +39,21 @@ INPUT = \ src/libcmd \ src/libexpr \ src/libexpr/flake \ - src/libexpr/tests \ - src/libexpr/tests/value \ + tests/unit/libexpr \ + tests/unit/libexpr/value \ + tests/unit/libexpr/test \ + tests/unit/libexpr/test/value \ src/libexpr/value \ src/libfetchers \ src/libmain \ src/libstore \ src/libstore/build \ src/libstore/builtins \ - src/libstore/tests \ + tests/unit/libstore \ + tests/unit/libstore/test \ src/libutil \ - src/libutil/tests \ + tests/unit/libutil \ + tests/unit/libutil/test \ src/nix \ src/nix-env \ src/nix-store diff --git a/doc/internal-api/local.mk b/doc/internal-api/local.mk index 890f341b7..bf2c4dede 100644 --- a/doc/internal-api/local.mk +++ b/doc/internal-api/local.mk @@ -1,19 +1,7 @@ -.PHONY: internal-api-html - -ifeq ($(internal_api_docs), yes) - $(docdir)/internal-api/html/index.html $(docdir)/internal-api/latex: $(d)/doxygen.cfg mkdir -p $(docdir)/internal-api { cat $< ; echo "OUTPUT_DIRECTORY=$(docdir)/internal-api" ; } | doxygen - # Generate the HTML API docs for Nix's unstable internal interfaces. +.PHONY: internal-api-html internal-api-html: $(docdir)/internal-api/html/index.html - -else - -# Make a nicer error message -internal-api-html: - @echo "Internal API docs are disabled. Configure with '--enable-internal-api-docs', or avoid calling 'make internal-api-html'." - @exit 1 - -endif diff --git a/doc/manual/_redirects b/doc/manual/_redirects new file mode 100644 index 000000000..62c693c97 --- /dev/null +++ b/doc/manual/_redirects @@ -0,0 +1,40 @@ +# redirect rules for paths (server-side) to prevent link rot. +# see ./redirects.js for redirects based on URL fragments (client-side) +# +# concrete user story this supports: +# - user finds URL to the manual for Nix x.y +# - Nix x.z (z > y) is the most recent release +# - updating the version in the URL will show the right thing +# +# format documentation: +# - https://docs.netlify.com/routing/redirects/#syntax-for-the-redirects-file +# - https://docs.netlify.com/routing/redirects/redirect-options/ +# +# conventions: +# - always force (!) since this allows re-using file names +# - group related paths to ease readability +# - keep in alphabetical/wildcards-last order, which will reduce version control conflicts +# - redirects that should have been there but are missing can be inserted where they belong + +/advanced-topics/advanced-topics /advanced-topics 301! + +/command-ref/command-ref /command-ref 301! + +/contributing/contributing /contributing 301! + +/expressions/expression-language /language/ 301! +/expressions/language-constructs /language/constructs 301! +/expressions/language-operators /language/operators 301! +/expressions/language-values /language/values 301! +/expressions/* /language/:splat 301! + +/installation/installation /installation 301! + +/package-management/basic-package-mgmt /command-ref/nix-env 301! +/package-management/channels /command-ref/nix-channel 301! +/package-management/package-management /package-management 301! +/package-management/s3-substituter /store/types/s3-binary-cache-store 301! + +/protocols/protocols /protocols 301! + +/release-notes/release-notes /release-notes 301! diff --git a/doc/manual/generate-builtin-constants.nix b/doc/manual/generate-builtin-constants.nix index 8af80a02c..cccd1e279 100644 --- a/doc/manual/generate-builtin-constants.nix +++ b/doc/manual/generate-builtin-constants.nix @@ -1,6 +1,6 @@ let inherit (builtins) concatStringsSep attrValues mapAttrs; - inherit (import ./utils.nix) optionalString squash; + inherit (import ) optionalString squash; in builtinsInfo: diff --git a/doc/manual/generate-builtins.nix b/doc/manual/generate-builtins.nix index 813a287f5..007b698f1 100644 --- a/doc/manual/generate-builtins.nix +++ b/doc/manual/generate-builtins.nix @@ -1,6 +1,6 @@ let inherit (builtins) concatStringsSep attrValues mapAttrs; - inherit (import ./utils.nix) optionalString squash; + inherit (import ) optionalString squash; in builtinsInfo: @@ -8,7 +8,15 @@ let showBuiltin = name: { doc, args, arity, experimental-feature }: let experimentalNotice = optionalString (experimental-feature != null) '' - This function is only available if the [${experimental-feature}](@docroot@/contributing/experimental-features.md#xp-feature-${experimental-feature}) experimental feature is enabled. + > **Note** + > + > This function is only available if the [`${experimental-feature}` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-${experimental-feature}) is enabled. + > + > For example, include the following in [`nix.conf`](@docroot@/command-ref/conf-file.md): + > + > ``` + > extra-experimental-features = ${experimental-feature} + > ``` ''; in squash '' @@ -17,10 +25,9 @@ let
- ${doc} - ${experimentalNotice} + ${doc}
''; listArgs = args: concatStringsSep " " (map (s: "${s}") args); diff --git a/doc/manual/generate-manpage.nix b/doc/manual/generate-manpage.nix index 14136016d..ae31b2a1f 100644 --- a/doc/manual/generate-manpage.nix +++ b/doc/manual/generate-manpage.nix @@ -1,9 +1,29 @@ let inherit (builtins) - attrNames attrValues fromJSON listToAttrs mapAttrs groupBy - concatStringsSep concatMap length lessThan replaceStrings sort; - inherit (import ) attrsToList concatStrings optionalString filterAttrs trim squash unique; - showStoreDocs = import ./generate-store-info.nix; + attrNames + attrValues + concatMap + concatStringsSep + fromJSON + groupBy + length + lessThan + listToAttrs + mapAttrs + match + replaceStrings + sort + ; + inherit (import ) + attrsToList + concatStrings + filterAttrs + optionalString + squash + trim + unique + ; + showStoreDocs = import ; in inlineHTML: commandDump: @@ -31,7 +51,7 @@ let ${maybeSubcommands} - ${maybeStoreDocs} + ${maybeProse} ${maybeOptions} ''; @@ -71,25 +91,56 @@ let * [`${command} ${name}`](./${appendName filename name}.md) - ${subcmd.description} ''; - # FIXME: this is a hack. - # store parameters should not be part of command documentation to begin - # with, but instead be rendered on separate pages. - maybeStoreDocs = optionalString (details ? doc) - (replaceStrings [ "@stores@" ] [ (showStoreDocs inlineHTML commandInfo.stores) ] details.doc); + maybeProse = + # FIXME: this is a horrible hack to keep `nix help-stores` working. + # the correct answer to this is to remove that command and replace it + # by statically generated manpages or the output of something like `nix + # store info `. + let + help-stores = '' + ${index} - maybeOptions = let - allVisibleOptions = filterAttrs - (_: o: ! o.hiddenCategory) - (details.flags // toplevel.flags); - in optionalString (allVisibleOptions != {}) '' - # Options + ${allStores} + ''; + index = replaceStrings + [ "@store-types@" "./local-store.md" "./local-daemon-store.md" ] + [ storesOverview "#local-store" "#local-daemon-store" ] + details.doc; + storesOverview = + let + showEntry = store: + "- [${store.name}](#${store.slug})"; + in + concatStringsSep "\n" (map showEntry storesList) + "\n"; + allStores = concatStringsSep "\n" (attrValues storePages); + storePages = listToAttrs + (map (s: { name = s.filename; value = s.page; }) storesList); + storesList = showStoreDocs { + storeInfo = commandInfo.stores; + inherit inlineHTML; + }; + in + optionalString (details ? doc) ( + if match "@store-types@" details.doc != [ ] + then help-stores + else details.doc + ); - ${showOptions inlineHTML allVisibleOptions} + maybeOptions = + let + allVisibleOptions = filterAttrs + (_: o: ! o.hiddenCategory) + (details.flags // toplevel.flags); + in + optionalString (allVisibleOptions != { }) '' + # Options - > **Note** - > - > See [`man nix.conf`](@docroot@/command-ref/conf-file.md#command-line-flags) for overriding configuration settings with command line flags. - ''; + ${showOptions inlineHTML allVisibleOptions} + + > **Note** + > + > See [`man nix.conf`](@docroot@/command-ref/conf-file.md#command-line-flags) for overriding configuration settings with command line flags. + ''; showOptions = inlineHTML: allOptions: let @@ -97,7 +148,7 @@ let ${optionalString (cat != "") "## ${cat}"} ${concatStringsSep "\n" (attrValues (mapAttrs showOption opts))} - ''; + ''; showOption = name: option: let result = trim '' diff --git a/doc/manual/generate-settings.nix b/doc/manual/generate-settings.nix index 8736bb793..504cda362 100644 --- a/doc/manual/generate-settings.nix +++ b/doc/manual/generate-settings.nix @@ -1,6 +1,6 @@ let inherit (builtins) attrValues concatStringsSep isAttrs isBool mapAttrs; - inherit (import ./utils.nix) concatStrings indent optionalString squash; + inherit (import ) concatStrings indent optionalString squash; in # `inlineHTML` is a hack to accommodate inconsistent output from `lowdown` @@ -20,10 +20,10 @@ let else "`${setting}`"; # separate body to cleanly handle indentation body = '' - ${description} - ${experimentalFeatureNote} + ${description} + **Default:** ${showDefault documentDefault defaultValue} ${showAliases aliases} @@ -31,18 +31,19 @@ let experimentalFeatureNote = optionalString (experimentalFeature != null) '' > **Warning** + > > This setting is part of an > [experimental feature](@docroot@/contributing/experimental-features.md). - - To change this setting, you need to make sure the corresponding experimental feature, - [`${experimentalFeature}`](@docroot@/contributing/experimental-features.md#xp-feature-${experimentalFeature}), - is enabled. - For example, include the following in [`nix.conf`](#): - - ``` - extra-experimental-features = ${experimentalFeature} - ${setting} = ... - ``` + > + > To change this setting, make sure the + > [`${experimentalFeature}` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-${experimentalFeature}) + > is enabled. + > For example, include the following in [`nix.conf`](@docroot@/command-ref/conf-file.md): + > + > ``` + > extra-experimental-features = ${experimentalFeature} + > ${setting} = ... + > ``` ''; showDefault = documentDefault: defaultValue: diff --git a/doc/manual/generate-store-info.nix b/doc/manual/generate-store-info.nix index 36215aadf..c311c3c39 100644 --- a/doc/manual/generate-store-info.nix +++ b/doc/manual/generate-store-info.nix @@ -1,45 +1,57 @@ let - inherit (builtins) attrValues mapAttrs; - inherit (import ./utils.nix) concatStrings optionalString; - showSettings = import ./generate-settings.nix; + inherit (builtins) attrNames listToAttrs concatStringsSep readFile replaceStrings; + inherit (import ) optionalString filterAttrs trim squash toLower unique indent; + showSettings = import ; in -inlineHTML: storesInfo: +{ + # data structure describing all stores and their parameters + storeInfo, + # whether to add inline HTML tags + # `lowdown` does not eat those for one of the output modes + inlineHTML, +}: let - showStore = name: { settings, doc, experimentalFeature }: + showStore = { name, slug }: { settings, doc, experimentalFeature }: let + result = squash '' + # ${name} - result = '' - ## ${name} + ${experimentalFeatureNote} - ${doc} + ${doc} - ${experimentalFeatureNote} + ## Settings - ### Settings + ${showSettings { prefix = "store-${slug}"; inherit inlineHTML; } settings} + ''; - ${showSettings { prefix = "store-${slug}"; inherit inlineHTML; } settings} - ''; + experimentalFeatureNote = optionalString (experimentalFeature != null) '' + > **Warning** + > + > This store is part of an + > [experimental feature](@docroot@/contributing/experimental-features.md). + > + > To use this store, make sure the + > [`${experimentalFeature}` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-${experimentalFeature}) + > is enabled. + > For example, include the following in [`nix.conf`](@docroot@/command-ref/conf-file.md): + > + > ``` + > extra-experimental-features = ${experimentalFeature} + > ``` + ''; + in result; - # markdown doesn't like spaces in URLs - slug = builtins.replaceStrings [ " " ] [ "-" ] name; + storesList = map + (name: rec { + inherit name; + slug = replaceStrings [ " " ] [ "-" ] (toLower name); + filename = "${slug}.md"; + page = showStore { inherit name slug; } storeInfo.${name}; + }) + (attrNames storeInfo); - experimentalFeatureNote = optionalString (experimentalFeature != null) '' - > **Warning** - > This store is part of an - > [experimental feature](@docroot@/contributing/experimental-features.md). - - To use this store, you need to make sure the corresponding experimental feature, - [`${experimentalFeature}`](@docroot@/contributing/experimental-features.md#xp-feature-${experimentalFeature}), - is enabled. - For example, include the following in [`nix.conf`](#): - - ``` - extra-experimental-features = ${experimentalFeature} - ``` - ''; - in result; - -in concatStrings (attrValues (mapAttrs showStore storesInfo)) +in storesList diff --git a/doc/manual/generate-store-types.nix b/doc/manual/generate-store-types.nix new file mode 100644 index 000000000..3b78a0e1b --- /dev/null +++ b/doc/manual/generate-store-types.nix @@ -0,0 +1,39 @@ +let + inherit (builtins) attrNames listToAttrs concatStringsSep readFile replaceStrings; + showSettings = import ; + showStoreDocs = import ; +in + +storeInfo: + +let + storesList = showStoreDocs { + inherit storeInfo; + inlineHTML = true; + }; + + index = + let + showEntry = store: + "- [${store.name}](./${store.filename})"; + in + concatStringsSep "\n" (map showEntry storesList); + + "index.md" = replaceStrings + [ "@store-types@" ] [ index ] + (readFile ./src/store/types/index.md.in); + + tableOfContents = + let + showEntry = store: + " - [${store.name}](store/types/${store.filename})"; + in + concatStringsSep "\n" (map showEntry storesList) + "\n"; + + "SUMMARY.md" = tableOfContents; + + storePages = listToAttrs + (map (s: { name = s.filename; value = s.page; }) storesList); + +in +storePages // { inherit "index.md" "SUMMARY.md"; } diff --git a/doc/manual/generate-xp-features-shortlist.nix b/doc/manual/generate-xp-features-shortlist.nix index 30e211c96..ec09f4b75 100644 --- a/doc/manual/generate-xp-features-shortlist.nix +++ b/doc/manual/generate-xp-features-shortlist.nix @@ -1,5 +1,5 @@ with builtins; -with import ./utils.nix; +with import ; let showExperimentalFeature = name: doc: diff --git a/doc/manual/generate-xp-features.nix b/doc/manual/generate-xp-features.nix index adb94355c..0eec0e1da 100644 --- a/doc/manual/generate-xp-features.nix +++ b/doc/manual/generate-xp-features.nix @@ -1,5 +1,5 @@ with builtins; -with import ./utils.nix; +with import ; let showExperimentalFeature = name: doc: @@ -8,4 +8,6 @@ let ${doc} ''; -in xps: (concatStringsSep "\n" (attrValues (mapAttrs showExperimentalFeature xps))) +in + +xps: (concatStringsSep "\n" (attrValues (mapAttrs showExperimentalFeature xps))) diff --git a/doc/manual/local.mk b/doc/manual/local.mk index db3daf252..b77168885 100644 --- a/doc/manual/local.mk +++ b/doc/manual/local.mk @@ -1,4 +1,7 @@ -ifeq ($(doc_generate),yes) +# The version of Nix used to generate the doc. Can also be +# `$(nix_INSTALL_PATH)` or just `nix` (to grap ambient from the `PATH`), +# if one prefers. +doc_nix = $(nix_PATH) MANUAL_SRCS := \ $(call rwildcard, $(d)/src, *.md) \ @@ -24,7 +27,7 @@ man-pages += $(foreach subcommand, \ clean-files += $(d)/*.1 $(d)/*.5 $(d)/*.8 # Provide a dummy environment for nix, so that it will not access files outside the macOS sandbox. -# Set cores to 0 because otherwise nix show-config resolves the cores based on the current machine +# Set cores to 0 because otherwise `nix config show` resolves the cores based on the current machine dummy-env = env -i \ HOME=/dummy \ NIX_CONF_DIR=/dummy \ @@ -32,7 +35,7 @@ dummy-env = env -i \ NIX_STATE_DIR=/dummy \ NIX_CONFIG='cores = 0' -nix-eval = $(dummy-env) $(bindir)/nix eval --experimental-features nix-command -I nix=doc/manual --store dummy:// --impure --raw +nix-eval = $(dummy-env) $(doc_nix) eval --experimental-features nix-command -I nix=doc/manual --store dummy:// --impure --raw # re-implement mdBook's include directive to make it usable for terminal output and for proper @docroot@ substitution define process-includes @@ -92,64 +95,91 @@ $(d)/nix-profiles.5: $(d)/src/command-ref/files/profiles.md $(trace-gen) lowdown -sT man --nroff-nolinks -M section=5 $^.tmp -o $@ @rm $^.tmp -$(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli $(d)/src/contributing/experimental-feature-descriptions.md +$(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/SUMMARY-rl-next.md $(d)/src/store/types $(d)/src/command-ref/new-cli $(d)/src/contributing/experimental-feature-descriptions.md @cp $< $@ @$(call process-includes,$@,$@) -$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/utils.nix $(d)/generate-manpage.nix $(d)/generate-settings.nix $(d)/generate-store-info.nix $(bindir)/nix +$(d)/src/store/types: $(d)/nix.json $(d)/utils.nix $(d)/generate-store-info.nix $(d)/generate-store-types.nix $(d)/src/store/types/index.md.in $(doc_nix) + @# FIXME: build out of tree! + @rm -rf $@.tmp + $(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-store-types.nix (builtins.fromJSON (builtins.readFile $<)).stores' + @# do not destroy existing contents + @mv $@.tmp/* $@/ + +$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/utils.nix $(d)/generate-manpage.nix $(d)/generate-settings.nix $(d)/generate-store-info.nix $(doc_nix) @rm -rf $@ $@.tmp $(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-manpage.nix true (builtins.readFile $<)' @mv $@.tmp $@ -$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/utils.nix $(d)/generate-settings.nix $(d)/src/command-ref/conf-file-prefix.md $(d)/src/command-ref/experimental-features-shortlist.md $(bindir)/nix +$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/utils.nix $(d)/generate-settings.nix $(d)/src/command-ref/conf-file-prefix.md $(d)/src/command-ref/experimental-features-shortlist.md $(doc_nix) @cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp $(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-settings.nix { prefix = "conf"; } (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp; @mv $@.tmp $@ -$(d)/nix.json: $(bindir)/nix - $(trace-gen) $(dummy-env) $(bindir)/nix __dump-cli > $@.tmp +$(d)/nix.json: $(doc_nix) + $(trace-gen) $(dummy-env) $(doc_nix) __dump-cli > $@.tmp @mv $@.tmp $@ -$(d)/conf-file.json: $(bindir)/nix - $(trace-gen) $(dummy-env) $(bindir)/nix show-config --json --experimental-features nix-command > $@.tmp +$(d)/conf-file.json: $(doc_nix) + $(trace-gen) $(dummy-env) $(doc_nix) config show --json --experimental-features nix-command > $@.tmp @mv $@.tmp $@ -$(d)/src/contributing/experimental-feature-descriptions.md: $(d)/xp-features.json $(d)/utils.nix $(d)/generate-xp-features.nix $(bindir)/nix +$(d)/src/contributing/experimental-feature-descriptions.md: $(d)/xp-features.json $(d)/utils.nix $(d)/generate-xp-features.nix $(doc_nix) @rm -rf $@ $@.tmp $(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-xp-features.nix (builtins.fromJSON (builtins.readFile $<))' @mv $@.tmp $@ -$(d)/src/command-ref/experimental-features-shortlist.md: $(d)/xp-features.json $(d)/utils.nix $(d)/generate-xp-features-shortlist.nix $(bindir)/nix +$(d)/src/command-ref/experimental-features-shortlist.md: $(d)/xp-features.json $(d)/utils.nix $(d)/generate-xp-features-shortlist.nix $(doc_nix) @rm -rf $@ $@.tmp $(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-xp-features-shortlist.nix (builtins.fromJSON (builtins.readFile $<))' @mv $@.tmp $@ -$(d)/xp-features.json: $(bindir)/nix - $(trace-gen) $(dummy-env) $(bindir)/nix __dump-xp-features > $@.tmp +$(d)/xp-features.json: $(doc_nix) + $(trace-gen) $(dummy-env) $(doc_nix) __dump-xp-features > $@.tmp @mv $@.tmp $@ -$(d)/src/language/builtins.md: $(d)/language.json $(d)/generate-builtins.nix $(d)/src/language/builtins-prefix.md $(bindir)/nix +$(d)/src/language/builtins.md: $(d)/language.json $(d)/generate-builtins.nix $(d)/src/language/builtins-prefix.md $(doc_nix) @cat doc/manual/src/language/builtins-prefix.md > $@.tmp $(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtins.nix (builtins.fromJSON (builtins.readFile $<)).builtins' >> $@.tmp; @cat doc/manual/src/language/builtins-suffix.md >> $@.tmp @mv $@.tmp $@ -$(d)/src/language/builtin-constants.md: $(d)/language.json $(d)/generate-builtin-constants.nix $(d)/src/language/builtin-constants-prefix.md $(bindir)/nix +$(d)/src/language/builtin-constants.md: $(d)/language.json $(d)/generate-builtin-constants.nix $(d)/src/language/builtin-constants-prefix.md $(doc_nix) @cat doc/manual/src/language/builtin-constants-prefix.md > $@.tmp $(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtin-constants.nix (builtins.fromJSON (builtins.readFile $<)).constants' >> $@.tmp; @cat doc/manual/src/language/builtin-constants-suffix.md >> $@.tmp @mv $@.tmp $@ -$(d)/language.json: $(bindir)/nix - $(trace-gen) $(dummy-env) $(bindir)/nix __dump-language > $@.tmp +$(d)/language.json: $(doc_nix) + $(trace-gen) $(dummy-env) $(doc_nix) __dump-language > $@.tmp @mv $@.tmp $@ +# Generate "Upcoming release" notes (or clear it and remove from menu) +$(d)/src/release-notes/rl-next.md: $(d)/rl-next $(d)/rl-next/* + @if type -p changelog-d > /dev/null; then \ + echo " GEN " $@; \ + changelog-d doc/manual/rl-next > $@; \ + else \ + echo " NULL " $@; \ + true > $@; \ + fi + +$(d)/src/SUMMARY-rl-next.md: $(d)/src/release-notes/rl-next.md + $(trace-gen) true + @if [ -s $< ]; then \ + echo ' - [Upcoming release](release-notes/rl-next.md)' > $@; \ + else \ + true > $@; \ + fi + # Generate the HTML manual. .PHONY: manual-html manual-html: $(docdir)/manual/index.html install: $(docdir)/manual/index.html # Generate 'nix' manpages. +.PHONY: manpages +manpages: $(mandir)/man1/nix3-manpages install: $(mandir)/man1/nix3-manpages man: doc/manual/generated/man1/nix3-manpages all: doc/manual/generated/man1/nix3-manpages @@ -177,7 +207,7 @@ doc/manual/generated/man1/nix3-manpages: $(d)/src/command-ref/new-cli # `@docroot@` is to be preserved for documenting the mechanism # FIXME: maybe contributing guides should live right next to the code # instead of in the manual -$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/contributing/experimental-feature-descriptions.md $(d)/src/command-ref/conf-file.md $(d)/src/language/builtins.md $(d)/src/language/builtin-constants.md +$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/store/types $(d)/src/command-ref/new-cli $(d)/src/contributing/experimental-feature-descriptions.md $(d)/src/command-ref/conf-file.md $(d)/src/language/builtins.md $(d)/src/language/builtin-constants.md $(d)/src/release-notes/rl-next.md $(trace-gen) \ tmp="$$(mktemp -d)"; \ cp -r doc/manual "$$tmp"; \ @@ -195,5 +225,3 @@ $(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/ @rm -rf $(DESTDIR)$(docdir)/manual @mv $(DESTDIR)$(docdir)/manual.tmp/html $(DESTDIR)$(docdir)/manual @rm -rf $(DESTDIR)$(docdir)/manual.tmp - -endif diff --git a/doc/manual/redirects.js b/doc/manual/redirects.js index d1b10109d..d04f32b49 100644 --- a/doc/manual/redirects.js +++ b/doc/manual/redirects.js @@ -1,7 +1,9 @@ -// redirect rules for anchors ensure backwards compatibility of URLs. -// this must be done on the client side, as web servers do not see the anchor part of the URL. +// redirect rules for URL fragments (client-side) to prevent link rot. +// this must be done on the client side, as web servers do not see the fragment part of the URL. +// it will only work with JavaScript enabled in the browser, but this is the best we can do here. +// see ./_redirects for path redirects (client-side) -// redirections are declared as follows: +// redirects are declared as follows: // each entry has as its key a path matching the requested URL path, relative to the mdBook document root. // // IMPORTANT: it must specify the full path with file name and suffix @@ -19,6 +21,7 @@ const redirects = { "chap-distributed-builds": "advanced-topics/distributed-builds.html", "chap-post-build-hook": "advanced-topics/post-build-hook.html", "chap-post-build-hook-caveats": "advanced-topics/post-build-hook.html#implementation-caveats", + "chap-writing-nix-expressions": "language/index.html", "part-command-ref": "command-ref/command-ref.html", "conf-allow-import-from-derivation": "command-ref/conf-file.html#conf-allow-import-from-derivation", "conf-allow-new-privileges": "command-ref/conf-file.html#conf-allow-new-privileges", diff --git a/doc/manual/rl-next/allowed-uris-can-now-match-whole-schemes.md b/doc/manual/rl-next/allowed-uris-can-now-match-whole-schemes.md new file mode 100644 index 000000000..3cf75a612 --- /dev/null +++ b/doc/manual/rl-next/allowed-uris-can-now-match-whole-schemes.md @@ -0,0 +1,7 @@ +--- +synopsis: Option `allowed-uris` can now match whole schemes in URIs without slashes +prs: 9547 +--- + +If a scheme, such as `github:` is specified in the `allowed-uris` option, all URIs starting with `github:` are allowed. +Previously this only worked for schemes whose URIs used the `://` syntax. diff --git a/doc/manual/rl-next/cgroup-stats.md b/doc/manual/rl-next/cgroup-stats.md new file mode 100644 index 000000000..00853a0f8 --- /dev/null +++ b/doc/manual/rl-next/cgroup-stats.md @@ -0,0 +1,8 @@ +--- +synopsis: Include cgroup stats when building through the daemon +prs: 9598 +--- + +Nix now also reports cgroup statistics when building through the nix daemon and when doing remote builds using ssh-ng, +if both sides of the connection are this version of Nix or newer. + diff --git a/doc/manual/rl-next/config b/doc/manual/rl-next/config new file mode 100644 index 000000000..b3c2e868f --- /dev/null +++ b/doc/manual/rl-next/config @@ -0,0 +1,2 @@ +organization: NixOS +repository: nix diff --git a/doc/manual/rl-next/drv-string-parse-hang.md b/doc/manual/rl-next/drv-string-parse-hang.md new file mode 100644 index 000000000..1e041d3e9 --- /dev/null +++ b/doc/manual/rl-next/drv-string-parse-hang.md @@ -0,0 +1,6 @@ +--- +synopsis: Fix handling of truncated `.drv` files. +prs: 9673 +--- + +Previously a `.drv` that was truncated in the middle of a string would case nix to enter an infinite loop, eventually exhausting all memory and crashing. diff --git a/doc/manual/rl-next/env-size-reduction.md b/doc/manual/rl-next/env-size-reduction.md new file mode 100644 index 000000000..40a58bc28 --- /dev/null +++ b/doc/manual/rl-next/env-size-reduction.md @@ -0,0 +1,7 @@ +--- +synopsis: Reduce eval memory usage and wall time +prs: 9658 +--- + +Reduce the size of the `Env` struct used in the evaluator by a pointer, or 8 bytes on most modern machines. +This reduces memory usage during eval by around 2% and wall time by around 3%. diff --git a/doc/manual/rl-next/eval-system.md b/doc/manual/rl-next/eval-system.md new file mode 100644 index 000000000..a4696a56c --- /dev/null +++ b/doc/manual/rl-next/eval-system.md @@ -0,0 +1,12 @@ +--- +synopsis: Add new `eval-system` setting +prs: 4093 +--- + +Add a new `eval-system` option. +Unlike `system`, it just overrides the value of `builtins.currentSystem`. +This is more useful than overriding `system`, because you can build these derivations on remote builders which can work on the given system. +In contrast, `system` also effects scheduling which will cause Nix to build those derivations locally even if that doesn't make sense. + +`eval-system` only takes effect if it is non-empty. +If empty (the default) `system` is used as before, so there is no breakage. diff --git a/doc/manual/rl-next/hash-format-nix32.md b/doc/manual/rl-next/hash-format-nix32.md new file mode 100644 index 000000000..73e6fbb24 --- /dev/null +++ b/doc/manual/rl-next/hash-format-nix32.md @@ -0,0 +1,23 @@ +--- +synopsis: Rename hash format `base32` to `nix32` +prs: 9452 +--- + +Hash format `base32` was renamed to `nix32` since it used a special nix-specific character set for +[Base32](https://en.wikipedia.org/wiki/Base32). + +## Deprecation: Use `nix32` instead of `base32` as `toHashFormat` + +For the builtin `convertHash`, the `toHashFormat` parameter now accepts the same hash formats as the `--to`/`--from` +parameters of the `nix hash conert` command: `"base16"`, `"nix32"`, `"base64"`, and `"sri"`. The former `"base32"` value +remains as a deprecated alias for `"base32"`. Please convert your code from: + +```nix +builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base32";} +``` + +to + +```nix +builtins.convertHash { inherit hash hashAlgo; toHashFormat = "nix32";} +``` \ No newline at end of file diff --git a/doc/manual/rl-next/ifd-eval-store.md b/doc/manual/rl-next/ifd-eval-store.md new file mode 100644 index 000000000..835e7e7a3 --- /dev/null +++ b/doc/manual/rl-next/ifd-eval-store.md @@ -0,0 +1,8 @@ +--- +synopsis: import-from-derivation builds the derivation in the build store +prs: 9661 +--- + +When using `--eval-store`, `import`ing from a derivation will now result in the derivation being built on the build store, i.e. the store specified in the `store` Nix option. + +Because the resulting Nix expression must be copied back to the eval store in order to be imported, this requires the eval store to trust the build store's signatures. diff --git a/doc/manual/rl-next/mounted-ssh-store.md b/doc/manual/rl-next/mounted-ssh-store.md new file mode 100644 index 000000000..6df44dbb6 --- /dev/null +++ b/doc/manual/rl-next/mounted-ssh-store.md @@ -0,0 +1,8 @@ +--- +synopsis: Mounted SSH Store +issues: 7890 +prs: 7912 +--- + +Introduced the store [`mounted-ssh-ng://`](@docroot@/command-ref/new-cli/nix3-help-stores.md). +This store allows full access to a Nix store on a remote machine and additionally requires that the store be mounted in the local filesystem. diff --git a/doc/manual/rl-next/nix-config-show.md b/doc/manual/rl-next/nix-config-show.md new file mode 100644 index 000000000..26b961b76 --- /dev/null +++ b/doc/manual/rl-next/nix-config-show.md @@ -0,0 +1,7 @@ +--- +synopsis: Rename to `nix config show` +issues: 7672 +prs: 9477 +--- + +`nix show-config` was renamed to `nix config show`, and `nix doctor` was renamed to `nix config check`, to be more consistent with the rest of the command-line interface. diff --git a/doc/manual/rl-next/nix-env-json-drv-path.md b/doc/manual/rl-next/nix-env-json-drv-path.md new file mode 100644 index 000000000..734cefd1b --- /dev/null +++ b/doc/manual/rl-next/nix-env-json-drv-path.md @@ -0,0 +1,6 @@ +--- +synopsis: Fix `nix-env --query --drv-path --json` +prs: 9257 +--- + +Fixed a bug where `nix-env --query` ignored `--drv-path` when `--json` was set. diff --git a/doc/manual/rl-next/nix-hash-convert.md b/doc/manual/rl-next/nix-hash-convert.md new file mode 100644 index 000000000..2b718a66b --- /dev/null +++ b/doc/manual/rl-next/nix-hash-convert.md @@ -0,0 +1,47 @@ +--- +synopsis: Add `nix hash convert` +prs: 9452 +--- + +New [`nix hash convert`](https://github.com/NixOS/nix/issues/8876) sub command with a fast track +to stabilization! Examples: + +- Convert the hash to `nix32`. + + ```bash + $ nix hash convert --algo "sha1" --to nix32 "800d59cfcd3c05e900cb4e214be48f6b886a08df" + vw46m23bizj4n8afrc0fj19wrp7mj3c0 + ``` + `nix32` is a base32 encoding with a nix-specific character set. + Explicitly specify the hashing algorithm (optional with SRI hashes) but detect hash format by the length of the input + hash. +- Convert the hash to the `sri` format that includes an algorithm specification: + ```bash + nix hash convert --algo "sha1" "800d59cfcd3c05e900cb4e214be48f6b886a08df" + sha1-gA1Zz808BekAy04hS+SPa4hqCN8= + ``` + or with an explicit `-to` format: + ```bash + nix hash convert --algo "sha1" --to sri "800d59cfcd3c05e900cb4e214be48f6b886a08df" + sha1-gA1Zz808BekAy04hS+SPa4hqCN8= + ``` +- Assert the input format of the hash: + ```bash + nix hash convert --algo "sha256" --from nix32 "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=" + error: input hash 'ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=' does not have the expected format '--from nix32' + nix hash convert --algo "sha256" --from nix32 "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s" + sha256-ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0= + ``` + +The `--to`/`--from`/`--algo` parameters have context-sensitive auto-completion. + +## Related Deprecations + +The following commands are still available but will emit a deprecation warning. Please convert your code to +`nix hash convert`: + +- `nix hash to-base16 $hash1 $hash2`: Use `nix hash convert --to base16 $hash1 $hash2` instead. +- `nix hash to-base32 $hash1 $hash2`: Use `nix hash convert --to nix32 $hash1 $hash2` instead. +- `nix hash to-base64 $hash1 $hash2`: Use `nix hash convert --to base64 $hash1 $hash2` instead. +- `nix hash to-sri $hash1 $hash2`: : Use `nix hash convert --to sri $hash1 $hash2` + or even just `nix hash convert $hash1 $hash2` instead. diff --git a/doc/manual/rl-next/nix-profile-names.md b/doc/manual/rl-next/nix-profile-names.md new file mode 100644 index 000000000..f5953bd72 --- /dev/null +++ b/doc/manual/rl-next/nix-profile-names.md @@ -0,0 +1,6 @@ +--- +synopsis: "`nix profile` now allows referring to elements by human-readable name" +prs: 8678 +--- + +[`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md) now uses names to refer to installed packages when running [`list`](@docroot@/command-ref/new-cli/nix3-profile-list.md), [`remove`](@docroot@/command-ref/new-cli/nix3-profile-remove.md) or [`upgrade`](@docroot@/command-ref/new-cli/nix3-profile-upgrade.md) as opposed to indices. Indices are deprecated and will be removed in a future version. diff --git a/doc/manual/rl-next/source-positions-in-errors.md b/doc/manual/rl-next/source-positions-in-errors.md new file mode 100644 index 000000000..5b210289d --- /dev/null +++ b/doc/manual/rl-next/source-positions-in-errors.md @@ -0,0 +1,42 @@ +--- +synopsis: Source locations are printed more consistently in errors +issues: 561 +prs: 9555 +--- + +Source location information is now included in error messages more +consistently. Given this code: + +```nix +let + attr = {foo = "bar";}; + key = {}; +in + attr.${key} +``` + +Previously, Nix would show this unhelpful message when attempting to evaluate +it: + +``` +error: + … while evaluating an attribute name + + error: value is a set while a string was expected +``` + +Now, the error message displays where the problematic value was found: + +``` +error: + … while evaluating an attribute name + + at bad.nix:4:11: + + 3| key = {}; + 4| in attr.${key} + | ^ + 5| + + error: value is a set while a string was expected +``` diff --git a/doc/manual/rl-next/stack-overflow-segfaults.md b/doc/manual/rl-next/stack-overflow-segfaults.md new file mode 100644 index 000000000..3d9753248 --- /dev/null +++ b/doc/manual/rl-next/stack-overflow-segfaults.md @@ -0,0 +1,32 @@ +--- +synopsis: Some stack overflow segfaults are fixed +issues: 9616 +prs: 9617 +--- + +The number of nested function calls has been restricted, to detect and report +infinite function call recursions. The default maximum call depth is 10,000 and +can be set with [the `max-call-depth` +option](@docroot@/command-ref/conf-file.md#conf-max-call-depth). + +This fixes segfaults or the following unhelpful error message in many cases: + + error: stack overflow (possible infinite recursion) + +Before: + +``` +$ nix-instantiate --eval --expr '(x: x x) (x: x x)' +Segmentation fault: 11 +``` + +After: + +``` +$ nix-instantiate --eval --expr '(x: x x) (x: x x)' +error: stack overflow + + at «string»:1:14: + 1| (x: x x) (x: x x) + | ^ +``` diff --git a/doc/manual/rl-next/with-error-reporting.md b/doc/manual/rl-next/with-error-reporting.md new file mode 100644 index 000000000..10b020956 --- /dev/null +++ b/doc/manual/rl-next/with-error-reporting.md @@ -0,0 +1,31 @@ +--- +synopsis: Better error reporting for `with` expressions +prs: 9658 +--- + +`with` expressions using non-attrset values to resolve variables are now reported with proper positions. + +Previously an incorrect `with` expression would report no position at all, making it hard to determine where the error originated: + +``` +nix-repl> with 1; a +error: + … + + at «none»:0: (source not available) + + error: value is an integer while a set was expected +``` + +Now position information is preserved and reported as with most other errors: + +``` +nix-repl> with 1; a +error: + … while evaluating the first subexpression of a with expression + at «string»:1:1: + 1| with 1; a + | ^ + + error: value is an integer while a set was expected +``` diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in index 794f78a07..c67ddc6cb 100644 --- a/doc/manual/src/SUMMARY.md.in +++ b/doc/manual/src/SUMMARY.md.in @@ -2,7 +2,7 @@ - [Introduction](introduction.md) - [Quick Start](quick-start.md) -- [Installation](installation/installation.md) +- [Installation](installation/index.md) - [Supported Platforms](installation/supported-platforms.md) - [Installing a Binary Distribution](installation/installing-binary.md) - [Installing Nix from Source](installation/installing-source.md) @@ -20,6 +20,8 @@ - [File System Object](store/file-system-object.md) - [Store Object](store/store-object.md) - [Store Path](store/store-path.md) + - [Store Types](store/types/index.md) +{{#include ./store/types/SUMMARY.md}} - [Nix Language](language/index.md) - [Data Types](language/values.md) - [Language Constructs](language/constructs.md) @@ -31,11 +33,11 @@ - [Import From Derivation](language/import-from-derivation.md) - [Built-in Constants](language/builtin-constants.md) - [Built-in Functions](language/builtins.md) -- [Package Management](package-management/package-management.md) +- [Package Management](package-management/index.md) - [Profiles](package-management/profiles.md) - [Garbage Collection](package-management/garbage-collection.md) - [Garbage Collector Roots](package-management/garbage-collector-roots.md) -- [Advanced Topics](advanced-topics/advanced-topics.md) +- [Advanced Topics](advanced-topics/index.md) - [Sharing Packages Between Machines](package-management/sharing-packages.md) - [Serving a Nix store via HTTP](package-management/binary-cache-substituter.md) - [Copying Closures via SSH](package-management/copy-closure.md) @@ -45,7 +47,7 @@ - [Tuning Cores and Jobs](advanced-topics/cores-vs-jobs.md) - [Verifying Build Reproducibility](advanced-topics/diff-hook.md) - [Using the `post-build-hook`](advanced-topics/post-build-hook.md) -- [Command Reference](command-ref/command-ref.md) +- [Command Reference](command-ref/index.md) - [Common Options](command-ref/opt-common.md) - [Common Environment Variables](command-ref/env-common.md) - [Main Commands](command-ref/main-commands.md) @@ -102,19 +104,20 @@ - [Channels](command-ref/files/channels.md) - [Default Nix expression](command-ref/files/default-nix-expression.md) - [Architecture and Design](architecture/architecture.md) -- [Protocols](protocols/protocols.md) +- [Protocols](protocols/index.md) - [Serving Tarball Flakes](protocols/tarball-fetcher.md) - [Derivation "ATerm" file format](protocols/derivation-aterm.md) - [Glossary](glossary.md) -- [Contributing](contributing/contributing.md) +- [Contributing](contributing/index.md) - [Hacking](contributing/hacking.md) - [Testing](contributing/testing.md) - [Documentation](contributing/documentation.md) - [Experimental Features](contributing/experimental-features.md) - [CLI guideline](contributing/cli-guideline.md) - [C++ style guide](contributing/cxx.md) -- [Release Notes](release-notes/release-notes.md) - - [Release X.Y (202?-??-??)](release-notes/rl-next.md) +- [Release Notes](release-notes/index.md) +{{#include ./SUMMARY-rl-next.md}} + - [Release 2.19 (2023-11-17)](release-notes/rl-2.19.md) - [Release 2.18 (2023-09-20)](release-notes/rl-2.18.md) - [Release 2.17 (2023-07-24)](release-notes/rl-2.17.md) - [Release 2.16 (2023-05-31)](release-notes/rl-2.16.md) diff --git a/doc/manual/src/advanced-topics/advanced-topics.md b/doc/manual/src/advanced-topics/index.md similarity index 100% rename from doc/manual/src/advanced-topics/advanced-topics.md rename to doc/manual/src/advanced-topics/index.md diff --git a/doc/manual/src/architecture/architecture.md b/doc/manual/src/architecture/architecture.md index 79429508f..2fec4ed20 100644 --- a/doc/manual/src/architecture/architecture.md +++ b/doc/manual/src/architecture/architecture.md @@ -52,7 +52,7 @@ The following [concept map] shows its main components (rectangles), the objects '---------------' ``` -At the top is the [command line interface](../command-ref/command-ref.md) that drives the underlying layers. +At the top is the [command line interface](../command-ref/index.md) that drives the underlying layers. The [Nix language](../language/index.md) evaluator transforms Nix expressions into self-contained *build plans*, which are used to derive *build results* from referenced *build inputs*. diff --git a/doc/manual/src/command-ref/command-ref.md b/doc/manual/src/command-ref/index.md similarity index 100% rename from doc/manual/src/command-ref/command-ref.md rename to doc/manual/src/command-ref/index.md diff --git a/doc/manual/src/contributing/cli-guideline.md b/doc/manual/src/contributing/cli-guideline.md index e53d2d178..e90d6de8d 100644 --- a/doc/manual/src/contributing/cli-guideline.md +++ b/doc/manual/src/contributing/cli-guideline.md @@ -87,7 +87,7 @@ impacted the most by bad user experience. and [aligning of text](#text-alignment). - [Autocomplete](#shell-completion) of options. - Examples of such commands: `nix doctor`, `nix edit`, `nix eval`, ... + Examples of such commands: `nix edit`, `nix eval`, ... - **Utility and scripting commands** @@ -426,7 +426,7 @@ This leads to the following guidelines: ### Examples -This is bad, because all keys must be assumed to be store implementations: +This is bad, because all keys must be assumed to be store types: ```json { diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index fe08ceb94..9a03ac9b6 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -10,7 +10,7 @@ $ cd nix The following instructions assume you already have some version of Nix installed locally, so that you can use it to set up the development environment. If you don't have it installed, follow the [installation instructions]. -[installation instructions]: ../installation/installation.md +[installation instructions]: ../installation/index.md ## Building Nix with flakes @@ -31,7 +31,7 @@ This shell also adds `./outputs/bin/nix` to your `$PATH` so you can run `nix` im To get a shell with one of the other [supported compilation environments](#compilation-environments): ```console -$ nix develop .#native-clang11StdenvPackages +$ nix develop .#native-clangStdenvPackages ``` > **Note** @@ -64,6 +64,27 @@ $ nix build You can also build Nix for one of the [supported platforms](#platforms). +## Makefile variables + +You may need `profiledir=$out/etc/profile.d` and `sysconfdir=$out/etc` to run +`make install`. + +You may want to set `MAKEFLAGS="-e -j $NIX_BUILD_CORES"` to allow environment +variables to override `Makefile` variables. + +- `ENABLE_BUILD=yes` to enable building the C++ code. +- `ENABLE_DOC_GEN=yes` to enable building the documentation (manual, man pages, etc.). + + The docs can take a while to build, so you may want to disable this for local development. +- `ENABLE_FUNCTIONAL_TESTS=yes` to enable building the functional tests. +- `ENABLE_UNIT_TESTS=yes` to enable building the unit tests. +- `OPTIMIZE=1` to enable optimizations. +- `libraries=libutil programs=` to only build a specific library (this will + fail in the linking phase if you don't have the other libraries built, but is + useful for checking types). +- `libraries= programs=nix` to only build a specific program (this will not, in + general, work, because the programs need the libraries). + ## Building Nix To build all dependencies and start a shell in which all environment variables are set up so that those dependencies can be found: @@ -75,7 +96,7 @@ $ nix-shell To get a shell with one of the other [supported compilation environments](#compilation-environments): ```console -$ nix-shell --attr devShells.x86_64-linux.native-clang11StdenvPackages +$ nix-shell --attr devShells.x86_64-linux.native-clangStdenvPackages ``` > **Note** @@ -146,6 +167,31 @@ $ nix build .#packages.aarch64-linux.default Cross-compiled builds are available for ARMv6 (`armv6l-linux`) and ARMv7 (`armv7l-linux`). Add more [system types](#system-type) to `crossSystems` in `flake.nix` to bootstrap Nix on unsupported platforms. +### Building for multiple platforms at once + +It is useful to perform multiple cross and native builds on the same source tree, +for example to ensure that better support for one platform doesn't break the build for another. +In order to facilitate this, Nix has some support for being built out of tree – that is, placing build artefacts in a different directory than the source code: + +1. Create a directory for the build, e.g. + + ```bash + mkdir build + ``` + +2. Run the configure script from that directory, e.g. + + ```bash + cd build + ../configure + ``` + +3. Run make from the source directory, but with the build directory specified, e.g. + + ```bash + make builddir=build + ``` + ## System type Nix uses a string with he following format to identify the *system type* or *platform* it runs on: @@ -220,3 +266,82 @@ Configure your editor to use the `clangd` from the shell, either by running it i > For some editors (e.g. Visual Studio Code), you may need to install a [special extension](https://open-vsx.org/extension/llvm-vs-code-extensions/vscode-clangd) for the editor to interact with `clangd`. > Some other editors (e.g. Emacs, Vim) need a plugin to support LSP servers in general (e.g. [lsp-mode](https://github.com/emacs-lsp/lsp-mode) for Emacs and [vim-lsp](https://github.com/prabirshrestha/vim-lsp) for vim). > Editor-specific setup is typically opinionated, so we will not cover it here in more detail. + +## Add a release note + +`doc/manual/rl-next` contains release notes entries for all unreleased changes. + +User-visible changes should come with a release note. + +### Add an entry + +Here's what a complete entry looks like. The file name is not incorporated in the document. + +``` +--- +synopsis: Basically a title +issues: 1234 +prs: 1238 +--- + +Here's one or more paragraphs that describe the change. + +- It's markdown +- Add references to the manual using @docroot@ +``` + +Significant changes should add the following header, which moves them to the top. + +``` +significance: significant +``` + + +See also the [format documentation](https://github.com/haskell/cabal/blob/master/CONTRIBUTING.md#changelog). + +### Build process + +Releases have a precomputed `rl-MAJOR.MINOR.md`, and no `rl-next.md`. +Set `buildUnreleasedNotes = true;` in `flake.nix` to build the release notes on the fly. + +## Branches + +- [`master`](https://github.com/NixOS/nix/commits/master) + + The main development branch. All changes are approved and merged here. + When developing a change, create a branch based on the latest `master`. + + Maintainers try to [keep it in a release-worthy state](#reverting). + +- [`maintenance-*.*`](https://github.com/NixOS/nix/branches/all?query=maintenance) + + These branches are the subject of backports only, and are + also [kept](#reverting) in a release-worthy state. + + See [`maintainers/backporting.md`](https://github.com/NixOS/nix/blob/master/maintainers/backporting.md) + +- [`latest-release`](https://github.com/NixOS/nix/tree/latest-release) + + The latest patch release of the latest minor version. + + See [`maintainers/release-process.md`](https://github.com/NixOS/nix/blob/master/maintainers/release-process.md) + +- [`backport-*-to-*`](https://github.com/NixOS/nix/branches/all?query=backport) + + Generally branches created by the backport action. + + See [`maintainers/backporting.md`](https://github.com/NixOS/nix/blob/master/maintainers/backporting.md) + +- [_other_](https://github.com/NixOS/nix/branches/all) + + Branches that do not conform to the above patterns should be feature branches. + +## Reverting + +If a change turns out to be merged by mistake, or contain a regression, it may be reverted. +A revert is not a rejection of the contribution, but merely part of an effective development process. +It makes sure that development keeps running smoothly, with minimal uncertainty, and less overhead. +If maintainers have to worry too much about avoiding reverts, they would not be able to merge as much. +By embracing reverts as a good part of the development process, everyone wins. + +However, taking a step back may be frustrating, so maintainers will be extra supportive on the next try. diff --git a/doc/manual/src/contributing/contributing.md b/doc/manual/src/contributing/index.md similarity index 100% rename from doc/manual/src/contributing/contributing.md rename to doc/manual/src/contributing/index.md diff --git a/doc/manual/src/contributing/testing.md b/doc/manual/src/contributing/testing.md index 0b45b88a3..d8d162379 100644 --- a/doc/manual/src/contributing/testing.md +++ b/doc/manual/src/contributing/testing.md @@ -20,6 +20,7 @@ The unit tests are defined using the [googletest] and [rapidcheck] frameworks. [googletest]: https://google.github.io/googletest/ [rapidcheck]: https://github.com/emil-e/rapidcheck +[property testing]: https://en.wikipedia.org/wiki/Property_testing ### Source and header layout @@ -28,34 +29,50 @@ The unit tests are defined using the [googletest] and [rapidcheck] frameworks. > ``` > src > ├── libexpr +> │ ├── local.mk > │ ├── value/context.hh > │ ├── value/context.cc +> │ … +> │ +> ├── tests > │ │ > │ … -> └── tests -> │ ├── value/context.hh -> │ ├── value/context.cc +> │ └── unit +> │ ├── libutil +> │ │ ├── local.mk +> │ │ … +> │ │ └── data +> │ │ ├── git/tree.txt +> │ │ … > │ │ -> │ … -> │ -> ├── unit-test-data -> │ ├── libstore -> │ │ ├── worker-protocol/content-address.bin -> │ │ … -> │ … +> │ ├── libexpr-support +> │ │ ├── local.mk +> │ │ └── tests +> │ │ ├── value/context.hh +> │ │ ├── value/context.cc +> │ │ … +> │ │ +> │ ├── libexpr +> │ … ├── local.mk +> │ ├── value/context.cc +> │ … > … > ``` -The unit tests for each Nix library (`libnixexpr`, `libnixstore`, etc..) live inside a directory `src/${library_shortname}/tests` within the directory for the library (`src/${library_shortname}`). +The tests for each Nix library (`libnixexpr`, `libnixstore`, etc..) live inside a directory `tests/unit/${library_name_without-nix}`. +Given a interface (header) and implementation pair in the original library, say, `src/libexpr/value/context.{hh,cc}`, we write tests for it in `tests/unit/libexpr/tests/value/context.cc`, and (possibly) declare/define additional interfaces for testing purposes in `tests/unit/libexpr-support/tests/value/context.{hh,cc}`. -The data is in `unit-test-data`, with one subdir per library, with the same name as where the code goes. -For example, `libnixstore` code is in `src/libstore`, and its test data is in `unit-test-data/libstore`. -The path to the `unit-test-data` directory is passed to the unit test executable with the environment variable `_NIX_TEST_UNIT_DATA`. +Data for unit tests is stored in a `data` subdir of the directory for each unit test executable. +For example, `libnixstore` code is in `src/libstore`, and its test data is in `tests/unit/libstore/data`. +The path to the `tests/unit/data` directory is passed to the unit test executable with the environment variable `_NIX_TEST_UNIT_DATA`. +Note that each executable only gets the data for its tests. -> **Note** -> Due to the way googletest works, downstream unit test executables will actually include and re-run upstream library tests. -> Therefore it is important that the same value for `_NIX_TEST_UNIT_DATA` be used with the tests for each library. -> That is why we have the test data nested within a single `unit-test-data` directory. +The unit test libraries are in `tests/unit/${library_name_without-nix}-lib`. +All headers are in a `tests` subdirectory so they are included with `#include "tests/"`. + +The use of all these separate directories for the unit tests might seem inconvenient, as for example the tests are not "right next to" the part of the code they are testing. +But organizing the tests this way has one big benefit: +there is no risk of any build-system wildcards for the library accidentally picking up test code that should not built and installed as part of the library. ### Running tests @@ -69,7 +86,7 @@ See [functional characterisation testing](#characterisation-testing-functional) Like with the functional characterisation, `_NIX_TEST_ACCEPT=1` is also used. For example: ```shell-session -$ _NIX_TEST_ACCEPT=1 make libstore-tests-exe_RUN +$ _NIX_TEST_ACCEPT=1 make libstore-tests_RUN ... [ SKIPPED ] WorkerProtoTest.string_read [ SKIPPED ] WorkerProtoTest.string_write @@ -80,6 +97,18 @@ $ _NIX_TEST_ACCEPT=1 make libstore-tests-exe_RUN will regenerate the "golden master" expected result for the `libnixstore` characterisation tests. The characterisation tests will mark themselves "skipped" since they regenerated the expected result instead of actually testing anything. +### Unit test support libraries + +There are headers and code which are not just used to test the library in question, but also downstream libraries. +For example, we do [property testing] with the [rapidcheck] library. +This requires writing `Arbitrary` "instances", which are used to describe how to generate values of a given type for the sake of running property tests. +Because types contain other types, `Arbitrary` "instances" for some type are not just useful for testing that type, but also any other type that contains it. +Downstream types frequently contain upstream types, so it is very important that we share arbitrary instances so that downstream libraries' property tests can also use them. + +It is important that these testing libraries don't contain any actual tests themselves. +On some platforms they would be run as part of every test executable that uses them, which is redundant. +On other platforms they wouldn't be run at all. + ## Functional tests The functional tests reside under the `tests/functional` directory and are listed in `tests/functional/local.mk`. diff --git a/doc/manual/src/installation/installation.md b/doc/manual/src/installation/index.md similarity index 100% rename from doc/manual/src/installation/installation.md rename to doc/manual/src/installation/index.md diff --git a/doc/manual/src/installation/installing-binary.md b/doc/manual/src/installation/installing-binary.md index ffabb250a..0dc989159 100644 --- a/doc/manual/src/installation/installing-binary.md +++ b/doc/manual/src/installation/installing-binary.md @@ -1,26 +1,60 @@ # Installing a Binary Distribution -The easiest way to install Nix is to run the following command: +To install the latest version Nix, run the following command: ```console $ curl -L https://nixos.org/nix/install | sh ``` -This will run the installer interactively (causing it to explain what -it is doing more explicitly), and perform the default "type" of install -for your platform: -- single-user on Linux -- multi-user on macOS +This performs the default type of installation for your platform: - > **Notes on read-only filesystem root in macOS 10.15 Catalina +** - > - > - It took some time to support this cleanly. You may see posts, - > examples, and tutorials using obsolete workarounds. - > - Supporting it cleanly made macOS installs too complex to qualify - > as single-user, so this type is no longer supported on macOS. +- [Multi-user](#multi-user-installation): + - Linux with systemd and without SELinux + - macOS +- [Single-user](#single-user-installation): + - Linux without systemd + - Linux with SELinux -We recommend the multi-user install if it supports your platform and -you can authenticate with `sudo`. +We recommend the multi-user installation if it supports your platform and you can authenticate with `sudo`. + +The installer can configured with various command line arguments and environment variables. +To show available command line flags: + +```console +$ curl -L https://nixos.org/nix/install | sh -s -- --help +``` + +To check what it does and how it can be customised further, [download and edit the second-stage installation script](#installing-from-a-binary-tarball). + +# Installing a pinned Nix version from a URL + +Version-specific installation URLs for all Nix versions since 1.11.16 can be found at [releases.nixos.org](https://releases.nixos.org/?prefix=nix/). +The directory for each version contains the corresponding SHA-256 hash. + +All installation scripts are invoked the same way: + +```console +$ export VERSION=2.19.2 +$ curl -L https://releases.nixos.org/nix/nix-$VERSION/install | sh +``` + +# Multi User Installation + +The multi-user Nix installation creates system users and a system service for the Nix daemon. + +Supported systems: + +- Linux running systemd, with SELinux disabled +- macOS + +To explicitly instruct the installer to perform a multi-user installation on your system: + +```console +$ curl -L https://nixos.org/nix/install | sh -s -- --daemon +``` + +You can run this under your usual user account or `root`. +The script will invoke `sudo` as needed. # Single User Installation @@ -30,60 +64,48 @@ To explicitly select a single-user installation on your system: $ curl -L https://nixos.org/nix/install | sh -s -- --no-daemon ``` -This will perform a single-user installation of Nix, meaning that `/nix` -is owned by the invoking user. You can run this under your usual user -account or root. The script will invoke `sudo` to create `/nix` -if it doesn’t already exist. If you don’t have `sudo`, you should -manually create `/nix` first as root, e.g.: +In a single-user installation, `/nix` is owned by the invoking user. +The script will invoke `sudo` to create `/nix` if it doesn’t already exist. +If you don’t have `sudo`, manually create `/nix` as `root`: ```console -$ mkdir /nix -$ chown alice /nix +$ su root +# mkdir /nix +# chown alice /nix ``` -The install script will modify the first writable file from amongst -`.bash_profile`, `.bash_login` and `.profile` to source -`~/.nix-profile/etc/profile.d/nix.sh`. You can set the -`NIX_INSTALLER_NO_MODIFY_PROFILE` environment variable before executing -the install script to disable this behaviour. +# Installing from a binary tarball -# Multi User Installation +You can also download a binary tarball that contains Nix and all its dependencies: +- Choose a [version](https://releases.nixos.org/?prefix=nix/) and [system type](../contributing/hacking.md#platforms) +- Download and unpack the tarball +- Run the installer -The multi-user Nix installation creates system users, and a system -service for the Nix daemon. - -**Supported Systems** -- Linux running systemd, with SELinux disabled -- macOS - -You can instruct the installer to perform a multi-user installation on -your system: - -```console -$ curl -L https://nixos.org/nix/install | sh -s -- --daemon -``` - -The multi-user installation of Nix will create build users between the -user IDs 30001 and 30032, and a group with the group ID 30000. You -can run this under your usual user account or root. The script -will invoke `sudo` as needed. - -> **Note** +> **Example** > -> If you need Nix to use a different group ID or user ID set, you will -> have to download the tarball manually and [edit the install -> script](#installing-from-a-binary-tarball). +> ```console +> $ pushd $(mktemp -d) +> $ export VERSION=2.19.2 +> $ export SYSTEM=x86_64-linux +> $ curl -LO https://releases.nixos.org/nix/nix-$VERSION/nix-$VERSION-$SYSTEM.tar.xz +> $ tar xfj nix-$VERSION-$SYSTEM.tar.xz +> $ cd nix-$VERSION-$SYSTEM +> $ ./install +> $ popd +> ``` -The installer will modify `/etc/bashrc`, and `/etc/zshrc` if they exist. -The installer will first back up these files with a `.backup-before-nix` -extension. The installer will also create `/etc/profile.d/nix.sh`. +The installer can be customised with the environment variables declared in the file named `install-multi-user`. + +## Native packages for Linux distributions + +The Nix community maintains installers for some Linux distributions in their native packaging format(https://nix-community.github.io/nix-installers/). # macOS Installation + []{#sect-macos-installation-change-store-prefix}[]{#sect-macos-installation-encrypted-volume}[]{#sect-macos-installation-symlink}[]{#sect-macos-installation-recommended-notes} - -We believe we have ironed out how to cleanly support the read-only root +We believe we have ironed out how to cleanly support the read-only root file system on modern macOS. New installs will do this automatically. This section previously detailed the situation, options, and trade-offs, @@ -126,33 +148,3 @@ this to run the installer, but it may help if you run into trouble: boot process to avoid problems loading or restoring any programs that need access to your Nix store -# Installing a pinned Nix version from a URL - -Version-specific installation URLs for all Nix versions -since 1.11.16 can be found at [releases.nixos.org](https://releases.nixos.org/?prefix=nix/). -The corresponding SHA-256 hash can be found in the directory for the given version. - -These install scripts can be used the same as usual: - -```console -$ curl -L https://releases.nixos.org/nix/nix-/install | sh -``` - -# Installing from a binary tarball - -You can also download a binary tarball that contains Nix and all its -dependencies. (This is what the install script at - does automatically.) You should unpack -it somewhere (e.g. in `/tmp`), and then run the script named `install` -inside the binary tarball: - -```console -$ cd /tmp -$ tar xfj nix-1.8-x86_64-darwin.tar.bz2 -$ cd nix-1.8-x86_64-darwin -$ ./install -``` - -If you need to edit the multi-user installation script to use different -group ID or a different user ID range, modify the variables set in the -file named `install-multi-user`. diff --git a/doc/manual/src/installation/prerequisites-source.md b/doc/manual/src/installation/prerequisites-source.md index d4babf1ea..807e82517 100644 --- a/doc/manual/src/installation/prerequisites-source.md +++ b/doc/manual/src/installation/prerequisites-source.md @@ -72,7 +72,7 @@ This is an optional dependency and can be disabled by providing a `--disable-cpuid` to the `configure` script. - - Unless `./configure --disable-tests` is specified, GoogleTest (GTest) and + - Unless `./configure --disable-unit-tests` is specified, GoogleTest (GTest) and RapidCheck are required, which are available at and respectively. diff --git a/doc/manual/src/installation/upgrading.md b/doc/manual/src/installation/upgrading.md index 6d09f54d8..d1b64b80b 100644 --- a/doc/manual/src/installation/upgrading.md +++ b/doc/manual/src/installation/upgrading.md @@ -1,5 +1,40 @@ # Upgrading Nix +> **Note** +> +> These upgrade instructions apply for regular Linux distributions where Nix was installed following the [installation instructions in this manual](./index.md). + +First, find the name of the current [channel](@docroot@/command-ref/nix-channel.md) through which Nix is distributed: + +```console +$ nix-channel --list +``` + +By default this should return an entry for Nixpkgs: + +```console +nixpkgs https://nixos.org/channels/nixpkgs-23.05 +``` + +Check which Nix version will be installed: + +```console +$ nix-shell -p nix -I nixpkgs=channel:nixpkgs-23.11 --run "nix --version" +nix (Nix) 2.18.1 +``` + +> **Warning** +> +> Writing to the [local store](@docroot@/store/types/local-store.md) with a newer version of Nix, for example by building derivations with `nix-build` or `nix-store --realise`, may change the database schema! +> Reverting to an older version of Nix may therefore require purging the store database before it can be used. + +Update the channel entry: + +```console +$ nix-channel --remove nixpkgs +$ nix-channel --add https://nixos.org/channels/nixpkgs-23.11 nixpkgs +``` + Multi-user Nix users on macOS can upgrade Nix by running: `sudo -i sh -c 'nix-channel --update && nix-env --install --attr nixpkgs.nix && diff --git a/doc/manual/src/language/advanced-attributes.md b/doc/manual/src/language/advanced-attributes.md index 282b75af2..5a6c00cd4 100644 --- a/doc/manual/src/language/advanced-attributes.md +++ b/doc/manual/src/language/advanced-attributes.md @@ -257,29 +257,18 @@ Derivations can declare some infrequently used optional attributes. of the environment (typically, a few hundred kilobyte). - [`preferLocalBuild`]{#adv-attr-preferLocalBuild}\ - If this attribute is set to `true` and [distributed building is - enabled](../advanced-topics/distributed-builds.md), then, if - possible, the derivation will be built locally instead of forwarded - to a remote machine. This is appropriate for trivial builders - where the cost of doing a download or remote build would exceed - the cost of building locally. + If this attribute is set to `true` and [distributed building is enabled](../advanced-topics/distributed-builds.md), then, if possible, the derivation will be built locally instead of being forwarded to a remote machine. + This is useful for derivations that are cheapest to build locally. - [`allowSubstitutes`]{#adv-attr-allowSubstitutes}\ - If this attribute is set to `false`, then Nix will always build this - derivation; it will not try to substitute its outputs. This is - useful for very trivial derivations (such as `writeText` in Nixpkgs) - that are cheaper to build than to substitute from a binary cache. + If this attribute is set to `false`, then Nix will always build this derivation (locally or remotely); it will not try to substitute its outputs. + This is useful for derivations that are cheaper to build than to substitute. - You may disable the effects of this attibute by enabling the - `always-allow-substitutes` configuration option in Nix. + This attribute can be ignored by setting [`always-allow-substitutes`](@docroot@/command-ref/conf-file.md#conf-always-allow-substitutes) to `true`. > **Note** > - > You need to have a builder configured which satisfies the - > derivation’s `system` attribute, since the derivation cannot be - > substituted. Thus it is usually a good idea to align `system` with - > `builtins.currentSystem` when setting `allowSubstitutes` to - > `false`. For most trivial derivations this should be the case. + > If set to `false`, the [`builder`](./derivations.md#attr-builder) should be able to run on the system type specified in the [`system` attribute](./derivations.md#attr-system), since the derivation cannot be substituted. - [`__structuredAttrs`]{#adv-attr-structuredAttrs}\ If the special attribute `__structuredAttrs` is set to `true`, the other derivation diff --git a/doc/manual/src/language/constructs.md b/doc/manual/src/language/constructs.md index a3590f55d..a82ec5960 100644 --- a/doc/manual/src/language/constructs.md +++ b/doc/manual/src/language/constructs.md @@ -132,6 +132,32 @@ a = src-set.a; b = src-set.b; c = src-set.c; when used while defining local variables in a let-expression or while defining a set. +In a `let` expression, `inherit` can be used to selectively bring specific attributes of a set into scope. For example + + +```nix +let + x = { a = 1; b = 2; }; + inherit (builtins) attrNames; +in +{ + names = attrNames x; +} +``` + +is equivalent to + +```nix +let + x = { a = 1; b = 2; }; +in +{ + names = builtins.attrNames x; +} +``` + +both evaluate to `{ names = [ "a" "b" ]; }`. + ## Functions Functions have the following form: @@ -146,65 +172,65 @@ three kinds of patterns: - If a pattern is a single identifier, then the function matches any argument. Example: - + ```nix let negate = x: !x; concat = x: y: x + y; in if negate true then concat "foo" "bar" else "" ``` - + Note that `concat` is a function that takes one argument and returns a function that takes another argument. This allows partial parameterisation (i.e., only filling some of the arguments of a function); e.g., - + ```nix map (concat "foo") [ "bar" "bla" "abc" ] ``` - + evaluates to `[ "foobar" "foobla" "fooabc" ]`. - A *set pattern* of the form `{ name1, name2, …, nameN }` matches a set containing the listed attributes, and binds the values of those attributes to variables in the function body. For example, the function - + ```nix { x, y, z }: z + y + x ``` - + can only be called with a set containing exactly the attributes `x`, `y` and `z`. No other attributes are allowed. If you want to allow additional arguments, you can use an ellipsis (`...`): - + ```nix { x, y, z, ... }: z + y + x ``` - + This works on any set that contains at least the three named attributes. - + It is possible to provide *default values* for attributes, in which case they are allowed to be missing. A default value is specified by writing `name ? e`, where *e* is an arbitrary expression. For example, - + ```nix { x, y ? "foo", z ? "bar" }: z + y + x ``` - + specifies a function that only requires an attribute named `x`, but optionally accepts `y` and `z`. - An `@`-pattern provides a means of referring to the whole value being matched: - + ```nix args@{ x, y, z, ... }: z + y + x + args.a ``` - + but can also be written as: - + ```nix { x, y, z, ... } @ args: z + y + x + args.a ``` diff --git a/doc/manual/src/language/derivations.md b/doc/manual/src/language/derivations.md index 2aded5527..cbb30d074 100644 --- a/doc/manual/src/language/derivations.md +++ b/doc/manual/src/language/derivations.md @@ -274,7 +274,7 @@ The [`builder`](#attr-builder) is executed as follows: directory (typically, `/nix/store`). - `NIX_ATTRS_JSON_FILE` & `NIX_ATTRS_SH_FILE` if `__structuredAttrs` - is set to `true` for the dervation. A detailed explanation of this + is set to `true` for the derivation. A detailed explanation of this behavior can be found in the [section about structured attrs](./advanced-attributes.md#adv-attr-structuredAttrs). diff --git a/doc/manual/src/language/values.md b/doc/manual/src/language/values.md index 0bb656746..aea68a441 100644 --- a/doc/manual/src/language/values.md +++ b/doc/manual/src/language/values.md @@ -116,6 +116,7 @@ [store path]: ../glossary.md#gloss-store-path Paths can include [string interpolation] and can themselves be [interpolated in other expressions]. + [interpolated in other expressions]: ./string-interpolation.md#interpolated-expressions At least one slash (`/`) must appear *before* any interpolated expression for the result to be recognized as a path. diff --git a/doc/manual/src/package-management/package-management.md b/doc/manual/src/package-management/index.md similarity index 100% rename from doc/manual/src/package-management/package-management.md rename to doc/manual/src/package-management/index.md diff --git a/doc/manual/src/protocols/protocols.md b/doc/manual/src/protocols/index.md similarity index 100% rename from doc/manual/src/protocols/protocols.md rename to doc/manual/src/protocols/index.md diff --git a/doc/manual/src/quick-start.md b/doc/manual/src/quick-start.md index 1d2688ede..75853ced7 100644 --- a/doc/manual/src/quick-start.md +++ b/doc/manual/src/quick-start.md @@ -1,99 +1,43 @@ # Quick Start -This chapter is for impatient people who don't like reading -documentation. For more in-depth information you are kindly referred -to subsequent chapters. +This chapter is for impatient people who don't like reading documentation. +For more in-depth information you are kindly referred to subsequent chapters. -1. Install Nix by running the following: +1. Install Nix: ```console $ curl -L https://nixos.org/nix/install | sh ``` The install script will use `sudo`, so make sure you have sufficient rights. - On Linux, `--daemon` can be omitted for a single-user install. - For other installation methods, see [here](installation/installation.md). + For other installation methods, see the detailed [installation instructions](installation/index.md). -1. See what installable packages are currently available in the - channel: +1. Run software without installing it permanently: ```console - $ nix-env --query --available --attr-path - nixpkgs.docbook_xml_dtd_43 docbook-xml-4.3 - nixpkgs.docbook_xml_dtd_45 docbook-xml-4.5 - nixpkgs.firefox firefox-33.0.2 - nixpkgs.hello hello-2.9 - nixpkgs.libxslt libxslt-1.1.28 - … + $ nix-shell --packages cowsay lolcat ``` -1. Install some packages from the channel: + This downloads the specified packages with all their dependencies, and drops you into a Bash shell where the commands provided by those packages are present. + This will not affect your normal environment: ```console - $ nix-env --install --attr nixpkgs.hello + [nix-shell:~]$ cowsay Hello, Nix! | lolcat ``` - This should download pre-built packages; it should not build them - locally (if it does, something went wrong). - -1. Test that they work: + Exiting the shell will make the programs disappear again: ```console - $ which hello - /home/eelco/.nix-profile/bin/hello - $ hello - Hello, world! - ``` - -1. Uninstall a package: - - ```console - $ nix-env --uninstall hello - ``` - -1. You can also test a package without installing it: - - ```console - $ nix-shell --packages hello - ``` - - This builds or downloads GNU Hello and its dependencies, then drops - you into a Bash shell where the `hello` command is present, all - without affecting your normal environment: - - ```console - [nix-shell:~]$ hello - Hello, world! - [nix-shell:~]$ exit - - $ hello - hello: command not found + $ lolcat + lolcat: command not found ``` -1. To keep up-to-date with the channel, do: +1. Search for more packages on to try them out. + +1. Free up storage space: ```console - $ nix-channel --update nixpkgs - $ nix-env --upgrade '*' - ``` - - The latter command will upgrade each installed package for which - there is a “newer” version (as determined by comparing the version - numbers). - -1. If you're unhappy with the result of a `nix-env` action (e.g., an - upgraded package turned out not to work properly), you can go back: - - ```console - $ nix-env --rollback - ``` - -1. You should periodically run the Nix garbage collector to get rid of - unused packages, since uninstalls or upgrades don't actually delete - them: - - ```console - $ nix-collect-garbage --delete-old + $ nix-collect-garbage ``` diff --git a/doc/manual/src/release-notes/release-notes.md b/doc/manual/src/release-notes/index.md similarity index 100% rename from doc/manual/src/release-notes/release-notes.md rename to doc/manual/src/release-notes/index.md diff --git a/doc/manual/src/release-notes/rl-next.md b/doc/manual/src/release-notes/rl-2.19.md similarity index 82% rename from doc/manual/src/release-notes/rl-next.md rename to doc/manual/src/release-notes/rl-2.19.md index 422f1fce8..ba6eb9c64 100644 --- a/doc/manual/src/release-notes/rl-next.md +++ b/doc/manual/src/release-notes/rl-2.19.md @@ -1,7 +1,7 @@ -# Release X.Y (202?-??-??) +# Release 2.19 (2023-11-17) -- The experimental nix command can now act as a [shebang interpreter](@docroot@/command-ref/new-cli/nix.md#shebang-interpreter) - by appending the contents of any `#! nix` lines and the script's location to a single call. +- The experimental `nix` command can now act as a [shebang interpreter](@docroot@/command-ref/new-cli/nix.md#shebang-interpreter) + by appending the contents of any `#! nix` lines and the script's location into a single call. - [URL flake references](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references) now support [percent-encoded](https://datatracker.ietf.org/doc/html/rfc3986#section-2.1) characters. @@ -9,16 +9,16 @@ - The experimental feature `repl-flake` is no longer needed, as its functionality is now part of the `flakes` experimental feature. To get the previous behavior, use the `--file/--expr` flags accordingly. -- Introduce new flake installable syntax `flakeref#.attrPath` where the "." prefix denotes no searching of default attribute prefixes like `packages.` or `legacyPackages.`. +- There is a new flake installable syntax `flakeref#.attrPath` where the "." prefix specifies that `attrPath` is interpreted from the root of the flake outputs, with no searching of default attribute prefixes like `packages.` or `legacyPackages.`. - Nix adds `apple-virt` to the default system features on macOS systems that support virtualization. This is similar to what's done for the `kvm` system feature on Linux hosts. -- Introduce a new built-in function [`builtins.convertHash`](@docroot@/language/builtins.md#builtins-convertHash). +- Add a new built-in function [`builtins.convertHash`](@docroot@/language/builtins.md#builtins-convertHash). - `nix-shell` shebang lines now support single-quoted arguments. - `builtins.fetchTree` is now its own experimental feature, [`fetch-tree`](@docroot@/contributing/experimental-features.md#xp-fetch-tree). - As described in the document for that feature, this is because we anticipate polishing it and then stabilizing it before the rest of Flakes. + This allows stabilising it independently of the rest of what is encompassed by [`flakes`](@docroot@/contributing/experimental-features.md#xp-fetch-tree). - The interface for creating and updating lock files has been overhauled: @@ -37,8 +37,8 @@ - [`nix path-info --json`](@docroot@/command-ref/new-cli/nix3-path-info.md) (experimental) now returns a JSON map rather than JSON list. - The `path` field of each object has instead become the key in th outer map, since it is unique. - The `valid` field also goes away because we just use null instead. + The `path` field of each object has instead become the key in the outer map, since it is unique. + The `valid` field also goes away because we just use `null` instead. - Old way: diff --git a/doc/manual/src/store/index.md b/doc/manual/src/store/index.md index 8a5305062..f1e8f1402 100644 --- a/doc/manual/src/store/index.md +++ b/doc/manual/src/store/index.md @@ -2,4 +2,4 @@ The *Nix store* is an abstraction to store immutable file system data (such as software packages) that can have dependencies on other such data. -There are multiple implementations of Nix stores with different capabilities, such as the actual filesystem (`/nix/store`) or binary caches. +There are [multiple types of Nix stores](./types/index.md) with different capabilities, such as the default one on the [local filesystem](./types/local-store.md) (`/nix/store`) or [binary caches](./types/http-binary-cache-store.md). diff --git a/src/nix/help-stores.md b/doc/manual/src/store/types/index.md.in similarity index 74% rename from src/nix/help-stores.md rename to doc/manual/src/store/types/index.md.in index 47ba9b94d..a35161ce8 100644 --- a/src/nix/help-stores.md +++ b/doc/manual/src/store/types/index.md.in @@ -1,6 +1,6 @@ -R"( +Nix supports different types of stores: -Nix supports different types of stores. These are described below. +@store-types@ ## Store URL format @@ -29,18 +29,15 @@ supported settings for each store type are documented below. The special store URL `auto` causes Nix to automatically select a store as follows: -* Use the [local store](#local-store) `/nix/store` if `/nix/var/nix` +* Use the [local store](./local-store.md) `/nix/store` if `/nix/var/nix` is writable by the current user. * Otherwise, if `/nix/var/nix/daemon-socket/socket` exists, [connect - to the Nix daemon listening on that socket](#local-daemon-store). + to the Nix daemon listening on that socket](./local-daemon-store.md). -* Otherwise, on Linux only, use the [local chroot store](#local-store) +* Otherwise, on Linux only, use the [local chroot store](./local-store.md) `~/.local/share/nix/root`, which will be created automatically if it does not exist. -* Otherwise, use the [local store](#local-store) `/nix/store`. +* Otherwise, use the [local store](./local-store.md) `/nix/store`. -@stores@ - -)" diff --git a/doc/manual/utils.nix b/doc/manual/utils.nix index 849832b2c..19ff49b64 100644 --- a/doc/manual/utils.nix +++ b/doc/manual/utils.nix @@ -1,5 +1,11 @@ with builtins; +let + lowerChars = stringToCharacters "abcdefghijklmnopqrstuvwxyz"; + upperChars = stringToCharacters "ABCDEFGHIJKLMNOPQRSTUVWXYZ"; + stringToCharacters = s: genList (p: substring p 1 s) (stringLength s); +in + rec { splitLines = s: filter (x: !isList x) (split "\n" s); @@ -18,6 +24,8 @@ rec { in if replaced == string then string else replaceStringsRec from to replaced; + toLower = replaceStrings upperChars lowerChars; + squash = replaceStringsRec "\n\n\n" "\n\n"; trim = string: diff --git a/flake.lock b/flake.lock index 991cef1ee..ae98d789a 100644 --- a/flake.lock +++ b/flake.lock @@ -16,34 +16,34 @@ "type": "github" } }, - "lowdown-src": { + "libgit2": { "flake": false, "locked": { - "lastModified": 1633514407, - "narHash": "sha256-Dw32tiMjdK9t3ETl5fzGrutQTzh2rufgZV4A/BbxuD4=", - "owner": "kristapsdz", - "repo": "lowdown", - "rev": "d2c2b44ff6c27b936ec27358a2653caaef8f73b8", + "lastModified": 1697646580, + "narHash": "sha256-oX4Z3S9WtJlwvj0uH9HlYcWv+x1hqp8mhXl7HsLu2f0=", + "owner": "libgit2", + "repo": "libgit2", + "rev": "45fd9ed7ae1a9b74b957ef4f337bc3c8b3df01b5", "type": "github" }, "original": { - "owner": "kristapsdz", - "repo": "lowdown", + "owner": "libgit2", + "repo": "libgit2", "type": "github" } }, "nixpkgs": { "locked": { - "lastModified": 1698876495, - "narHash": "sha256-nsQo2/mkDUFeAjuu92p0dEqhRvHHiENhkKVIV1y0/Oo=", + "lastModified": 1704018918, + "narHash": "sha256-erjg/HrpC9liEfm7oLqb8GXCqsxaFwIIPqCsknW5aFY=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "9eb24edd6a0027fed010ccfe300a9734d029983c", + "rev": "2c9c58e98243930f8cb70387934daa4bc8b00373", "type": "github" }, "original": { "owner": "NixOS", - "ref": "release-23.05", + "ref": "nixos-23.05-small", "repo": "nixpkgs", "type": "github" } @@ -67,7 +67,7 @@ "root": { "inputs": { "flake-compat": "flake-compat", - "lowdown-src": "lowdown-src", + "libgit2": "libgit2", "nixpkgs": "nixpkgs", "nixpkgs-regression": "nixpkgs-regression" } diff --git a/flake.nix b/flake.nix index 51d818423..49f214e72 100644 --- a/flake.nix +++ b/flake.nix @@ -1,20 +1,29 @@ { description = "The purely functional package manager"; - # FIXME go back to nixos-23.05-small once - # https://github.com/NixOS/nixpkgs/pull/264875 is included. - inputs.nixpkgs.url = "github:NixOS/nixpkgs/release-23.05"; + inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.05-small"; inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2"; - inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; }; inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; }; + inputs.libgit2 = { url = "github:libgit2/libgit2"; flake = false; }; - outputs = { self, nixpkgs, nixpkgs-regression, lowdown-src, flake-compat }: + outputs = { self, nixpkgs, nixpkgs-regression, libgit2, ... }: let inherit (nixpkgs) lib; + # Experimental fileset library: https://github.com/NixOS/nixpkgs/pull/222981 + # Not an "idiomatic" flake input because: + # - Propagation to dependent locks: https://github.com/NixOS/nix/issues/7730 + # - Subflake would download redundant and huge parent flake + # - No git tree hash support: https://github.com/NixOS/nix/issues/6044 + inherit (import (builtins.fetchTarball { url = "https://github.com/NixOS/nix/archive/1bdcd7fc8a6a40b2e805bad759b36e64e911036b.tar.gz"; sha256 = "sha256:14ljlpdsp4x7h1fkhbmc4bd3vsqnx8zdql4h3037wh09ad6a0893"; })) + fileset; + officialRelease = false; + # Set to true to build the release notes for the next release. + buildUnreleasedNotes = false; + version = lib.fileContents ./.version + versionSuffix; versionSuffix = if officialRelease @@ -28,11 +37,26 @@ systems = linuxSystems ++ darwinSystems; crossSystems = [ - "armv6l-linux" "armv7l-linux" - "x86_64-freebsd13" "x86_64-netbsd" + "armv6l-unknown-linux-gnueabihf" + "armv7l-unknown-linux-gnueabihf" + "x86_64-unknown-freebsd13" + "x86_64-unknown-netbsd" ]; - stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" "libcxxStdenv" "ccacheStdenv" ]; + # Nix doesn't yet build on this platform, so we put it in a + # separate list. We just use this for `devShells` and + # `nixpkgsFor`, which this depends on. + shellCrossSystems = crossSystems ++ [ + "x86_64-w64-mingw32" + ]; + + stdenvs = [ + "ccacheStdenv" + "clangStdenv" + "gccStdenv" + "libcxxStdenv" + "stdenv" + ]; forAllSystems = lib.genAttrs systems; @@ -47,57 +71,6 @@ }) stdenvs); - # Experimental fileset library: https://github.com/NixOS/nixpkgs/pull/222981 - # Not an "idiomatic" flake input because: - # - Propagation to dependent locks: https://github.com/NixOS/nix/issues/7730 - # - Subflake would download redundant and huge parent flake - # - No git tree hash support: https://github.com/NixOS/nix/issues/6044 - inherit (import (builtins.fetchTarball { url = "https://github.com/NixOS/nix/archive/1bdcd7fc8a6a40b2e805bad759b36e64e911036b.tar.gz"; sha256 = "sha256:14ljlpdsp4x7h1fkhbmc4bd3vsqnx8zdql4h3037wh09ad6a0893"; })) - fileset; - - baseFiles = - # .gitignore has already been processed, so any changes in it are irrelevant - # at this point. It is not represented verbatim for test purposes because - # that would interfere with repo semantics. - fileset.fileFilter (f: f.name != ".gitignore") ./.; - - configureFiles = fileset.unions [ - ./.version - ./configure.ac - ./m4 - # TODO: do we really need README.md? It doesn't seem used in the build. - ./README.md - ]; - - topLevelBuildFiles = fileset.unions [ - ./local.mk - ./Makefile - ./Makefile.config.in - ./mk - ]; - - functionalTestFiles = fileset.unions [ - ./tests/functional - (fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts) - ]; - - nixSrc = fileset.toSource { - root = ./.; - fileset = fileset.intersect baseFiles (fileset.unions [ - configureFiles - topLevelBuildFiles - ./boehmgc-coroutine-sp-fallback.diff - ./doc - ./misc - ./precompiled-headers.h - ./src - ./unit-test-data - ./COPYING - ./scripts/local.mk - functionalTestFiles - ]); - }; - # Memoize nixpkgs for different platforms for efficiency. nixpkgsFor = forAllSystems (system: let @@ -106,8 +79,8 @@ inherit system; }; crossSystem = if crossSystem == null then null else { - system = crossSystem; - } // lib.optionalAttrs (crossSystem == "x86_64-freebsd13") { + config = crossSystem; + } // lib.optionalAttrs (crossSystem == "x86_64-unknown-freebsd13") { useLLVM = true; }; overlays = [ @@ -119,390 +92,116 @@ in { inherit stdenvs native; static = native.pkgsStatic; - cross = forAllCrossSystems (crossSystem: make-pkgs crossSystem "stdenv"); + cross = lib.genAttrs shellCrossSystems (crossSystem: make-pkgs crossSystem "stdenv"); }); - commonDeps = - { pkgs - , isStatic ? pkgs.stdenv.hostPlatform.isStatic - }: - with pkgs; rec { - # Use "busybox-sandbox-shell" if present, - # if not (legacy) fallback and hope it's sufficient. - sh = pkgs.busybox-sandbox-shell or (busybox.override { - useMusl = true; - enableStatic = true; - enableMinimal = true; - extraConfig = '' - CONFIG_FEATURE_FANCY_ECHO y - CONFIG_FEATURE_SH_MATH y - CONFIG_FEATURE_SH_MATH_64 y - - CONFIG_ASH y - CONFIG_ASH_OPTIMIZE_FOR_SIZE y - - CONFIG_ASH_ALIAS y - CONFIG_ASH_BASH_COMPAT y - CONFIG_ASH_CMDCMD y - CONFIG_ASH_ECHO y - CONFIG_ASH_GETOPTS y - CONFIG_ASH_INTERNAL_GLOB y - CONFIG_ASH_JOB_CONTROL y - CONFIG_ASH_PRINTF y - CONFIG_ASH_TEST y - ''; - }); - - configureFlags = - lib.optionals stdenv.isLinux [ - "--with-boost=${boost}/lib" - "--with-sandbox-shell=${sh}/bin/busybox" - ] - ++ lib.optionals (stdenv.isLinux && !(isStatic && stdenv.system == "aarch64-linux")) [ - "LDFLAGS=-fuse-ld=gold" - ]; - - testConfigureFlags = [ - "RAPIDCHECK_HEADERS=${lib.getDev rapidcheck}/extras/gtest/include" - ]; - - internalApiDocsConfigureFlags = [ - "--enable-internal-api-docs" - ]; - - nativeBuildDeps = - [ - buildPackages.bison - buildPackages.flex - (lib.getBin buildPackages.lowdown-nix) - buildPackages.mdbook - buildPackages.mdbook-linkcheck - buildPackages.autoconf-archive - buildPackages.autoreconfHook - buildPackages.pkg-config - - # Tests - buildPackages.git - buildPackages.mercurial # FIXME: remove? only needed for tests - buildPackages.jq # Also for custom mdBook preprocessor. - buildPackages.openssh # only needed for tests (ssh-keygen) - ] - ++ lib.optionals stdenv.hostPlatform.isLinux [(buildPackages.util-linuxMinimal or buildPackages.utillinuxMinimal)]; - - buildDeps = - [ curl - bzip2 xz brotli editline - openssl sqlite - libarchive - boost - lowdown-nix - libsodium - ] - ++ lib.optionals stdenv.isLinux [libseccomp] - ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid; - - checkDeps = [ - gtest - rapidcheck - ]; - - internalApiDocsDeps = [ - buildPackages.doxygen - ]; - - awsDeps = lib.optional (stdenv.isLinux || stdenv.isDarwin) - (aws-sdk-cpp.override { - apis = ["s3" "transfer"]; - customMemoryManagement = false; - }); - - propagatedDeps = - [ ((boehmgc.override { - enableLargeConfig = true; - }).overrideAttrs(o: { - patches = (o.patches or []) ++ [ - ./boehmgc-coroutine-sp-fallback.diff - ]; - }) - ) - nlohmann_json - ]; - }; - - installScriptFor = systems: - with nixpkgsFor.x86_64-linux.native; - runCommand "installer-script" - { buildInputs = [ nix ]; - } - '' - mkdir -p $out/nix-support - - # Converts /nix/store/50p3qk8k...-nix-2.4pre20201102_550e11f/bin/nix to 50p3qk8k.../bin/nix. - tarballPath() { - # Remove the store prefix - local path=''${1#${builtins.storeDir}/} - # Get the path relative to the derivation root - local rest=''${path#*/} - # Get the derivation hash - local drvHash=''${path%%-*} - echo "$drvHash/$rest" - } - - substitute ${./scripts/install.in} $out/install \ - ${pkgs.lib.concatMapStrings - (system: let - tarball = if builtins.elem system crossSystems then self.hydraJobs.binaryTarballCross.x86_64-linux.${system} else self.hydraJobs.binaryTarball.${system}; - in '' \ - --replace '@tarballHash_${system}@' $(nix --experimental-features nix-command hash-file --base16 --type sha256 ${tarball}/*.tar.xz) \ - --replace '@tarballPath_${system}@' $(tarballPath ${tarball}/*.tar.xz) \ - '' - ) - systems - } --replace '@nixVersion@' ${version} - - echo "file installer $out/install" >> $out/nix-support/hydra-build-products - ''; - - testNixVersions = pkgs: client: daemon: with commonDeps { inherit pkgs; }; with pkgs.lib; pkgs.stdenv.mkDerivation { - NIX_DAEMON_PACKAGE = daemon; - NIX_CLIENT_PACKAGE = client; - name = - "nix-tests" - + optionalString - (versionAtLeast daemon.version "2.4pre20211005" && - versionAtLeast client.version "2.4pre20211005") - "-${client.version}-against-${daemon.version}"; - inherit version; - - src = fileset.toSource { - root = ./.; - fileset = fileset.intersect baseFiles (fileset.unions [ - configureFiles - topLevelBuildFiles - functionalTestFiles - ]); + installScriptFor = tarballs: + nixpkgsFor.x86_64-linux.native.callPackage ./scripts/installer.nix { + inherit tarballs; }; - VERSION_SUFFIX = versionSuffix; + testNixVersions = pkgs: client: daemon: + pkgs.callPackage ./package.nix { + pname = + "nix-tests" + + lib.optionalString + (lib.versionAtLeast daemon.version "2.4pre20211005" && + lib.versionAtLeast client.version "2.4pre20211005") + "-${client.version}-against-${daemon.version}"; - nativeBuildInputs = nativeBuildDeps; - buildInputs = buildDeps ++ awsDeps ++ checkDeps; - propagatedBuildInputs = propagatedDeps; + inherit fileset; - enableParallelBuilding = true; + test-client = client; + test-daemon = daemon; - configureFlags = - testConfigureFlags # otherwise configure fails - ++ [ "--disable-build" ]; - dontBuild = true; - doInstallCheck = true; + doBuild = false; + }; - installPhase = '' - mkdir -p $out - ''; - - installCheckPhase = '' - mkdir -p src/nix-channel - make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES - ''; + binaryTarball = nix: pkgs: pkgs.callPackage ./scripts/binary-tarball.nix { + inherit nix; }; - binaryTarball = nix: pkgs: - let - inherit (pkgs) buildPackages; - inherit (pkgs) cacert; - installerClosureInfo = buildPackages.closureInfo { rootPaths = [ nix cacert ]; }; - in - - buildPackages.runCommand "nix-binary-tarball-${version}" - { #nativeBuildInputs = lib.optional (system != "aarch64-linux") shellcheck; - meta.description = "Distribution-independent Nix bootstrap binaries for ${pkgs.system}"; - } - '' - cp ${installerClosureInfo}/registration $TMPDIR/reginfo - cp ${./scripts/create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh - substitute ${./scripts/install-nix-from-closure.sh} $TMPDIR/install \ - --subst-var-by nix ${nix} \ - --subst-var-by cacert ${cacert} - - substitute ${./scripts/install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \ - --subst-var-by nix ${nix} \ - --subst-var-by cacert ${cacert} - substitute ${./scripts/install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \ - --subst-var-by nix ${nix} \ - --subst-var-by cacert ${cacert} - substitute ${./scripts/install-multi-user.sh} $TMPDIR/install-multi-user \ - --subst-var-by nix ${nix} \ - --subst-var-by cacert ${cacert} - - if type -p shellcheck; then - # SC1090: Don't worry about not being able to find - # $nix/etc/profile.d/nix.sh - shellcheck --exclude SC1090 $TMPDIR/install - shellcheck $TMPDIR/create-darwin-volume.sh - shellcheck $TMPDIR/install-darwin-multi-user.sh - shellcheck $TMPDIR/install-systemd-multi-user.sh - - # SC1091: Don't panic about not being able to source - # /etc/profile - # SC2002: Ignore "useless cat" "error", when loading - # .reginfo, as the cat is a much cleaner - # implementation, even though it is "useless" - # SC2116: Allow ROOT_HOME=$(echo ~root) for resolving - # root's home directory - shellcheck --external-sources \ - --exclude SC1091,SC2002,SC2116 $TMPDIR/install-multi-user - fi - - chmod +x $TMPDIR/install - chmod +x $TMPDIR/create-darwin-volume.sh - chmod +x $TMPDIR/install-darwin-multi-user.sh - chmod +x $TMPDIR/install-systemd-multi-user.sh - chmod +x $TMPDIR/install-multi-user - dir=nix-${version}-${pkgs.system} - fn=$out/$dir.tar.xz - mkdir -p $out/nix-support - echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products - tar cvfJ $fn \ - --owner=0 --group=0 --mode=u+rw,uga+r \ - --mtime='1970-01-01' \ - --absolute-names \ - --hard-dereference \ - --transform "s,$TMPDIR/install,$dir/install," \ - --transform "s,$TMPDIR/create-darwin-volume.sh,$dir/create-darwin-volume.sh," \ - --transform "s,$TMPDIR/reginfo,$dir/.reginfo," \ - --transform "s,$NIX_STORE,$dir/store,S" \ - $TMPDIR/install \ - $TMPDIR/create-darwin-volume.sh \ - $TMPDIR/install-darwin-multi-user.sh \ - $TMPDIR/install-systemd-multi-user.sh \ - $TMPDIR/install-multi-user \ - $TMPDIR/reginfo \ - $(cat ${installerClosureInfo}/store-paths) - ''; - overlayFor = getStdenv: final: prev: - let currentStdenv = getStdenv final; in + let + stdenv = getStdenv final; + in { nixStable = prev.nix; - # Forward from the previous stage as we don’t want it to pick the lowdown override - nixUnstable = prev.nixUnstable; + default-busybox-sandbox-shell = final.busybox.override { + useMusl = true; + enableStatic = true; + enableMinimal = true; + extraConfig = '' + CONFIG_FEATURE_FANCY_ECHO y + CONFIG_FEATURE_SH_MATH y + CONFIG_FEATURE_SH_MATH_64 y - nix = - with final; - with commonDeps { - inherit pkgs; - inherit (currentStdenv.hostPlatform) isStatic; - }; - let - canRunInstalled = currentStdenv.buildPlatform.canExecute currentStdenv.hostPlatform; - in currentStdenv.mkDerivation (finalAttrs: { - name = "nix-${version}"; - inherit version; + CONFIG_ASH y + CONFIG_ASH_OPTIMIZE_FOR_SIZE y - src = nixSrc; - VERSION_SUFFIX = versionSuffix; - - outputs = [ "out" "dev" "doc" ]; - - nativeBuildInputs = nativeBuildDeps; - buildInputs = buildDeps - # There have been issues building these dependencies - ++ lib.optionals (currentStdenv.hostPlatform == currentStdenv.buildPlatform) awsDeps - ++ lib.optionals finalAttrs.doCheck checkDeps; - - propagatedBuildInputs = propagatedDeps; - - disallowedReferences = [ boost ]; - - preConfigure = lib.optionalString (! currentStdenv.hostPlatform.isStatic) - '' - # Copy libboost_context so we don't get all of Boost in our closure. - # https://github.com/NixOS/nixpkgs/issues/45462 - mkdir -p $out/lib - cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*} $out/lib - rm -f $out/lib/*.a - ${lib.optionalString currentStdenv.hostPlatform.isLinux '' - chmod u+w $out/lib/*.so.* - patchelf --set-rpath $out/lib:${currentStdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.* - ''} - ${lib.optionalString currentStdenv.hostPlatform.isDarwin '' - for LIB in $out/lib/*.dylib; do - chmod u+w $LIB - install_name_tool -id $LIB $LIB - install_name_tool -delete_rpath ${boost}/lib/ $LIB || true - done - install_name_tool -change ${boost}/lib/libboost_system.dylib $out/lib/libboost_system.dylib $out/lib/libboost_thread.dylib - ''} - ''; - - configureFlags = configureFlags ++ - [ "--sysconfdir=/etc" ] ++ - lib.optional stdenv.hostPlatform.isStatic "--enable-embedded-sandbox-shell" ++ - [ (lib.enableFeature finalAttrs.doCheck "tests") ] ++ - lib.optionals finalAttrs.doCheck testConfigureFlags ++ - lib.optional (!canRunInstalled) "--disable-doc-gen"; - - enableParallelBuilding = true; - - makeFlags = "profiledir=$(out)/etc/profile.d PRECOMPILE_HEADERS=1"; - - doCheck = true; - - installFlags = "sysconfdir=$(out)/etc"; - - postInstall = '' - mkdir -p $doc/nix-support - echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products - ${lib.optionalString currentStdenv.hostPlatform.isStatic '' - mkdir -p $out/nix-support - echo "file binary-dist $out/bin/nix" >> $out/nix-support/hydra-build-products - ''} - ${lib.optionalString currentStdenv.isDarwin '' - install_name_tool \ - -change ${boost}/lib/libboost_context.dylib \ - $out/lib/libboost_context.dylib \ - $out/lib/libnixutil.dylib - ''} + CONFIG_ASH_ALIAS y + CONFIG_ASH_BASH_COMPAT y + CONFIG_ASH_CMDCMD y + CONFIG_ASH_ECHO y + CONFIG_ASH_GETOPTS y + CONFIG_ASH_INTERNAL_GLOB y + CONFIG_ASH_JOB_CONTROL y + CONFIG_ASH_PRINTF y + CONFIG_ASH_TEST y ''; + }; - doInstallCheck = finalAttrs.doCheck; - installCheckFlags = "sysconfdir=$(out)/etc"; - installCheckTarget = "installcheck"; # work around buggy detection in stdenv - - separateDebugInfo = !currentStdenv.hostPlatform.isStatic; - - strictDeps = true; - - hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - - passthru.perl-bindings = final.callPackage ./perl { - inherit fileset; - stdenv = currentStdenv; - }; - - meta.platforms = lib.platforms.unix; - meta.mainProgram = "nix"; + libgit2-nix = final.libgit2.overrideAttrs (attrs: { + src = libgit2; + version = libgit2.lastModifiedDate; + cmakeFlags = attrs.cmakeFlags or [] + ++ [ "-DUSE_SSH=exec" ]; }); - lowdown-nix = with final; currentStdenv.mkDerivation rec { - name = "lowdown-0.9.0"; + boehmgc-nix = (final.boehmgc.override { + enableLargeConfig = true; + }).overrideAttrs(o: { + patches = (o.patches or []) ++ [ + ./boehmgc-coroutine-sp-fallback.diff - src = lowdown-src; + # https://github.com/ivmai/bdwgc/pull/586 + ./boehmgc-traceable_allocator-public.diff + ]; + }); - outputs = [ "out" "bin" "dev" ]; + changelog-d-nix = final.buildPackages.callPackage ./misc/changelog-d.nix { }; - nativeBuildInputs = [ buildPackages.which ]; + nix = + let + officialRelease = false; + versionSuffix = + if officialRelease + then "" + else "pre${builtins.substring 0 8 (self.lastModifiedDate or self.lastModified or "19700101")}_${self.shortRev or "dirty"}"; + + in final.callPackage ./package.nix { + inherit + fileset + stdenv + versionSuffix + ; + officialRelease = false; + boehmgc = final.boehmgc-nix; + libgit2 = final.libgit2-nix; + busybox-sandbox-shell = final.busybox-sandbox-shell or final.default-busybox-sandbox-shell; + changelog-d = final.changelog-d-nix; + } // { + # this is a proper separate downstream package, but put + # here also for back compat reasons. + perl-bindings = final.nix-perl-bindings; + }; + + nix-perl-bindings = final.callPackage ./perl { + inherit fileset stdenv; + }; - configurePhase = '' - ${if (currentStdenv.isDarwin && currentStdenv.isAarch64) then "echo \"HAVE_SANDBOX_INIT=false\" > configure.local" else ""} - ./configure \ - PREFIX=${placeholder "dev"} \ - BINDIR=${placeholder "bin"}/bin - ''; }; - }; in { # A Nixpkgs overlay that overrides the 'nix' and @@ -514,19 +213,32 @@ # Binary package for various platforms. build = forAllSystems (system: self.packages.${system}.nix); + shellInputs = forAllSystems (system: self.devShells.${system}.default.inputDerivation); + buildStatic = lib.genAttrs linux64BitSystems (system: self.packages.${system}.nix-static); buildCross = forAllCrossSystems (crossSystem: lib.genAttrs ["x86_64-linux"] (system: self.packages.${system}."nix-${crossSystem}")); - buildNoGc = forAllSystems (system: self.packages.${system}.nix.overrideAttrs (a: { configureFlags = (a.configureFlags or []) ++ ["--enable-gc=no"];})); + buildNoGc = forAllSystems (system: + self.packages.${system}.nix.override { enableGC = false; } + ); buildNoTests = forAllSystems (system: - self.packages.${system}.nix.overrideAttrs (a: { - doCheck = - assert ! a?dontCheck; - false; - }) + self.packages.${system}.nix.override { + doCheck = false; + doInstallCheck = false; + installUnitTests = false; + } + ); + + # Toggles some settings for better coverage. Windows needs these + # library combinations, and Debian build Nix with GNU readline too. + buildReadlineNoMarkdown = forAllSystems (system: + self.packages.${system}.nix.override { + enableMarkdown = false; + readlineFlavor = "readline"; + } ); # Perl bindings for various platforms. @@ -547,67 +259,41 @@ # to https://nixos.org/nix/install. It downloads the binary # tarball for the user's system and calls the second half of the # installation script. - installerScript = installScriptFor [ "x86_64-linux" "i686-linux" "aarch64-linux" "x86_64-darwin" "aarch64-darwin" "armv6l-linux" "armv7l-linux" ]; - installerScriptForGHA = installScriptFor [ "x86_64-linux" "x86_64-darwin" "armv6l-linux" "armv7l-linux"]; + installerScript = installScriptFor [ + # Native + self.hydraJobs.binaryTarball."x86_64-linux" + self.hydraJobs.binaryTarball."i686-linux" + self.hydraJobs.binaryTarball."aarch64-linux" + self.hydraJobs.binaryTarball."x86_64-darwin" + self.hydraJobs.binaryTarball."aarch64-darwin" + # Cross + self.hydraJobs.binaryTarballCross."x86_64-linux"."armv6l-unknown-linux-gnueabihf" + self.hydraJobs.binaryTarballCross."x86_64-linux"."armv7l-unknown-linux-gnueabihf" + ]; + installerScriptForGHA = installScriptFor [ + # Native + self.hydraJobs.binaryTarball."x86_64-linux" + self.hydraJobs.binaryTarball."x86_64-darwin" + # Cross + self.hydraJobs.binaryTarballCross."x86_64-linux"."armv6l-unknown-linux-gnueabihf" + self.hydraJobs.binaryTarballCross."x86_64-linux"."armv7l-unknown-linux-gnueabihf" + ]; # docker image with Nix inside dockerImage = lib.genAttrs linux64BitSystems (system: self.packages.${system}.dockerImage); # Line coverage analysis. - coverage = - with nixpkgsFor.x86_64-linux.native; - with commonDeps { inherit pkgs; }; - - releaseTools.coverageAnalysis { - name = "nix-coverage-${version}"; - - src = nixSrc; - - configureFlags = testConfigureFlags; - - enableParallelBuilding = true; - - nativeBuildInputs = nativeBuildDeps; - buildInputs = buildDeps ++ propagatedDeps ++ awsDeps ++ checkDeps; - - dontInstall = false; - - doInstallCheck = true; - installCheckTarget = "installcheck"; # work around buggy detection in stdenv - - lcovFilter = [ "*/boost/*" "*-tab.*" ]; - - hardeningDisable = ["fortify"]; - - NIX_CFLAGS_COMPILE = "-DCOVERAGE=1"; - }; + coverage = nixpkgsFor.x86_64-linux.native.nix.override { + pname = "nix-coverage"; + withCoverageChecks = true; + }; # API docs for Nix's unstable internal C++ interfaces. - internal-api-docs = - with nixpkgsFor.x86_64-linux.native; - with commonDeps { inherit pkgs; }; - - stdenv.mkDerivation { - pname = "nix-internal-api-docs"; - inherit version; - - src = nixSrc; - - configureFlags = testConfigureFlags ++ internalApiDocsConfigureFlags; - - nativeBuildInputs = nativeBuildDeps; - buildInputs = buildDeps ++ propagatedDeps - ++ awsDeps ++ checkDeps ++ internalApiDocsDeps; - - dontBuild = true; - - installTargets = [ "internal-api-html" ]; - - postInstall = '' - mkdir -p $out/nix-support - echo "doc internal-api-docs $out/share/doc/nix/internal-api/html" >> $out/nix-support/hydra-build-products - ''; - }; + internal-api-docs = nixpkgsFor.x86_64-linux.native.callPackage ./package.nix { + inherit fileset; + doBuild = false; + enableInternalAPIDocs = true; + }; # System tests. tests = import ./tests/nixos { inherit lib nixpkgs nixpkgsFor; } // { @@ -615,7 +301,9 @@ # Make sure that nix-env still produces the exact same result # on a particular version of Nixpkgs. evalNixpkgs = - with nixpkgsFor.x86_64-linux.native; + let + inherit (nixpkgsFor.x86_64-linux.native) runCommand nix; + in runCommand "eval-nixos" { buildInputs = [ nix ]; } '' type -p nix-env @@ -665,6 +353,11 @@ perlBindings = self.hydraJobs.perlBindings.${system}; installTests = self.hydraJobs.installTests.${system}; nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system}; + rl-next = + let pkgs = nixpkgsFor.${system}.native; + in pkgs.buildPackages.runCommand "test-rl-next-release-notes" { } '' + LANG=C.UTF-8 ${pkgs.changelog-d-nix}/bin/changelog-d ${./doc/manual/rl-next} >$out + ''; } // (lib.optionalAttrs (builtins.elem system linux64BitSystems)) { dockerImage = self.hydraJobs.dockerImage.${system}; }); @@ -702,44 +395,20 @@ stdenvs))); devShells = let - makeShell = pkgs: stdenv: - let - canRunInstalled = stdenv.buildPlatform.canExecute stdenv.hostPlatform; - in - with commonDeps { inherit pkgs; }; - stdenv.mkDerivation { - name = "nix"; + makeShell = pkgs: stdenv: (pkgs.nix.override { inherit stdenv; forDevShell = true; }).overrideAttrs (attrs: { + installFlags = "sysconfdir=$(out)/etc"; + shellHook = '' + PATH=$prefix/bin:$PATH + unset PYTHONPATH + export MANPATH=$out/share/man:$MANPATH - outputs = [ "out" "dev" "doc" ]; - - nativeBuildInputs = nativeBuildDeps - ++ lib.optional stdenv.cc.isClang pkgs.buildPackages.bear - ++ lib.optional - (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) - pkgs.buildPackages.clang-tools - ; - - buildInputs = buildDeps ++ propagatedDeps - ++ awsDeps ++ checkDeps ++ internalApiDocsDeps; - - configureFlags = configureFlags - ++ testConfigureFlags ++ internalApiDocsConfigureFlags - ++ lib.optional (!canRunInstalled) "--disable-doc-gen"; - - enableParallelBuilding = true; - - installFlags = "sysconfdir=$(out)/etc"; - - shellHook = - '' - PATH=$prefix/bin:$PATH - unset PYTHONPATH - export MANPATH=$out/share/man:$MANPATH - - # Make bash completion work. - XDG_DATA_DIRS+=:$out/share - ''; - }; + # Make bash completion work. + XDG_DATA_DIRS+=:$out/share + ''; + nativeBuildInputs = attrs.nativeBuildInputs or [] + ++ lib.optional stdenv.cc.isClang pkgs.buildPackages.bear + ++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) pkgs.buildPackages.clang-tools; + }); in forAllSystems (system: let @@ -750,7 +419,7 @@ in (makeShells "native" nixpkgsFor.${system}.native) // (makeShells "static" nixpkgsFor.${system}.static) // - (forAllCrossSystems (crossSystem: let pkgs = nixpkgsFor.${system}.cross.${crossSystem}; in makeShell pkgs pkgs.stdenv)) // + (lib.genAttrs shellCrossSystems (crossSystem: let pkgs = nixpkgsFor.${system}.cross.${crossSystem}; in makeShell pkgs pkgs.stdenv)) // { default = self.devShells.${system}.native-stdenvPackages; } diff --git a/maintainers/release-notes b/maintainers/release-notes new file mode 100755 index 000000000..34cd85a56 --- /dev/null +++ b/maintainers/release-notes @@ -0,0 +1,179 @@ +#!/usr/bin/env nix-shell +#!nix-shell -i bash ../shell.nix -I nixpkgs=channel:nixos-unstable-small +# ^^^^^^^ +# Only used for bash. shell.nix goes to the flake. + +# --- CONFIGURATION --- + +# This does double duty for +# - including rl-next +# - marking where to insert new links (right after) +SUMMARY_MARKER_LINE='{{#include ./SUMMARY-rl-next.md}}' + +# --- LIB --- + +log() { + echo 1>&2 "release-notes:" "$@" +} +logcmd() { + local cmd="$1" + shift + logcmd2 "$cmd" "${*@Q}" "$cmd" "$@" +} +logcmd2() { + local fakecmd="$1" + local fakeargs="$2" + shift + shift + printf 1>&2 "release-notes: \033[34;1m$fakecmd\033[0m " + echo "$fakeargs" 1>&2 + "$@" +} +die() { + # ANSI red + printf 1>&2 "release-notes: \033[31;1merror:\033[0m" + echo 1>&2 "" "$@" + exit 1 +} +confirm() { + local answer + echo 1>&2 "$@" "[y/n]" + read -r answer + case "$answer" in + y|Y|yes|Yes|YES) + return 0 + ;; + n|N|no|No|NO) + return 1 + ;; + *) + echo 1>&2 "please answer y or n" + confirm "$@" + ;; + esac +} +report_done() { + logcmd2 "git" "show" git -c pager.show=false show + printf 1>&2 "release-notes: \033[32;1mdone\033[0m\n" +} + +# --- PARSE ARGS --- + +if [[ $# -gt 0 ]]; then + die "Release notes takes no arguments, but make sure to set VERSION." +fi + +# --- CHECKS --- + +if [[ ! -e flake.nix ]] || [[ ! -e .git ]]; then + die "must run in repo root" + exit 1 +fi + +# repo must be clean +if ! git diff --quiet; then + die "repo is dirty, please commit or stash changes" +fi + +if ! git diff --quiet --cached; then + die "repo has staged changes, please commit or stash them" +fi + +if ! grep "$SUMMARY_MARKER_LINE" doc/manual/src/SUMMARY.md.in >/dev/null; then + # would have been nice to catch this early, but won't be worth the extra infra + die "SUMMARY.md.in is missing the marker line '$SUMMARY_MARKER_LINE', which would be used for inserting a new release notes page. Please fix the script." +fi + +if [[ ! -n "${VERSION:-}" ]]; then + die "please set the VERSION environment variable before invoking this script" + exit 1 +fi + +# version_major_minor: MAJOR.MINOR +# version_full: MAJOR.MINOR.PATCH +# IS_PATCH: true if this is a patch release; append instead of create +if grep -E '^[0-9]+\.[0-9]+$' <<< "$VERSION" >/dev/null; then + log 'is minor' + IS_PATCH=false + version_full="$VERSION.0" + version_major_minor="$VERSION" +elif grep -E '^[0-9]+\.[0-9]+\.0$' <<< "$VERSION" >/dev/null; then + log 'is minor (.0)' + IS_PATCH=false + version_full="$VERSION" + version_major_minor="$(echo "$VERSION" | sed -e 's/\.0$//')" +elif grep -E '^[0-9]+\.[0-9]+\.[0-9]+$' <<< "$VERSION" >/dev/null; then + log 'is patch' + IS_PATCH=true + version_full="$VERSION" + version_major_minor="$(echo "$VERSION" | sed -e 's/\.[0-9]*$//')" +else + die "VERSION must be MAJOR.MINOR[.PATCH], where each is a number, e.g. 2.20 or 2.20.1 (VERSION was set to $VERSION)" +fi + +unset VERSION + +log "version_major_minor=$version_major_minor" +log "version_full=$version_full" +log "IS_PATCH=$IS_PATCH" + +basename=rl-${version_major_minor}.md +file=doc/manual/src/release-notes/$basename + +if ! $IS_PATCH; then + if [[ -e $file ]]; then + die "release notes file $file already exists. If you'd like to make a minor release, pass a patch version, e.g. 2.20.1" + fi +fi + +# --- DEFAULTS --- + +if [[ ! -n "${DATE:-}" ]]; then + DATE="$(date +%Y-%m-%d)" + log "DATE not set, using $DATE" +fi + +case "$DATE" in + [0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]) + ;; + *) + die "DATE must be YYYY-MM-DD, e.g. 2021-12-31 (DATE was set to $DATE)" + ;; +esac + +# --- DO THE WORK --- + +# menu +title="Release $version_major_minor ($DATE)" +# section on page +section_title="Release $version_full ($DATE)" + +( + # TODO add minor number, and append? + echo "# $section_title" + echo + changelog-d doc/manual/rl-next | sed -e 's/ *$//' +) | tee -a $file + +log "Wrote $file" + +if ! $IS_PATCH; then + NEW_SUMMARY_LINE=" - [$title](release-notes/$basename)" + + # find the marker line, insert new link after it + escaped_marker="$(echo "$SUMMARY_MARKER_LINE" | sed -e 's/\//\\\//g' -e 's/ /\\ /g')" + escaped_line="$(echo "$NEW_SUMMARY_LINE" | sed -e 's/\//\\\//g' -e 's/ /\\ /g')" + logcmd sed -i -e "/$escaped_marker/a $escaped_line" doc/manual/src/SUMMARY.md.in +fi + +for f in doc/manual/rl-next/*.md; do + if [[ config != "$(basename $f)" ]]; then + logcmd git rm $f + fi +done + +logcmd git add $file doc/manual/src/SUMMARY.md.in +logcmd git status +logcmd git commit -m "release notes: $version_full" + +report_done diff --git a/maintainers/release-process.md b/maintainers/release-process.md index d85266b81..db8b064a5 100644 --- a/maintainers/release-process.md +++ b/maintainers/release-process.md @@ -24,34 +24,23 @@ release: * In a checkout of the Nix repo, make sure you're on `master` and run `git pull`. -* Move the contents of `doc/manual/src/release-notes/rl-next.md` - (except the first line) to - `doc/manual/src/release-notes/rl-$VERSION.md` (where `$VERSION` is - the contents of `.version` *without* the patch level, e.g. `2.12` - rather than `2.12.0`). - -* Add a header to `doc/manual/src/release-notes/rl-$VERSION.md` like - - ``` - # Release 2.12 (2022-12-06) - ``` - -* Proof-read / edit / rearrange the release notes. Breaking changes - and highlights should go to the top. - -* Add a link to the release notes to `doc/manual/src/SUMMARY.md.in` - (*not* `SUMMARY.md`), e.g. - - ``` - - [Release 2.12 (2022-12-06)](release-notes/rl-2.12.md) - ``` - -* Run +* Compile the release notes by running ```console $ git checkout -b release-notes - $ git add doc/manual/src/release-notes/rl-$VERSION.md - $ git commit -a -m 'Release notes' + $ VERSION=X.YY ./maintainers/release-notes + ``` + + where `X.YY` is *without* the patch level, e.g. `2.12` rather than ~~`2.12.0`~~. + + A commit is created. + +* Proof-read / edit / rearrange the release notes if needed. Breaking changes + and highlights should go to the top. + +* Push. + + ```console $ git push --set-upstream $REMOTE release-notes ``` @@ -67,15 +56,17 @@ release: $ git checkout -b $VERSION-maintenance ``` -* Mark the release as stable: +* Mark the release as official: ```console - $ git cherry-pick f673551e71942a52b6d7ae66af8b67140904a76a + $ sed -e 's/officialRelease = false;/officialRelease = true;/' -i flake.nix ``` This removes the link to `rl-next.md` from the manual and sets `officialRelease = true` in `flake.nix`. +* Commit + * Push the release branch: ```console @@ -159,6 +150,30 @@ release: ## Creating a point release +* Checkout. + + ```console + $ git checkout XX.YY-maintenance + ``` + +* Determine the next patch version. + + ```console + $ export VERSION=XX.YY.ZZ + ``` + +* Update release notes. + + ```console + $ ./maintainers/release-notes + ``` + +* Push. + + ```console + $ git push + ``` + * Wait for the desired evaluation of the maintenance jobset to finish building. diff --git a/maintainers/upload-release.pl b/maintainers/upload-release.pl index ebc536f12..4e2c379f0 100755 --- a/maintainers/upload-release.pl +++ b/maintainers/upload-release.pl @@ -154,8 +154,8 @@ downloadFile("binaryTarball.x86_64-linux", "1"); downloadFile("binaryTarball.aarch64-linux", "1"); downloadFile("binaryTarball.x86_64-darwin", "1"); downloadFile("binaryTarball.aarch64-darwin", "1"); -downloadFile("binaryTarballCross.x86_64-linux.armv6l-linux", "1"); -downloadFile("binaryTarballCross.x86_64-linux.armv7l-linux", "1"); +downloadFile("binaryTarballCross.x86_64-linux.armv6l-unknown-linux-gnueabihf", "1"); +downloadFile("binaryTarballCross.x86_64-linux.armv7l-unknown-linux-gnueabihf", "1"); downloadFile("installerScript", "1"); # Upload docker images to dockerhub. diff --git a/misc/changelog-d.cabal.nix b/misc/changelog-d.cabal.nix new file mode 100644 index 000000000..76f9353cd --- /dev/null +++ b/misc/changelog-d.cabal.nix @@ -0,0 +1,31 @@ +{ mkDerivation, aeson, base, bytestring, cabal-install-parsers +, Cabal-syntax, containers, directory, filepath, frontmatter +, generic-lens-lite, lib, mtl, optparse-applicative, parsec, pretty +, regex-applicative, text, pkgs +}: +let rev = "f30f6969e9cd8b56242309639d58acea21c99d06"; +in +mkDerivation { + pname = "changelog-d"; + version = "0.1"; + src = pkgs.fetchurl { + name = "changelog-d-${rev}.tar.gz"; + url = "https://codeberg.org/roberth/changelog-d/archive/${rev}.tar.gz"; + hash = "sha256-8a2+i5u7YoszAgd5OIEW0eYUcP8yfhtoOIhLJkylYJ4="; + } // { inherit rev; }; + isLibrary = false; + isExecutable = true; + libraryHaskellDepends = [ + aeson base bytestring cabal-install-parsers Cabal-syntax containers + directory filepath frontmatter generic-lens-lite mtl parsec pretty + regex-applicative text + ]; + executableHaskellDepends = [ + base bytestring Cabal-syntax directory filepath + optparse-applicative + ]; + doHaddock = false; + description = "Concatenate changelog entries into a single one"; + license = lib.licenses.gpl3Plus; + mainProgram = "changelog-d"; +} diff --git a/misc/changelog-d.nix b/misc/changelog-d.nix new file mode 100644 index 000000000..1b20f4596 --- /dev/null +++ b/misc/changelog-d.nix @@ -0,0 +1,31 @@ +# Taken temporarily from +{ + callPackage, + lib, + haskell, + haskellPackages, +}: + +let + hsPkg = haskellPackages.callPackage ./changelog-d.cabal.nix { }; + + addCompletions = haskellPackages.generateOptparseApplicativeCompletions ["changelog-d"]; + + haskellModifications = + lib.flip lib.pipe [ + addCompletions + haskell.lib.justStaticExecutables + ]; + + mkDerivationOverrides = finalAttrs: oldAttrs: { + + version = oldAttrs.version + "-git-${lib.strings.substring 0 7 oldAttrs.src.rev}"; + + meta = oldAttrs.meta // { + homepage = "https://codeberg.org/roberth/changelog-d"; + maintainers = [ lib.maintainers.roberth ]; + }; + + }; +in + (haskellModifications hsPkg).overrideAttrs mkDerivationOverrides diff --git a/mk/build-dir.mk b/mk/build-dir.mk new file mode 100644 index 000000000..02f4cae60 --- /dev/null +++ b/mk/build-dir.mk @@ -0,0 +1,10 @@ +# Initialise support for build directories. +builddir ?= + +ifdef builddir + buildprefix = $(builddir)/ + buildprefixrel = $(builddir) +else + buildprefix = + buildprefixrel = . +endif diff --git a/mk/common-test.sh b/mk/common-test.sh index 00ccd1584..2783d293b 100644 --- a/mk/common-test.sh +++ b/mk/common-test.sh @@ -1,7 +1,7 @@ # Remove overall test dir (at most one of the two should match) and # remove file extension. test_name=$(echo -n "$test" | sed \ - -e "s|^unit-test-data/||" \ + -e "s|^tests/unit/[^/]*/data/||" \ -e "s|^tests/functional/||" \ -e "s|\.sh$||" \ ) diff --git a/mk/disable-tests.mk b/mk/disable-tests.mk deleted file mode 100644 index f72f84412..000000000 --- a/mk/disable-tests.mk +++ /dev/null @@ -1,12 +0,0 @@ -# This file is only active for `./configure --disable-tests`. -# Running `make check` or `make installcheck` would indicate a mistake in the -# caller. - -installcheck: - @echo "Tests are disabled. Configure without '--disable-tests', or avoid calling 'make installcheck'." - @exit 1 - -# This currently has little effect. -check: - @echo "Tests are disabled. Configure without '--disable-tests', or avoid calling 'make check'." - @exit 1 diff --git a/mk/install-dirs.mk b/mk/install-dirs.mk new file mode 100644 index 000000000..732b0d6fc --- /dev/null +++ b/mk/install-dirs.mk @@ -0,0 +1,11 @@ +# Default installation paths. +prefix ?= /usr/local +libdir ?= $(prefix)/lib +bindir ?= $(prefix)/bin +libexecdir ?= $(prefix)/libexec +datadir ?= $(prefix)/share +localstatedir ?= $(prefix)/var +sysconfdir ?= $(prefix)/etc +mandir ?= $(prefix)/share/man + +DESTDIR ?= diff --git a/mk/lib.mk b/mk/lib.mk index 49abe9862..10ce8d436 100644 --- a/mk/lib.mk +++ b/mk/lib.mk @@ -12,24 +12,7 @@ man-pages := install-tests := install-tests-groups := -ifdef HOST_OS - HOST_KERNEL = $(firstword $(subst -, ,$(HOST_OS))) - ifeq ($(HOST_KERNEL), cygwin) - HOST_CYGWIN = 1 - endif - ifeq ($(patsubst darwin%,,$(HOST_KERNEL)),) - HOST_DARWIN = 1 - endif - ifeq ($(patsubst freebsd%,,$(HOST_KERNEL)),) - HOST_FREEBSD = 1 - endif - ifeq ($(HOST_KERNEL), linux) - HOST_LINUX = 1 - endif - ifeq ($(patsubst solaris%,,$(HOST_KERNEL)),) - HOST_SOLARIS = 1 - endif -endif +include mk/platform.mk # Hack to define a literal space. space := @@ -43,27 +26,6 @@ define newline endef -# Default installation paths. -prefix ?= /usr/local -libdir ?= $(prefix)/lib -bindir ?= $(prefix)/bin -libexecdir ?= $(prefix)/libexec -datadir ?= $(prefix)/share -localstatedir ?= $(prefix)/var -sysconfdir ?= $(prefix)/etc -mandir ?= $(prefix)/share/man - - -# Initialise support for build directories. -builddir ?= - -ifdef builddir - buildprefix = $(builddir)/ -else - buildprefix = -endif - - # Pass -fPIC if we're building dynamic libraries. BUILD_SHARED_LIBS ?= 1 @@ -94,6 +56,8 @@ ifeq ($(BUILD_DEBUG), 1) endif +include mk/build-dir.mk +include mk/install-dirs.mk include mk/functions.mk include mk/tracing.mk include mk/clean.mk @@ -112,7 +76,7 @@ define include-sub-makefile include $(1) endef -$(foreach mf, $(makefiles), $(eval $(call include-sub-makefile, $(mf)))) +$(foreach mf, $(makefiles), $(eval $(call include-sub-makefile,$(mf)))) # Instantiate stuff. diff --git a/mk/libraries.mk b/mk/libraries.mk index 1bc73d7f7..b99ba2782 100644 --- a/mk/libraries.mk +++ b/mk/libraries.mk @@ -3,13 +3,19 @@ libs-list := ifdef HOST_DARWIN SO_EXT = dylib else - ifdef HOST_CYGWIN + ifdef HOST_WINDOWS SO_EXT = dll else SO_EXT = so endif endif +ifdef HOST_UNIX + THREAD_LDFLAGS = -pthread +else + THREAD_LDFLAGS = +endif + # Build a library with symbolic name $(1). The library is defined by # various variables prefixed by ‘$(1)_’: # @@ -59,7 +65,7 @@ define build-library $(1)_OBJS := $$(addprefix $(buildprefix), $$(addsuffix .o, $$(basename $$(_srcs)))) _libs := $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_PATH)) - ifdef HOST_CYGWIN + ifdef HOST_WINDOWS $(1)_INSTALL_DIR ?= $$(bindir) else $(1)_INSTALL_DIR ?= $$(libdir) @@ -79,7 +85,7 @@ define build-library endif else ifndef HOST_DARWIN - ifndef HOST_CYGWIN + ifndef HOST_WINDOWS $(1)_LDFLAGS += -Wl,-z,defs endif endif diff --git a/mk/platform.mk b/mk/platform.mk new file mode 100644 index 000000000..fe960dedf --- /dev/null +++ b/mk/platform.mk @@ -0,0 +1,32 @@ +ifdef HOST_OS + HOST_KERNEL = $(firstword $(subst -, ,$(HOST_OS))) + ifeq ($(patsubst mingw%,,$(HOST_KERNEL)),) + HOST_MINGW = 1 + HOST_WINDOWS = 1 + endif + ifeq ($(HOST_KERNEL), cygwin) + HOST_CYGWIN = 1 + HOST_WINDOWS = 1 + HOST_UNIX = 1 + endif + ifeq ($(patsubst darwin%,,$(HOST_KERNEL)),) + HOST_DARWIN = 1 + HOST_UNIX = 1 + endif + ifeq ($(patsubst freebsd%,,$(HOST_KERNEL)),) + HOST_FREEBSD = 1 + HOST_UNIX = 1 + endif + ifeq ($(patsubst netbsd%,,$(HOST_KERNEL)),) + HOST_NETBSD = 1 + HOST_UNIX = 1 + endif + ifeq ($(HOST_KERNEL), linux) + HOST_LINUX = 1 + HOST_UNIX = 1 + endif + ifeq ($(patsubst solaris%,,$(HOST_KERNEL)),) + HOST_SOLARIS = 1 + HOST_UNIX = 1 + endif +endif diff --git a/mk/programs.mk b/mk/programs.mk index a88d9d949..623caaf55 100644 --- a/mk/programs.mk +++ b/mk/programs.mk @@ -1,5 +1,11 @@ programs-list := +ifdef HOST_WINDOWS + EXE_EXT = .exe +else + EXE_EXT = +endif + # Build a program with symbolic name $(1). The program is defined by # various variables prefixed by ‘$(1)_’: # @@ -31,7 +37,7 @@ define build-program _srcs := $$(sort $$(foreach src, $$($(1)_SOURCES), $$(src))) $(1)_OBJS := $$(addprefix $(buildprefix), $$(addsuffix .o, $$(basename $$(_srcs)))) _libs := $$(foreach lib, $$($(1)_LIBS), $$(foreach lib2, $$($$(lib)_LIB_CLOSURE), $$($$(lib2)_PATH))) - $(1)_PATH := $$(_d)/$$($(1)_NAME) + $(1)_PATH := $$(_d)/$$($(1)_NAME)$(EXE_EXT) $$(eval $$(call create-dir, $$(_d))) @@ -42,7 +48,7 @@ define build-program ifdef $(1)_INSTALL_DIR - $(1)_INSTALL_PATH := $$($(1)_INSTALL_DIR)/$$($(1)_NAME) + $(1)_INSTALL_PATH := $$($(1)_INSTALL_DIR)/$$($(1)_NAME)$(EXE_EXT) $$(eval $$(call create-dir, $$($(1)_INSTALL_DIR))) @@ -87,6 +93,6 @@ define build-program # Phony target to run this program (typically as a dependency of 'check'). .PHONY: $(1)_RUN $(1)_RUN: $$($(1)_PATH) - $(trace-test) $$(UNIT_TEST_ENV) $$($(1)_PATH) + $(trace-test) $$($(1)_ENV) $$($(1)_PATH) endef diff --git a/mk/templates.mk b/mk/templates.mk index c7ac7afbf..866bdc17f 100644 --- a/mk/templates.mk +++ b/mk/templates.mk @@ -10,10 +10,10 @@ endef ifneq ($(MAKECMDGOALS), clean) -%.h: %.h.in - $(trace-gen) rm -f $@ && ./config.status --quiet --header=$@ +$(buildprefix)%.h: %.h.in + $(trace-gen) rm -f $@ && cd $(buildprefixrel) && ./config.status --quiet --header=$(@:$(buildprefix)%=%) -%: %.in - $(trace-gen) rm -f $@ && ./config.status --quiet --file=$@ +$(buildprefix)%: %.in + $(trace-gen) rm -f $@ && cd $(buildprefixrel) && ./config.status --quiet --file=$(@:$(buildprefix)%=%) endif diff --git a/package.nix b/package.nix new file mode 100644 index 000000000..37410dc2f --- /dev/null +++ b/package.nix @@ -0,0 +1,404 @@ +{ lib +, stdenv +, releaseTools +, autoconf-archive +, autoreconfHook +, aws-sdk-cpp +, boehmgc +, nlohmann_json +, bison +, boost +, brotli +, bzip2 +, changelog-d +, curl +, editline +, readline +, fileset +, flex +, git +, gtest +, jq +, doxygen +, libarchive +, libcpuid +, libgit2 +, libseccomp +, libsodium +, lowdown +, mdbook +, mdbook-linkcheck +, mercurial +, openssh +, openssl +, pkg-config +, rapidcheck +, sqlite +, util-linux +, xz + +, busybox-sandbox-shell ? null + +# Configuration Options +#: +# This probably seems like too many degrees of freedom, but it +# faithfully reflects how the underlying configure + make build system +# work. The top-level flake.nix will choose useful combinations of these +# options to CI. + +, pname ? "nix" + +, versionSuffix ? "" +, officialRelease ? false + +# Whether to build Nix. Useful to skip for tasks like (a) just +# generating API docs or (b) testing existing pre-built versions of Nix +, doBuild ? true + +# Run the unit tests as part of the build. See `installUnitTests` for an +# alternative to this. +, doCheck ? __forDefaults.canRunInstalled + +# Run the functional tests as part of the build. +, doInstallCheck ? test-client != null || __forDefaults.canRunInstalled + +# Check test coverage of Nix. Probably want to use with with at least +# one of `doCHeck` or `doInstallCheck` enabled. +, withCoverageChecks ? false + +# Whether to build the regular manual +, enableManual ? __forDefaults.canRunInstalled + +# Whether to use garbage collection for the Nix language evaluator. +# +# If it is disabled, we just leak memory, but this is not as bad as it +# sounds so long as evaluation just takes places within short-lived +# processes. (When the process exits, the memory is reclaimed; it is +# only leaked *within* the process.) +, enableGC ? true + +# Whether to enable Markdown rendering in the Nix binary. +, enableMarkdown ? !stdenv.hostPlatform.isWindows + +# Which interactive line editor library to use for Nix's repl. +# +# Currently supported choices are: +# +# - editline (default) +# - readline +, readlineFlavor ? if stdenv.hostPlatform.isWindows then "readline" else "editline" + +# Whether to compile `rl-next.md`, the release notes for the next +# not-yet-released version of Nix in the manul, from the individual +# change log entries in the directory. +, buildUnreleasedNotes ? false + +# Whether to build the internal API docs, can be done separately from +# everything else. +, enableInternalAPIDocs ? false + +# Whether to install unit tests. This is useful when cross compiling +# since we cannot run them natively during the build, but can do so +# later. +, installUnitTests ? doBuild && !__forDefaults.canExecuteHost + +# For running the functional tests against a pre-built Nix. Probably +# want to use in conjunction with `doBuild = false;`. +, test-daemon ? null +, test-client ? null + +# Avoid setting things that would interfere with a functioning devShell +, forDevShell ? false + +# Not a real argument, just the only way to approximate let-binding some +# stuff for argument defaults. +, __forDefaults ? { + canExecuteHost = stdenv.buildPlatform.canExecute stdenv.hostPlatform; + canRunInstalled = doBuild && __forDefaults.canExecuteHost; + } +}: + +let + version = lib.fileContents ./.version + versionSuffix; + + # selected attributes with defaults, will be used to define some + # things which should instead be gotten via `finalAttrs` in order to + # work with overriding. + attrs = { + inherit doBuild doCheck doInstallCheck; + }; + + mkDerivation = + if withCoverageChecks + then + # TODO support `finalAttrs` args function in + # `releaseTools.coverageAnalysis`. + argsFun: + releaseTools.coverageAnalysis (let args = argsFun args; in args) + else stdenv.mkDerivation; +in + +mkDerivation (finalAttrs: let + + inherit (finalAttrs) + doCheck + doInstallCheck + ; + + doBuild = !finalAttrs.dontBuild; + + # Either running the unit tests during the build, or installing them + # to be run later, requiresthe unit tests to be built. + buildUnitTests = doCheck || installUnitTests; + +in { + inherit pname version; + + src = + let + baseFiles = fileset.fileFilter (f: f.name != ".gitignore") ./.; + in + fileset.toSource { + root = ./.; + fileset = fileset.intersect baseFiles (fileset.unions ([ + # For configure + ./.version + ./configure.ac + ./m4 + # TODO: do we really need README.md? It doesn't seem used in the build. + ./README.md + # For make, regardless of what we are building + ./local.mk + ./Makefile + ./Makefile.config.in + ./mk + (fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts) + ] ++ lib.optionals doBuild [ + ./boehmgc-coroutine-sp-fallback.diff + ./doc + ./misc + ./precompiled-headers.h + ./src + ./COPYING + ./scripts/local.mk + ] ++ lib.optionals buildUnitTests [ + ./doc/manual + ] ++ lib.optionals enableInternalAPIDocs [ + ./doc/internal-api + # Source might not be compiled, but still must be available + # for Doxygen to gather comments. + ./src + ./tests/unit + ] ++ lib.optionals buildUnitTests [ + ./tests/unit + ] ++ lib.optionals doInstallCheck [ + ./tests/functional + ])); + }; + + VERSION_SUFFIX = versionSuffix; + + outputs = [ "out" ] + ++ lib.optional doBuild "dev" + # If we are doing just build or just docs, the one thing will use + # "out". We only need additional outputs if we are doing both. + ++ lib.optional (doBuild && (enableManual || enableInternalAPIDocs)) "doc" + ++ lib.optional installUnitTests "check"; + + nativeBuildInputs = [ + autoconf-archive + autoreconfHook + pkg-config + ] ++ lib.optionals doBuild [ + bison + flex + ] ++ lib.optionals enableManual [ + (lib.getBin lowdown) + mdbook + mdbook-linkcheck + ] ++ lib.optionals (doInstallCheck || enableManual) [ + jq # Also for custom mdBook preprocessor. + ] ++ lib.optional stdenv.hostPlatform.isLinux util-linux + # Official releases don't have rl-next, so we don't need to compile a + # changelog + ++ lib.optional (!officialRelease && buildUnreleasedNotes) changelog-d + ++ lib.optional enableInternalAPIDocs doxygen + ; + + buildInputs = lib.optionals doBuild [ + boost + brotli + bzip2 + curl + libarchive + libgit2 + libsodium + openssl + sqlite + xz + ] ++ lib.optionals (!stdenv.hostPlatform.isWindows) [ + ({ inherit readline editline; }.${readlineFlavor}) + ] ++ lib.optionals enableMarkdown [ + lowdown + ] ++ lib.optionals buildUnitTests [ + gtest + rapidcheck + ] ++ lib.optional stdenv.isLinux libseccomp + ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid + # There have been issues building these dependencies + ++ lib.optional (stdenv.hostPlatform == stdenv.buildPlatform && (stdenv.isLinux || stdenv.isDarwin)) + (aws-sdk-cpp.override { + apis = ["s3" "transfer"]; + customMemoryManagement = false; + }) + ; + + propagatedBuildInputs = [ + nlohmann_json + ] ++ lib.optional enableGC boehmgc; + + dontBuild = !attrs.doBuild; + doCheck = attrs.doCheck; + + nativeCheckInputs = [ + git + mercurial + openssh + ]; + + disallowedReferences = [ boost ]; + + preConfigure = lib.optionalString (doBuild && ! stdenv.hostPlatform.isStatic) ( + '' + # Copy libboost_context so we don't get all of Boost in our closure. + # https://github.com/NixOS/nixpkgs/issues/45462 + mkdir -p $out/lib + cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*} $out/lib + rm -f $out/lib/*.a + '' + lib.optionalString stdenv.hostPlatform.isLinux '' + chmod u+w $out/lib/*.so.* + patchelf --set-rpath $out/lib:${stdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.* + '' + lib.optionalString stdenv.hostPlatform.isDarwin '' + for LIB in $out/lib/*.dylib; do + chmod u+w $LIB + install_name_tool -id $LIB $LIB + install_name_tool -delete_rpath ${boost}/lib/ $LIB || true + done + install_name_tool -change ${boost}/lib/libboost_system.dylib $out/lib/libboost_system.dylib $out/lib/libboost_thread.dylib + '' + ); + + configureFlags = [ + (lib.enableFeature doBuild "build") + (lib.enableFeature buildUnitTests "unit-tests") + (lib.enableFeature doInstallCheck "functional-tests") + (lib.enableFeature enableInternalAPIDocs "internal-api-docs") + (lib.enableFeature enableManual "doc-gen") + (lib.enableFeature enableGC "gc") + (lib.enableFeature enableMarkdown "markdown") + (lib.enableFeature installUnitTests "install-unit-tests") + (lib.withFeatureAs true "readline-flavor" readlineFlavor) + ] ++ lib.optionals (!forDevShell) [ + "--sysconfdir=/etc" + ] ++ lib.optionals installUnitTests [ + "--with-check-bin-dir=${builtins.placeholder "check"}/bin" + "--with-check-lib-dir=${builtins.placeholder "check"}/lib" + ] ++ lib.optionals (doBuild) [ + "--with-boost=${boost}/lib" + ] ++ lib.optionals (doBuild && stdenv.isLinux) [ + "--with-sandbox-shell=${busybox-sandbox-shell}/bin/busybox" + ] ++ lib.optional (doBuild && stdenv.isLinux && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")) + "LDFLAGS=-fuse-ld=gold" + ++ lib.optional (doBuild && stdenv.hostPlatform.isStatic) "--enable-embedded-sandbox-shell" + ++ lib.optional buildUnitTests "RAPIDCHECK_HEADERS=${lib.getDev rapidcheck}/extras/gtest/include"; + + enableParallelBuilding = true; + + makeFlags = "profiledir=$(out)/etc/profile.d PRECOMPILE_HEADERS=1"; + + installTargets = lib.optional doBuild "install" + ++ lib.optional enableInternalAPIDocs "internal-api-html"; + + installFlags = "sysconfdir=$(out)/etc"; + + # In this case we are probably just running tests, and so there isn't + # anything to install, we just make an empty directory to signify tests + # succeeded. + installPhase = if finalAttrs.installTargets != [] then null else '' + mkdir -p $out + ''; + + postInstall = lib.optionalString doBuild ( + lib.optionalString stdenv.hostPlatform.isStatic '' + mkdir -p $out/nix-support + echo "file binary-dist $out/bin/nix" >> $out/nix-support/hydra-build-products + '' + lib.optionalString stdenv.isDarwin '' + install_name_tool \ + -change ${boost}/lib/libboost_context.dylib \ + $out/lib/libboost_context.dylib \ + $out/lib/libnixutil.dylib + '' + ) + lib.optionalString enableManual '' + mkdir -p ''${!outputDoc}/nix-support + echo "doc manual ''${!outputDoc}/share/doc/nix/manual" >> ''${!outputDoc}/nix-support/hydra-build-products + '' + lib.optionalString enableInternalAPIDocs '' + mkdir -p ''${!outputDoc}/nix-support + echo "doc internal-api-docs $out/share/doc/nix/internal-api/html" >> ''${!outputDoc}/nix-support/hydra-build-products + ''; + + doInstallCheck = attrs.doInstallCheck; + + installCheckFlags = "sysconfdir=$(out)/etc"; + # Work around buggy detection in stdenv. + installCheckTarget = "installcheck"; + + # Work around weird bug where it doesn't think there is a Makefile. + installCheckPhase = if (!doBuild && doInstallCheck) then '' + mkdir -p src/nix-channel + make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES + '' else null; + + # Needed for tests if we are not doing a build, but testing existing + # built Nix. + preInstallCheck = lib.optionalString (! doBuild) '' + mkdir -p src/nix-channel + ''; + + separateDebugInfo = !stdenv.hostPlatform.isStatic; + + # TODO `releaseTools.coverageAnalysis` in Nixpkgs needs to be updated + # to work with `strictDeps`. + strictDeps = !withCoverageChecks; + + hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; + + meta = { + platforms = lib.platforms.unix ++ lib.platforms.windows; + mainProgram = "nix"; + broken = !(lib.all (a: a) [ + # We cannot run or install unit tests if we don't build them or + # Nix proper (which they depend on). + (installUnitTests -> doBuild) + (doCheck -> doBuild) + # We have to build the manual to build unreleased notes, as those + # are part of the manual + (buildUnreleasedNotes -> enableManual) + # The build process for the manual currently requires extracting + # data from the Nix executable we are trying to document. + (enableManual -> doBuild) + ]); + }; + +} // lib.optionalAttrs withCoverageChecks { + lcovFilter = [ "*/boost/*" "*-tab.*" ]; + + hardeningDisable = ["fortify"]; + + NIX_CFLAGS_COMPILE = "-DCOVERAGE=1"; + + dontInstall = false; +} // lib.optionalAttrs (test-daemon != null) { + NIX_DAEMON_PACKAGE = test-daemon; +} // lib.optionalAttrs (test-client != null) { + NIX_CLIENT_PACKAGE = test-client; +}) diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs index f89ac4077..423c01cf7 100644 --- a/perl/lib/Nix/Store.xs +++ b/perl/lib/Nix/Store.xs @@ -9,9 +9,10 @@ #undef do_close #include "derivations.hh" +#include "realisation.hh" #include "globals.hh" #include "store-api.hh" -#include "crypto.hh" +#include "posix-source-accessor.hh" #include #include @@ -77,7 +78,7 @@ SV * queryReferences(char * path) SV * queryPathHash(char * path) PPCODE: try { - auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(HashFormat::Base32, true); + auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(HashFormat::Nix32, true); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); @@ -103,7 +104,7 @@ SV * queryPathInfo(char * path, int base32) XPUSHs(&PL_sv_undef); else XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0))); - auto s = info->narHash.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, true); + auto s = info->narHash.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, true); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); mXPUSHi(info->registrationTime); mXPUSHi(info->narSize); @@ -204,8 +205,11 @@ void importPaths(int fd, int dontCheckSigs) SV * hashPath(char * algo, int base32, char * path) PPCODE: try { - Hash h = hashPath(parseHashType(algo), path).first; - auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false); + PosixSourceAccessor accessor; + Hash h = hashPath( + accessor, CanonPath::fromCwd(path), + FileIngestionMethod::Recursive, parseHashAlgo(algo)).first; + auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); @@ -215,8 +219,8 @@ SV * hashPath(char * algo, int base32, char * path) SV * hashFile(char * algo, int base32, char * path) PPCODE: try { - Hash h = hashFile(parseHashType(algo), path); - auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false); + Hash h = hashFile(parseHashAlgo(algo), path); + auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); @@ -226,8 +230,8 @@ SV * hashFile(char * algo, int base32, char * path) SV * hashString(char * algo, int base32, char * s) PPCODE: try { - Hash h = hashString(parseHashType(algo), s); - auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false); + Hash h = hashString(parseHashAlgo(algo), s); + auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); @@ -237,8 +241,8 @@ SV * hashString(char * algo, int base32, char * s) SV * convertHash(char * algo, char * s, int toBase32) PPCODE: try { - auto h = Hash::parseAny(s, parseHashType(algo)); - auto s = h.to_string(toBase32 ? HashFormat::Base32 : HashFormat::Base16, false); + auto h = Hash::parseAny(s, parseHashAlgo(algo)); + auto s = h.to_string(toBase32 ? HashFormat::Nix32 : HashFormat::Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); @@ -280,7 +284,11 @@ SV * addToStore(char * srcPath, int recursive, char * algo) PPCODE: try { auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat; - auto path = store()->addToStore(std::string(baseNameOf(srcPath)), srcPath, method, parseHashType(algo)); + PosixSourceAccessor accessor; + auto path = store()->addToStore( + std::string(baseNameOf(srcPath)), + accessor, CanonPath::fromCwd(srcPath), + method, parseHashAlgo(algo)); XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); @@ -290,7 +298,7 @@ SV * addToStore(char * srcPath, int recursive, char * algo) SV * makeFixedOutputPath(int recursive, char * algo, char * hash, char * name) PPCODE: try { - auto h = Hash::parseAny(hash, parseHashType(algo)); + auto h = Hash::parseAny(hash, parseHashAlgo(algo)); auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat; auto path = store()->makeFixedOutputPath(name, FixedOutputInfo { .method = method, diff --git a/scripts/binary-tarball.nix b/scripts/binary-tarball.nix new file mode 100644 index 000000000..104189b0c --- /dev/null +++ b/scripts/binary-tarball.nix @@ -0,0 +1,84 @@ +{ runCommand +, system +, buildPackages +, cacert +, nix +}: + +let + + installerClosureInfo = buildPackages.closureInfo { + rootPaths = [ nix cacert ]; + }; + + inherit (nix) version; + + env = { + #nativeBuildInputs = lib.optional (system != "aarch64-linux") shellcheck; + meta.description = "Distribution-independent Nix bootstrap binaries for ${system}"; + }; + +in + +runCommand "nix-binary-tarball-${version}" env '' + cp ${installerClosureInfo}/registration $TMPDIR/reginfo + cp ${./create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh + substitute ${./install-nix-from-closure.sh} $TMPDIR/install \ + --subst-var-by nix ${nix} \ + --subst-var-by cacert ${cacert} + + substitute ${./install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \ + --subst-var-by nix ${nix} \ + --subst-var-by cacert ${cacert} + substitute ${./install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \ + --subst-var-by nix ${nix} \ + --subst-var-by cacert ${cacert} + substitute ${./install-multi-user.sh} $TMPDIR/install-multi-user \ + --subst-var-by nix ${nix} \ + --subst-var-by cacert ${cacert} + + if type -p shellcheck; then + # SC1090: Don't worry about not being able to find + # $nix/etc/profile.d/nix.sh + shellcheck --exclude SC1090 $TMPDIR/install + shellcheck $TMPDIR/create-darwin-volume.sh + shellcheck $TMPDIR/install-darwin-multi-user.sh + shellcheck $TMPDIR/install-systemd-multi-user.sh + + # SC1091: Don't panic about not being able to source + # /etc/profile + # SC2002: Ignore "useless cat" "error", when loading + # .reginfo, as the cat is a much cleaner + # implementation, even though it is "useless" + # SC2116: Allow ROOT_HOME=$(echo ~root) for resolving + # root's home directory + shellcheck --external-sources \ + --exclude SC1091,SC2002,SC2116 $TMPDIR/install-multi-user + fi + + chmod +x $TMPDIR/install + chmod +x $TMPDIR/create-darwin-volume.sh + chmod +x $TMPDIR/install-darwin-multi-user.sh + chmod +x $TMPDIR/install-systemd-multi-user.sh + chmod +x $TMPDIR/install-multi-user + dir=nix-${version}-${system} + fn=$out/$dir.tar.xz + mkdir -p $out/nix-support + echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products + tar cvfJ $fn \ + --owner=0 --group=0 --mode=u+rw,uga+r \ + --mtime='1970-01-01' \ + --absolute-names \ + --hard-dereference \ + --transform "s,$TMPDIR/install,$dir/install," \ + --transform "s,$TMPDIR/create-darwin-volume.sh,$dir/create-darwin-volume.sh," \ + --transform "s,$TMPDIR/reginfo,$dir/.reginfo," \ + --transform "s,$NIX_STORE,$dir/store,S" \ + $TMPDIR/install \ + $TMPDIR/create-darwin-volume.sh \ + $TMPDIR/install-darwin-multi-user.sh \ + $TMPDIR/install-systemd-multi-user.sh \ + $TMPDIR/install-multi-user \ + $TMPDIR/reginfo \ + $(cat ${installerClosureInfo}/store-paths) +'' diff --git a/scripts/install-darwin-multi-user.sh b/scripts/install-darwin-multi-user.sh index 0326d3415..766f81bde 100644 --- a/scripts/install-darwin-multi-user.sh +++ b/scripts/install-darwin-multi-user.sh @@ -3,11 +3,13 @@ set -eu set -o pipefail +# System specific settings +export NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-301}" +export NIX_BUILD_USER_NAME_TEMPLATE="_nixbld%d" + readonly NIX_DAEMON_DEST=/Library/LaunchDaemons/org.nixos.nix-daemon.plist # create by default; set 0 to DIY, use a symlink, etc. readonly NIX_VOLUME_CREATE=${NIX_VOLUME_CREATE:-1} # now default -NIX_FIRST_BUILD_UID="301" -NIX_BUILD_USER_NAME_TEMPLATE="_nixbld%d" # caution: may update times on / if not run as normal non-root user read_only_root() { diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh index a08f62333..ad3ee8881 100644 --- a/scripts/install-multi-user.sh +++ b/scripts/install-multi-user.sh @@ -25,9 +25,9 @@ readonly RED='\033[31m' readonly NIX_USER_COUNT=${NIX_USER_COUNT:-32} readonly NIX_BUILD_GROUP_ID="${NIX_BUILD_GROUP_ID:-30000}" readonly NIX_BUILD_GROUP_NAME="nixbld" -# darwin installer needs to override these -NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-30001}" -NIX_BUILD_USER_NAME_TEMPLATE="nixbld%d" +# each system specific installer must set these: +# NIX_FIRST_BUILD_UID +# NIX_BUILD_USER_NAME_TEMPLATE # Please don't change this. We don't support it, because the # default shell profile that comes with Nix doesn't support it. readonly NIX_ROOT="/nix" @@ -707,6 +707,12 @@ EOF fi } +check_required_system_specific_settings() { + if [ -z "${NIX_FIRST_BUILD_UID+x}" ] || [ -z "${NIX_BUILD_USER_NAME_TEMPLATE+x}" ]; then + failure "Internal error: System specific installer for $(uname) ($1) does not export required settings." + fi +} + welcome_to_nix() { local -r NIX_UID_RANGES="${NIX_FIRST_BUILD_UID}..$((NIX_FIRST_BUILD_UID + NIX_USER_COUNT - 1))" local -r RANGE_TEXT=$(echo -ne "${BLUE}(uids [${NIX_UID_RANGES}])${ESC}") @@ -726,7 +732,9 @@ manager. This will happen in a few stages: if you are ready to continue. 3. Create the system users ${RANGE_TEXT} and groups ${GROUP_TEXT} - that the Nix daemon uses to run builds. + that the Nix daemon uses to run builds. To create system users + in a different range, exit and run this tool again with + NIX_FIRST_BUILD_UID set. 4. Perform the basic installation of the Nix files daemon. @@ -968,13 +976,16 @@ main() { if is_os_darwin; then # shellcheck source=./install-darwin-multi-user.sh . "$EXTRACTED_NIX_PATH/install-darwin-multi-user.sh" + check_required_system_specific_settings "install-darwin-multi-user.sh" elif is_os_linux; then # shellcheck source=./install-systemd-multi-user.sh . "$EXTRACTED_NIX_PATH/install-systemd-multi-user.sh" # most of this works on non-systemd distros also + check_required_system_specific_settings "install-systemd-multi-user.sh" else failure "Sorry, I don't know what to do on $(uname)" fi + welcome_to_nix if ! is_root; then diff --git a/scripts/install-systemd-multi-user.sh b/scripts/install-systemd-multi-user.sh index 07b34033a..202a9bb54 100755 --- a/scripts/install-systemd-multi-user.sh +++ b/scripts/install-systemd-multi-user.sh @@ -3,6 +3,10 @@ set -eu set -o pipefail +# System specific settings +export NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-30001}" +export NIX_BUILD_USER_NAME_TEMPLATE="nixbld%d" + readonly SERVICE_SRC=/lib/systemd/system/nix-daemon.service readonly SERVICE_DEST=/etc/systemd/system/nix-daemon.service diff --git a/scripts/installer.nix b/scripts/installer.nix new file mode 100644 index 000000000..cc7759c2c --- /dev/null +++ b/scripts/installer.nix @@ -0,0 +1,36 @@ +{ lib +, runCommand +, nix +, tarballs +}: + +runCommand "installer-script" { + buildInputs = [ nix ]; +} '' + mkdir -p $out/nix-support + + # Converts /nix/store/50p3qk8k...-nix-2.4pre20201102_550e11f/bin/nix to 50p3qk8k.../bin/nix. + tarballPath() { + # Remove the store prefix + local path=''${1#${builtins.storeDir}/} + # Get the path relative to the derivation root + local rest=''${path#*/} + # Get the derivation hash + local drvHash=''${path%%-*} + echo "$drvHash/$rest" + } + + substitute ${./install.in} $out/install \ + ${lib.concatMapStrings + (tarball: let + inherit (tarball.stdenv.hostPlatform) system; + in '' \ + --replace '@tarballHash_${system}@' $(nix --experimental-features nix-command hash-file --base16 --type sha256 ${tarball}/*.tar.xz) \ + --replace '@tarballPath_${system}@' $(tarballPath ${tarball}/*.tar.xz) \ + '' + ) + tarballs + } --replace '@nixVersion@' ${nix.version} + + echo "file installer $out/install" >> $out/nix-support/hydra-build-products +'' diff --git a/scripts/nix-profile-daemon.sh.in b/scripts/nix-profile-daemon.sh.in index c63db4648..d256b24ed 100644 --- a/scripts/nix-profile-daemon.sh.in +++ b/scripts/nix-profile-daemon.sh.in @@ -31,7 +31,7 @@ fi export NIX_PROFILES="@localstatedir@/nix/profiles/default $NIX_LINK" # Populate bash completions, .desktop files, etc -if [ -z "$XDG_DATA_DIRS" ]; then +if [ -z "${XDG_DATA_DIRS-}" ]; then # According to XDG spec the default is /usr/local/share:/usr/share, don't set something that prevents that default export XDG_DATA_DIRS="/usr/local/share:/usr/share:$NIX_LINK/share:/nix/var/nix/profiles/default/share" else diff --git a/scripts/nix-profile.sh.in b/scripts/nix-profile.sh.in index 56e070ae1..44bc96e89 100644 --- a/scripts/nix-profile.sh.in +++ b/scripts/nix-profile.sh.in @@ -33,7 +33,7 @@ if [ -n "$HOME" ] && [ -n "$USER" ]; then export NIX_PROFILES="@localstatedir@/nix/profiles/default $NIX_LINK" # Populate bash completions, .desktop files, etc - if [ -z "$XDG_DATA_DIRS" ]; then + if [ -z "${XDG_DATA_DIRS-}" ]; then # According to XDG spec the default is /usr/local/share:/usr/share, don't set something that prevents that default export XDG_DATA_DIRS="/usr/local/share:/usr/share:$NIX_LINK/share:/nix/var/nix/profiles/default/share" else diff --git a/src/libcmd/built-path.cc b/src/libcmd/built-path.cc index 9a2dce806..c5eb93c5d 100644 --- a/src/libcmd/built-path.cc +++ b/src/libcmd/built-path.cc @@ -12,9 +12,9 @@ namespace nix { bool MY_TYPE ::operator COMPARATOR (const MY_TYPE & other) const \ { \ const MY_TYPE* me = this; \ - auto fields1 = std::make_tuple(*me->drvPath, me->FIELD); \ + auto fields1 = std::tie(*me->drvPath, me->FIELD); \ me = &other; \ - auto fields2 = std::make_tuple(*me->drvPath, me->FIELD); \ + auto fields2 = std::tie(*me->drvPath, me->FIELD); \ return fields1 COMPARATOR fields2; \ } #define CMP(CHILD_TYPE, MY_TYPE, FIELD) \ @@ -80,7 +80,7 @@ SingleDerivedPath SingleBuiltPath::discardOutputPath() const ); } -nlohmann::json BuiltPath::Built::toJSON(const Store & store) const +nlohmann::json BuiltPath::Built::toJSON(const StoreDirConfig & store) const { nlohmann::json res; res["drvPath"] = drvPath->toJSON(store); @@ -90,7 +90,7 @@ nlohmann::json BuiltPath::Built::toJSON(const Store & store) const return res; } -nlohmann::json SingleBuiltPath::Built::toJSON(const Store & store) const +nlohmann::json SingleBuiltPath::Built::toJSON(const StoreDirConfig & store) const { nlohmann::json res; res["drvPath"] = drvPath->toJSON(store); @@ -100,14 +100,14 @@ nlohmann::json SingleBuiltPath::Built::toJSON(const Store & store) const return res; } -nlohmann::json SingleBuiltPath::toJSON(const Store & store) const +nlohmann::json SingleBuiltPath::toJSON(const StoreDirConfig & store) const { return std::visit([&](const auto & buildable) { return buildable.toJSON(store); }, raw()); } -nlohmann::json BuiltPath::toJSON(const Store & store) const +nlohmann::json BuiltPath::toJSON(const StoreDirConfig & store) const { return std::visit([&](const auto & buildable) { return buildable.toJSON(store); diff --git a/src/libcmd/built-path.hh b/src/libcmd/built-path.hh index 7154cc504..99917e0ee 100644 --- a/src/libcmd/built-path.hh +++ b/src/libcmd/built-path.hh @@ -14,9 +14,9 @@ struct SingleBuiltPathBuilt { SingleDerivedPathBuilt discardOutputPath() const; - std::string to_string(const Store & store) const; - static SingleBuiltPathBuilt parse(const Store & store, std::string_view, std::string_view); - nlohmann::json toJSON(const Store & store) const; + std::string to_string(const StoreDirConfig & store) const; + static SingleBuiltPathBuilt parse(const StoreDirConfig & store, std::string_view, std::string_view); + nlohmann::json toJSON(const StoreDirConfig & store) const; DECLARE_CMP(SingleBuiltPathBuilt); }; @@ -41,8 +41,8 @@ struct SingleBuiltPath : _SingleBuiltPathRaw { SingleDerivedPath discardOutputPath() const; - static SingleBuiltPath parse(const Store & store, std::string_view); - nlohmann::json toJSON(const Store & store) const; + static SingleBuiltPath parse(const StoreDirConfig & store, std::string_view); + nlohmann::json toJSON(const StoreDirConfig & store) const; }; static inline ref staticDrv(StorePath drvPath) @@ -59,9 +59,9 @@ struct BuiltPathBuilt { ref drvPath; std::map outputs; - std::string to_string(const Store & store) const; - static BuiltPathBuilt parse(const Store & store, std::string_view, std::string_view); - nlohmann::json toJSON(const Store & store) const; + std::string to_string(const StoreDirConfig & store) const; + static BuiltPathBuilt parse(const StoreDirConfig & store, std::string_view, std::string_view); + nlohmann::json toJSON(const StoreDirConfig & store) const; DECLARE_CMP(BuiltPathBuilt); }; @@ -89,7 +89,7 @@ struct BuiltPath : _BuiltPathRaw { StorePathSet outPaths() const; RealisedPath::Set toRealisedPaths(Store & store) const; - nlohmann::json toJSON(const Store & store) const; + nlohmann::json toJSON(const StoreDirConfig & store) const; }; typedef std::vector BuiltPaths; diff --git a/src/libcmd/command.cc b/src/libcmd/command.cc index de9f546fc..369fa6004 100644 --- a/src/libcmd/command.cc +++ b/src/libcmd/command.cc @@ -1,4 +1,5 @@ #include "command.hh" +#include "markdown.hh" #include "store-api.hh" #include "local-fs-store.hh" #include "derivations.hh" @@ -34,6 +35,19 @@ nlohmann::json NixMultiCommand::toJSON() return MultiCommand::toJSON(); } +void NixMultiCommand::run() +{ + if (!command) { + std::set subCommandTextLines; + for (auto & [name, _] : commands) + subCommandTextLines.insert(fmt("- `%s`", name)); + std::string markdownError = fmt("`nix %s` requires a sub-command. Available sub-commands:\n\n%s\n", + commandName, concatStringsSep("\n", subCommandTextLines)); + throw UsageError(renderMarkdownToTerminal(markdownError)); + } + command->second->run(); +} + StoreCommand::StoreCommand() { } diff --git a/src/libcmd/command.hh b/src/libcmd/command.hh index 120c832ac..4a72627ed 100644 --- a/src/libcmd/command.hh +++ b/src/libcmd/command.hh @@ -26,9 +26,13 @@ static constexpr Command::Category catNixInstallation = 102; static constexpr auto installablesCategory = "Options that change the interpretation of [installables](@docroot@/command-ref/new-cli/nix.md#installables)"; -struct NixMultiCommand : virtual MultiCommand, virtual Command +struct NixMultiCommand : MultiCommand, virtual Command { nlohmann::json toJSON() override; + + using MultiCommand::MultiCommand; + + virtual void run() override; }; // For the overloaded run methods diff --git a/src/libcmd/common-eval-args.cc b/src/libcmd/common-eval-args.cc index 401acc38e..193972272 100644 --- a/src/libcmd/common-eval-args.cc +++ b/src/libcmd/common-eval-args.cc @@ -141,7 +141,7 @@ MixEvalArgs::MixEvalArgs() .longName = "eval-store", .description = R"( - The [URL of the Nix store](@docroot@/command-ref/new-cli/nix3-help-stores.md#store-url-format) + The [URL of the Nix store](@docroot@/store/types/index.md#store-url-format) to use for evaluation, i.e. to store derivations (`.drv` files) and inputs referenced by them. )", .category = category, diff --git a/src/libcmd/editor-for.cc b/src/libcmd/editor-for.cc index 619d3673f..67653d9c9 100644 --- a/src/libcmd/editor-for.cc +++ b/src/libcmd/editor-for.cc @@ -1,5 +1,6 @@ #include "editor-for.hh" #include "environment-variables.hh" +#include "source-path.hh" namespace nix { diff --git a/src/libcmd/editor-for.hh b/src/libcmd/editor-for.hh index fbf4307c9..8acd7011e 100644 --- a/src/libcmd/editor-for.hh +++ b/src/libcmd/editor-for.hh @@ -2,7 +2,7 @@ ///@file #include "types.hh" -#include "input-accessor.hh" +#include "source-path.hh" namespace nix { diff --git a/src/libcmd/installable-flake.cc b/src/libcmd/installable-flake.cc index 2f428cb7e..ddec7537b 100644 --- a/src/libcmd/installable-flake.cc +++ b/src/libcmd/installable-flake.cc @@ -52,7 +52,7 @@ Value * InstallableFlake::getFlakeOutputs(EvalState & state, const flake::Locked auto aOutputs = vFlake->attrs->get(state.symbols.create("outputs")); assert(aOutputs); - state.forceValue(*aOutputs->value, [&]() { return aOutputs->value->determinePos(noPos); }); + state.forceValue(*aOutputs->value, aOutputs->value->determinePos(noPos)); return aOutputs->value; } diff --git a/src/libcmd/installable-value.cc b/src/libcmd/installable-value.cc index 08ad35105..c8a3e1b21 100644 --- a/src/libcmd/installable-value.cc +++ b/src/libcmd/installable-value.cc @@ -1,5 +1,6 @@ #include "installable-value.hh" #include "eval-cache.hh" +#include "fetch-to-store.hh" namespace nix { @@ -44,7 +45,7 @@ ref InstallableValue::require(ref installable) std::optional InstallableValue::trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx) { if (v.type() == nPath) { - auto storePath = v.path().fetchToStore(state->store); + auto storePath = fetchToStore(*state->store, v.path()); return {{ .path = DerivedPath::Opaque { .path = std::move(storePath), diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index 1c6103020..736c41a1e 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -47,6 +47,22 @@ MixFlakeOptions::MixFlakeOptions() { auto category = "Common flake-related options"; + addFlag({ + .longName = "recreate-lock-file", + .description = R"( + Recreate the flake's lock file from scratch. + + > **DEPRECATED** + > + > Use [`nix flake update`](@docroot@/command-ref/new-cli/nix3-flake-update.md) instead. + )", + .category = category, + .handler = {[&]() { + lockFlags.recreateLockFile = true; + warn("'--recreate-lock-file' is deprecated and will be removed in a future version; use 'nix flake update' instead."); + }} + }); + addFlag({ .longName = "no-update-lock-file", .description = "Do not allow any updates to the flake's lock file.", @@ -63,8 +79,13 @@ MixFlakeOptions::MixFlakeOptions() addFlag({ .longName = "no-registries", - .description = - "Don't allow lookups in the flake registries. This option is deprecated; use `--no-use-registries`.", + .description = R"( + Don't allow lookups in the flake registries. + + > **DEPRECATED** + > + > Use [`--no-use-registries`](#opt-no-use-registries) instead. + )", .category = category, .handler = {[&]() { lockFlags.useRegistries = false; @@ -79,6 +100,26 @@ MixFlakeOptions::MixFlakeOptions() .handler = {&lockFlags.commitLockFile, true} }); + addFlag({ + .longName = "update-input", + .description = R"( + Update a specific flake input (ignoring its previous entry in the lock file). + + > **DEPRECATED** + > + > Use [`nix flake update`](@docroot@/command-ref/new-cli/nix3-flake-update.md) instead. + )", + .category = category, + .labels = {"input-path"}, + .handler = {[&](std::string s) { + warn("'--update-input' is a deprecated alias for 'flake update' and will be removed in a future version."); + lockFlags.inputUpdates.insert(flake::parseInputPath(s)); + }}, + .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { + completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix); + }} + }); + addFlag({ .longName = "override-input", .description = "Override a specific flake input (e.g. `dwarffs/nixpkgs`). This implies `--no-write-lock-file`.", @@ -219,9 +260,10 @@ void SourceExprCommand::completeInstallable(AddCompletions & completions, std::s evalSettings.pureEval = false; auto state = getEvalState(); - Expr *e = state->parseExprFromFile( - resolveExprPath(state->checkSourcePath(lookupFileArg(*state, *file))) - ); + auto e = + state->parseExprFromFile( + resolveExprPath( + lookupFileArg(*state, *file))); Value root; state->eval(e, root); @@ -673,7 +715,7 @@ BuiltPaths Installable::toBuiltPaths( } } -StorePathSet Installable::toStorePaths( +StorePathSet Installable::toStorePathSet( ref evalStore, ref store, Realise mode, OperateOn operateOn, @@ -687,13 +729,27 @@ StorePathSet Installable::toStorePaths( return outPaths; } +StorePaths Installable::toStorePaths( + ref evalStore, + ref store, + Realise mode, OperateOn operateOn, + const Installables & installables) +{ + StorePaths outPaths; + for (auto & path : toBuiltPaths(evalStore, store, mode, operateOn, installables)) { + auto thisOutPaths = path.outPaths(); + outPaths.insert(outPaths.end(), thisOutPaths.begin(), thisOutPaths.end()); + } + return outPaths; +} + StorePath Installable::toStorePath( ref evalStore, ref store, Realise mode, OperateOn operateOn, ref installable) { - auto paths = toStorePaths(evalStore, store, mode, operateOn, {installable}); + auto paths = toStorePathSet(evalStore, store, mode, operateOn, {installable}); if (paths.size() != 1) throw Error("argument '%s' should evaluate to one store path", installable->what()); diff --git a/src/libcmd/installables.hh b/src/libcmd/installables.hh index e087f935c..95e8841ca 100644 --- a/src/libcmd/installables.hh +++ b/src/libcmd/installables.hh @@ -165,7 +165,14 @@ struct Installable const Installables & installables, BuildMode bMode = bmNormal); - static std::set toStorePaths( + static std::set toStorePathSet( + ref evalStore, + ref store, + Realise mode, + OperateOn operateOn, + const Installables & installables); + + static std::vector toStorePaths( ref evalStore, ref store, Realise mode, diff --git a/src/libcmd/local.mk b/src/libcmd/local.mk index 541a7d2ba..abb7459a7 100644 --- a/src/libcmd/local.mk +++ b/src/libcmd/local.mk @@ -8,8 +8,8 @@ libcmd_SOURCES := $(wildcard $(d)/*.cc) libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers -libcmd_LDFLAGS = $(EDITLINE_LIBS) $(LOWDOWN_LIBS) -pthread +libcmd_LDFLAGS = $(EDITLINE_LIBS) $(LOWDOWN_LIBS) $(THREAD_LDFLAGS) libcmd_LIBS = libstore libutil libexpr libmain libfetchers -$(eval $(call install-file-in, $(d)/nix-cmd.pc, $(libdir)/pkgconfig, 0644)) +$(eval $(call install-file-in, $(buildprefix)$(d)/nix-cmd.pc, $(libdir)/pkgconfig, 0644)) diff --git a/src/libcmd/markdown.cc b/src/libcmd/markdown.cc index 8b3bbc1b5..a4e3c5a77 100644 --- a/src/libcmd/markdown.cc +++ b/src/libcmd/markdown.cc @@ -4,12 +4,15 @@ #include "terminal.hh" #include +#if HAVE_LOWDOWN #include +#endif namespace nix { std::string renderMarkdownToTerminal(std::string_view markdown) { +#if HAVE_LOWDOWN int windowWidth = getWindowSize().second; struct lowdown_opts opts { @@ -48,6 +51,9 @@ std::string renderMarkdownToTerminal(std::string_view markdown) throw Error("allocation error while rendering Markdown"); return filterANSIEscapes(std::string(buf->data, buf->size), !shouldANSI()); +#else + return std::string(markdown); +#endif } } diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index bf5643a5c..7a1df74ef 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -5,7 +5,7 @@ #include -#ifdef READLINE +#ifdef USE_READLINE #include #include #else @@ -43,7 +43,6 @@ extern "C" { #include "finally.hh" #include "markdown.hh" #include "local-fs-store.hh" -#include "progress-bar.hh" #include "print.hh" #if HAVE_BOEHMGC @@ -113,7 +112,7 @@ NixRepl::NixRepl(const SearchPath & searchPath, nix::ref store, refstaticBaseEnv.get())) + , staticEnv(new StaticEnv(nullptr, state->staticBaseEnv.get())) , historyFile(getDataDir() + "/nix/repl-history") { } @@ -222,7 +221,7 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi // prefer direct pos, but if noPos then try the expr. auto pos = dt.pos ? dt.pos - : static_cast>(positions[dt.expr.getPos() ? dt.expr.getPos() : noPos]); + : positions[dt.expr.getPos() ? dt.expr.getPos() : noPos]; if (pos) { out << pos; @@ -250,25 +249,23 @@ void NixRepl::mainLoop() } catch (SysError & e) { logWarning(e.info()); } -#ifndef READLINE +#ifndef USE_READLINE el_hist_size = 1000; #endif read_history(historyFile.c_str()); auto oldRepl = curRepl; curRepl = this; Finally restoreRepl([&] { curRepl = oldRepl; }); -#ifndef READLINE +#ifndef USE_READLINE rl_set_complete_func(completionCallback); rl_set_list_possib_func(listPossibleCallback); #endif - /* Stop the progress bar because it interferes with the display of - the repl. */ - stopProgressBar(); - std::string input; while (true) { + // Hide the progress bar while waiting for user input, so that it won't interfere. + logger->pause(); // When continuing input from previous lines, don't print a prompt, just align to the same // number of chars as the prompt. if (!getLine(input, input.empty() ? "nix-repl> " : " ")) { @@ -278,6 +275,7 @@ void NixRepl::mainLoop() logger->cout(""); break; } + logger->resume(); try { if (!removeWhitespace(input).empty() && !processLine(input)) return; } catch (ParseError & e) { @@ -890,7 +888,7 @@ void NixRepl::evalString(std::string s, Value & v) { Expr * e = parseString(s); e->eval(*state, *env, v); - state->forceValue(v, [&]() { return v.determinePos(noPos); }); + state->forceValue(v, v.determinePos(noPos)); } @@ -909,7 +907,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m str.flush(); checkInterrupt(); - state->forceValue(v, [&]() { return v.determinePos(noPos); }); + state->forceValue(v, v.determinePos(noPos)); switch (v.type()) { diff --git a/src/libexpr/eval-cache.cc b/src/libexpr/eval-cache.cc index 6c0e33709..5808d58b6 100644 --- a/src/libexpr/eval-cache.cc +++ b/src/libexpr/eval-cache.cc @@ -22,7 +22,7 @@ struct AttrDb { std::atomic_bool failed{false}; - const Store & cfg; + const StoreDirConfig & cfg; struct State { @@ -39,7 +39,7 @@ struct AttrDb SymbolTable & symbols; AttrDb( - const Store & cfg, + const StoreDirConfig & cfg, const Hash & fingerprint, SymbolTable & symbols) : cfg(cfg) @@ -323,7 +323,7 @@ struct AttrDb }; static std::shared_ptr makeAttrDb( - const Store & cfg, + const StoreDirConfig & cfg, const Hash & fingerprint, SymbolTable & symbols) { diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh index a988fa40c..f7710f819 100644 --- a/src/libexpr/eval-inline.hh +++ b/src/libexpr/eval-inline.hh @@ -73,8 +73,6 @@ Env & EvalState::allocEnv(size_t size) #endif env = (Env *) allocBytes(sizeof(Env) + size * sizeof(Value *)); - env->type = Env::Plain; - /* We assume that env->values has been cleared by the allocator; maybeThunk() and lookupVar fromWith expect this. */ return *env; @@ -83,13 +81,6 @@ Env & EvalState::allocEnv(size_t size) [[gnu::always_inline]] void EvalState::forceValue(Value & v, const PosIdx pos) -{ - forceValue(v, [&]() { return pos; }); -} - - -template -void EvalState::forceValue(Value & v, Callable getPos) { if (v.isThunk()) { Env * env = v.thunk.env; @@ -100,13 +91,12 @@ void EvalState::forceValue(Value & v, Callable getPos) expr->eval(*this, *env, v); } catch (...) { v.mkThunk(env, expr); + tryFixupBlackHolePos(v, pos); throw; } } else if (v.isApp()) - callFunction(*v.app.left, *v.app.right, v, noPos); - else if (v.isBlackhole()) - error("infinite recursion encountered").atPos(getPos()).template debugThrow(); + callFunction(*v.app.left, *v.app.right, v, pos); } @@ -121,9 +111,9 @@ template [[gnu::always_inline]] inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view errorCtx) { - forceValue(v, noPos); + PosIdx pos = getPos(); + forceValue(v, pos); if (v.type() != nAttrs) { - PosIdx pos = getPos(); error("value is %1% while a set was expected", showType(v)).withTrace(pos, errorCtx).debugThrow(); } } @@ -132,7 +122,7 @@ inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view e [[gnu::always_inline]] inline void EvalState::forceList(Value & v, const PosIdx pos, std::string_view errorCtx) { - forceValue(v, noPos); + forceValue(v, pos); if (!v.isList()) { error("value is %1% while a list was expected", showType(v)).withTrace(pos, errorCtx).debugThrow(); } diff --git a/src/libexpr/eval-settings.cc b/src/libexpr/eval-settings.cc index 444a7d7d6..2ccbe327f 100644 --- a/src/libexpr/eval-settings.cc +++ b/src/libexpr/eval-settings.cc @@ -89,6 +89,12 @@ std::string EvalSettings::resolvePseudoUrl(std::string_view url) return std::string(url); } +const std::string & EvalSettings::getCurrentSystem() +{ + const auto & evalSystem = currentSystem.get(); + return evalSystem != "" ? evalSystem : settings.thisSystem.get(); +} + EvalSettings evalSettings; static GlobalConfig::Register rEvalSettings(&evalSettings); diff --git a/src/libexpr/eval-settings.hh b/src/libexpr/eval-settings.hh index db2971acb..2f6c12d45 100644 --- a/src/libexpr/eval-settings.hh +++ b/src/libexpr/eval-settings.hh @@ -27,6 +27,26 @@ struct EvalSettings : Config [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath). )"}; + Setting currentSystem{ + this, "", "eval-system", + R"( + This option defines + [`builtins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem) + in the Nix language if it is set as a non-empty string. + Otherwise, if it is defined as the empty string (the default), the value of the + [`system` ](#conf-system) + configuration setting is used instead. + + Unlike `system`, this setting does not change what kind of derivations can be built locally. + This is useful for evaluating Nix code on one system to produce derivations to be built on another type of system. + )"}; + + /** + * Implements the `eval-system` vs `system` defaulting logic + * described for `eval-system`. + */ + const std::string & getCurrentSystem(); + Setting restrictEval{ this, false, "restrict-eval", R"( @@ -68,6 +88,11 @@ struct EvalSettings : Config evaluation mode. For example, when set to `https://github.com/NixOS`, builtin functions such as `fetchGit` are allowed to access `https://github.com/NixOS/patchelf.git`. + + Access is granted when + - the URI is equal to the prefix, + - or the URI is a subpath of the prefix, + - or the prefix is a URI scheme ended by a colon `:` and the URI has the same scheme. )"}; Setting traceFunctionCalls{this, false, "trace-function-calls", @@ -99,6 +124,9 @@ struct EvalSettings : Config Setting traceVerbose{this, false, "trace-verbose", "Whether `builtins.traceVerbose` should trace its first argument when evaluated."}; + + Setting maxCallDepth{this, 10000, "max-call-depth", + "The maximum function call depth to allow before erroring."}; }; extern EvalSettings evalSettings; diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index dfe81cbf7..d408f1adc 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -1,6 +1,7 @@ #include "eval.hh" #include "eval-settings.hh" #include "hash.hh" +#include "primops.hh" #include "types.hh" #include "util.hh" #include "store-api.hh" @@ -13,8 +14,12 @@ #include "profiles.hh" #include "print.hh" #include "fs-input-accessor.hh" +#include "filtering-input-accessor.hh" #include "memory-input-accessor.hh" #include "signals.hh" +#include "gc-small-vector.hh" +#include "url.hh" +#include "fetch-to-store.hh" #include #include @@ -30,6 +35,7 @@ #include #include +#include #if HAVE_BOEHMGC @@ -158,7 +164,17 @@ void Value::print(const SymbolTable &symbols, std::ostream &str, break; case tThunk: case tApp: - str << ""; + if (!isBlackhole()) { + str << ""; + } else { + // Although we know for sure that it's going to be an infinite recursion + // when this value is accessed _in the current context_, it's likely + // that the user will misinterpret a simpler «infinite recursion» output + // as a definitive statement about the value, while in fact it may be + // a valid value after `builtins.trace` and perhaps some other steps + // have completed. + str << "«potential infinite recursion»"; + } break; case tLambda: str << ""; @@ -175,15 +191,6 @@ void Value::print(const SymbolTable &symbols, std::ostream &str, case tFloat: str << fpoint; break; - case tBlackhole: - // Although we know for sure that it's going to be an infinite recursion - // when this value is accessed _in the current context_, it's likely - // that the user will misinterpret a simpler «infinite recursion» output - // as a definitive statement about the value, while in fact it may be - // a valid value after `builtins.trace` and perhaps some other steps - // have completed. - str << "«potential infinite recursion»"; - break; default: printError("Nix evaluator internal error: Value::print(): invalid value type %1%", internalType); abort(); @@ -251,9 +258,8 @@ std::string showType(const Value & v) case tPrimOpApp: return fmt("the partially applied built-in function '%s'", std::string(getPrimOp(v)->primOp->name)); case tExternal: return v.external->showType(); - case tThunk: return "a thunk"; + case tThunk: return v.isBlackhole() ? "a black hole" : "a thunk"; case tApp: return "a function application"; - case tBlackhole: return "a black hole"; default: return std::string(showType(v.type())); } @@ -341,7 +347,7 @@ static Symbol getName(const AttrName & name, EvalState & state, Env & env) } else { Value nameValue; name.expr->eval(state, env, nameValue); - state.forceStringNoCtx(nameValue, noPos, "while evaluating an attribute name"); + state.forceStringNoCtx(nameValue, name.expr->getPos(), "while evaluating an attribute name"); return state.symbols.create(nameValue.string_view()); } } @@ -506,7 +512,16 @@ EvalState::EvalState( , sOutputSpecified(symbols.create("outputSpecified")) , repair(NoRepair) , emptyBindings(0) - , rootFS(makeFSInputAccessor(CanonPath::root)) + , rootFS( + evalSettings.restrictEval || evalSettings.pureEval + ? ref(AllowListInputAccessor::create(makeFSInputAccessor(CanonPath::root), {}, + [](const CanonPath & path) -> RestrictedPathError { + auto modeInformation = evalSettings.pureEval + ? "in pure evaluation mode (use '--impure' to override)" + : "in restricted mode"; + throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", path, modeInformation); + })) + : makeFSInputAccessor(CanonPath::root)) , corepkgsFS(makeMemoryInputAccessor()) , internalFS(makeMemoryInputAccessor()) , derivationInternal{corepkgsFS->addFile( @@ -529,14 +544,19 @@ EvalState::EvalState( , env1AllocCache(std::allocate_shared(traceable_allocator(), nullptr)) #endif , baseEnv(allocEnv(128)) - , staticBaseEnv{std::make_shared(false, nullptr)} + , staticBaseEnv{std::make_shared(nullptr, nullptr)} { + corepkgsFS->setPathDisplay(""); + internalFS->setPathDisplay("«nix-internal»", ""); + countCalls = getEnv("NIX_COUNT_CALLS").value_or("0") != "0"; assert(gcInitialised); static_assert(sizeof(Env) <= 16, "environment must be <= 16 bytes"); + vEmptyList.mkList(0); + /* Initialise the Nix expression search path. */ if (!evalSettings.pureEval) { for (auto & i : _searchPath.elements) @@ -545,28 +565,10 @@ EvalState::EvalState( searchPath.elements.emplace_back(SearchPath::Elem::parse(i)); } - if (evalSettings.restrictEval || evalSettings.pureEval) { - allowedPaths = PathSet(); - - for (auto & i : searchPath.elements) { - auto r = resolveSearchPathPath(i.path); - if (!r) continue; - - auto path = std::move(*r); - - if (store->isInStore(path)) { - try { - StorePathSet closure; - store->computeFSClosure(store->toStorePath(path).first, closure); - for (auto & path : closure) - allowPath(path); - } catch (InvalidPath &) { - allowPath(path); - } - } else - allowPath(path); - } - } + /* Allow access to all paths in the search path. */ + if (rootFS.dynamic_pointer_cast()) + for (auto & i : searchPath.elements) + resolveSearchPathPath(i.path, true); corepkgsFS->addFile( CanonPath("fetchurl.nix"), @@ -584,14 +586,14 @@ EvalState::~EvalState() void EvalState::allowPath(const Path & path) { - if (allowedPaths) - allowedPaths->insert(path); + if (auto rootFS2 = rootFS.dynamic_pointer_cast()) + rootFS2->allowPath(CanonPath(path)); } void EvalState::allowPath(const StorePath & storePath) { - if (allowedPaths) - allowedPaths->insert(store->toRealPath(storePath)); + if (auto rootFS2 = rootFS.dynamic_pointer_cast()) + rootFS2->allowPath(CanonPath(store->toRealPath(storePath))); } void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value & v) @@ -601,79 +603,57 @@ void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value & mkStorePathString(storePath, v); } -SourcePath EvalState::checkSourcePath(const SourcePath & path_) +inline static bool isJustSchemePrefix(std::string_view prefix) { - // Don't check non-rootFS accessors, they're in a different namespace. - if (path_.accessor != ref(rootFS)) return path_; - - if (!allowedPaths) return path_; - - auto i = resolvedPaths.find(path_.path.abs()); - if (i != resolvedPaths.end()) - return i->second; - - bool found = false; - - /* First canonicalize the path without symlinks, so we make sure an - * attacker can't append ../../... to a path that would be in allowedPaths - * and thus leak symlink targets. - */ - Path abspath = canonPath(path_.path.abs()); - - for (auto & i : *allowedPaths) { - if (isDirOrInDir(abspath, i)) { - found = true; - break; - } - } - - if (!found) { - auto modeInformation = evalSettings.pureEval - ? "in pure eval mode (use '--impure' to override)" - : "in restricted mode"; - throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", abspath, modeInformation); - } - - /* Resolve symlinks. */ - debug("checking access to '%s'", abspath); - SourcePath path = rootPath(CanonPath(canonPath(abspath, true))); - - for (auto & i : *allowedPaths) { - if (isDirOrInDir(path.path.abs(), i)) { - resolvedPaths.insert_or_assign(path_.path.abs(), path); - return path; - } - } - - throw RestrictedPathError("access to canonical path '%1%' is forbidden in restricted mode", path); + return + !prefix.empty() + && prefix[prefix.size() - 1] == ':' + && isValidSchemeName(prefix.substr(0, prefix.size() - 1)); } +bool isAllowedURI(std::string_view uri, const Strings & allowedUris) +{ + /* 'uri' should be equal to a prefix, or in a subdirectory of a + prefix. Thus, the prefix https://github.co does not permit + access to https://github.com. */ + for (auto & prefix : allowedUris) { + if (uri == prefix + // Allow access to subdirectories of the prefix. + || (uri.size() > prefix.size() + && prefix.size() > 0 + && hasPrefix(uri, prefix) + && ( + // Allow access to subdirectories of the prefix. + prefix[prefix.size() - 1] == '/' + || uri[prefix.size()] == '/' + + // Allow access to whole schemes + || isJustSchemePrefix(prefix) + ) + )) + return true; + } + + return false; +} void EvalState::checkURI(const std::string & uri) { if (!evalSettings.restrictEval) return; - /* 'uri' should be equal to a prefix, or in a subdirectory of a - prefix. Thus, the prefix https://github.co does not permit - access to https://github.com. Note: this allows 'http://' and - 'https://' as prefixes for any http/https URI. */ - for (auto & prefix : evalSettings.allowedUris.get()) - if (uri == prefix || - (uri.size() > prefix.size() - && prefix.size() > 0 - && hasPrefix(uri, prefix) - && (prefix[prefix.size() - 1] == '/' || uri[prefix.size()] == '/'))) - return; + if (isAllowedURI(uri, evalSettings.allowedUris.get())) return; /* If the URI is a path, then check it against allowedPaths as well. */ if (hasPrefix(uri, "/")) { - checkSourcePath(rootPath(CanonPath(uri))); + if (auto rootFS2 = rootFS.dynamic_pointer_cast()) + rootFS2->checkAccess(CanonPath(uri)); return; } if (hasPrefix(uri, "file://")) { - checkSourcePath(rootPath(CanonPath(std::string(uri, 7)))); + if (auto rootFS2 = rootFS.dynamic_pointer_cast()) + rootFS2->checkAccess(CanonPath(uri.substr(7))); return; } @@ -722,6 +702,23 @@ void EvalState::addConstant(const std::string & name, Value * v, Constant info) } +void PrimOp::check() +{ + if (arity > maxPrimOpArity) { + throw Error("primop arity must not exceed %1%", maxPrimOpArity); + } +} + + +void Value::mkPrimOp(PrimOp * p) +{ + p->check(); + clearValue(); + internalType = tPrimOp; + primOp = p; +} + + Value * EvalState::addPrimOp(PrimOp && primOp) { /* Hack to make constants lazy: turn them into a application of @@ -787,7 +784,7 @@ void printStaticEnvBindings(const SymbolTable & st, const StaticEnv & se) // just for the current level of Env, not the whole chain. void printWithBindings(const SymbolTable & st, const Env & env) { - if (env.type == Env::HasWithAttrs) { + if (!env.values[0]->isThunk()) { std::cout << "with: "; std::cout << ANSI_MAGENTA; Bindings::iterator j = env.values[0]->attrs->begin(); @@ -841,7 +838,7 @@ void mapStaticEnvBindings(const SymbolTable & st, const StaticEnv & se, const En if (env.up && se.up) { mapStaticEnvBindings(st, *se.up, *env.up, vm); - if (env.type == Env::HasWithAttrs) { + if (!env.values[0]->isThunk()) { // add 'with' bindings. Bindings::iterator j = env.values[0]->attrs->begin(); while (j != env.values[0]->attrs->end()) { @@ -874,7 +871,7 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr & ? std::make_unique( *this, DebugTrace { - .pos = error->info().errPos ? error->info().errPos : static_cast>(positions[expr.getPos()]), + .pos = error->info().errPos ? error->info().errPos : positions[expr.getPos()], .expr = expr, .env = env, .hint = error->info().msg, @@ -913,7 +910,7 @@ static std::unique_ptr makeDebugTraceStacker( EvalState & state, Expr & expr, Env & env, - std::shared_ptr && pos, + std::shared_ptr && pos, const char * s, const std::string & s2) { @@ -979,22 +976,23 @@ inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval) if (!var.fromWith) return env->values[var.displ]; + // This early exit defeats the `maybeThunk` optimization for variables from `with`, + // The added complexity of handling this appears to be similarly in cost, or + // the cases where applicable were insignificant in the first place. + if (noEval) return nullptr; + + auto * fromWith = var.fromWith; while (1) { - if (env->type == Env::HasWithExpr) { - if (noEval) return 0; - Value * v = allocValue(); - evalAttrs(*env->up, (Expr *) env->values[0], *v, noPos, ""); - env->values[0] = v; - env->type = Env::HasWithAttrs; - } + forceAttrs(*env->values[0], fromWith->pos, "while evaluating the first subexpression of a with expression"); Bindings::iterator j = env->values[0]->attrs->find(var.name); if (j != env->values[0]->attrs->end()) { if (countCalls) attrSelects[j->pos]++; return j->value; } - if (!env->prevWith) + if (!fromWith->parentWith) error("undefined variable '%1%'", symbols[var.name]).atPos(var.pos).withFrame(*env, var).debugThrow(); - for (size_t l = env->prevWith; l; --l, env = env->up) ; + for (size_t l = fromWith->prevWith; l; --l, env = env->up) ; + fromWith = fromWith->parentWith; } } @@ -1158,10 +1156,8 @@ Value * ExprPath::maybeThunk(EvalState & state, Env & env) } -void EvalState::evalFile(const SourcePath & path_, Value & v, bool mustBeTrivial) +void EvalState::evalFile(const SourcePath & path, Value & v, bool mustBeTrivial) { - auto path = checkSourcePath(path_); - FileEvalCache::iterator i; if ((i = fileEvalCache.find(path)) != fileEvalCache.end()) { v = i->second; @@ -1182,7 +1178,7 @@ void EvalState::evalFile(const SourcePath & path_, Value & v, bool mustBeTrivial e = j->second; if (!e) - e = parseExprFromFile(checkSourcePath(resolvedPath)); + e = parseExprFromFile(resolvedPath); fileParseCache[resolvedPath] = e; @@ -1192,7 +1188,7 @@ void EvalState::evalFile(const SourcePath & path_, Value & v, bool mustBeTrivial *this, *e, this->baseEnv, - e->getPos() ? static_cast>(positions[e->getPos()]) : nullptr, + e->getPos() ? std::make_shared(positions[e->getPos()]) : nullptr, "while evaluating the file '%1%':", resolvedPath.to_string()) : nullptr; @@ -1392,6 +1388,15 @@ void ExprList::eval(EvalState & state, Env & env, Value & v) } +Value * ExprList::maybeThunk(EvalState & state, Env & env) +{ + if (elems.empty()) { + return &state.vEmptyList; + } + return Expr::maybeThunk(state, env); +} + + void ExprVar::eval(EvalState & state, Env & env, Value & v) { Value * v2 = state.lookupVar(&env, *this, false); @@ -1491,7 +1496,7 @@ void ExprOpHasAttr::eval(EvalState & state, Env & env, Value & v) e->eval(state, env, vTmp); for (auto & i : attrPath) { - state.forceValue(*vAttrs, noPos); + state.forceValue(*vAttrs, getPos()); Bindings::iterator j; auto name = getName(i, state, env); if (vAttrs->type() != nAttrs || @@ -1513,9 +1518,27 @@ void ExprLambda::eval(EvalState & state, Env & env, Value & v) v.mkLambda(&env, this); } +namespace { +/** Increments a count on construction and decrements on destruction. + */ +class CallDepth { + size_t & count; +public: + CallDepth(size_t & count) : count(count) { + ++count; + } + ~CallDepth() { + --count; + } +}; +}; void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const PosIdx pos) { + if (callDepth > evalSettings.maxCallDepth) + error("stack overflow; max-call-depth exceeded").atPos(pos).template debugThrow(); + CallDepth _level(callDepth); + auto trace = evalSettings.traceFunctionCalls ? std::make_unique(positions[pos]) : nullptr; @@ -1654,15 +1677,15 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & return; } else { /* We have all the arguments, so call the primop. */ - auto name = vCur.primOp->name; + auto * fn = vCur.primOp; nrPrimOpCalls++; - if (countCalls) primOpCalls[name]++; + if (countCalls) primOpCalls[fn->name]++; try { - vCur.primOp->fun(*this, noPos, args, vCur); + fn->fun(*this, vCur.determinePos(noPos), args, vCur); } catch (Error & e) { - addErrorTrace(e, pos, "while calling the '%1%' builtin", name); + addErrorTrace(e, pos, "while calling the '%1%' builtin", fn->name); throw; } @@ -1691,7 +1714,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & /* We have all the arguments, so call the primop with the previous and new arguments. */ - Value * vArgs[arity]; + Value * vArgs[maxPrimOpArity]; auto n = argsDone; for (Value * arg = &vCur; arg->isPrimOpApp(); arg = arg->primOpApp.left) vArgs[--n] = arg->primOpApp.right; @@ -1699,18 +1722,18 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & for (size_t i = 0; i < argsLeft; ++i) vArgs[argsDone + i] = args[i]; - auto name = primOp->primOp->name; + auto fn = primOp->primOp; nrPrimOpCalls++; - if (countCalls) primOpCalls[name]++; + if (countCalls) primOpCalls[fn->name]++; try { // TODO: // 1. Unify this and above code. Heavily redundant. // 2. Create a fake env (arg1, arg2, etc.) and a fake expr (arg1: arg2: etc: builtins.name arg1 arg2 etc) // so the debugger allows to inspect the wrong parameters passed to the builtin. - primOp->primOp->fun(*this, noPos, vArgs, vCur); + fn->fun(*this, vCur.determinePos(noPos), vArgs, vCur); } catch (Error & e) { - addErrorTrace(e, pos, "while calling the '%1%' builtin", name); + addErrorTrace(e, pos, "while calling the '%1%' builtin", fn->name); throw; } @@ -1748,11 +1771,17 @@ void ExprCall::eval(EvalState & state, Env & env, Value & v) Value vFun; fun->eval(state, env, vFun); - Value * vArgs[args.size()]; + // Empirical arity of Nixpkgs lambdas by regex e.g. ([a-zA-Z]+:(\s|(/\*.*\/)|(#.*\n))*){5} + // 2: over 4000 + // 3: about 300 + // 4: about 60 + // 5: under 10 + // This excluded attrset lambdas (`{...}:`). Contributions of mixed lambdas appears insignificant at ~150 total. + SmallValueVector<4> vArgs(args.size()); for (size_t i = 0; i < args.size(); ++i) vArgs[i] = args[i]->maybeThunk(state, env); - state.callFunction(vFun, args.size(), vArgs, v, pos); + state.callFunction(vFun, args.size(), vArgs.data(), v, pos); } @@ -1810,7 +1839,7 @@ https://nixos.org/manual/nix/stable/language/constructs.html#functions.)", symbo } } - callFunction(fun, allocValue()->mkAttrs(attrs), res, noPos); + callFunction(fun, allocValue()->mkAttrs(attrs), res, pos); } @@ -1818,9 +1847,7 @@ void ExprWith::eval(EvalState & state, Env & env, Value & v) { Env & env2(state.allocEnv(1)); env2.up = &env; - env2.prevWith = prevWith; - env2.type = Env::HasWithExpr; - env2.values[0] = (Value *) attrs; + env2.values[0] = attrs->maybeThunk(state, env); body->eval(state, env2, v); } @@ -1846,7 +1873,7 @@ void ExprAssert::eval(EvalState & state, Env & env, Value & v) void ExprOpNot::eval(EvalState & state, Env & env, Value & v) { - v.mkBool(!state.evalBool(env, e, noPos, "in the argument of the not operator")); // XXX: FIXME: ! + v.mkBool(!state.evalBool(env, e, getPos(), "in the argument of the not operator")); // XXX: FIXME: ! } @@ -1991,8 +2018,9 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) return result; }; - Value values[es->size()]; - Value * vTmpP = values; + // List of returned strings. References to these Values must NOT be persisted. + SmallTemporaryValueVector values(es->size()); + Value * vTmpP = values.data(); for (auto & [i_pos, i] : *es) { Value & vTmp = *vTmpP++; @@ -2057,6 +2085,29 @@ void ExprPos::eval(EvalState & state, Env & env, Value & v) } +void ExprBlackHole::eval(EvalState & state, Env & env, Value & v) +{ + state.error("infinite recursion encountered") + .debugThrow(); +} + +// always force this to be separate, otherwise forceValue may inline it and take +// a massive perf hit +[[gnu::noinline]] +void EvalState::tryFixupBlackHolePos(Value & v, PosIdx pos) +{ + if (!v.isBlackhole()) + return; + auto e = std::current_exception(); + try { + std::rethrow_exception(e); + } catch (InfiniteRecursionError & e) { + e.err.errPos = positions[pos]; + } catch (...) { + } +} + + void EvalState::forceValueDeep(Value & v) { std::set seen; @@ -2066,7 +2117,7 @@ void EvalState::forceValueDeep(Value & v) recurse = [&](Value & v) { if (!seen.insert(&v).second) return; - forceValue(v, [&]() { return v.determinePos(noPos); }); + forceValue(v, v.determinePos(noPos)); if (v.type() == nAttrs) { for (auto & i : *v.attrs) @@ -2286,7 +2337,7 @@ BackedStringView EvalState::coerceToString( std::string result; for (auto [n, v2] : enumerate(v.listItems())) { try { - result += *coerceToString(noPos, *v2, context, + result += *coerceToString(pos, *v2, context, "while evaluating one element of the list", coerceMore, copyToStore, canonicalizePath); } catch (Error & e) { @@ -2318,7 +2369,7 @@ StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePat auto dstPath = i != srcToStore.end() ? i->second : [&]() { - auto dstPath = path.fetchToStore(store, path.baseName(), FileIngestionMethod::Recursive, nullptr, repair); + auto dstPath = fetchToStore(*store, path, path.baseName(), FileIngestionMethod::Recursive, nullptr, repair); allowPath(dstPath); srcToStore.insert_or_assign(path, dstPath); printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath)); @@ -2433,8 +2484,8 @@ SingleDerivedPath EvalState::coerceToSingleDerivedPath(const PosIdx pos, Value & bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_view errorCtx) { - forceValue(v1, noPos); - forceValue(v2, noPos); + forceValue(v1, pos); + forceValue(v2, pos); /* !!! Hack to support some old broken code that relies on pointer equality tests between sets. (Specifically, builderDefs calls @@ -2458,7 +2509,7 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v return v1.boolean == v2.boolean; case nString: - return v1.string_view().compare(v2.string_view()) == 0; + return strcmp(v1.c_str(), v2.c_str()) == 0; case nPath: return diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 048dff42b..5e0f1886d 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -18,13 +18,18 @@ namespace nix { +/** + * We put a limit on primop arity because it lets us use a fixed size array on + * the stack. 8 is already an impractical number of arguments. Use an attrset + * argument for such overly complicated functions. + */ +constexpr size_t maxPrimOpArity = 8; class Store; class EvalState; class StorePath; struct SingleDerivedPath; enum RepairFlag : bool; -struct FSInputAccessor; struct MemoryInputAccessor; @@ -71,6 +76,12 @@ struct PrimOp * Optional experimental for this to be gated on. */ std::optional experimentalFeature; + + /** + * Validity check to be performed by functions that introduce primops, + * such as RegisterPrimOp() and Value::mkPrimOp(). + */ + void check(); }; /** @@ -105,11 +116,6 @@ struct Constant struct Env { Env * up; - /** - * Number of of levels up to next `with` environment - */ - unsigned short prevWith:14; - enum { Plain = 0, HasWithExpr, HasWithAttrs } type:2; Value * values[0]; }; @@ -136,7 +142,7 @@ struct RegexCache; std::shared_ptr makeRegexCache(); struct DebugTrace { - std::shared_ptr pos; + std::shared_ptr pos; const Expr & expr; const Env & env; hintformat hint; @@ -205,18 +211,17 @@ public: */ RepairFlag repair; - /** - * The allowed filesystem paths in restricted or pure evaluation - * mode. - */ - std::optional allowedPaths; - Bindings emptyBindings; + /** + * Empty list constant. + */ + Value vEmptyList; + /** * The accessor for the root filesystem. */ - const ref rootFS; + const ref rootFS; /** * The in-memory filesystem for paths. @@ -330,11 +335,6 @@ private: std::map> searchPathResolved; - /** - * Cache used by checkSourcePath(). - */ - std::unordered_map resolvedPaths; - /** * Cache used by prim_match(). */ @@ -384,12 +384,6 @@ public: */ void allowAndSetStorePathString(const StorePath & storePath, Value & v); - /** - * Check whether access to a path is allowed and throw an error if - * not. Otherwise return the canonicalised path. - */ - SourcePath checkSourcePath(const SourcePath & path); - void checkURI(const std::string & uri); /** @@ -433,13 +427,15 @@ public: SourcePath findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos = noPos); /** - * Try to resolve a search path value (not the optional key part) + * Try to resolve a search path value (not the optional key part). * * If the specified search path element is a URI, download it. * * If it is not found, return `std::nullopt` */ - std::optional resolveSearchPathPath(const SearchPath::Path & path); + std::optional resolveSearchPathPath( + const SearchPath::Path & elem, + bool initAccessControl = false); /** * Evaluate an expression to normal form @@ -464,8 +460,7 @@ public: */ inline void forceValue(Value & v, const PosIdx pos); - template - inline void forceValue(Value & v, Callable getPos); + void tryFixupBlackHolePos(Value & v, PosIdx pos); /** * Force a value, then recursively force list elements and @@ -627,6 +622,11 @@ private: const SourcePath & basePath, std::shared_ptr & staticEnv); + /** + * Current Nix call stack depth, used with `max-call-depth` setting to throw stack overflow hopefully before we run out of system stack. + */ + size_t callDepth = 0; + public: /** @@ -744,6 +744,13 @@ public: */ [[nodiscard]] StringMap realiseContext(const NixStringContext & context); + /* Call the binary path filter predicate used builtins.path etc. */ + bool callPathFilter( + Value * filterFun, + const SourcePath & path, + std::string_view pathArg, + PosIdx pos); + private: /** @@ -827,7 +834,12 @@ std::string showType(const Value & v); /** * If `path` refers to a directory, then append "/default.nix". */ -SourcePath resolveExprPath(const SourcePath & path); +SourcePath resolveExprPath(SourcePath path); + +/** + * Whether a URI is allowed, assuming restrictEval is enabled + */ +bool isAllowedURI(std::string_view uri, const Strings & allowedPaths); struct InvalidPathError : EvalError { diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc index 54de53e0b..fee58792b 100644 --- a/src/libexpr/flake/flake.cc +++ b/src/libexpr/flake/flake.cc @@ -212,8 +212,16 @@ static Flake getFlake( auto [storePath, resolvedRef, lockedRef] = fetchOrSubstituteTree( state, originalRef, allowLookup, flakeCache); + // We need to guard against symlink attacks, but before we start doing + // filesystem operations we should make sure there's a flake.nix in the + // first place. + auto unsafeFlakeDir = state.store->toRealPath(storePath) + "/" + lockedRef.subdir; + auto unsafeFlakeFile = unsafeFlakeDir + "/flake.nix"; + if (!pathExists(unsafeFlakeFile)) + throw Error("source tree referenced by '%s' does not contain a '%s/flake.nix' file", lockedRef, lockedRef.subdir); + // Guard against symlink attacks. - auto flakeDir = canonPath(state.store->toRealPath(storePath) + "/" + lockedRef.subdir, true); + auto flakeDir = canonPath(unsafeFlakeDir, true); auto flakeFile = canonPath(flakeDir + "/flake.nix", true); if (!isInDir(flakeFile, state.store->toRealPath(storePath))) throw Error("'flake.nix' file of flake '%s' escapes from '%s'", @@ -226,9 +234,6 @@ static Flake getFlake( .storePath = storePath, }; - if (!pathExists(flakeFile)) - throw Error("source tree referenced by '%s' does not contain a '%s/flake.nix' file", lockedRef, lockedRef.subdir); - Value vInfo; state.evalFile(state.rootPath(CanonPath(flakeFile)), vInfo, true); // FIXME: symlink attack @@ -358,10 +363,13 @@ LockedFlake lockFlake( debug("old lock file: %s", oldLockFile); std::map overrides; + std::set explicitCliOverrides; std::set overridesUsed, updatesUsed; - for (auto & i : lockFlags.inputOverrides) + for (auto & i : lockFlags.inputOverrides) { overrides.insert_or_assign(i.first, FlakeInput { .ref = i.second }); + explicitCliOverrides.insert(i.first); + } LockFile newLockFile; @@ -432,6 +440,7 @@ LockedFlake lockFlake( ancestors? */ auto i = overrides.find(inputPath); bool hasOverride = i != overrides.end(); + bool hasCliOverride = explicitCliOverrides.contains(inputPath); if (hasOverride) { overridesUsed.insert(inputPath); // Respect the “flakeness” of the input even if we @@ -467,7 +476,7 @@ LockedFlake lockFlake( if (oldLock && oldLock->originalRef == *input.ref - && !hasOverride) + && !hasCliOverride) { debug("keeping existing input '%s'", inputPathS); @@ -547,7 +556,7 @@ LockedFlake lockFlake( nuked the next time we update the lock file. That is, overrides are sticky unless you use --no-write-lock-file. */ - auto ref = input2.ref ? *input2.ref : *input.ref; + auto ref = (input2.ref && explicitCliOverrides.contains(inputPath)) ? *input2.ref : *input.ref; if (input.isFlake) { Path localPath = parentPath; @@ -895,7 +904,7 @@ Fingerprint LockedFlake::getFingerprint() const // FIXME: as an optimization, if the flake contains a lock file // and we haven't changed it, then it's sufficient to use // flake.sourceInfo.storePath for the fingerprint. - return hashString(htSHA256, + return hashString(HashAlgorithm::SHA256, fmt("%s;%s;%d;%d;%s", flake.storePath.to_string(), flake.lockedRef.subdir, diff --git a/src/libexpr/flake/flakeref.cc b/src/libexpr/flake/flakeref.cc index 16f45ace7..86a0982f3 100644 --- a/src/libexpr/flake/flakeref.cc +++ b/src/libexpr/flake/flakeref.cc @@ -90,7 +90,7 @@ std::pair parsePathFlakeRefWithFragment( fragment = percentDecode(url.substr(fragmentStart+1)); } if (pathEnd != std::string::npos && fragmentStart != std::string::npos) { - query = decodeQuery(url.substr(pathEnd+1, fragmentStart)); + query = decodeQuery(url.substr(pathEnd+1, fragmentStart-pathEnd-1)); } if (baseDir) { @@ -190,7 +190,7 @@ std::optional> parseFlakeIdRef( static std::regex flakeRegex( "((" + flakeIdRegexS + ")(?:/(?:" + refAndOrRevRegex + "))?)" - + "(?:#(" + queryRegex + "))?", + + "(?:#(" + fragmentRegex + "))?", std::regex::ECMAScript); if (std::regex_match(url, match, flakeRegex)) { diff --git a/src/libexpr/flake/url-name.cc b/src/libexpr/flake/url-name.cc new file mode 100644 index 000000000..753f197d5 --- /dev/null +++ b/src/libexpr/flake/url-name.cc @@ -0,0 +1,48 @@ +#include "url-name.hh" +#include +#include + +namespace nix { + +static const std::string attributeNamePattern("[a-zA-Z0-9_-]+"); +static const std::regex lastAttributeRegex("(?:" + attributeNamePattern + "\\.)*(?!default)(" + attributeNamePattern +")(\\^.*)?"); +static const std::string pathSegmentPattern("[a-zA-Z0-9_-]+"); +static const std::regex lastPathSegmentRegex(".*/(" + pathSegmentPattern +")"); +static const std::regex secondPathSegmentRegex("(?:" + pathSegmentPattern + ")/(" + pathSegmentPattern +")(?:/.*)?"); +static const std::regex gitProviderRegex("github|gitlab|sourcehut"); +static const std::regex gitSchemeRegex("git($|\\+.*)"); +static const std::regex defaultOutputRegex(".*\\.default($|\\^.*)"); + +std::optional getNameFromURL(const ParsedURL & url) +{ + std::smatch match; + + /* If there is a dir= argument, use its value */ + if (url.query.count("dir") > 0) + return url.query.at("dir"); + + /* If the fragment isn't a "default" and contains two attribute elements, use the last one */ + if (std::regex_match(url.fragment, match, lastAttributeRegex)) + return match.str(1); + + /* If this is a github/gitlab/sourcehut flake, use the repo name */ + if (std::regex_match(url.scheme, gitProviderRegex) && std::regex_match(url.path, match, secondPathSegmentRegex)) + return match.str(1); + + /* If it is a regular git flake, use the directory name */ + if (std::regex_match(url.scheme, gitSchemeRegex) && std::regex_match(url.path, match, lastPathSegmentRegex)) + return match.str(1); + + /* If everything failed but there is a non-default fragment, use it in full */ + if (!url.fragment.empty() && !std::regex_match(url.fragment, defaultOutputRegex)) + return url.fragment; + + /* If there is no fragment, take the last element of the path */ + if (std::regex_match(url.path, match, lastPathSegmentRegex)) + return match.str(1); + + /* If even that didn't work, the URL does not contain enough info to determine a useful name */ + return {}; +} + +} diff --git a/src/libexpr/flake/url-name.hh b/src/libexpr/flake/url-name.hh new file mode 100644 index 000000000..6f32754d2 --- /dev/null +++ b/src/libexpr/flake/url-name.hh @@ -0,0 +1,20 @@ +#include "url.hh" +#include "url-parts.hh" +#include "util.hh" +#include "split.hh" + +namespace nix { + +/** + * Try to extract a reasonably unique and meaningful, human-readable + * name of a flake output from a parsed URL. + * When nullopt is returned, the callsite should use information available + * to it outside of the URL to determine a useful name. + * This is a heuristic approach intended for user interfaces. + * @return nullopt if the extracted name is not useful to identify a + * flake output, for example because it is empty or "default". + * Otherwise returns the extracted name. + */ +std::optional getNameFromURL(const ParsedURL & url); + +} diff --git a/src/libexpr/gc-small-vector.hh b/src/libexpr/gc-small-vector.hh new file mode 100644 index 000000000..7f4f08fc7 --- /dev/null +++ b/src/libexpr/gc-small-vector.hh @@ -0,0 +1,42 @@ +#pragma once + +#include + +#if HAVE_BOEHMGC + +#include +#include +#include + +#endif + +namespace nix { + +struct Value; + +/** + * A GC compatible vector that may used a reserved portion of `nItems` on the stack instead of allocating on the heap. + */ +#if HAVE_BOEHMGC +template +using SmallVector = boost::container::small_vector>; +#else +template +using SmallVector = boost::container::small_vector; +#endif + +/** + * A vector of value pointers. See `SmallVector`. + */ +template +using SmallValueVector = SmallVector; + +/** + * A vector of values that must not be referenced after the vector is destroyed. + * + * See also `SmallValueVector`. + */ +template +using SmallTemporaryValueVector = SmallVector; + +} \ No newline at end of file diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc index d4e946d81..a6441871c 100644 --- a/src/libexpr/get-drvs.cc +++ b/src/libexpr/get-drvs.cc @@ -198,7 +198,7 @@ StringSet DrvInfo::queryMetaNames() bool DrvInfo::checkMeta(Value & v) { - state->forceValue(v, [&]() { return v.determinePos(noPos); }); + state->forceValue(v, v.determinePos(noPos)); if (v.type() == nList) { for (auto elem : v.listItems()) if (!checkMeta(*elem)) return false; @@ -304,7 +304,7 @@ static bool getDerivation(EvalState & state, Value & v, bool ignoreAssertionFailures) { try { - state.forceValue(v, [&]() { return v.determinePos(noPos); }); + state.forceValue(v, v.determinePos(noPos)); if (!state.isDerivation(v)) return true; /* Remove spurious duplicates (e.g., a set like `rec { x = diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index a3a8608d9..df2cbd06f 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -1,4 +1,5 @@ %option reentrant bison-bridge bison-locations +%option align %option noyywrap %option never-interactive %option stack @@ -35,9 +36,6 @@ static inline PosIdx makeCurPos(const YYLTYPE & loc, ParseData * data) #define CUR_POS makeCurPos(*yylloc, data) -// backup to recover from yyless(0) -thread_local YYLTYPE prev_yylloc; - static void initLoc(YYLTYPE * loc) { loc->first_line = loc->last_line = 1; @@ -46,7 +44,7 @@ static void initLoc(YYLTYPE * loc) static void adjustLoc(YYLTYPE * loc, const char * s, size_t len) { - prev_yylloc = *loc; + loc->stash(); loc->first_line = loc->last_line; loc->first_column = loc->last_column; @@ -230,7 +228,7 @@ or { return OR_KW; } {HPATH_START}\$\{ { PUSH_STATE(PATH_START); yyless(0); - *yylloc = prev_yylloc; + yylloc->unstash(); } {PATH_SEG} { @@ -286,7 +284,7 @@ or { return OR_KW; } context (it may be ')', ';', or something of that sort) */ POP_STATE(); yyless(0); - *yylloc = prev_yylloc; + yylloc->unstash(); return PATH_END; } diff --git a/src/libexpr/local.mk b/src/libexpr/local.mk index d243b9cec..0c3e36750 100644 --- a/src/libexpr/local.mk +++ b/src/libexpr/local.mk @@ -16,9 +16,9 @@ libexpr_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/lib libexpr_LIBS = libutil libstore libfetchers -libexpr_LDFLAGS += -lboost_context -pthread +libexpr_LDFLAGS += -lboost_context $(THREAD_LDFLAGS) ifdef HOST_LINUX - libexpr_LDFLAGS += -ldl + libexpr_LDFLAGS += -ldl endif # The dependency on libgc must be propagated (i.e. meaning that @@ -36,15 +36,15 @@ $(d)/lexer-tab.cc $(d)/lexer-tab.hh: $(d)/lexer.l clean-files += $(d)/parser-tab.cc $(d)/parser-tab.hh $(d)/lexer-tab.cc $(d)/lexer-tab.hh -$(eval $(call install-file-in, $(d)/nix-expr.pc, $(libdir)/pkgconfig, 0644)) +$(eval $(call install-file-in, $(buildprefix)$(d)/nix-expr.pc, $(libdir)/pkgconfig, 0644)) $(foreach i, $(wildcard src/libexpr/value/*.hh), \ $(eval $(call install-file-in, $(i), $(includedir)/nix/value, 0644))) $(foreach i, $(wildcard src/libexpr/flake/*.hh), \ $(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644))) -$(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh $(d)/primops/derivation.nix.gen.hh $(d)/fetchurl.nix.gen.hh +$(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh -$(d)/flake/flake.cc: $(d)/flake/call-flake.nix.gen.hh +$(d)/eval.cc: $(d)/primops/derivation.nix.gen.hh $(d)/fetchurl.nix.gen.hh $(d)/flake/call-flake.nix.gen.hh -src/libexpr/primops/fromTOML.o: ERROR_SWITCH_ENUM = +$(buildprefix)src/libexpr/primops/fromTOML.o: ERROR_SWITCH_ENUM = diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index 22be8e68c..964de6351 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -9,57 +9,7 @@ namespace nix { -struct PosAdapter : AbstractPos -{ - Pos::Origin origin; - - PosAdapter(Pos::Origin origin) - : origin(std::move(origin)) - { - } - - std::optional getSource() const override - { - return std::visit(overloaded { - [](const Pos::none_tag &) -> std::optional { - return std::nullopt; - }, - [](const Pos::Stdin & s) -> std::optional { - // Get rid of the null terminators added by the parser. - return std::string(s.source->c_str()); - }, - [](const Pos::String & s) -> std::optional { - // Get rid of the null terminators added by the parser. - return std::string(s.source->c_str()); - }, - [](const SourcePath & path) -> std::optional { - try { - return path.readFile(); - } catch (Error &) { - return std::nullopt; - } - } - }, origin); - } - - void print(std::ostream & out) const override - { - std::visit(overloaded { - [&](const Pos::none_tag &) { out << "«none»"; }, - [&](const Pos::Stdin &) { out << "«stdin»"; }, - [&](const Pos::String & s) { out << "«string»"; }, - [&](const SourcePath & path) { out << path; } - }, origin); - } -}; - -Pos::operator std::shared_ptr() const -{ - auto pos = std::make_shared(origin); - pos->line = line; - pos->column = column; - return pos; -} +ExprBlackHole eBlackHole; // FIXME: remove, because *symbols* are abstract and do not have a single // textual representation; see printIdentifier() @@ -266,17 +216,6 @@ void ExprPos::show(const SymbolTable & symbols, std::ostream & str) const } -std::ostream & operator << (std::ostream & str, const Pos & pos) -{ - if (auto pos2 = (std::shared_ptr) pos) { - str << *pos2; - } else - str << "undefined position"; - - return str; -} - - std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath) { std::ostringstream out; @@ -331,6 +270,8 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr & if (es.debugRepl) es.exprEnvs.insert(std::make_pair(this, env)); + fromWith = nullptr; + /* Check whether the variable appears in the environment. If so, set its level and displacement. */ const StaticEnv * curEnv; @@ -342,7 +283,6 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr & } else { auto i = curEnv->find(name); if (i != curEnv->vars.end()) { - fromWith = false; this->level = level; displ = i->second; return; @@ -358,7 +298,8 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr & .msg = hintfmt("undefined variable '%1%'", es.symbols[name]), .errPos = es.positions[pos] }); - fromWith = true; + for (auto * e = env.get(); e && !fromWith; e = e->up) + fromWith = e->isWith; this->level = withLevel; } @@ -391,7 +332,7 @@ void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr es.exprEnvs.insert(std::make_pair(this, env)); if (recursive) { - auto newEnv = std::make_shared(false, env.get(), recursive ? attrs.size() : 0); + auto newEnv = std::make_shared(nullptr, env.get(), recursive ? attrs.size() : 0); Displacement displ = 0; for (auto & i : attrs) @@ -433,7 +374,7 @@ void ExprLambda::bindVars(EvalState & es, const std::shared_ptr es.exprEnvs.insert(std::make_pair(this, env)); auto newEnv = std::make_shared( - false, env.get(), + nullptr, env.get(), (hasFormals() ? formals->formals.size() : 0) + (!arg ? 0 : 1)); @@ -469,7 +410,7 @@ void ExprLet::bindVars(EvalState & es, const std::shared_ptr & if (es.debugRepl) es.exprEnvs.insert(std::make_pair(this, env)); - auto newEnv = std::make_shared(false, env.get(), attrs->attrs.size()); + auto newEnv = std::make_shared(nullptr, env.get(), attrs->attrs.size()); Displacement displ = 0; for (auto & i : attrs->attrs) @@ -488,6 +429,10 @@ void ExprWith::bindVars(EvalState & es, const std::shared_ptr & if (es.debugRepl) es.exprEnvs.insert(std::make_pair(this, env)); + parentWith = nullptr; + for (auto * e = env.get(); e && !parentWith; e = e->up) + parentWith = e->isWith; + /* Does this `with' have an enclosing `with'? If so, record its level so that `lookupVar' can look up variables in the previous `with' if this one doesn't contain the desired attribute. */ @@ -504,7 +449,7 @@ void ExprWith::bindVars(EvalState & es, const std::shared_ptr & es.exprEnvs.insert(std::make_pair(this, env)); attrs->bindVars(es, env); - auto newEnv = std::make_shared(true, env.get()); + auto newEnv = std::make_shared(this, env.get()); body->bindVars(es, newEnv); } diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index 10099d49e..3cd46ca27 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -8,6 +8,7 @@ #include "symbol-table.hh" #include "error.hh" #include "chunked-vector.hh" +#include "position.hh" namespace nix { @@ -21,25 +22,11 @@ MakeError(TypeError, EvalError); MakeError(UndefinedVarError, Error); MakeError(MissingArgumentError, EvalError); -/** - * Position objects. - */ -struct Pos +class InfiniteRecursionError : public EvalError { - uint32_t line; - uint32_t column; - - struct none_tag { }; - struct Stdin { ref source; }; - struct String { ref source; }; - - typedef std::variant Origin; - - Origin origin; - - explicit operator bool() const { return line > 0; } - - operator std::shared_ptr() const; + friend class EvalState; +public: + using EvalError::EvalError; }; class PosIdx { @@ -74,7 +61,7 @@ public: mutable uint32_t idx = std::numeric_limits::max(); // Used for searching in PosTable::[]. - explicit Origin(uint32_t idx): idx(idx), origin{Pos::none_tag()} {} + explicit Origin(uint32_t idx): idx(idx), origin{std::monostate()} {} public: const Pos::Origin origin; @@ -125,12 +112,11 @@ public: inline PosIdx noPos = {}; -std::ostream & operator << (std::ostream & str, const Pos & pos); - struct Env; struct Value; class EvalState; +struct ExprWith; struct StaticEnv; @@ -219,8 +205,11 @@ struct ExprVar : Expr Symbol name; /* Whether the variable comes from an environment (e.g. a rec, let - or function argument) or from a "with". */ - bool fromWith; + or function argument) or from a "with". + + `nullptr`: Not from a `with`. + Valid pointer: the nearest, innermost `with` expression to query first. */ + ExprWith * fromWith; /* In the former case, the value is obtained by going `level` levels up from the current environment and getting the @@ -292,6 +281,7 @@ struct ExprList : Expr std::vector elems; ExprList() { }; COMMON_METHODS + Value * maybeThunk(EvalState & state, Env & env) override; PosIdx getPos() const override { @@ -378,6 +368,7 @@ struct ExprWith : Expr PosIdx pos; Expr * attrs, * body; size_t prevWith; + ExprWith * parentWith; ExprWith(const PosIdx & pos, Expr * attrs, Expr * body) : pos(pos), attrs(attrs), body(body) { }; PosIdx getPos() const override { return pos; } COMMON_METHODS @@ -405,6 +396,7 @@ struct ExprOpNot : Expr { Expr * e; ExprOpNot(Expr * e) : e(e) { }; + PosIdx getPos() const override { return e->getPos(); } COMMON_METHODS }; @@ -454,20 +446,30 @@ struct ExprPos : Expr COMMON_METHODS }; +/* only used to mark thunks as black holes. */ +struct ExprBlackHole : Expr +{ + void show(const SymbolTable & symbols, std::ostream & str) const override {} + void eval(EvalState & state, Env & env, Value & v) override; + void bindVars(EvalState & es, const std::shared_ptr & env) override {} +}; + +extern ExprBlackHole eBlackHole; + /* Static environments are used to map variable names onto (level, displacement) pairs used to obtain the value of the variable at runtime. */ struct StaticEnv { - bool isWith; + ExprWith * isWith; const StaticEnv * up; // Note: these must be in sorted order. typedef std::vector> Vars; Vars vars; - StaticEnv(bool isWith, const StaticEnv * up, size_t expectedSize = 0) : isWith(isWith), up(up) { + StaticEnv(ExprWith * isWith, const StaticEnv * up, size_t expectedSize = 0) : isWith(isWith), up(up) { vars.reserve(expectedSize); }; diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index b86cef217..b331776f0 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -28,6 +28,31 @@ namespace nix { +#define YYLTYPE ::nix::ParserLocation + struct ParserLocation + { + int first_line, first_column; + int last_line, last_column; + + // backup to recover from yyless(0) + int stashed_first_line, stashed_first_column; + int stashed_last_line, stashed_last_column; + + void stash() { + stashed_first_line = first_line; + stashed_first_column = first_column; + stashed_last_line = last_line; + stashed_last_column = last_column; + } + + void unstash() { + first_line = stashed_first_line; + first_column = stashed_first_column; + last_line = stashed_last_line; + last_column = stashed_last_column; + } + }; + struct ParseData { EvalState & state; @@ -686,17 +711,26 @@ Expr * EvalState::parse( } -SourcePath resolveExprPath(const SourcePath & path) +SourcePath resolveExprPath(SourcePath path) { + unsigned int followCount = 0, maxFollow = 1024; + /* If `path' is a symlink, follow it. This is so that relative path references work. */ - auto path2 = path.resolveSymlinks(); + while (!path.path.isRoot()) { + // Basic cycle/depth limit to avoid infinite loops. + if (++followCount >= maxFollow) + throw Error("too many symbolic links encountered while traversing the path '%s'", path); + auto p = path.parent().resolveSymlinks() + path.baseName(); + if (p.lstat().type != InputAccessor::tSymlink) break; + path = {path.accessor, CanonPath(p.readLink(), path.path.parent().value_or(CanonPath::root))}; + } /* If `path' refers to a directory, append `/default.nix'. */ - if (path2.lstat().type == InputAccessor::tDirectory) - return path2 + "default.nix"; + if (path.resolveSymlinks().lstat().type == InputAccessor::tDirectory) + return path + "default.nix"; - return path2; + return path; } @@ -708,7 +742,7 @@ Expr * EvalState::parseExprFromFile(const SourcePath & path) Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr & staticEnv) { - auto buffer = path.readFile(); + auto buffer = path.resolveSymlinks().readFile(); // readFile hopefully have left some extra space for terminators buffer.append("\0\0", 2); return parse(buffer.data(), buffer.size(), Pos::Origin(path), path.parent(), staticEnv); @@ -775,7 +809,7 @@ SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_ } -std::optional EvalState::resolveSearchPathPath(const SearchPath::Path & value0) +std::optional EvalState::resolveSearchPathPath(const SearchPath::Path & value0, bool initAccessControl) { auto & value = value0.s; auto i = searchPathResolved.find(value); @@ -792,7 +826,6 @@ std::optional EvalState::resolveSearchPathPath(const SearchPath::Pa logWarning({ .msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value) }); - res = std::nullopt; } } @@ -806,6 +839,20 @@ std::optional EvalState::resolveSearchPathPath(const SearchPath::Pa else { auto path = absPath(value); + + /* Allow access to paths in the search path. */ + if (initAccessControl) { + allowPath(path); + if (store->isInStore(path)) { + try { + StorePathSet closure; + store->computeFSClosure(store->toStorePath(path).first, closure); + for (auto & p : closure) + allowPath(p); + } catch (InvalidPath &) { } + } + } + if (pathExists(path)) res = { path }; else { @@ -821,7 +868,7 @@ std::optional EvalState::resolveSearchPathPath(const SearchPath::Pa else debug("failed to resolve search path element '%s'", value); - searchPathResolved[value] = res; + searchPathResolved.emplace(value, res); return res; } diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 8d3a18526..ee07e5568 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -4,6 +4,7 @@ #include "eval-inline.hh" #include "eval.hh" #include "eval-settings.hh" +#include "gc-small-vector.hh" #include "globals.hh" #include "json-to-value.hh" #include "names.hh" @@ -14,6 +15,8 @@ #include "value-to-json.hh" #include "value-to-xml.hh" #include "primops.hh" +#include "fs-input-accessor.hh" +#include "fetch-to-store.hh" #include #include @@ -29,7 +32,6 @@ #include - namespace nix { @@ -83,16 +85,15 @@ StringMap EvalState::realiseContext(const NixStringContext & context) /* Build/substitute the context. */ std::vector buildReqs; for (auto & d : drvs) buildReqs.emplace_back(DerivedPath { d }); - store->buildPaths(buildReqs); + buildStore->buildPaths(buildReqs, bmNormal, store); + + StorePathSet outputsToCopyAndAllow; for (auto & drv : drvs) { - auto outputs = resolveDerivedPath(*store, drv); + auto outputs = resolveDerivedPath(*buildStore, drv, &*store); for (auto & [outputName, outputPath] : outputs) { - /* Add the output of this derivations to the allowed - paths. */ - if (allowedPaths) { - allowPath(outputPath); - } + outputsToCopyAndAllow.insert(outputPath); + /* Get all the output paths corresponding to the placeholders we had */ if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) { res.insert_or_assign( @@ -101,36 +102,35 @@ StringMap EvalState::realiseContext(const NixStringContext & context) .drvPath = drv.drvPath, .output = outputName, }).render(), - store->printStorePath(outputPath) + buildStore->printStorePath(outputPath) ); } } } + if (store != buildStore) copyClosure(*buildStore, *store, outputsToCopyAndAllow); + for (auto & outputPath : outputsToCopyAndAllow) { + /* Add the output of this derivations to the allowed + paths. */ + allowPath(store->toRealPath(outputPath)); + } + return res; } -struct RealisePathFlags { - // Whether to check that the path is allowed in pure eval mode - bool checkForPureEval = true; -}; - -static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, const RealisePathFlags flags = {}) +static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, bool resolveSymlinks = true) { NixStringContext context; auto path = state.coerceToPath(noPos, v, context, "while realising the context of a path"); try { - if (!context.empty()) { + if (!context.empty() && path.accessor == state.rootFS) { auto rewrites = state.realiseContext(context); auto realPath = state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context); - return {path.accessor, CanonPath(realPath)}; + path = {path.accessor, CanonPath(realPath)}; } - - return flags.checkForPureEval - ? state.checkSourcePath(path) - : path; + return resolveSymlinks ? path.resolveSymlinks() : path; } catch (Error & e) { e.addTrace(state.positions[pos], "while realising the context of path '%s'", path); throw; @@ -170,7 +170,7 @@ static void mkOutputString( argument. */ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * vScope, Value & v) { - auto path = realisePath(state, pos, vPath); + auto path = realisePath(state, pos, vPath, false); auto path2 = path.path.abs(); // FIXME @@ -222,7 +222,7 @@ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * v Env * env = &state.allocEnv(vScope->attrs->size()); env->up = &state.baseEnv; - auto staticEnv = std::make_shared(false, state.staticBaseEnv.get(), vScope->attrs->size()); + auto staticEnv = std::make_shared(nullptr, state.staticBaseEnv.get(), vScope->attrs->size()); unsigned int displ = 0; for (auto & attr : *vScope->attrs) { @@ -446,9 +446,7 @@ static RegisterPrimOp primop_isNull({ .doc = R"( Return `true` if *e* evaluates to `null`, and `false` otherwise. - > **Warning** - > - > This function is *deprecated*; just write `e == null` instead. + This is equivalent to `e == null`. )", .fun = prim_isNull, }); @@ -594,7 +592,7 @@ struct CompareValues case nFloat: return v1->fpoint < v2->fpoint; case nString: - return v1->string_view().compare(v2->string_view()) < 0; + return strcmp(v1->c_str(), v2->c_str()) < 0; case nPath: // Note: we don't take the accessor into account // since it's not obvious how to compare them in a @@ -1317,7 +1315,7 @@ drvName, Bindings * attrs, Value & v) .errPos = state.positions[noPos] })); - auto h = newHashAllowEmpty(*outputHash, parseHashTypeOpt(outputHashAlgo)); + auto h = newHashAllowEmpty(*outputHash, parseHashAlgoOpt(outputHashAlgo)); auto method = ingestionMethod.value_or(FileIngestionMethod::Flat); @@ -1339,7 +1337,7 @@ drvName, Bindings * attrs, Value & v) .errPos = state.positions[noPos] }); - auto ht = parseHashTypeOpt(outputHashAlgo).value_or(htSHA256); + auto ha = parseHashAlgoOpt(outputHashAlgo).value_or(HashAlgorithm::SHA256); auto method = ingestionMethod.value_or(FileIngestionMethod::Recursive); for (auto & i : outputs) { @@ -1348,13 +1346,13 @@ drvName, Bindings * attrs, Value & v) drv.outputs.insert_or_assign(i, DerivationOutput::Impure { .method = method, - .hashType = ht, + .hashAlgo = ha, }); else drv.outputs.insert_or_assign(i, DerivationOutput::CAFloating { .method = method, - .hashType = ht, + .hashAlgo = ha, }); } } @@ -1493,7 +1491,7 @@ static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args, })); NixStringContext context; - auto path = state.checkSourcePath(state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to 'builtins.storePath'")).path; + auto path = state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to 'builtins.storePath'").path; /* Resolve symlinks in ‘path’, unless ‘path’ itself is a symlink directly in the store. The latter condition is necessary so e.g. nix-push does the right thing. */ @@ -1533,29 +1531,19 @@ static RegisterPrimOp primop_storePath({ static void prim_pathExists(EvalState & state, const PosIdx pos, Value * * args, Value & v) { - auto & arg = *args[0]; - - /* We don’t check the path right now, because we don’t want to - throw if the path isn’t allowed, but just return false (and we - can’t just catch the exception here because we still want to - throw if something in the evaluation of `arg` tries to - access an unauthorized path). */ - auto path = realisePath(state, pos, arg, { .checkForPureEval = false }); - - /* SourcePath doesn't know about trailing slash. */ - auto mustBeDir = arg.type() == nString - && (arg.string_view().ends_with("/") - || arg.string_view().ends_with("/.")); - try { - auto checked = state.checkSourcePath(path); - auto st = checked.maybeLstat(); + auto & arg = *args[0]; + + auto path = realisePath(state, pos, arg); + + /* SourcePath doesn't know about trailing slash. */ + auto mustBeDir = arg.type() == nString + && (arg.string_view().ends_with("/") + || arg.string_view().ends_with("/.")); + + auto st = path.maybeLstat(); auto exists = st && (!mustBeDir || st->type == SourceAccessor::tDirectory); v.mkBool(exists); - } catch (SysError & e) { - /* Don't give away info from errors while canonicalising - ‘path’ in restricted mode. */ - v.mkBool(false); } catch (RestrictedPathError & e) { v.mkBool(false); } @@ -1699,7 +1687,7 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V auto path = state.forceStringNoCtx(*args[1], pos, "while evaluating the second argument passed to builtins.findFile"); - v.mkPath(state.checkSourcePath(state.findFile(searchPath, path, pos))); + v.mkPath(state.findFile(searchPath, path, pos)); } static RegisterPrimOp primop_findFile(PrimOp { @@ -1754,17 +1742,17 @@ static RegisterPrimOp primop_findFile(PrimOp { /* Return the cryptographic hash of a file in base-16. */ static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, Value & v) { - auto type = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashFile"); - std::optional ht = parseHashType(type); - if (!ht) + auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashFile"); + std::optional ha = parseHashAlgo(algo); + if (!ha) state.debugThrowLastTrace(Error({ - .msg = hintfmt("unknown hash type '%1%'", type), + .msg = hintfmt("unknown hash algo '%1%'", algo), .errPos = state.positions[pos] })); auto path = realisePath(state, pos, *args[1]); - v.mkString(hashString(*ht, path.readFile()).to_string(HashFormat::Base16, false)); + v.mkString(hashString(*ha, path.readFile()).to_string(HashFormat::Base16, false)); } static RegisterPrimOp primop_hashFile({ @@ -1789,7 +1777,7 @@ static std::string_view fileTypeToString(InputAccessor::Type type) static void prim_readFileType(EvalState & state, const PosIdx pos, Value * * args, Value & v) { - auto path = realisePath(state, pos, *args[0]); + auto path = realisePath(state, pos, *args[0], false); /* Retrieve the directory entry type and stringize it. */ v.mkString(fileTypeToString(path.lstat().type)); } @@ -2090,8 +2078,14 @@ static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Val } auto storePath = settings.readOnlyMode - ? state.store->computeStorePathForText(name, contents, refs) - : state.store->addTextToStore(name, contents, refs, state.repair); + ? state.store->makeFixedOutputPathFromCA(name, TextInfo { + .hash = hashString(HashAlgorithm::SHA256, contents), + .references = std::move(refs), + }) + : ({ + StringSource s { contents }; + state.store->addToStoreFromDump(s, name, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, state.repair); + }); /* Note: we don't need to add `context' to the context of the result, since `storePath' itself has references to the paths @@ -2178,11 +2172,35 @@ static RegisterPrimOp primop_toFile({ .fun = prim_toFile, }); +bool EvalState::callPathFilter( + Value * filterFun, + const SourcePath & path, + std::string_view pathArg, + PosIdx pos) +{ + auto st = path.lstat(); + + /* Call the filter function. The first argument is the path, the + second is a string indicating the type of the file. */ + Value arg1; + arg1.mkString(pathArg); + + Value arg2; + // assert that type is not "unknown" + arg2.mkString(fileTypeToString(st.type)); + + Value * args []{&arg1, &arg2}; + Value res; + callFunction(*filterFun, 2, args, res, pos); + + return forceBool(res, pos, "while evaluating the return value of the path filter function"); +} + static void addPath( EvalState & state, const PosIdx pos, std::string_view name, - Path path, + SourcePath path, Value * filterFun, FileIngestionMethod method, const std::optional expectedHash, @@ -2190,48 +2208,29 @@ static void addPath( const NixStringContext & context) { try { - // FIXME: handle CA derivation outputs (where path needs to - // be rewritten to the actual output). - auto rewrites = state.realiseContext(context); - path = state.toRealPath(rewriteStrings(path, rewrites), context); - StorePathSet refs; - if (state.store->isInStore(path)) { + if (path.accessor == state.rootFS && state.store->isInStore(path.path.abs())) { + // FIXME: handle CA derivation outputs (where path needs to + // be rewritten to the actual output). + auto rewrites = state.realiseContext(context); + path = {state.rootFS, CanonPath(state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context))}; + try { - auto [storePath, subPath] = state.store->toStorePath(path); + auto [storePath, subPath] = state.store->toStorePath(path.path.abs()); // FIXME: we should scanForReferences on the path before adding it refs = state.store->queryPathInfo(storePath)->references; - path = state.store->toRealPath(storePath) + subPath; + path = {state.rootFS, CanonPath(state.store->toRealPath(storePath) + subPath)}; } catch (Error &) { // FIXME: should be InvalidPathError } } - path = evalSettings.pureEval && expectedHash - ? path - : state.checkSourcePath(state.rootPath(CanonPath(path))).path.abs(); - - PathFilter filter = filterFun ? ([&](const Path & path) { - auto st = lstat(path); - - /* Call the filter function. The first argument is the path, - the second is a string indicating the type of the file. */ - Value arg1; - arg1.mkString(path); - - Value arg2; - arg2.mkString( - S_ISREG(st.st_mode) ? "regular" : - S_ISDIR(st.st_mode) ? "directory" : - S_ISLNK(st.st_mode) ? "symlink" : - "unknown" /* not supported, will fail! */); - - Value * args []{&arg1, &arg2}; - Value res; - state.callFunction(*filterFun, 2, args, res, pos); - - return state.forceBool(res, pos, "while evaluating the return value of the path filter function"); - }) : defaultPathFilter; + std::unique_ptr filter; + if (filterFun) + filter = std::make_unique([&](const Path & p) { + auto p2 = CanonPath(p); + return state.callPathFilter(filterFun, {path.accessor, p2}, p2.abs(), pos); + }); std::optional expectedStorePath; if (expectedHash) @@ -2242,7 +2241,7 @@ static void addPath( }); if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) { - auto dstPath = state.rootPath(CanonPath(path)).fetchToStore(state.store, name, method, &filter, state.repair); + auto dstPath = fetchToStore(*state.store, path, name, method, filter.get(), state.repair); if (expectedHash && expectedStorePath != dstPath) state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path)); state.allowAndSetStorePathString(dstPath, v); @@ -2261,7 +2260,8 @@ static void prim_filterSource(EvalState & state, const PosIdx pos, Value * * arg auto path = state.coerceToPath(pos, *args[1], context, "while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'"); state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.filterSource"); - addPath(state, pos, path.baseName(), path.path.abs(), args[0], FileIngestionMethod::Recursive, std::nullopt, v, context); + + addPath(state, pos, path.baseName(), path, args[0], FileIngestionMethod::Recursive, std::nullopt, v, context); } static RegisterPrimOp primop_filterSource({ @@ -2341,7 +2341,7 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value else if (n == "recursive") method = FileIngestionMethod { state.forceBool(*attr.value, attr.pos, "while evaluating the `recursive` attribute passed to builtins.path") }; else if (n == "sha256") - expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), htSHA256); + expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), HashAlgorithm::SHA256); else state.debugThrowLastTrace(EvalError({ .msg = hintfmt("unsupported argument '%1%' to 'addPath'", state.symbols[attr.name]), @@ -2356,7 +2356,7 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value if (name.empty()) name = path->baseName(); - addPath(state, pos, name, path->path.abs(), filterFun, method, expectedHash, v, context); + addPath(state, pos, name, *path, filterFun, method, expectedHash, v, context); } static RegisterPrimOp primop_path({ @@ -2375,7 +2375,7 @@ static RegisterPrimOp primop_path({ like `@`. - filter\ - A function of the type expected by `builtins.filterSource`, + A function of the type expected by [`builtins.filterSource`](#builtins-filterSource), with the same semantics. - recursive\ @@ -2413,7 +2413,7 @@ static void prim_attrNames(EvalState & state, const PosIdx pos, Value * * args, (v.listElems()[n++] = state.allocValue())->mkString(state.symbols[i.name]); std::sort(v.listElems(), v.listElems() + n, - [](Value * v1, Value * v2) { return v1->string_view().compare(v2->string_view()) < 0; }); + [](Value * v1, Value * v2) { return strcmp(v1->c_str(), v2->c_str()) < 0; }); } static RegisterPrimOp primop_attrNames({ @@ -2550,6 +2550,7 @@ static void prim_removeAttrs(EvalState & state, const PosIdx pos, Value * * args /* Get the attribute names to be removed. We keep them as Attrs instead of Symbols so std::set_difference can be used to remove them from attrs[0]. */ + // 64: large enough to fit the attributes of a derivation boost::container::small_vector names; names.reserve(args[1]->listSize()); for (auto elem : args[1]->listItems()) { @@ -2729,8 +2730,8 @@ static void prim_catAttrs(EvalState & state, const PosIdx pos, Value * * args, V auto attrName = state.symbols.create(state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.catAttrs")); state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.catAttrs"); - Value * res[args[1]->listSize()]; - unsigned int found = 0; + SmallValueVector res(args[1]->listSize()); + size_t found = 0; for (auto v2 : args[1]->listItems()) { state.forceAttrs(*v2, pos, "while evaluating an element in the list passed as second argument to builtins.catAttrs"); @@ -3064,9 +3065,8 @@ static void prim_filter(EvalState & state, const PosIdx pos, Value * * args, Val state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.filter"); - // FIXME: putting this on the stack is risky. - Value * vs[args[1]->listSize()]; - unsigned int k = 0; + SmallValueVector vs(args[1]->listSize()); + size_t k = 0; bool same = true; for (unsigned int n = 0; n < args[1]->listSize(); ++n) { @@ -3179,9 +3179,16 @@ static RegisterPrimOp primop_foldlStrict({ .doc = R"( Reduce a list by applying a binary operator, from left to right, e.g. `foldl' op nul [x0 x1 x2 ...] = op (op (op nul x0) x1) x2) - ...`. For example, `foldl' (x: y: x + y) 0 [1 2 3]` evaluates to 6. - The return value of each application of `op` is evaluated immediately, - even for intermediate values. + ...`. + + For example, `foldl' (acc: elem: acc + elem) 0 [1 2 3]` evaluates + to `6` and `foldl' (acc: elem: { "${elem}" = elem; } // acc) {} + ["a" "b"]` evaluates to `{ a = "a"; b = "b"; }`. + + The first argument of `op` is the accumulator whereas the second + argument is the current element being processed. The return value + of each application of `op` is evaluated immediately, even for + intermediate values. )", .fun = prim_foldlStrict, }); @@ -3191,10 +3198,14 @@ static void anyOrAll(bool any, EvalState & state, const PosIdx pos, Value * * ar state.forceFunction(*args[0], pos, std::string("while evaluating the first argument passed to builtins.") + (any ? "any" : "all")); state.forceList(*args[1], pos, std::string("while evaluating the second argument passed to builtins.") + (any ? "any" : "all")); + std::string_view errorCtx = any + ? "while evaluating the return value of the function passed to builtins.any" + : "while evaluating the return value of the function passed to builtins.all"; + Value vTmp; for (auto elem : args[1]->listItems()) { state.callFunction(*args[0], *elem, vTmp, pos); - bool res = state.forceBool(vTmp, pos, std::string("while evaluating the return value of the function passed to builtins.") + (any ? "any" : "all")); + bool res = state.forceBool(vTmp, pos, errorCtx); if (res == any) { v.mkBool(any); return; @@ -3450,13 +3461,14 @@ static void prim_concatMap(EvalState & state, const PosIdx pos, Value * * args, state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.concatMap"); auto nrLists = args[1]->listSize(); - Value lists[nrLists]; + // List of returned lists before concatenation. References to these Values must NOT be persisted. + SmallTemporaryValueVector lists(nrLists); size_t len = 0; for (unsigned int n = 0; n < nrLists; ++n) { Value * vElem = args[1]->listElems()[n]; state.callFunction(*args[0], *vElem, lists[n], pos); - state.forceList(lists[n], lists[n].determinePos(args[0]->determinePos(pos)), "while evaluating the return value of the function passed to buitlins.concatMap"); + state.forceList(lists[n], lists[n].determinePos(args[0]->determinePos(pos)), "while evaluating the return value of the function passed to builtins.concatMap"); len += lists[n].listSize(); } @@ -3754,18 +3766,18 @@ static RegisterPrimOp primop_stringLength({ /* Return the cryptographic hash of a string in base-16. */ static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args, Value & v) { - auto type = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashString"); - std::optional ht = parseHashType(type); - if (!ht) + auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashString"); + std::optional ha = parseHashAlgo(algo); + if (!ha) state.debugThrowLastTrace(Error({ - .msg = hintfmt("unknown hash type '%1%'", type), + .msg = hintfmt("unknown hash algo '%1%'", algo), .errPos = state.positions[pos] })); NixStringContext context; // discarded auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString"); - v.mkString(hashString(*ht, s).to_string(HashFormat::Base16, false)); + v.mkString(hashString(*ha, s).to_string(HashFormat::Base16, false)); } static RegisterPrimOp primop_hashString({ @@ -3788,15 +3800,15 @@ static void prim_convertHash(EvalState & state, const PosIdx pos, Value * * args auto hash = state.forceStringNoCtx(*iteratorHash->value, pos, "while evaluating the attribute 'hash'"); Bindings::iterator iteratorHashAlgo = inputAttrs->find(state.symbols.create("hashAlgo")); - std::optional ht = std::nullopt; + std::optional ha = std::nullopt; if (iteratorHashAlgo != inputAttrs->end()) { - ht = parseHashType(state.forceStringNoCtx(*iteratorHashAlgo->value, pos, "while evaluating the attribute 'hashAlgo'")); + ha = parseHashAlgo(state.forceStringNoCtx(*iteratorHashAlgo->value, pos, "while evaluating the attribute 'hashAlgo'")); } Bindings::iterator iteratorToHashFormat = getAttr(state, state.symbols.create("toHashFormat"), args[0]->attrs, "while locating the attribute 'toHashFormat'"); HashFormat hf = parseHashFormat(state.forceStringNoCtx(*iteratorToHashFormat->value, pos, "while evaluating the attribute 'toHashFormat'")); - v.mkString(Hash::parseAny(hash, ht).to_string(hf, hf == HashFormat::SRI)); + v.mkString(Hash::parseAny(hash, ha).to_string(hf, hf == HashFormat::SRI)); } static RegisterPrimOp primop_convertHash({ @@ -3825,7 +3837,8 @@ static RegisterPrimOp primop_convertHash({ The format of the resulting hash. Must be one of - `"base16"` - - `"base32"` + - `"nix32"` + - `"base32"` (deprecated alias for `"nix32"`) - `"base64"` - `"sri"` @@ -4382,13 +4395,16 @@ void EvalState::createBaseEnv() .impureOnly = true, }); - if (!evalSettings.pureEval) { - v.mkString(settings.thisSystem.get()); - } + if (!evalSettings.pureEval) + v.mkString(evalSettings.getCurrentSystem()); addConstant("__currentSystem", v, { .type = nString, .doc = R"( - The value of the [`system` configuration option](@docroot@/command-ref/conf-file.md#conf-pure-eval). + The value of the + [`eval-system`](@docroot@/command-ref/conf-file.md#conf-eval-system) + or else + [`system`](@docroot@/command-ref/conf-file.md#conf-system) + configuration option. It can be used to set the `system` attribute for [`builtins.derivation`](@docroot@/language/derivations.md) such that the resulting derivation can be built on the same system that evaluates the Nix expression: @@ -4437,7 +4453,7 @@ void EvalState::createBaseEnv() .doc = R"( Logical file system location of the [Nix store](@docroot@/glossary.md#gloss-store) currently in use. - This value is determined by the `store` parameter in [Store URLs](@docroot@/command-ref/new-cli/nix3-help-stores.md): + This value is determined by the `store` parameter in [Store URLs](@docroot@/store/types/index.md#store-url-format): ```shell-session $ nix-instantiate --store 'dummy://?store=/blah' --eval --expr builtins.storeDir diff --git a/src/libexpr/primops.hh b/src/libexpr/primops.hh index 930e7f32a..45486608f 100644 --- a/src/libexpr/primops.hh +++ b/src/libexpr/primops.hh @@ -8,6 +8,22 @@ namespace nix { +/** + * For functions where we do not expect deep recursion, we can use a sizable + * part of the stack a free allocation space. + * + * Note: this is expected to be multiplied by sizeof(Value), or about 24 bytes. + */ +constexpr size_t nonRecursiveStackReservation = 128; + +/** + * Functions that maybe applied to self-similar inputs, such as concatMap on a + * tree, should reserve a smaller part of the stack for allocation. + * + * Note: this is expected to be multiplied by sizeof(Value), or about 24 bytes. + */ +constexpr size_t conservativeStackReservation = 16; + struct RegisterPrimOp { typedef std::vector PrimOps; diff --git a/src/libexpr/primops/fetchClosure.cc b/src/libexpr/primops/fetchClosure.cc index b86ef6b93..27147a5d1 100644 --- a/src/libexpr/primops/fetchClosure.cc +++ b/src/libexpr/primops/fetchClosure.cc @@ -1,5 +1,6 @@ #include "primops.hh" #include "store-api.hh" +#include "realisation.hh" #include "make-content-addressed.hh" #include "url.hh" diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc index e76ce455d..58fe6f173 100644 --- a/src/libexpr/primops/fetchMercurial.cc +++ b/src/libexpr/primops/fetchMercurial.cc @@ -31,7 +31,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a // be both a revision or a branch/tag name. auto value = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `rev` attribute passed to builtins.fetchMercurial"); if (std::regex_match(value.begin(), value.end(), revRegex)) - rev = Hash::parseAny(value, htSHA1); + rev = Hash::parseAny(value, HashAlgorithm::SHA1); else ref = value; } @@ -79,7 +79,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a attrs2.alloc("branch").mkString(*input2.getRef()); // Backward compatibility: set 'rev' to // 0000000000000000000000000000000000000000 for a dirty tree. - auto rev2 = input2.getRev().value_or(Hash(htSHA1)); + auto rev2 = input2.getRev().value_or(Hash(HashAlgorithm::SHA1)); attrs2.alloc("rev").mkString(rev2.gitRev()); attrs2.alloc("shortRev").mkString(rev2.gitRev().substr(0, 12)); if (auto revCount = input2.getRevCount()) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index 8031bf809..eb2df8626 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -46,7 +46,7 @@ void emitTreeAttrs( attrs.alloc("shortRev").mkString(rev->gitShortRev()); } else if (emptyRevFallback) { // Backwards compat for `builtins.fetchGit`: dirty repos return an empty sha1 as rev - auto emptyHash = Hash(htSHA1); + auto emptyHash = Hash(HashAlgorithm::SHA1); attrs.alloc("rev").mkString(emptyHash.gitRev()); attrs.alloc("shortRev").mkString(emptyHash.gitShortRev()); } @@ -187,45 +187,215 @@ static RegisterPrimOp primop_fetchTree({ .name = "fetchTree", .args = {"input"}, .doc = R"( - Fetch a source tree or a plain file using one of the supported backends. - *input* must be a [flake reference](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references), either in attribute set representation or in the URL-like syntax. - The input should be "locked", that is, it should contain a commit hash or content hash unless impure evaluation (`--impure`) is enabled. + Fetch a file system tree or a plain file using one of the supported backends and return an attribute set with: - > **Note** + - the resulting fixed-output [store path](@docroot@/glossary.md#gloss-store-path) + - the corresponding [NAR](@docroot@/glossary.md#gloss-nar) hash + - backend-specific metadata (currently not documented). + + *input* must be an attribute set with the following attributes: + + - `type` (String, required) + + One of the [supported source types](#source-types). + This determines other required and allowed input attributes. + + - `narHash` (String, optional) + + The `narHash` parameter can be used to substitute the source of the tree. + It also allows for verification of tree contents that may not be provided by the underlying transfer mechanism. + If `narHash` is set, the source is first looked up is the Nix store and [substituters](@docroot@/command-ref/conf-file.md#conf-substituters), and only fetched if not available. + + A subset of the output attributes of `fetchTree` can be re-used for subsequent calls to `fetchTree` to produce the same result again. + That is, `fetchTree` is idempotent. + + Downloads are cached in `$XDG_CACHE_HOME/nix`. + The remote source will be fetched from the network if both are true: + - A NAR hash is supplied and the corresponding store path is not [valid](@docroot@/glossary.md#gloss-validity), that is, not available in the store + + > **Note** + > + > [Substituters](@docroot@/command-ref/conf-file.md#conf-substituters) are not used in fetching. + + - There is no cache entry or the cache entry is older than [`tarball-ttl`](@docroot@/command-ref/conf-file.md#conf-tarball-ttl) + + ## Source types + + The following source types and associated input attributes are supported. + + + + - `"file"` + + Place a plain file into the Nix store. + This is similar to [`builtins.fetchurl`](@docroot@/language/builtins.md#builtins-fetchurl) + + - `url` (String, required) + + Supported protocols: + + - `https` + + > **Example** + > + > ```nix + > fetchTree { + > type = "file"; + > url = "https://example.com/index.html"; + > } + > ``` + + - `http` + + Insecure HTTP transfer for legacy sources. + + > **Warning** + > + > HTTP performs no encryption or authentication. + > Use a `narHash` known in advance to ensure the output has expected contents. + + - `file` + + A file on the local file system. + + > **Example** + > + > ```nix + > fetchTree { + > type = "file"; + > url = "file:///home/eelco/nix/README.md"; + > } + > ``` + + - `"tarball"` + + Download a tar archive and extract it into the Nix store. + This has the same underyling implementation as [`builtins.fetchTarball`](@docroot@/language/builtins.md#builtins-fetchTarball) + + - `url` (String, required) + + > **Example** + > + > ```nix + > fetchTree { + > type = "tarball"; + > url = "https://github.com/NixOS/nixpkgs/tarball/nixpkgs-23.11"; + > } + > ``` + + - `"git"` + + Fetch a Git tree and copy it to the Nix store. + This is similar to [`builtins.fetchGit`](@docroot@/language/builtins.md#builtins-fetchGit). + + - `url` (String, required) + + The URL formats supported are the same as for Git itself. + + > **Example** + > + > ```nix + > fetchTree { + > type = "git"; + > url = "git@github.com:NixOS/nixpkgs.git"; + > } + > ``` + + > **Note** + > + > If the URL points to a local directory, and no `ref` or `rev` is given, Nix will only consider files added to the Git index, as listed by `git ls-files` but use the *current file contents* of the Git working directory. + + - `ref` (String, optional) + + A [Git reference](https://git-scm.com/book/en/v2/Git-Internals-Git-References), such as a branch or tag name. + + Default: `"HEAD"` + + - `rev` (String, optional) + + A Git revision; a commit hash. + + Default: the tip of `ref` + + - `shallow` (Bool, optional) + + Make a shallow clone when fetching the Git tree. + + Default: `false` + + - `submodules` (Bool, optional) + + Also fetch submodules if available. + + Default: `false` + + - `allRefs` (Bool, optional) + + If set to `true`, always fetch the entire repository, even if the latest commit is still in the cache. + Otherwise, only the latest commit is fetched if it is not already cached. + + Default: `false` + + - `lastModified` (Integer, optional) + + Unix timestamp of the fetched commit. + + If set, pass through the value to the output attribute set. + Otherwise, generated from the fetched Git tree. + + - `revCount` (Integer, optional) + + Number of revisions in the history of the Git repository before the fetched commit. + + If set, pass through the value to the output attribute set. + Otherwise, generated from the fetched Git tree. + + The following input types are still subject to change: + + - `"path"` + - `"github"` + - `"gitlab"` + - `"sourcehut"` + - `"mercurial"` + + *input* can also be a [URL-like reference](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references). + The additional input types and the URL-like syntax requires the [`flakes` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-flakes) to be enabled. + + > **Example** > - > The URL-like syntax requires the [`flakes` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-flakes) to be enabled. + > Fetch a GitHub repository using the attribute set representation: + > + > ```nix + > builtins.fetchTree { + > type = "github"; + > owner = "NixOS"; + > repo = "nixpkgs"; + > rev = "ae2e6b3958682513d28f7d633734571fb18285dd"; + > } + > ``` + > + > This evaluates to the following attribute set: + > + > ```nix + > { + > lastModified = 1686503798; + > lastModifiedDate = "20230611171638"; + > narHash = "sha256-rA9RqKP9OlBrgGCPvfd5HVAXDOy8k2SmPtB/ijShNXc="; + > outPath = "/nix/store/l5m6qlvfs9sdw14ja3qbzpglcjlb6j1x-source"; + > rev = "ae2e6b3958682513d28f7d633734571fb18285dd"; + > shortRev = "ae2e6b3"; + > } + > ``` - Here are some examples of how to use `fetchTree`: - - - Fetch a GitHub repository using the attribute set representation: - - ```nix - builtins.fetchTree { - type = "github"; - owner = "NixOS"; - repo = "nixpkgs"; - rev = "ae2e6b3958682513d28f7d633734571fb18285dd"; - } - ``` - - This evaluates to the following attribute set: - - ``` - { - lastModified = 1686503798; - lastModifiedDate = "20230611171638"; - narHash = "sha256-rA9RqKP9OlBrgGCPvfd5HVAXDOy8k2SmPtB/ijShNXc="; - outPath = "/nix/store/l5m6qlvfs9sdw14ja3qbzpglcjlb6j1x-source"; - rev = "ae2e6b3958682513d28f7d633734571fb18285dd"; - shortRev = "ae2e6b3"; - } - ``` - - - Fetch the same GitHub repository using the URL-like syntax: - - ``` - builtins.fetchTree "github:NixOS/nixpkgs/ae2e6b3958682513d28f7d633734571fb18285dd" - ``` + > **Example** + > + > Fetch the same GitHub repository using the URL-like syntax: + > + > ```nix + > builtins.fetchTree "github:NixOS/nixpkgs/ae2e6b3958682513d28f7d633734571fb18285dd" + > ``` )", .fun = prim_fetchTree, .experimentalFeature = Xp::FetchTree, @@ -246,7 +416,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v if (n == "url") url = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the url we should fetch"); else if (n == "sha256") - expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the sha256 of the content we should fetch"), htSHA256); + expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the sha256 of the content we should fetch"), HashAlgorithm::SHA256); else if (n == "name") name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the name of the content we should fetch"); else @@ -276,7 +446,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v state.debugThrowLastTrace(EvalError("in pure evaluation mode, '%s' requires a 'sha256' argument", who)); // early exit if pinned and already in the store - if (expectedHash && expectedHash->type == htSHA256) { + if (expectedHash && expectedHash->algo == HashAlgorithm::SHA256) { auto expectedPath = state.store->makeFixedOutputPath( name, FixedOutputInfo { @@ -301,10 +471,10 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v if (expectedHash) { auto hash = unpack ? state.store->queryPathInfo(storePath)->narHash - : hashFile(htSHA256, state.store->toRealPath(storePath)); + : hashFile(HashAlgorithm::SHA256, state.store->toRealPath(storePath)); if (hash != *expectedHash) state.debugThrowLastTrace(EvalError((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s", - *url, expectedHash->to_string(HashFormat::Base32, true), hash.to_string(HashFormat::Base32, true))); + *url, expectedHash->to_string(HashFormat::Nix32, true), hash.to_string(HashFormat::Nix32, true))); } state.allowAndSetStorePathString(storePath, v); @@ -425,7 +595,8 @@ static RegisterPrimOp primop_fetchGit({ - `shallow` (default: `false`) - A Boolean parameter that specifies whether fetching a shallow clone is allowed. + A Boolean parameter that specifies whether fetching from a shallow remote repository is allowed. + This still performs a full clone of what is available on the remote. - `allRefs` diff --git a/src/libexpr/tests/local.mk b/src/libexpr/tests/local.mk deleted file mode 100644 index 331a5ead6..000000000 --- a/src/libexpr/tests/local.mk +++ /dev/null @@ -1,19 +0,0 @@ -check: libexpr-tests_RUN - -programs += libexpr-tests - -libexpr-tests_NAME := libnixexpr-tests - -libexpr-tests_DIR := $(d) - -libexpr-tests_INSTALL_DIR := - -libexpr-tests_SOURCES := \ - $(wildcard $(d)/*.cc) \ - $(wildcard $(d)/value/*.cc) - -libexpr-tests_CXXFLAGS += -I src/libexpr -I src/libutil -I src/libstore -I src/libexpr/tests -I src/libfetchers - -libexpr-tests_LIBS = libstore-tests libutils-tests libexpr libutil libstore libfetchers - -libexpr-tests_LDFLAGS := $(GTEST_LIBS) -lgmock diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh index 622e613ea..c65b336b0 100644 --- a/src/libexpr/value.hh +++ b/src/libexpr/value.hh @@ -3,10 +3,12 @@ #include #include +#include #include "symbol-table.hh" #include "value/context.hh" #include "input-accessor.hh" +#include "source-path.hh" #if HAVE_BOEHMGC #include @@ -31,7 +33,6 @@ typedef enum { tThunk, tApp, tLambda, - tBlackhole, tPrimOp, tPrimOpApp, tExternal, @@ -61,12 +62,12 @@ class Bindings; struct Env; struct Expr; struct ExprLambda; +struct ExprBlackHole; struct PrimOp; class Symbol; class PosIdx; struct Pos; class StorePath; -class Store; class EvalState; class XMLWriter; @@ -151,49 +152,67 @@ public: // type() == nThunk inline bool isThunk() const { return internalType == tThunk; }; inline bool isApp() const { return internalType == tApp; }; - inline bool isBlackhole() const { return internalType == tBlackhole; }; + inline bool isBlackhole() const; // type() == nFunction inline bool isLambda() const { return internalType == tLambda; }; inline bool isPrimOp() const { return internalType == tPrimOp; }; inline bool isPrimOpApp() const { return internalType == tPrimOpApp; }; + /** + * Strings in the evaluator carry a so-called `context` which + * is a list of strings representing store paths. This is to + * allow users to write things like + * + * "--with-freetype2-library=" + freetype + "/lib" + * + * where `freetype` is a derivation (or a source to be copied + * to the store). If we just concatenated the strings without + * keeping track of the referenced store paths, then if the + * string is used as a derivation attribute, the derivation + * will not have the correct dependencies in its inputDrvs and + * inputSrcs. + + * The semantics of the context is as follows: when a string + * with context C is used as a derivation attribute, then the + * derivations in C will be added to the inputDrvs of the + * derivation, and the other store paths in C will be added to + * the inputSrcs of the derivations. + + * For canonicity, the store paths should be in sorted order. + */ + struct StringWithContext { + const char * c_str; + const char * * context; // must be in sorted order + }; + + struct Path { + InputAccessor * accessor; + const char * path; + }; + + struct ClosureThunk { + Env * env; + Expr * expr; + }; + + struct FunctionApplicationThunk { + Value * left, * right; + }; + + struct Lambda { + Env * env; + ExprLambda * fun; + }; + union { NixInt integer; bool boolean; - /** - * Strings in the evaluator carry a so-called `context` which - * is a list of strings representing store paths. This is to - * allow users to write things like + StringWithContext string; - * "--with-freetype2-library=" + freetype + "/lib" - - * where `freetype` is a derivation (or a source to be copied - * to the store). If we just concatenated the strings without - * keeping track of the referenced store paths, then if the - * string is used as a derivation attribute, the derivation - * will not have the correct dependencies in its inputDrvs and - * inputSrcs. - - * The semantics of the context is as follows: when a string - * with context C is used as a derivation attribute, then the - * derivations in C will be added to the inputDrvs of the - * derivation, and the other store paths in C will be added to - * the inputSrcs of the derivations. - - * For canonicity, the store paths should be in sorted order. - */ - struct { - const char * c_str; - const char * * context; // must be in sorted order - } string; - - struct { - InputAccessor * accessor; - const char * path; - } _path; + Path _path; Bindings * attrs; struct { @@ -201,21 +220,11 @@ public: Value * * elems; } bigList; Value * smallList[2]; - struct { - Env * env; - Expr * expr; - } thunk; - struct { - Value * left, * right; - } app; - struct { - Env * env; - ExprLambda * fun; - } lambda; + ClosureThunk thunk; + FunctionApplicationThunk app; + Lambda lambda; PrimOp * primOp; - struct { - Value * left, * right; - } primOpApp; + FunctionApplicationThunk primOpApp; ExternalValueBase * external; NixFloat fpoint; }; @@ -240,7 +249,7 @@ public: case tLambda: case tPrimOp: case tPrimOpApp: return nFunction; case tExternal: return nExternal; case tFloat: return nFloat; - case tThunk: case tApp: case tBlackhole: return nThunk; + case tThunk: case tApp: return nThunk; } if (invalidIsThunk) return nThunk; @@ -348,19 +357,9 @@ public: lambda.fun = f; } - inline void mkBlackhole() - { - internalType = tBlackhole; - // Value will be overridden anyways - } - - inline void mkPrimOp(PrimOp * p) - { - clearValue(); - internalType = tPrimOp; - primOp = p; - } + inline void mkBlackhole(); + void mkPrimOp(PrimOp * p); inline void mkPrimOpApp(Value * l, Value * r) { @@ -393,7 +392,13 @@ public: return internalType == tList1 || internalType == tList2 ? smallList : bigList.elems; } - const Value * const * listElems() const + std::span listItems() const + { + assert(isList()); + return std::span(listElems(), listSize()); + } + + Value * const * listElems() const { return internalType == tList1 || internalType == tList2 ? smallList : bigList.elems; } @@ -412,41 +417,12 @@ public: */ bool isTrivial() const; - auto listItems() - { - struct ListIterable - { - typedef Value * const * iterator; - iterator _begin, _end; - iterator begin() const { return _begin; } - iterator end() const { return _end; } - }; - assert(isList()); - auto begin = listElems(); - return ListIterable { begin, begin + listSize() }; - } - - auto listItems() const - { - struct ConstListIterable - { - typedef const Value * const * iterator; - iterator _begin, _end; - iterator begin() const { return _begin; } - iterator end() const { return _end; } - }; - assert(isList()); - auto begin = listElems(); - return ConstListIterable { begin, begin + listSize() }; - } - SourcePath path() const { assert(internalType == tPath); - return SourcePath { - .accessor = ref(_path.accessor->shared_from_this()), - .path = CanonPath(CanonPath::unchecked_t(), _path.path) - }; + return SourcePath( + ref(_path.accessor->shared_from_this()), + CanonPath(CanonPath::unchecked_t(), _path.path)); } std::string_view string_view() const @@ -468,6 +444,20 @@ public: }; +extern ExprBlackHole eBlackHole; + +bool Value::isBlackhole() const +{ + return internalType == tThunk && thunk.expr == (Expr*) &eBlackHole; +} + +void Value::mkBlackhole() +{ + internalType = tThunk; + thunk.expr = (Expr*) &eBlackHole; +} + + #if HAVE_BOEHMGC typedef std::vector> ValueVector; typedef std::map, traceable_allocator>> ValueMap; diff --git a/src/libfetchers/cache.cc b/src/libfetchers/cache.cc index b72a464e8..e071b4717 100644 --- a/src/libfetchers/cache.cc +++ b/src/libfetchers/cache.cc @@ -20,6 +20,9 @@ create table if not exists Cache ( ); )sql"; +// FIXME: we should periodically purge/nuke this cache to prevent it +// from growing too big. + struct CacheImpl : Cache { struct State @@ -48,8 +51,62 @@ struct CacheImpl : Cache "select info, path, immutable, timestamp from Cache where input = ?"); } + void upsert( + const Attrs & inAttrs, + const Attrs & infoAttrs) override + { + _state.lock()->add.use() + (attrsToJSON(inAttrs).dump()) + (attrsToJSON(infoAttrs).dump()) + ("") // no path + (false) + (time(0)).exec(); + } + + std::optional lookup(const Attrs & inAttrs) override + { + if (auto res = lookupExpired(inAttrs)) + return std::move(res->infoAttrs); + return {}; + } + + std::optional lookupWithTTL(const Attrs & inAttrs) override + { + if (auto res = lookupExpired(inAttrs)) { + if (!res->expired) + return std::move(res->infoAttrs); + debug("ignoring expired cache entry '%s'", + attrsToJSON(inAttrs).dump()); + } + return {}; + } + + std::optional lookupExpired(const Attrs & inAttrs) override + { + auto state(_state.lock()); + + auto inAttrsJSON = attrsToJSON(inAttrs).dump(); + + auto stmt(state->lookup.use()(inAttrsJSON)); + if (!stmt.next()) { + debug("did not find cache entry for '%s'", inAttrsJSON); + return {}; + } + + auto infoJSON = stmt.getStr(0); + auto locked = stmt.getInt(2) != 0; + auto timestamp = stmt.getInt(3); + + debug("using cache entry '%s' -> '%s'", inAttrsJSON, infoJSON); + + return Result2 { + .expired = !locked && (settings.tarballTtl.get() == 0 || timestamp + settings.tarballTtl < time(0)), + .infoAttrs = jsonToAttrs(nlohmann::json::parse(infoJSON)), + }; + } + void add( - ref store, + Store & store, const Attrs & inAttrs, const Attrs & infoAttrs, const StorePath & storePath, @@ -58,13 +115,13 @@ struct CacheImpl : Cache _state.lock()->add.use() (attrsToJSON(inAttrs).dump()) (attrsToJSON(infoAttrs).dump()) - (store->printStorePath(storePath)) + (store.printStorePath(storePath)) (locked) (time(0)).exec(); } std::optional> lookup( - ref store, + Store & store, const Attrs & inAttrs) override { if (auto res = lookupExpired(store, inAttrs)) { @@ -77,7 +134,7 @@ struct CacheImpl : Cache } std::optional lookupExpired( - ref store, + Store & store, const Attrs & inAttrs) override { auto state(_state.lock()); @@ -91,19 +148,19 @@ struct CacheImpl : Cache } auto infoJSON = stmt.getStr(0); - auto storePath = store->parseStorePath(stmt.getStr(1)); + auto storePath = store.parseStorePath(stmt.getStr(1)); auto locked = stmt.getInt(2) != 0; auto timestamp = stmt.getInt(3); - store->addTempRoot(storePath); - if (!store->isValidPath(storePath)) { + store.addTempRoot(storePath); + if (!store.isValidPath(storePath)) { // FIXME: we could try to substitute 'storePath'. debug("ignoring disappeared cache entry '%s'", inAttrsJSON); return {}; } debug("using cache entry '%s' -> '%s', '%s'", - inAttrsJSON, infoJSON, store->printStorePath(storePath)); + inAttrsJSON, infoJSON, store.printStorePath(storePath)); return Result { .expired = !locked && (settings.tarballTtl.get() == 0 || timestamp + settings.tarballTtl < time(0)), diff --git a/src/libfetchers/cache.hh b/src/libfetchers/cache.hh index af34e66ce..791d77025 100644 --- a/src/libfetchers/cache.hh +++ b/src/libfetchers/cache.hh @@ -6,19 +6,58 @@ namespace nix::fetchers { +/** + * A cache for arbitrary `Attrs` -> `Attrs` mappings with a timestamp + * for expiration. + */ struct Cache { virtual ~Cache() { } + /** + * Add a value to the cache. The cache is an arbitrary mapping of + * Attrs to Attrs. + */ + virtual void upsert( + const Attrs & inAttrs, + const Attrs & infoAttrs) = 0; + + /** + * Look up a key with infinite TTL. + */ + virtual std::optional lookup( + const Attrs & inAttrs) = 0; + + /** + * Look up a key. Return nothing if its TTL has exceeded + * `settings.tarballTTL`. + */ + virtual std::optional lookupWithTTL( + const Attrs & inAttrs) = 0; + + struct Result2 + { + bool expired = false; + Attrs infoAttrs; + }; + + /** + * Look up a key. Return a bool denoting whether its TTL has + * exceeded `settings.tarballTTL`. + */ + virtual std::optional lookupExpired( + const Attrs & inAttrs) = 0; + + /* Old cache for things that have a store path. */ virtual void add( - ref store, + Store & store, const Attrs & inAttrs, const Attrs & infoAttrs, const StorePath & storePath, bool locked) = 0; virtual std::optional> lookup( - ref store, + Store & store, const Attrs & inAttrs) = 0; struct Result @@ -29,7 +68,7 @@ struct Cache }; virtual std::optional lookupExpired( - ref store, + Store & store, const Attrs & inAttrs) = 0; }; diff --git a/src/libfetchers/fetch-to-store.cc b/src/libfetchers/fetch-to-store.cc new file mode 100644 index 000000000..196489e05 --- /dev/null +++ b/src/libfetchers/fetch-to-store.cc @@ -0,0 +1,68 @@ +#include "fetch-to-store.hh" +#include "fetchers.hh" +#include "cache.hh" + +namespace nix { + +StorePath fetchToStore( + Store & store, + const SourcePath & path, + std::string_view name, + ContentAddressMethod method, + PathFilter * filter, + RepairFlag repair) +{ + // FIXME: add an optimisation for the case where the accessor is + // an FSInputAccessor pointing to a store path. + + std::optional cacheKey; + + if (!filter && path.accessor->fingerprint) { + cacheKey = fetchers::Attrs{ + {"_what", "fetchToStore"}, + {"store", store.storeDir}, + {"name", std::string(name)}, + {"fingerprint", *path.accessor->fingerprint}, + { + "method", + std::visit(overloaded { + [](const TextIngestionMethod &) { + return "text"; + }, + [](const FileIngestionMethod & fim) { + switch (fim) { + case FileIngestionMethod::Flat: return "flat"; + case FileIngestionMethod::Recursive: return "nar"; + default: assert(false); + } + }, + }, method.raw), + }, + {"path", path.path.abs()} + }; + if (auto res = fetchers::getCache()->lookup(store, *cacheKey)) { + debug("store path cache hit for '%s'", path); + return res->second; + } + } else + debug("source path '%s' is uncacheable", path); + + Activity act(*logger, lvlChatty, actUnknown, fmt("copying '%s' to the store", path)); + + auto filter2 = filter ? *filter : defaultPathFilter; + + auto storePath = + settings.readOnlyMode + ? store.computeStorePath( + name, *path.accessor, path.path, method, HashAlgorithm::SHA256, {}, filter2).first + : store.addToStore( + name, *path.accessor, path.path, method, HashAlgorithm::SHA256, {}, filter2, repair); + + if (cacheKey) + fetchers::getCache()->add(store, *cacheKey, {}, storePath, true); + + return storePath; +} + + +} diff --git a/src/libfetchers/fetch-to-store.hh b/src/libfetchers/fetch-to-store.hh new file mode 100644 index 000000000..e5e039340 --- /dev/null +++ b/src/libfetchers/fetch-to-store.hh @@ -0,0 +1,22 @@ +#pragma once + +#include "source-path.hh" +#include "store-api.hh" +#include "file-system.hh" +#include "repair-flag.hh" +#include "file-content-address.hh" + +namespace nix { + +/** + * Copy the `path` to the Nix store. + */ +StorePath fetchToStore( + Store & store, + const SourcePath & path, + std::string_view name = "source", + ContentAddressMethod method = FileIngestionMethod::Recursive, + PathFilter * filter = nullptr, + RepairFlag repair = NoRepair); + +} diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc index 895515327..7f282c972 100644 --- a/src/libfetchers/fetchers.cc +++ b/src/libfetchers/fetchers.cc @@ -1,5 +1,8 @@ #include "fetchers.hh" #include "store-api.hh" +#include "input-accessor.hh" +#include "source-path.hh" +#include "fetch-to-store.hh" #include @@ -107,6 +110,11 @@ Input Input::fromAttrs(Attrs && attrs) return std::move(*res); } +std::optional Input::getFingerprint(ref store) const +{ + return scheme ? scheme->getFingerprint(store, *this) : std::nullopt; +} + ParsedURL Input::toURL() const { if (!scheme) @@ -219,6 +227,16 @@ std::pair Input::fetch(ref store) const return {std::move(storePath), input}; } +std::pair, Input> Input::getAccessor(ref store) const +{ + try { + return scheme->getAccessor(store, *this); + } catch (Error & e) { + e.addTrace({}, "while fetching the input '%s'", to_string()); + throw; + } +} + Input Input::applyOverrides( std::optional ref, std::optional rev) const @@ -273,8 +291,8 @@ std::string Input::getType() const std::optional Input::getNarHash() const { if (auto s = maybeGetStrAttr(attrs, "narHash")) { - auto hash = s->empty() ? Hash(htSHA256) : Hash::parseSRI(*s); - if (hash.type != htSHA256) + auto hash = s->empty() ? Hash(HashAlgorithm::SHA256) : Hash::parseSRI(*s); + if (hash.algo != HashAlgorithm::SHA256) throw UsageError("narHash must use SHA-256"); return hash; } @@ -298,7 +316,7 @@ std::optional Input::getRev() const } catch (BadHash &e) { // Default to sha1 for backwards compatibility with existing // usages (e.g. `builtins.fetchTree` calls or flake inputs). - hash = Hash::parseAny(*s, htSHA1); + hash = Hash::parseAny(*s, HashAlgorithm::SHA1); } } @@ -355,6 +373,18 @@ void InputScheme::clone(const Input & input, const Path & destDir) const throw Error("do not know how to clone input '%s'", input.to_string()); } +std::pair InputScheme::fetch(ref store, const Input & input) +{ + auto [accessor, input2] = getAccessor(store, input); + auto storePath = fetchToStore(*store, SourcePath(accessor), input2.getName()); + return {storePath, input2}; +} + +std::pair, Input> InputScheme::getAccessor(ref store, const Input & input) const +{ + throw UnimplementedError("InputScheme must implement fetch() or getAccessor()"); +} + std::optional InputScheme::experimentalFeature() const { return {}; diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh index a056c8939..5f3254b6d 100644 --- a/src/libfetchers/fetchers.hh +++ b/src/libfetchers/fetchers.hh @@ -10,7 +10,7 @@ #include #include -namespace nix { class Store; class StorePath; } +namespace nix { class Store; class StorePath; struct InputAccessor; } namespace nix::fetchers { @@ -83,6 +83,8 @@ public: */ std::pair fetch(ref store) const; + std::pair, Input> getAccessor(ref store) const; + Input applyOverrides( std::optional ref, std::optional rev) const; @@ -111,6 +113,12 @@ public: std::optional getRev() const; std::optional getRevCount() const; std::optional getLastModified() const; + + /** + * For locked inputs, return a string that uniquely specifies the + * content of the input (typically a commit hash or content hash). + */ + std::optional getFingerprint(ref store) const; }; @@ -167,7 +175,9 @@ struct InputScheme std::string_view contents, std::optional commitMsg) const; - virtual std::pair fetch(ref store, const Input & input) = 0; + virtual std::pair fetch(ref store, const Input & input); + + virtual std::pair, Input> getAccessor(ref store, const Input & input) const; /** * Is this `InputScheme` part of an experimental feature? @@ -176,6 +186,9 @@ struct InputScheme virtual bool isDirect(const Input & input) const { return true; } + + virtual std::optional getFingerprint(ref store, const Input & input) const + { return std::nullopt; } }; void registerInputScheme(std::shared_ptr && fetcher); diff --git a/src/libfetchers/filtering-input-accessor.cc b/src/libfetchers/filtering-input-accessor.cc new file mode 100644 index 000000000..5ae416fd3 --- /dev/null +++ b/src/libfetchers/filtering-input-accessor.cc @@ -0,0 +1,83 @@ +#include "filtering-input-accessor.hh" + +namespace nix { + +std::string FilteringInputAccessor::readFile(const CanonPath & path) +{ + checkAccess(path); + return next->readFile(prefix + path); +} + +bool FilteringInputAccessor::pathExists(const CanonPath & path) +{ + return isAllowed(path) && next->pathExists(prefix + path); +} + +std::optional FilteringInputAccessor::maybeLstat(const CanonPath & path) +{ + checkAccess(path); + return next->maybeLstat(prefix + path); +} + +InputAccessor::DirEntries FilteringInputAccessor::readDirectory(const CanonPath & path) +{ + checkAccess(path); + DirEntries entries; + for (auto & entry : next->readDirectory(prefix + path)) { + if (isAllowed(path + entry.first)) + entries.insert(std::move(entry)); + } + return entries; +} + +std::string FilteringInputAccessor::readLink(const CanonPath & path) +{ + checkAccess(path); + return next->readLink(prefix + path); +} + +std::string FilteringInputAccessor::showPath(const CanonPath & path) +{ + return next->showPath(prefix + path); +} + +void FilteringInputAccessor::checkAccess(const CanonPath & path) +{ + if (!isAllowed(path)) + throw makeNotAllowedError + ? makeNotAllowedError(path) + : RestrictedPathError("access to path '%s' is forbidden", showPath(path)); +} + +struct AllowListInputAccessorImpl : AllowListInputAccessor +{ + std::set allowedPaths; + + AllowListInputAccessorImpl( + ref next, + std::set && allowedPaths, + MakeNotAllowedError && makeNotAllowedError) + : AllowListInputAccessor(SourcePath(next), std::move(makeNotAllowedError)) + , allowedPaths(std::move(allowedPaths)) + { } + + bool isAllowed(const CanonPath & path) override + { + return path.isAllowed(allowedPaths); + } + + void allowPath(CanonPath path) override + { + allowedPaths.insert(std::move(path)); + } +}; + +ref AllowListInputAccessor::create( + ref next, + std::set && allowedPaths, + MakeNotAllowedError && makeNotAllowedError) +{ + return make_ref(next, std::move(allowedPaths), std::move(makeNotAllowedError)); +} + +} diff --git a/src/libfetchers/filtering-input-accessor.hh b/src/libfetchers/filtering-input-accessor.hh new file mode 100644 index 000000000..a352a33a6 --- /dev/null +++ b/src/libfetchers/filtering-input-accessor.hh @@ -0,0 +1,74 @@ +#pragma once + +#include "input-accessor.hh" +#include "source-path.hh" + +namespace nix { + +/** + * A function that should throw an exception of type + * `RestrictedPathError` explaining that access to `path` is + * forbidden. + */ +typedef std::function MakeNotAllowedError; + +/** + * An abstract wrapping `InputAccessor` that performs access + * control. Subclasses should override `isAllowed()` to implement an + * access control policy. The error message is customized at construction. + */ +struct FilteringInputAccessor : InputAccessor +{ + ref next; + CanonPath prefix; + MakeNotAllowedError makeNotAllowedError; + + FilteringInputAccessor(const SourcePath & src, MakeNotAllowedError && makeNotAllowedError) + : next(src.accessor) + , prefix(src.path) + , makeNotAllowedError(std::move(makeNotAllowedError)) + { } + + std::string readFile(const CanonPath & path) override; + + bool pathExists(const CanonPath & path) override; + + std::optional maybeLstat(const CanonPath & path) override; + + DirEntries readDirectory(const CanonPath & path) override; + + std::string readLink(const CanonPath & path) override; + + std::string showPath(const CanonPath & path) override; + + /** + * Call `makeNotAllowedError` to throw a `RestrictedPathError` + * exception if `isAllowed()` returns `false` for `path`. + */ + void checkAccess(const CanonPath & path); + + /** + * Return `true` iff access to path is allowed. + */ + virtual bool isAllowed(const CanonPath & path) = 0; +}; + +/** + * A wrapping `InputAccessor` that checks paths against an allow-list. + */ +struct AllowListInputAccessor : public FilteringInputAccessor +{ + /** + * Grant access to the specified path. + */ + virtual void allowPath(CanonPath path) = 0; + + static ref create( + ref next, + std::set && allowedPaths, + MakeNotAllowedError && makeNotAllowedError); + + using FilteringInputAccessor::FilteringInputAccessor; +}; + +} diff --git a/src/libfetchers/fs-input-accessor.cc b/src/libfetchers/fs-input-accessor.cc index 81be64482..c3d8d273c 100644 --- a/src/libfetchers/fs-input-accessor.cc +++ b/src/libfetchers/fs-input-accessor.cc @@ -4,20 +4,14 @@ namespace nix { -struct FSInputAccessorImpl : FSInputAccessor, PosixSourceAccessor +struct FSInputAccessor : InputAccessor, PosixSourceAccessor { CanonPath root; - std::optional> allowedPaths; - MakeNotAllowedError makeNotAllowedError; - FSInputAccessorImpl( - const CanonPath & root, - std::optional> && allowedPaths, - MakeNotAllowedError && makeNotAllowedError) + FSInputAccessor(const CanonPath & root) : root(root) - , allowedPaths(std::move(allowedPaths)) - , makeNotAllowedError(std::move(makeNotAllowedError)) { + displayPrefix = root.isRoot() ? "" : root.abs(); } void readFile( @@ -26,39 +20,30 @@ struct FSInputAccessorImpl : FSInputAccessor, PosixSourceAccessor std::function sizeCallback) override { auto absPath = makeAbsPath(path); - checkAllowed(absPath); PosixSourceAccessor::readFile(absPath, sink, sizeCallback); } bool pathExists(const CanonPath & path) override { - auto absPath = makeAbsPath(path); - return isAllowed(absPath) && PosixSourceAccessor::pathExists(absPath); + return PosixSourceAccessor::pathExists(makeAbsPath(path)); } std::optional maybeLstat(const CanonPath & path) override { - auto absPath = makeAbsPath(path); - checkAllowed(absPath); - return PosixSourceAccessor::maybeLstat(absPath); + return PosixSourceAccessor::maybeLstat(makeAbsPath(path)); } DirEntries readDirectory(const CanonPath & path) override { - auto absPath = makeAbsPath(path); - checkAllowed(absPath); DirEntries res; - for (auto & entry : PosixSourceAccessor::readDirectory(absPath)) - if (isAllowed(absPath + entry.first)) - res.emplace(entry); + for (auto & entry : PosixSourceAccessor::readDirectory(makeAbsPath(path))) + res.emplace(entry); return res; } std::string readLink(const CanonPath & path) override { - auto absPath = makeAbsPath(path); - checkAllowed(absPath); - return PosixSourceAccessor::readLink(absPath); + return PosixSourceAccessor::readLink(makeAbsPath(path)); } CanonPath makeAbsPath(const CanonPath & path) @@ -66,59 +51,22 @@ struct FSInputAccessorImpl : FSInputAccessor, PosixSourceAccessor return root + path; } - void checkAllowed(const CanonPath & absPath) override - { - if (!isAllowed(absPath)) - throw makeNotAllowedError - ? makeNotAllowedError(absPath) - : RestrictedPathError("access to path '%s' is forbidden", absPath); - } - - bool isAllowed(const CanonPath & absPath) - { - if (!absPath.isWithin(root)) - return false; - - if (allowedPaths) { - auto p = absPath.removePrefix(root); - if (!p.isAllowed(*allowedPaths)) - return false; - } - - return true; - } - - void allowPath(CanonPath path) override - { - if (allowedPaths) - allowedPaths->insert(std::move(path)); - } - - bool hasAccessControl() override - { - return (bool) allowedPaths; - } - std::optional getPhysicalPath(const CanonPath & path) override { return makeAbsPath(path); } }; -ref makeFSInputAccessor( - const CanonPath & root, - std::optional> && allowedPaths, - MakeNotAllowedError && makeNotAllowedError) +ref makeFSInputAccessor(const CanonPath & root) { - return make_ref(root, std::move(allowedPaths), std::move(makeNotAllowedError)); + return make_ref(root); } -ref makeStorePathAccessor( +ref makeStorePathAccessor( ref store, - const StorePath & storePath, - MakeNotAllowedError && makeNotAllowedError) + const StorePath & storePath) { - return makeFSInputAccessor(CanonPath(store->toRealPath(storePath)), {}, std::move(makeNotAllowedError)); + return makeFSInputAccessor(CanonPath(store->toRealPath(storePath))); } SourcePath getUnfilteredRootPath(CanonPath path) diff --git a/src/libfetchers/fs-input-accessor.hh b/src/libfetchers/fs-input-accessor.hh index 19a5211c8..a98e83511 100644 --- a/src/libfetchers/fs-input-accessor.hh +++ b/src/libfetchers/fs-input-accessor.hh @@ -1,32 +1,19 @@ #pragma once #include "input-accessor.hh" +#include "source-path.hh" namespace nix { class StorePath; class Store; -struct FSInputAccessor : InputAccessor -{ - virtual void checkAllowed(const CanonPath & absPath) = 0; +ref makeFSInputAccessor( + const CanonPath & root); - virtual void allowPath(CanonPath path) = 0; - - virtual bool hasAccessControl() = 0; -}; - -typedef std::function MakeNotAllowedError; - -ref makeFSInputAccessor( - const CanonPath & root, - std::optional> && allowedPaths = {}, - MakeNotAllowedError && makeNotAllowedError = {}); - -ref makeStorePathAccessor( +ref makeStorePathAccessor( ref store, - const StorePath & storePath, - MakeNotAllowedError && makeNotAllowedError = {}); + const StorePath & storePath); SourcePath getUnfilteredRootPath(CanonPath path); diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc new file mode 100644 index 000000000..65f7b45ef --- /dev/null +++ b/src/libfetchers/git-utils.cc @@ -0,0 +1,677 @@ +#include "git-utils.hh" +#include "input-accessor.hh" +#include "cache.hh" +#include "finally.hh" +#include "processes.hh" +#include "signals.hh" + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +namespace std { + +template<> struct hash +{ + size_t operator()(const git_oid & oid) const + { + return * (size_t *) oid.id; + } +}; + +} + +std::ostream & operator << (std::ostream & str, const git_oid & oid) +{ + str << git_oid_tostr_s(&oid); + return str; +} + +bool operator == (const git_oid & oid1, const git_oid & oid2) +{ + return git_oid_equal(&oid1, &oid2); +} + +namespace nix { + +// Some wrapper types that ensure that the git_*_free functions get called. +template +struct Deleter +{ + template + void operator()(T * p) const { del(p); }; +}; + +typedef std::unique_ptr> Repository; +typedef std::unique_ptr> TreeEntry; +typedef std::unique_ptr> Tree; +typedef std::unique_ptr> TreeBuilder; +typedef std::unique_ptr> Blob; +typedef std::unique_ptr> Object; +typedef std::unique_ptr> Commit; +typedef std::unique_ptr> Reference; +typedef std::unique_ptr> DescribeResult; +typedef std::unique_ptr> StatusList; +typedef std::unique_ptr> Remote; +typedef std::unique_ptr> GitConfig; +typedef std::unique_ptr> ConfigIterator; + +// A helper to ensure that we don't leak objects returned by libgit2. +template +struct Setter +{ + T & t; + typename T::pointer p = nullptr; + + Setter(T & t) : t(t) { } + + ~Setter() { if (p) t = T(p); } + + operator typename T::pointer * () { return &p; } +}; + +Hash toHash(const git_oid & oid) +{ + #ifdef GIT_EXPERIMENTAL_SHA256 + assert(oid.type == GIT_OID_SHA1); + #endif + Hash hash(HashAlgorithm::SHA1); + memcpy(hash.hash, oid.id, hash.hashSize); + return hash; +} + +static void initLibGit2() +{ + if (git_libgit2_init() < 0) + throw Error("initialising libgit2: %s", git_error_last()->message); +} + +git_oid hashToOID(const Hash & hash) +{ + git_oid oid; + if (git_oid_fromstr(&oid, hash.gitRev().c_str())) + throw Error("cannot convert '%s' to a Git OID", hash.gitRev()); + return oid; +} + +Object lookupObject(git_repository * repo, const git_oid & oid) +{ + Object obj; + if (git_object_lookup(Setter(obj), repo, &oid, GIT_OBJECT_ANY)) { + auto err = git_error_last(); + throw Error("getting Git object '%s': %s", oid, err->message); + } + return obj; +} + +template +T peelObject(git_repository * repo, git_object * obj, git_object_t type) +{ + T obj2; + if (git_object_peel((git_object * *) (typename T::pointer *) Setter(obj2), obj, type)) { + auto err = git_error_last(); + throw Error("peeling Git object '%s': %s", git_object_id(obj), err->message); + } + return obj2; +} + +struct GitRepoImpl : GitRepo, std::enable_shared_from_this +{ + CanonPath path; + Repository repo; + + GitRepoImpl(CanonPath _path, bool create, bool bare) + : path(std::move(_path)) + { + initLibGit2(); + + if (pathExists(path.abs())) { + if (git_repository_open(Setter(repo), path.c_str())) + throw Error("opening Git repository '%s': %s", path, git_error_last()->message); + } else { + if (git_repository_init(Setter(repo), path.c_str(), bare)) + throw Error("creating Git repository '%s': %s", path, git_error_last()->message); + } + } + + operator git_repository * () + { + return repo.get(); + } + + uint64_t getRevCount(const Hash & rev) override + { + std::unordered_set done; + std::queue todo; + + todo.push(peelObject(*this, lookupObject(*this, hashToOID(rev)).get(), GIT_OBJECT_COMMIT)); + + while (auto commit = pop(todo)) { + if (!done.insert(*git_commit_id(commit->get())).second) continue; + + for (size_t n = 0; n < git_commit_parentcount(commit->get()); ++n) { + git_commit * parent; + if (git_commit_parent(&parent, commit->get(), n)) + throw Error("getting parent of Git commit '%s': %s", *git_commit_id(commit->get()), git_error_last()->message); + todo.push(Commit(parent)); + } + } + + return done.size(); + } + + uint64_t getLastModified(const Hash & rev) override + { + auto commit = peelObject(*this, lookupObject(*this, hashToOID(rev)).get(), GIT_OBJECT_COMMIT); + + return git_commit_time(commit.get()); + } + + bool isShallow() override + { + return git_repository_is_shallow(*this); + } + + Hash resolveRef(std::string ref) override + { + // Handle revisions used as refs. + { + git_oid oid; + if (git_oid_fromstr(&oid, ref.c_str()) == 0) + return toHash(oid); + } + + // Resolve short names like 'master'. + Reference ref2; + if (!git_reference_dwim(Setter(ref2), *this, ref.c_str())) + ref = git_reference_name(ref2.get()); + + // Resolve full references like 'refs/heads/master'. + Reference ref3; + if (git_reference_lookup(Setter(ref3), *this, ref.c_str())) + throw Error("resolving Git reference '%s': %s", ref, git_error_last()->message); + + auto oid = git_reference_target(ref3.get()); + if (!oid) + throw Error("cannot get OID for Git reference '%s'", git_reference_name(ref3.get())); + + return toHash(*oid); + } + + std::vector parseSubmodules(const CanonPath & configFile) + { + GitConfig config; + if (git_config_open_ondisk(Setter(config), configFile.abs().c_str())) + throw Error("parsing .gitmodules file: %s", git_error_last()->message); + + ConfigIterator it; + if (git_config_iterator_glob_new(Setter(it), config.get(), "^submodule\\..*\\.(path|url|branch)$")) + throw Error("iterating over .gitmodules: %s", git_error_last()->message); + + std::map entries; + + while (true) { + git_config_entry * entry = nullptr; + if (auto err = git_config_next(&entry, it.get())) { + if (err == GIT_ITEROVER) break; + throw Error("iterating over .gitmodules: %s", git_error_last()->message); + } + entries.emplace(entry->name + 10, entry->value); + } + + std::vector result; + + for (auto & [key, value] : entries) { + if (!hasSuffix(key, ".path")) continue; + std::string key2(key, 0, key.size() - 5); + auto path = CanonPath(value); + result.push_back(Submodule { + .path = path, + .url = entries[key2 + ".url"], + .branch = entries[key2 + ".branch"], + }); + } + + return result; + } + + // Helper for statusCallback below. + static int statusCallbackTrampoline(const char * path, unsigned int statusFlags, void * payload) + { + return (*((std::function *) payload))(path, statusFlags); + } + + WorkdirInfo getWorkdirInfo() override + { + WorkdirInfo info; + + /* Get the head revision, if any. */ + git_oid headRev; + if (auto err = git_reference_name_to_id(&headRev, *this, "HEAD")) { + if (err != GIT_ENOTFOUND) + throw Error("resolving HEAD: %s", git_error_last()->message); + } else + info.headRev = toHash(headRev); + + /* Get all tracked files and determine whether the working + directory is dirty. */ + std::function statusCallback = [&](const char * path, unsigned int statusFlags) + { + if (!(statusFlags & GIT_STATUS_INDEX_DELETED) && + !(statusFlags & GIT_STATUS_WT_DELETED)) + info.files.insert(CanonPath(path)); + if (statusFlags != GIT_STATUS_CURRENT) + info.isDirty = true; + return 0; + }; + + git_status_options options = GIT_STATUS_OPTIONS_INIT; + options.flags |= GIT_STATUS_OPT_INCLUDE_UNMODIFIED; + options.flags |= GIT_STATUS_OPT_EXCLUDE_SUBMODULES; + if (git_status_foreach_ext(*this, &options, &statusCallbackTrampoline, &statusCallback)) + throw Error("getting working directory status: %s", git_error_last()->message); + + /* Get submodule info. */ + auto modulesFile = path + ".gitmodules"; + if (pathExists(modulesFile.abs())) + info.submodules = parseSubmodules(modulesFile); + + return info; + } + + std::optional getWorkdirRef() override + { + Reference ref; + if (git_reference_lookup(Setter(ref), *this, "HEAD")) + throw Error("looking up HEAD: %s", git_error_last()->message); + + if (auto target = git_reference_symbolic_target(ref.get())) + return target; + + return std::nullopt; + } + + std::vector> getSubmodules(const Hash & rev) override; + + std::string resolveSubmoduleUrl( + const std::string & url, + const std::string & base) override + { + git_buf buf = GIT_BUF_INIT; + if (git_submodule_resolve_url(&buf, *this, url.c_str())) + throw Error("resolving Git submodule URL '%s'", url); + Finally cleanup = [&]() { git_buf_dispose(&buf); }; + + std::string res(buf.ptr); + + if (!hasPrefix(res, "/") && res.find("://") == res.npos) + res = parseURL(base + "/" + res).canonicalise().to_string(); + + return res; + } + + bool hasObject(const Hash & oid_) override + { + auto oid = hashToOID(oid_); + + Object obj; + if (auto errCode = git_object_lookup(Setter(obj), *this, &oid, GIT_OBJECT_ANY)) { + if (errCode == GIT_ENOTFOUND) return false; + auto err = git_error_last(); + throw Error("getting Git object '%s': %s", oid, err->message); + } + + return true; + } + + ref getAccessor(const Hash & rev) override; + + static int sidebandProgressCallback(const char * str, int len, void * payload) + { + auto act = (Activity *) payload; + act->result(resFetchStatus, trim(std::string_view(str, len))); + return _isInterrupted ? -1 : 0; + } + + static int transferProgressCallback(const git_indexer_progress * stats, void * payload) + { + auto act = (Activity *) payload; + act->result(resFetchStatus, + fmt("%d/%d objects received, %d/%d deltas indexed, %.1f MiB", + stats->received_objects, + stats->total_objects, + stats->indexed_deltas, + stats->total_deltas, + stats->received_bytes / (1024.0 * 1024.0))); + return _isInterrupted ? -1 : 0; + } + + void fetch( + const std::string & url, + const std::string & refspec, + bool shallow) override + { + Activity act(*logger, lvlTalkative, actFetchTree, fmt("fetching Git repository '%s'", url)); + + Remote remote; + + if (git_remote_create_anonymous(Setter(remote), *this, url.c_str())) + throw Error("cannot create Git remote '%s': %s", url, git_error_last()->message); + + char * refspecs[] = {(char *) refspec.c_str()}; + git_strarray refspecs2 { + .strings = refspecs, + .count = 1 + }; + + git_fetch_options opts = GIT_FETCH_OPTIONS_INIT; + // FIXME: for some reason, shallow fetching over ssh barfs + // with "could not read from remote repository". + opts.depth = shallow && parseURL(url).scheme != "ssh" ? 1 : GIT_FETCH_DEPTH_FULL; + opts.callbacks.payload = &act; + opts.callbacks.sideband_progress = sidebandProgressCallback; + opts.callbacks.transfer_progress = transferProgressCallback; + + if (git_remote_fetch(remote.get(), &refspecs2, &opts, nullptr)) + throw Error("fetching '%s' from '%s': %s", refspec, url, git_error_last()->message); + } + + void verifyCommit( + const Hash & rev, + const std::vector & publicKeys) override + { + // Create ad-hoc allowedSignersFile and populate it with publicKeys + auto allowedSignersFile = createTempFile().second; + std::string allowedSigners; + for (const fetchers::PublicKey & k : publicKeys) { + if (k.type != "ssh-dsa" + && k.type != "ssh-ecdsa" + && k.type != "ssh-ecdsa-sk" + && k.type != "ssh-ed25519" + && k.type != "ssh-ed25519-sk" + && k.type != "ssh-rsa") + throw Error("Unknown key type '%s'.\n" + "Please use one of\n" + "- ssh-dsa\n" + " ssh-ecdsa\n" + " ssh-ecdsa-sk\n" + " ssh-ed25519\n" + " ssh-ed25519-sk\n" + " ssh-rsa", k.type); + allowedSigners += "* " + k.type + " " + k.key + "\n"; + } + writeFile(allowedSignersFile, allowedSigners); + + // Run verification command + auto [status, output] = runProgram(RunOptions { + .program = "git", + .args = { + "-c", + "gpg.ssh.allowedSignersFile=" + allowedSignersFile, + "-C", path.abs(), + "verify-commit", + rev.gitRev() + }, + .mergeStderrToStdout = true, + }); + + /* Evaluate result through status code and checking if public + key fingerprints appear on stderr. This is neccessary + because the git command might also succeed due to the + commit being signed by gpg keys that are present in the + users key agent. */ + std::string re = R"(Good "git" signature for \* with .* key SHA256:[)"; + for (const fetchers::PublicKey & k : publicKeys){ + // Calculate sha256 fingerprint from public key and escape the regex symbol '+' to match the key literally + auto fingerprint = trim(hashString(HashAlgorithm::SHA256, base64Decode(k.key)).to_string(nix::HashFormat::Base64, false), "="); + auto escaped_fingerprint = std::regex_replace(fingerprint, std::regex("\\+"), "\\+" ); + re += "(" + escaped_fingerprint + ")"; + } + re += "]"; + if (status == 0 && std::regex_search(output, std::regex(re))) + printTalkative("Signature verification on commit %s succeeded.", rev.gitRev()); + else + throw Error("Commit signature verification on commit %s failed: %s", rev.gitRev(), output); + } +}; + +ref GitRepo::openRepo(const CanonPath & path, bool create, bool bare) +{ + return make_ref(path, create, bare); +} + +struct GitInputAccessor : InputAccessor +{ + ref repo; + Tree root; + + GitInputAccessor(ref repo_, const Hash & rev) + : repo(repo_) + , root(peelObject(*repo, lookupObject(*repo, hashToOID(rev)).get(), GIT_OBJECT_TREE)) + { + } + + std::string readBlob(const CanonPath & path, bool symlink) + { + auto blob = getBlob(path, symlink); + + auto data = std::string_view((const char *) git_blob_rawcontent(blob.get()), git_blob_rawsize(blob.get())); + + return std::string(data); + } + + std::string readFile(const CanonPath & path) override + { + return readBlob(path, false); + } + + bool pathExists(const CanonPath & path) override + { + return path.isRoot() ? true : (bool) lookup(path); + } + + std::optional maybeLstat(const CanonPath & path) override + { + if (path.isRoot()) + return Stat { .type = tDirectory }; + + auto entry = lookup(path); + if (!entry) + return std::nullopt; + + auto mode = git_tree_entry_filemode(entry); + + if (mode == GIT_FILEMODE_TREE) + return Stat { .type = tDirectory }; + + else if (mode == GIT_FILEMODE_BLOB) + return Stat { .type = tRegular }; + + else if (mode == GIT_FILEMODE_BLOB_EXECUTABLE) + return Stat { .type = tRegular, .isExecutable = true }; + + else if (mode == GIT_FILEMODE_LINK) + return Stat { .type = tSymlink }; + + else if (mode == GIT_FILEMODE_COMMIT) + // Treat submodules as an empty directory. + return Stat { .type = tDirectory }; + + else + throw Error("file '%s' has an unsupported Git file type"); + } + + DirEntries readDirectory(const CanonPath & path) override + { + return std::visit(overloaded { + [&](Tree tree) { + DirEntries res; + + auto count = git_tree_entrycount(tree.get()); + + for (size_t n = 0; n < count; ++n) { + auto entry = git_tree_entry_byindex(tree.get(), n); + // FIXME: add to cache + res.emplace(std::string(git_tree_entry_name(entry)), DirEntry{}); + } + + return res; + }, + [&](Submodule) { + return DirEntries(); + } + }, getTree(path)); + } + + std::string readLink(const CanonPath & path) override + { + return readBlob(path, true); + } + + Hash getSubmoduleRev(const CanonPath & path) + { + auto entry = need(path); + + if (git_tree_entry_type(entry) != GIT_OBJECT_COMMIT) + throw Error("'%s' is not a submodule", showPath(path)); + + return toHash(*git_tree_entry_id(entry)); + } + + std::unordered_map lookupCache; + + /* Recursively look up 'path' relative to the root. */ + git_tree_entry * lookup(const CanonPath & path) + { + if (path.isRoot()) return nullptr; + + auto i = lookupCache.find(path); + if (i == lookupCache.end()) { + TreeEntry entry; + if (auto err = git_tree_entry_bypath(Setter(entry), root.get(), std::string(path.rel()).c_str())) { + if (err != GIT_ENOTFOUND) + throw Error("looking up '%s': %s", showPath(path), git_error_last()->message); + } + + i = lookupCache.emplace(path, std::move(entry)).first; + } + + return &*i->second; + } + + git_tree_entry * need(const CanonPath & path) + { + auto entry = lookup(path); + if (!entry) + throw Error("'%s' does not exist", showPath(path)); + return entry; + } + + struct Submodule { }; + + std::variant getTree(const CanonPath & path) + { + if (path.isRoot()) { + Tree tree; + if (git_tree_dup(Setter(tree), root.get())) + throw Error("duplicating directory '%s': %s", showPath(path), git_error_last()->message); + return tree; + } + + auto entry = need(path); + + if (git_tree_entry_type(entry) == GIT_OBJECT_COMMIT) + return Submodule(); + + if (git_tree_entry_type(entry) != GIT_OBJECT_TREE) + throw Error("'%s' is not a directory", showPath(path)); + + Tree tree; + if (git_tree_entry_to_object((git_object * *) (git_tree * *) Setter(tree), *repo, entry)) + throw Error("looking up directory '%s': %s", showPath(path), git_error_last()->message); + + return tree; + } + + Blob getBlob(const CanonPath & path, bool expectSymlink) + { + auto notExpected = [&]() + { + throw Error( + expectSymlink + ? "'%s' is not a symlink" + : "'%s' is not a regular file", + showPath(path)); + }; + + if (path.isRoot()) notExpected(); + + auto entry = need(path); + + if (git_tree_entry_type(entry) != GIT_OBJECT_BLOB) + notExpected(); + + auto mode = git_tree_entry_filemode(entry); + if (expectSymlink) { + if (mode != GIT_FILEMODE_LINK) + notExpected(); + } else { + if (mode != GIT_FILEMODE_BLOB && mode != GIT_FILEMODE_BLOB_EXECUTABLE) + notExpected(); + } + + Blob blob; + if (git_tree_entry_to_object((git_object * *) (git_blob * *) Setter(blob), *repo, entry)) + throw Error("looking up file '%s': %s", showPath(path), git_error_last()->message); + + return blob; + } +}; + +ref GitRepoImpl::getAccessor(const Hash & rev) +{ + return make_ref(ref(shared_from_this()), rev); +} + +std::vector> GitRepoImpl::getSubmodules(const Hash & rev) +{ + /* Read the .gitmodules files from this revision. */ + CanonPath modulesFile(".gitmodules"); + + auto accessor = getAccessor(rev); + if (!accessor->pathExists(modulesFile)) return {}; + + /* Parse it and get the revision of each submodule. */ + auto configS = accessor->readFile(modulesFile); + + auto [fdTemp, pathTemp] = createTempFile("nix-git-submodules"); + writeFull(fdTemp.get(), configS); + + std::vector> result; + + for (auto & submodule : parseSubmodules(CanonPath(pathTemp))) { + auto rev = accessor.dynamic_pointer_cast()->getSubmoduleRev(submodule.path); + result.push_back({std::move(submodule), rev}); + } + + return result; +} + + +} diff --git a/src/libfetchers/git-utils.hh b/src/libfetchers/git-utils.hh new file mode 100644 index 000000000..1def82071 --- /dev/null +++ b/src/libfetchers/git-utils.hh @@ -0,0 +1,90 @@ +#pragma once + +#include "input-accessor.hh" + +namespace nix { + +namespace fetchers { struct PublicKey; } + +struct GitRepo +{ + virtual ~GitRepo() + { } + + static ref openRepo(const CanonPath & path, bool create = false, bool bare = false); + + virtual uint64_t getRevCount(const Hash & rev) = 0; + + virtual uint64_t getLastModified(const Hash & rev) = 0; + + virtual bool isShallow() = 0; + + /* Return the commit hash to which a ref points. */ + virtual Hash resolveRef(std::string ref) = 0; + + /** + * Info about a submodule. + */ + struct Submodule + { + CanonPath path; + std::string url; + std::string branch; + }; + + struct WorkdirInfo + { + bool isDirty = false; + + /* The checked out commit, or nullopt if there are no commits + in the repo yet. */ + std::optional headRev; + + /* All files in the working directory that are unchanged, + modified or added, but excluding deleted files. */ + std::set files; + + /* The submodules listed in .gitmodules of this workdir. */ + std::vector submodules; + }; + + virtual WorkdirInfo getWorkdirInfo() = 0; + + /* Get the ref that HEAD points to. */ + virtual std::optional getWorkdirRef() = 0; + + /** + * Return the submodules of this repo at the indicated revision, + * along with the revision of each submodule. + */ + virtual std::vector> getSubmodules(const Hash & rev) = 0; + + virtual std::string resolveSubmoduleUrl( + const std::string & url, + const std::string & base) = 0; + + struct TarballInfo + { + Hash treeHash; + time_t lastModified; + }; + + virtual bool hasObject(const Hash & oid) = 0; + + virtual ref getAccessor(const Hash & rev) = 0; + + virtual void fetch( + const std::string & url, + const std::string & refspec, + bool shallow) = 0; + + /** + * Verify that commit `rev` is signed by one of the keys in + * `publicKeys`. Throw an error if it isn't. + */ + virtual void verifyCommit( + const Hash & rev, + const std::vector & publicKeys) = 0; +}; + +} diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index cc735996b..79270c317 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -8,6 +8,12 @@ #include "pathlocks.hh" #include "processes.hh" #include "git.hh" +#include "fs-input-accessor.hh" +#include "filtering-input-accessor.hh" +#include "mounted-input-accessor.hh" +#include "git-utils.hh" +#include "logging.hh" +#include "finally.hh" #include "fetch-settings.hh" @@ -47,7 +53,7 @@ bool touchCacheFile(const Path & path, time_t touch_time) Path getCachePath(std::string_view key) { return getCacheDir() + "/nix/gitv3/" + - hashString(htSHA256, key).to_string(HashFormat::Base32, false); + hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Nix32, false); } // Returns the name of the HEAD branch. @@ -133,189 +139,19 @@ std::optional readHeadCached(const std::string & actualUrl) return std::nullopt; } -bool isNotDotGitDirectory(const Path & path) +std::vector getPublicKeys(const Attrs & attrs) { - return baseNameOf(path) != ".git"; -} - -struct WorkdirInfo -{ - bool clean = false; - bool hasHead = false; -}; - -std::vector getPublicKeys(const Attrs & attrs) { std::vector publicKeys; if (attrs.contains("publicKeys")) { nlohmann::json publicKeysJson = nlohmann::json::parse(getStrAttr(attrs, "publicKeys")); ensureType(publicKeysJson, nlohmann::json::value_t::array); publicKeys = publicKeysJson.get>(); } - else { - publicKeys = {}; - } if (attrs.contains("publicKey")) publicKeys.push_back(PublicKey{maybeGetStrAttr(attrs, "keytype").value_or("ssh-ed25519"),getStrAttr(attrs, "publicKey")}); return publicKeys; } -void doCommitVerification(const Path repoDir, const Path gitDir, const std::string rev, const std::vector& publicKeys) { - // Create ad-hoc allowedSignersFile and populate it with publicKeys - auto allowedSignersFile = createTempFile().second; - std::string allowedSigners; - for (const PublicKey& k : publicKeys) { - if (k.type != "ssh-dsa" - && k.type != "ssh-ecdsa" - && k.type != "ssh-ecdsa-sk" - && k.type != "ssh-ed25519" - && k.type != "ssh-ed25519-sk" - && k.type != "ssh-rsa") - warn("Unknown keytype: %s\n" - "Please use one of\n" - "- ssh-dsa\n" - " ssh-ecdsa\n" - " ssh-ecdsa-sk\n" - " ssh-ed25519\n" - " ssh-ed25519-sk\n" - " ssh-rsa", k.type); - allowedSigners += "* " + k.type + " " + k.key + "\n"; - } - writeFile(allowedSignersFile, allowedSigners); - - // Run verification command - auto [status, output] = runProgram(RunOptions { - .program = "git", - .args = {"-c", "gpg.ssh.allowedSignersFile=" + allowedSignersFile, "-C", repoDir, - "--git-dir", gitDir, "verify-commit", rev}, - .mergeStderrToStdout = true, - }); - - /* Evaluate result through status code and checking if public key fingerprints appear on stderr - * This is neccessary because the git command might also succeed due to the commit being signed by gpg keys - * that are present in the users key agent. */ - std::string re = R"(Good "git" signature for \* with .* key SHA256:[)"; - for (const PublicKey& k : publicKeys){ - // Calculate sha256 fingerprint from public key and escape the regex symbol '+' to match the key literally - auto fingerprint = trim(hashString(htSHA256, base64Decode(k.key)).to_string(nix::HashFormat::Base64, false), "="); - auto escaped_fingerprint = std::regex_replace(fingerprint, std::regex("\\+"), "\\+" ); - re += "(" + escaped_fingerprint + ")"; - } - re += "]"; - if (status == 0 && std::regex_search(output, std::regex(re))) - printTalkative("Signature verification on commit %s succeeded", rev); - else - throw Error("Commit signature verification on commit %s failed: \n%s", rev, output); -} - -// Returns whether a git workdir is clean and has commits. -WorkdirInfo getWorkdirInfo(const Input & input, const Path & workdir) -{ - const bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false); - std::string gitDir(".git"); - - auto env = getEnv(); - // Set LC_ALL to C: because we rely on the error messages from git rev-parse to determine what went wrong - // that way unknown errors can lead to a failure instead of continuing through the wrong code path - env["LC_ALL"] = "C"; - - /* Check whether HEAD points to something that looks like a commit, - since that is the refrence we want to use later on. */ - auto result = runProgram(RunOptions { - .program = "git", - .args = { "-C", workdir, "--git-dir", gitDir, "rev-parse", "--verify", "--no-revs", "HEAD^{commit}" }, - .environment = env, - .mergeStderrToStdout = true - }); - auto exitCode = WEXITSTATUS(result.first); - auto errorMessage = result.second; - - if (errorMessage.find("fatal: not a git repository") != std::string::npos) { - throw Error("'%s' is not a Git repository", workdir); - } else if (errorMessage.find("fatal: Needed a single revision") != std::string::npos) { - // indicates that the repo does not have any commits - // we want to proceed and will consider it dirty later - } else if (exitCode != 0) { - // any other errors should lead to a failure - throw Error("getting the HEAD of the Git tree '%s' failed with exit code %d:\n%s", workdir, exitCode, errorMessage); - } - - bool clean = false; - bool hasHead = exitCode == 0; - - try { - if (hasHead) { - // Using git diff is preferrable over lower-level operations here, - // because its conceptually simpler and we only need the exit code anyways. - auto gitDiffOpts = Strings({ "-C", workdir, "--git-dir", gitDir, "diff", "HEAD", "--quiet"}); - if (!submodules) { - // Changes in submodules should only make the tree dirty - // when those submodules will be copied as well. - gitDiffOpts.emplace_back("--ignore-submodules"); - } - gitDiffOpts.emplace_back("--"); - runProgram("git", true, gitDiffOpts); - - clean = true; - } - } catch (ExecError & e) { - if (!WIFEXITED(e.status) || WEXITSTATUS(e.status) != 1) throw; - } - - return WorkdirInfo { .clean = clean, .hasHead = hasHead }; -} - -std::pair fetchFromWorkdir(ref store, Input & input, const Path & workdir, const WorkdirInfo & workdirInfo) -{ - const bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false); - auto gitDir = ".git"; - - if (!fetchSettings.allowDirty) - throw Error("Git tree '%s' is dirty", workdir); - - if (fetchSettings.warnDirty) - warn("Git tree '%s' is dirty", workdir); - - auto gitOpts = Strings({ "-C", workdir, "--git-dir", gitDir, "ls-files", "-z" }); - if (submodules) - gitOpts.emplace_back("--recurse-submodules"); - - auto files = tokenizeString>( - runProgram("git", true, gitOpts), "\0"s); - - Path actualPath(absPath(workdir)); - - PathFilter filter = [&](const Path & p) -> bool { - assert(hasPrefix(p, actualPath)); - std::string file(p, actualPath.size() + 1); - - auto st = lstat(p); - - if (S_ISDIR(st.st_mode)) { - auto prefix = file + "/"; - auto i = files.lower_bound(prefix); - return i != files.end() && hasPrefix(*i, prefix); - } - - return files.count(file); - }; - - auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, htSHA256, filter); - - // FIXME: maybe we should use the timestamp of the last - // modified dirty file? - input.attrs.insert_or_assign( - "lastModified", - workdirInfo.hasHead ? std::stoull(runProgram("git", true, { "-C", actualPath, "--git-dir", gitDir, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0); - - if (workdirInfo.hasHead) { - input.attrs.insert_or_assign("dirtyRev", chomp( - runProgram("git", true, { "-C", actualPath, "--git-dir", gitDir, "rev-parse", "--verify", "HEAD" })) + "-dirty"); - input.attrs.insert_or_assign("dirtyShortRev", chomp( - runProgram("git", true, { "-C", actualPath, "--git-dir", gitDir, "rev-parse", "--verify", "--short", "HEAD" })) + "-dirty"); - } - - return {std::move(storePath), input}; -} } // end namespace struct GitInputScheme : InputScheme @@ -386,9 +222,6 @@ struct GitInputScheme : InputScheme || name == "publicKeys") experimentalFeatureSettings.require(Xp::VerifiedFetches); - maybeGetBoolAttr(attrs, "shallow"); - maybeGetBoolAttr(attrs, "submodules"); - maybeGetBoolAttr(attrs, "allRefs"); maybeGetBoolAttr(attrs, "verifyCommit"); if (auto ref = maybeGetStrAttr(attrs, "ref")) { @@ -401,6 +234,9 @@ struct GitInputScheme : InputScheme auto url = fixGitURL(getStrAttr(attrs, "url")); parseURL(url); input.attrs["url"] = url; + getShallowAttr(input); + getSubmodulesAttr(input); + getAllRefsAttr(input); return input; } @@ -410,8 +246,10 @@ struct GitInputScheme : InputScheme if (url.scheme != "git") url.scheme = "git+" + url.scheme; if (auto rev = input.getRev()) url.query.insert_or_assign("rev", rev->gitRev()); if (auto ref = input.getRef()) url.query.insert_or_assign("ref", *ref); - if (maybeGetBoolAttr(input.attrs, "shallow").value_or(false)) + if (getShallowAttr(input)) url.query.insert_or_assign("shallow", "1"); + if (getSubmodulesAttr(input)) + url.query.insert_or_assign("submodules", "1"); if (maybeGetBoolAttr(input.attrs, "verifyCommit").value_or(false)) url.query.insert_or_assign("verifyCommit", "1"); auto publicKeys = getPublicKeys(input.attrs); @@ -439,11 +277,11 @@ struct GitInputScheme : InputScheme void clone(const Input & input, const Path & destDir) const override { - auto [isLocal, actualUrl] = getActualUrl(input); + auto repoInfo = getRepoInfo(input); Strings args = {"clone"}; - args.push_back(actualUrl); + args.push_back(repoInfo.url); if (auto ref = input.getRef()) { args.push_back("--branch"); @@ -459,10 +297,9 @@ struct GitInputScheme : InputScheme std::optional getSourcePath(const Input & input) const override { - auto url = parseURL(getStrAttr(input.attrs, "url")); - if (url.scheme == "file" && !input.getRef() && !input.getRev()) - return url.path; - return {}; + auto repoInfo = getRepoInfo(input); + if (repoInfo.isLocal) return repoInfo.url; + return std::nullopt; } void putFile( @@ -471,24 +308,88 @@ struct GitInputScheme : InputScheme std::string_view contents, std::optional commitMsg) const override { - auto root = getSourcePath(input); - if (!root) + auto repoInfo = getRepoInfo(input); + if (!repoInfo.isLocal) throw Error("cannot commit '%s' to Git repository '%s' because it's not a working tree", path, input.to_string()); - writeFile((CanonPath(*root) + path).abs(), contents); + writeFile((CanonPath(repoInfo.url) + path).abs(), contents); - auto gitDir = ".git"; + auto result = runProgram(RunOptions { + .program = "git", + .args = {"-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "check-ignore", "--quiet", std::string(path.rel())}, + }); + auto exitCode = WEXITSTATUS(result.first); - runProgram("git", true, - { "-C", *root, "--git-dir", gitDir, "add", "--intent-to-add", "--", std::string(path.rel()) }); - - if (commitMsg) + if (exitCode != 0) { + // The path is not `.gitignore`d, we can add the file. runProgram("git", true, - { "-C", *root, "--git-dir", gitDir, "commit", std::string(path.rel()), "-m", *commitMsg }); + { "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "add", "--intent-to-add", "--", std::string(path.rel()) }); + + + if (commitMsg) { + // Pause the logger to allow for user input (such as a gpg passphrase) in `git commit` + logger->pause(); + Finally restoreLogger([]() { logger->resume(); }); + runProgram("git", true, + { "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "commit", std::string(path.rel()), "-m", *commitMsg }); + } + } } - std::pair getActualUrl(const Input & input) const + struct RepoInfo { + /* Whether this is a local, non-bare repository. */ + bool isLocal = false; + + /* Working directory info: the complete list of files, and + whether the working directory is dirty compared to HEAD. */ + GitRepo::WorkdirInfo workdirInfo; + + /* URL of the repo, or its path if isLocal. Never a `file` URL. */ + std::string url; + + void warnDirty() const + { + if (workdirInfo.isDirty) { + if (!fetchSettings.allowDirty) + throw Error("Git tree '%s' is dirty", url); + + if (fetchSettings.warnDirty) + warn("Git tree '%s' is dirty", url); + } + } + + std::string gitDir = ".git"; + }; + + bool getShallowAttr(const Input & input) const + { + return maybeGetBoolAttr(input.attrs, "shallow").value_or(false); + } + + bool getSubmodulesAttr(const Input & input) const + { + return maybeGetBoolAttr(input.attrs, "submodules").value_or(false); + } + + bool getAllRefsAttr(const Input & input) const + { + return maybeGetBoolAttr(input.attrs, "allRefs").value_or(false); + } + + RepoInfo getRepoInfo(const Input & input) const + { + auto checkHashAlgorithm = [&](const std::optional & hash) + { + if (hash.has_value() && !(hash->algo == HashAlgorithm::SHA1 || hash->algo == HashAlgorithm::SHA256)) + throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(HashFormat::Base16, true)); + }; + + if (auto rev = input.getRev()) + checkHashAlgorithm(rev); + + RepoInfo repoInfo; + // file:// URIs are normally not cloned (but otherwise treated the // same as remote URIs, i.e. we don't use the working tree or // HEAD). Exception: If _NIX_FORCE_HTTP is set, or the repo is a bare git @@ -496,162 +397,132 @@ struct GitInputScheme : InputScheme static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1"; // for testing auto url = parseURL(getStrAttr(input.attrs, "url")); bool isBareRepository = url.scheme == "file" && !pathExists(url.path + "/.git"); - bool isLocal = url.scheme == "file" && !forceHttp && !isBareRepository; - return {isLocal, isLocal ? url.path : url.base}; + repoInfo.isLocal = url.scheme == "file" && !forceHttp && !isBareRepository; + repoInfo.url = repoInfo.isLocal ? url.path : url.base; + + // If this is a local directory and no ref or revision is + // given, then allow the use of an unclean working tree. + if (!input.getRef() && !input.getRev() && repoInfo.isLocal) + repoInfo.workdirInfo = GitRepo::openRepo(CanonPath(repoInfo.url))->getWorkdirInfo(); + + return repoInfo; } - std::pair fetch(ref store, const Input & _input) override + uint64_t getLastModified(const RepoInfo & repoInfo, const std::string & repoDir, const Hash & rev) const { - Input input(_input); - auto gitDir = ".git"; + Attrs key{{"_what", "gitLastModified"}, {"rev", rev.gitRev()}}; + + auto cache = getCache(); + + if (auto res = cache->lookup(key)) + return getIntAttr(*res, "lastModified"); + + auto lastModified = GitRepo::openRepo(CanonPath(repoDir))->getLastModified(rev); + + cache->upsert(key, Attrs{{"lastModified", lastModified}}); + + return lastModified; + } + + uint64_t getRevCount(const RepoInfo & repoInfo, const std::string & repoDir, const Hash & rev) const + { + Attrs key{{"_what", "gitRevCount"}, {"rev", rev.gitRev()}}; + + auto cache = getCache(); + + if (auto revCountAttrs = cache->lookup(key)) + return getIntAttr(*revCountAttrs, "revCount"); + + Activity act(*logger, lvlChatty, actUnknown, fmt("getting Git revision count of '%s'", repoInfo.url)); + + auto revCount = GitRepo::openRepo(CanonPath(repoDir))->getRevCount(rev); + + cache->upsert(key, Attrs{{"revCount", revCount}}); + + return revCount; + } + + std::string getDefaultRef(const RepoInfo & repoInfo) const + { + auto head = repoInfo.isLocal + ? GitRepo::openRepo(CanonPath(repoInfo.url))->getWorkdirRef() + : readHeadCached(repoInfo.url); + if (!head) { + warn("could not read HEAD ref from repo at '%s', using 'master'", repoInfo.url); + return "master"; + } + return *head; + } + + static MakeNotAllowedError makeNotAllowedError(std::string url) + { + return [url{std::move(url)}](const CanonPath & path) -> RestrictedPathError + { + if (nix::pathExists(path.abs())) + return RestrictedPathError("access to path '%s' is forbidden because it is not under Git control; maybe you should 'git add' it to the repository '%s'?", path, url); + else + return RestrictedPathError("path '%s' does not exist in Git repository '%s'", path, url); + }; + } + + void verifyCommit(const Input & input, std::shared_ptr repo) const + { + auto publicKeys = getPublicKeys(input.attrs); + auto verifyCommit = maybeGetBoolAttr(input.attrs, "verifyCommit").value_or(!publicKeys.empty()); + + if (verifyCommit) { + if (input.getRev() && repo) + repo->verifyCommit(*input.getRev(), publicKeys); + else + throw Error("commit verification is required for Git repository '%s', but it's dirty", input.to_string()); + } + } + + std::pair, Input> getAccessorFromCommit( + ref store, + RepoInfo & repoInfo, + Input && input) const + { + assert(!repoInfo.workdirInfo.isDirty); + + auto origRev = input.getRev(); std::string name = input.getName(); - bool shallow = maybeGetBoolAttr(input.attrs, "shallow").value_or(false); - bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false); - bool allRefs = maybeGetBoolAttr(input.attrs, "allRefs").value_or(false); - std::vector publicKeys = getPublicKeys(input.attrs); - bool verifyCommit = maybeGetBoolAttr(input.attrs, "verifyCommit").value_or(!publicKeys.empty()); - - std::string cacheType = "git"; - if (shallow) cacheType += "-shallow"; - if (submodules) cacheType += "-submodules"; - if (allRefs) cacheType += "-all-refs"; - - auto checkHashType = [&](const std::optional & hash) - { - if (hash.has_value() && !(hash->type == htSHA1 || hash->type == htSHA256)) - throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(HashFormat::Base16, true)); - }; - - auto getLockedAttrs = [&]() - { - checkHashType(input.getRev()); - - return Attrs({ - {"type", cacheType}, - {"name", name}, - {"rev", input.getRev()->gitRev()}, - {"verifyCommit", verifyCommit}, - {"publicKeys", publicKeys_to_string(publicKeys)}, - }); - }; - - auto makeResult = [&](const Attrs & infoAttrs, StorePath && storePath) - -> std::pair - { - assert(input.getRev()); - assert(!_input.getRev() || _input.getRev() == input.getRev()); - if (!shallow) - input.attrs.insert_or_assign("revCount", getIntAttr(infoAttrs, "revCount")); - input.attrs.insert_or_assign("lastModified", getIntAttr(infoAttrs, "lastModified")); - return {std::move(storePath), input}; - }; - - if (input.getRev()) { - if (auto res = getCache()->lookup(store, getLockedAttrs())) - return makeResult(res->first, std::move(res->second)); - } - - auto [isLocal, actualUrl_] = getActualUrl(input); - auto actualUrl = actualUrl_; // work around clang bug - - /* If this is a local directory, no ref or revision is given and no signature verification is needed, - allow fetching directly from a dirty workdir. */ - if (!input.getRef() && !input.getRev() && isLocal) { - auto workdirInfo = getWorkdirInfo(input, actualUrl); - if (!workdirInfo.clean) { - if (verifyCommit) - throw Error("Can't fetch from a dirty workdir with commit signature verification enabled."); - else - return fetchFromWorkdir(store, input, actualUrl, workdirInfo); - } - } - - Attrs unlockedAttrs({ - {"type", cacheType}, - {"name", name}, - {"url", actualUrl}, - {"verifyCommit", verifyCommit}, - {"publicKeys", publicKeys_to_string(publicKeys)}, - }); + auto originalRef = input.getRef(); + auto ref = originalRef ? *originalRef : getDefaultRef(repoInfo); + input.attrs.insert_or_assign("ref", ref); Path repoDir; - if (isLocal) { - if (!input.getRef()) { - auto head = readHead(actualUrl); - if (!head) { - warn("could not read HEAD ref from repo at '%s', using 'master'", actualUrl); - head = "master"; - } - input.attrs.insert_or_assign("ref", *head); - unlockedAttrs.insert_or_assign("ref", *head); - } - + if (repoInfo.isLocal) { + repoDir = repoInfo.url; if (!input.getRev()) - input.attrs.insert_or_assign("rev", - Hash::parseAny(chomp(runProgram("git", true, { "-C", actualUrl, "--git-dir", gitDir, "rev-parse", *input.getRef() })), htSHA1).gitRev()); - - repoDir = actualUrl; + input.attrs.insert_or_assign("rev", GitRepo::openRepo(CanonPath(repoDir))->resolveRef(ref).gitRev()); } else { - const bool useHeadRef = !input.getRef(); - if (useHeadRef) { - auto head = readHeadCached(actualUrl); - if (!head) { - warn("could not read HEAD ref from repo at '%s', using 'master'", actualUrl); - head = "master"; - } - input.attrs.insert_or_assign("ref", *head); - unlockedAttrs.insert_or_assign("ref", *head); - } else { - if (!input.getRev()) { - unlockedAttrs.insert_or_assign("ref", input.getRef().value()); - } - } - - if (auto res = getCache()->lookup(store, unlockedAttrs)) { - auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), htSHA1); - if (!input.getRev() || input.getRev() == rev2) { - input.attrs.insert_or_assign("rev", rev2.gitRev()); - return makeResult(res->first, std::move(res->second)); - } - } - - Path cacheDir = getCachePath(actualUrl); + Path cacheDir = getCachePath(repoInfo.url); repoDir = cacheDir; - gitDir = "."; + repoInfo.gitDir = "."; createDirs(dirOf(cacheDir)); - PathLocks cacheDirLock({cacheDir + ".lock"}); + PathLocks cacheDirLock({cacheDir}); - if (!pathExists(cacheDir)) { - runProgram("git", true, { "-c", "init.defaultBranch=" + gitInitialBranch, "init", "--bare", repoDir }); - } + auto repo = GitRepo::openRepo(CanonPath(cacheDir), true, true); Path localRefFile = - input.getRef()->compare(0, 5, "refs/") == 0 - ? cacheDir + "/" + *input.getRef() - : cacheDir + "/refs/heads/" + *input.getRef(); + ref.compare(0, 5, "refs/") == 0 + ? cacheDir + "/" + ref + : cacheDir + "/refs/heads/" + ref; bool doFetch; time_t now = time(0); /* If a rev was specified, we need to fetch if it's not in the repo. */ - if (input.getRev()) { - try { - runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "cat-file", "-e", input.getRev()->gitRev() }); - doFetch = false; - } catch (ExecError & e) { - if (WIFEXITED(e.status)) { - doFetch = true; - } else { - throw; - } - } + if (auto rev = input.getRev()) { + doFetch = !repo->hasObject(*rev); } else { - if (allRefs) { + if (getAllRefsAttr(input)) { doFetch = true; } else { /* If the local ref is older than ‘tarball-ttl’ seconds, do a @@ -663,163 +534,213 @@ struct GitInputScheme : InputScheme } if (doFetch) { - Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Git repository '%s'", actualUrl)); - - // FIXME: git stderr messes up our progress indicator, so - // we're using --quiet for now. Should process its stderr. try { - auto ref = input.getRef(); - auto fetchRef = allRefs + auto fetchRef = + getAllRefsAttr(input) ? "refs/*" - : ref->compare(0, 5, "refs/") == 0 - ? *ref - : ref == "HEAD" - ? *ref - : "refs/heads/" + *ref; - runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", fetchRef, fetchRef) }, {}, true); + : input.getRev() + ? input.getRev()->gitRev() + : ref.compare(0, 5, "refs/") == 0 + ? ref + : ref == "HEAD" + ? ref + : "refs/heads/" + ref; + + repo->fetch(repoInfo.url, fmt("%s:%s", fetchRef, fetchRef), getShallowAttr(input)); } catch (Error & e) { if (!pathExists(localRefFile)) throw; - warn("could not update local clone of Git repository '%s'; continuing with the most recent version", actualUrl); + logError(e.info()); + warn("could not update local clone of Git repository '%s'; continuing with the most recent version", repoInfo.url); } if (!touchCacheFile(localRefFile, now)) warn("could not update mtime for file '%s': %s", localRefFile, strerror(errno)); - if (useHeadRef && !storeCachedHead(actualUrl, *input.getRef())) - warn("could not update cached head '%s' for '%s'", *input.getRef(), actualUrl); + if (!originalRef && !storeCachedHead(repoInfo.url, ref)) + warn("could not update cached head '%s' for '%s'", ref, repoInfo.url); } - if (!input.getRev()) - input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), htSHA1).gitRev()); + if (auto rev = input.getRev()) { + if (!repo->hasObject(*rev)) + throw Error( + "Cannot find Git revision '%s' in ref '%s' of repository '%s'! " + "Please make sure that the " ANSI_BOLD "rev" ANSI_NORMAL " exists on the " + ANSI_BOLD "ref" ANSI_NORMAL " you've specified or add " ANSI_BOLD + "allRefs = true;" ANSI_NORMAL " to " ANSI_BOLD "fetchGit" ANSI_NORMAL ".", + rev->gitRev(), + ref, + repoInfo.url + ); + } else + input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), HashAlgorithm::SHA1).gitRev()); // cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder } - bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "rev-parse", "--is-shallow-repository" })) == "true"; + auto repo = GitRepo::openRepo(CanonPath(repoDir)); - if (isShallow && !shallow) - throw Error("'%s' is a shallow Git repository, but shallow repositories are only allowed when `shallow = true;` is specified.", actualUrl); + auto isShallow = repo->isShallow(); - // FIXME: check whether rev is an ancestor of ref. + if (isShallow && !getShallowAttr(input)) + throw Error("'%s' is a shallow Git repository, but shallow repositories are only allowed when `shallow = true;` is specified", repoInfo.url); - printTalkative("using revision %s of repo '%s'", input.getRev()->gitRev(), actualUrl); + // FIXME: check whether rev is an ancestor of ref? - /* Now that we know the ref, check again whether we have it in - the store. */ - if (auto res = getCache()->lookup(store, getLockedAttrs())) - return makeResult(res->first, std::move(res->second)); - - Path tmpDir = createTempDir(); - AutoDelete delTmpDir(tmpDir, true); - PathFilter filter = defaultPathFilter; - - auto result = runProgram(RunOptions { - .program = "git", - .args = { "-C", repoDir, "--git-dir", gitDir, "cat-file", "commit", input.getRev()->gitRev() }, - .mergeStderrToStdout = true - }); - if (WEXITSTATUS(result.first) == 128 - && result.second.find("bad file") != std::string::npos) - { - throw Error( - "Cannot find Git revision '%s' in ref '%s' of repository '%s'! " - "Please make sure that the " ANSI_BOLD "rev" ANSI_NORMAL " exists on the " - ANSI_BOLD "ref" ANSI_NORMAL " you've specified or add " ANSI_BOLD - "allRefs = true;" ANSI_NORMAL " to " ANSI_BOLD "fetchGit" ANSI_NORMAL ".", - input.getRev()->gitRev(), - *input.getRef(), - actualUrl - ); - } - - if (verifyCommit) - doCommitVerification(repoDir, gitDir, input.getRev()->gitRev(), publicKeys); - - if (submodules) { - Path tmpGitDir = createTempDir(); - AutoDelete delTmpGitDir(tmpGitDir, true); - - runProgram("git", true, { "-c", "init.defaultBranch=" + gitInitialBranch, "init", tmpDir, "--separate-git-dir", tmpGitDir }); - - { - // TODO: repoDir might lack the ref (it only checks if rev - // exists, see FIXME above) so use a big hammer and fetch - // everything to ensure we get the rev. - Activity act(*logger, lvlTalkative, actUnknown, fmt("making temporary clone of '%s'", repoDir)); - runProgram("git", true, { "-C", tmpDir, "fetch", "--quiet", "--force", - "--update-head-ok", "--", repoDir, "refs/*:refs/*" }, {}, true); - } - - runProgram("git", true, { "-C", tmpDir, "checkout", "--quiet", input.getRev()->gitRev() }); - - /* Ensure that we use the correct origin for fetching - submodules. This matters for submodules with relative - URLs. */ - if (isLocal) { - writeFile(tmpGitDir + "/config", readFile(repoDir + "/" + gitDir + "/config")); - - /* Restore the config.bare setting we may have just - copied erroneously from the user's repo. */ - runProgram("git", true, { "-C", tmpDir, "config", "core.bare", "false" }); - } else - runProgram("git", true, { "-C", tmpDir, "config", "remote.origin.url", actualUrl }); - - /* As an optimisation, copy the modules directory of the - source repo if it exists. */ - auto modulesPath = repoDir + "/" + gitDir + "/modules"; - if (pathExists(modulesPath)) { - Activity act(*logger, lvlTalkative, actUnknown, fmt("copying submodules of '%s'", actualUrl)); - runProgram("cp", true, { "-R", "--", modulesPath, tmpGitDir + "/modules" }); - } - - { - Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching submodules of '%s'", actualUrl)); - runProgram("git", true, { "-C", tmpDir, "submodule", "--quiet", "update", "--init", "--recursive" }, {}, true); - } - - filter = isNotDotGitDirectory; - } else { - // FIXME: should pipe this, or find some better way to extract a - // revision. - auto source = sinkToSource([&](Sink & sink) { - runProgram2({ - .program = "git", - .args = { "-C", repoDir, "--git-dir", gitDir, "archive", input.getRev()->gitRev() }, - .standardOut = &sink - }); - }); - - unpackTarfile(*source, tmpDir); - } - - auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, htSHA256, filter); - - auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "log", "-1", "--format=%ct", "--no-show-signature", input.getRev()->gitRev() })); + auto rev = *input.getRev(); Attrs infoAttrs({ - {"rev", input.getRev()->gitRev()}, - {"lastModified", lastModified}, + {"rev", rev.gitRev()}, + {"lastModified", getLastModified(repoInfo, repoDir, rev)}, }); - if (!shallow) + if (!getShallowAttr(input)) infoAttrs.insert_or_assign("revCount", - std::stoull(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "rev-list", "--count", input.getRev()->gitRev() }))); + getRevCount(repoInfo, repoDir, rev)); - if (!_input.getRev()) - getCache()->add( - store, - unlockedAttrs, - infoAttrs, - storePath, - false); + printTalkative("using revision %s of repo '%s'", rev.gitRev(), repoInfo.url); - getCache()->add( - store, - getLockedAttrs(), - infoAttrs, - storePath, - true); + verifyCommit(input, repo); - return makeResult(infoAttrs, std::move(storePath)); + auto accessor = repo->getAccessor(rev); + + accessor->setPathDisplay("«" + input.to_string() + "»"); + + /* If the repo has submodules, fetch them and return a mounted + input accessor consisting of the accessor for the top-level + repo and the accessors for the submodules. */ + if (getSubmodulesAttr(input)) { + std::map> mounts; + + for (auto & [submodule, submoduleRev] : repo->getSubmodules(rev)) { + auto resolved = repo->resolveSubmoduleUrl(submodule.url, repoInfo.url); + debug("Git submodule %s: %s %s %s -> %s", + submodule.path, submodule.url, submodule.branch, submoduleRev.gitRev(), resolved); + fetchers::Attrs attrs; + attrs.insert_or_assign("type", "git"); + attrs.insert_or_assign("url", resolved); + if (submodule.branch != "") + attrs.insert_or_assign("ref", submodule.branch); + attrs.insert_or_assign("rev", submoduleRev.gitRev()); + auto submoduleInput = fetchers::Input::fromAttrs(std::move(attrs)); + auto [submoduleAccessor, submoduleInput2] = + submoduleInput.getAccessor(store); + mounts.insert_or_assign(submodule.path, submoduleAccessor); + } + + if (!mounts.empty()) { + mounts.insert_or_assign(CanonPath::root, accessor); + accessor = makeMountedInputAccessor(std::move(mounts)); + } + } + + assert(!origRev || origRev == rev); + if (!getShallowAttr(input)) + input.attrs.insert_or_assign("revCount", getIntAttr(infoAttrs, "revCount")); + input.attrs.insert_or_assign("lastModified", getIntAttr(infoAttrs, "lastModified")); + + return {accessor, std::move(input)}; + } + + std::pair, Input> getAccessorFromWorkdir( + ref store, + RepoInfo & repoInfo, + Input && input) const + { + if (getSubmodulesAttr(input)) + /* Create mountpoints for the submodules. */ + for (auto & submodule : repoInfo.workdirInfo.submodules) + repoInfo.workdirInfo.files.insert(submodule.path); + + ref accessor = + AllowListInputAccessor::create( + makeFSInputAccessor(CanonPath(repoInfo.url)), + std::move(repoInfo.workdirInfo.files), + makeNotAllowedError(repoInfo.url)); + + /* If the repo has submodules, return a mounted input accessor + consisting of the accessor for the top-level repo and the + accessors for the submodule workdirs. */ + if (getSubmodulesAttr(input) && !repoInfo.workdirInfo.submodules.empty()) { + std::map> mounts; + + for (auto & submodule : repoInfo.workdirInfo.submodules) { + auto submodulePath = CanonPath(repoInfo.url) + submodule.path; + fetchers::Attrs attrs; + attrs.insert_or_assign("type", "git"); + attrs.insert_or_assign("url", submodulePath.abs()); + auto submoduleInput = fetchers::Input::fromAttrs(std::move(attrs)); + auto [submoduleAccessor, submoduleInput2] = + submoduleInput.getAccessor(store); + + /* If the submodule is dirty, mark this repo dirty as + well. */ + if (!submoduleInput2.getRev()) + repoInfo.workdirInfo.isDirty = true; + + mounts.insert_or_assign(submodule.path, submoduleAccessor); + } + + mounts.insert_or_assign(CanonPath::root, accessor); + accessor = makeMountedInputAccessor(std::move(mounts)); + } + + if (!repoInfo.workdirInfo.isDirty) { + auto repo = GitRepo::openRepo(CanonPath(repoInfo.url)); + + if (auto ref = repo->getWorkdirRef()) + input.attrs.insert_or_assign("ref", *ref); + + auto rev = repoInfo.workdirInfo.headRev.value(); + + input.attrs.insert_or_assign("rev", rev.gitRev()); + input.attrs.insert_or_assign("revCount", getRevCount(repoInfo, repoInfo.url, rev)); + + verifyCommit(input, repo); + } else { + repoInfo.warnDirty(); + + if (repoInfo.workdirInfo.headRev) { + input.attrs.insert_or_assign("dirtyRev", + repoInfo.workdirInfo.headRev->gitRev() + "-dirty"); + input.attrs.insert_or_assign("dirtyShortRev", + repoInfo.workdirInfo.headRev->gitShortRev() + "-dirty"); + } + + verifyCommit(input, nullptr); + } + + input.attrs.insert_or_assign( + "lastModified", + repoInfo.workdirInfo.headRev + ? getLastModified(repoInfo, repoInfo.url, *repoInfo.workdirInfo.headRev) + : 0); + + input.locked = true; // FIXME + + return {accessor, std::move(input)}; + } + + std::pair, Input> getAccessor(ref store, const Input & _input) const override + { + Input input(_input); + + auto repoInfo = getRepoInfo(input); + + auto [accessor, final] = + input.getRef() || input.getRev() || !repoInfo.isLocal + ? getAccessorFromCommit(store, repoInfo, std::move(input)) + : getAccessorFromWorkdir(store, repoInfo, std::move(input)); + + accessor->fingerprint = final.getFingerprint(store); + + return {accessor, std::move(final)}; + } + + std::optional getFingerprint(ref store, const Input & input) const override + { + if (auto rev = input.getRev()) + return rev->gitRev() + (getSubmodulesAttr(input) ? ";s" : ""); + else + return std::nullopt; } }; diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 6c9b29721..498e41357 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -42,7 +42,7 @@ struct GitArchiveInputScheme : InputScheme auto size = path.size(); if (size == 3) { if (std::regex_match(path[2], revRegex)) - rev = Hash::parseAny(path[2], htSHA1); + rev = Hash::parseAny(path[2], HashAlgorithm::SHA1); else if (std::regex_match(path[2], refRegex)) ref = path[2]; else @@ -68,7 +68,7 @@ struct GitArchiveInputScheme : InputScheme if (name == "rev") { if (rev) throw BadURL("URL '%s' contains multiple commit hashes", url.url); - rev = Hash::parseAny(value, htSHA1); + rev = Hash::parseAny(value, HashAlgorithm::SHA1); } else if (name == "ref") { if (!std::regex_match(value, refRegex)) @@ -201,7 +201,7 @@ struct GitArchiveInputScheme : InputScheme {"rev", rev->gitRev()}, }); - if (auto res = getCache()->lookup(store, lockedAttrs)) { + if (auto res = getCache()->lookup(*store, lockedAttrs)) { input.attrs.insert_or_assign("lastModified", getIntAttr(res->first, "lastModified")); return {std::move(res->second), input}; } @@ -213,7 +213,7 @@ struct GitArchiveInputScheme : InputScheme input.attrs.insert_or_assign("lastModified", uint64_t(result.lastModified)); getCache()->add( - store, + *store, lockedAttrs, { {"rev", rev->gitRev()}, @@ -229,6 +229,14 @@ struct GitArchiveInputScheme : InputScheme { return Xp::Flakes; } + + std::optional getFingerprint(ref store, const Input & input) const override + { + if (auto rev = input.getRev()) + return rev->gitRev(); + else + return std::nullopt; + } }; struct GitHubInputScheme : GitArchiveInputScheme @@ -276,7 +284,7 @@ struct GitHubInputScheme : GitArchiveInputScheme readFile( store->toRealPath( downloadFile(store, url, "source", false, headers).storePath))); - auto rev = Hash::parseAny(std::string { json["sha"] }, htSHA1); + auto rev = Hash::parseAny(std::string { json["sha"] }, HashAlgorithm::SHA1); debug("HEAD revision for '%s' is %s", url, rev.gitRev()); return rev; } @@ -348,7 +356,7 @@ struct GitLabInputScheme : GitArchiveInputScheme readFile( store->toRealPath( downloadFile(store, url, "source", false, headers).storePath))); - auto rev = Hash::parseAny(std::string(json[0]["id"]), htSHA1); + auto rev = Hash::parseAny(std::string(json[0]["id"]), HashAlgorithm::SHA1); debug("HEAD revision for '%s' is %s", url, rev.gitRev()); return rev; } @@ -440,7 +448,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme if(!id) throw BadURL("in '%d', couldn't find ref '%d'", input.to_string(), ref); - auto rev = Hash::parseAny(*id, htSHA1); + auto rev = Hash::parseAny(*id, HashAlgorithm::SHA1); debug("HEAD revision for '%s' is %s", fmt("%s/%s", base_url, ref), rev.gitRev()); return rev; } diff --git a/src/libfetchers/indirect.cc b/src/libfetchers/indirect.cc index 8e30284c6..002c0c292 100644 --- a/src/libfetchers/indirect.cc +++ b/src/libfetchers/indirect.cc @@ -20,7 +20,7 @@ struct IndirectInputScheme : InputScheme if (path.size() == 1) { } else if (path.size() == 2) { if (std::regex_match(path[1], revRegex)) - rev = Hash::parseAny(path[1], htSHA1); + rev = Hash::parseAny(path[1], HashAlgorithm::SHA1); else if (std::regex_match(path[1], refRegex)) ref = path[1]; else @@ -31,7 +31,7 @@ struct IndirectInputScheme : InputScheme ref = path[1]; if (!std::regex_match(path[2], revRegex)) throw BadURL("in flake URL '%s', '%s' is not a commit hash", url.url, path[2]); - rev = Hash::parseAny(path[2], htSHA1); + rev = Hash::parseAny(path[2], HashAlgorithm::SHA1); } else throw BadURL("GitHub URL '%s' is invalid", url.url); diff --git a/src/libfetchers/input-accessor.cc b/src/libfetchers/input-accessor.cc deleted file mode 100644 index d1d450cf7..000000000 --- a/src/libfetchers/input-accessor.cc +++ /dev/null @@ -1,98 +0,0 @@ -#include "input-accessor.hh" -#include "store-api.hh" - -namespace nix { - -StorePath InputAccessor::fetchToStore( - ref store, - const CanonPath & path, - std::string_view name, - FileIngestionMethod method, - PathFilter * filter, - RepairFlag repair) -{ - Activity act(*logger, lvlChatty, actUnknown, fmt("copying '%s' to the store", showPath(path))); - - auto source = sinkToSource([&](Sink & sink) { - if (method == FileIngestionMethod::Recursive) - dumpPath(path, sink, filter ? *filter : defaultPathFilter); - else - readFile(path, sink); - }); - - auto storePath = - settings.readOnlyMode - ? store->computeStorePathFromDump(*source, name, method, htSHA256).first - : store->addToStoreFromDump(*source, name, method, htSHA256, repair); - - return storePath; -} - -SourcePath InputAccessor::root() -{ - return {ref(shared_from_this()), CanonPath::root}; -} - -std::ostream & operator << (std::ostream & str, const SourcePath & path) -{ - str << path.to_string(); - return str; -} - -StorePath SourcePath::fetchToStore( - ref store, - std::string_view name, - FileIngestionMethod method, - PathFilter * filter, - RepairFlag repair) const -{ - return accessor->fetchToStore(store, path, name, method, filter, repair); -} - -std::string_view SourcePath::baseName() const -{ - return path.baseName().value_or("source"); -} - -SourcePath SourcePath::parent() const -{ - auto p = path.parent(); - assert(p); - return {accessor, std::move(*p)}; -} - -SourcePath SourcePath::resolveSymlinks() const -{ - auto res = accessor->root(); - - int linksAllowed = 1024; - - std::list todo; - for (auto & c : path) - todo.push_back(std::string(c)); - - while (!todo.empty()) { - auto c = *todo.begin(); - todo.pop_front(); - if (c == "" || c == ".") - ; - else if (c == "..") - res.path.pop(); - else { - res.path.push(c); - if (auto st = res.maybeLstat(); st && st->type == InputAccessor::tSymlink) { - if (!linksAllowed--) - throw Error("infinite symlink recursion in path '%s'", path); - auto target = res.readLink(); - res.path.pop(); - if (hasPrefix(target, "/")) - res.path = CanonPath::root; - todo.splice(todo.begin(), tokenizeString>(target, "/")); - } - } - } - - return res; -} - -} diff --git a/src/libfetchers/input-accessor.hh b/src/libfetchers/input-accessor.hh deleted file mode 100644 index 9c688a234..000000000 --- a/src/libfetchers/input-accessor.hh +++ /dev/null @@ -1,169 +0,0 @@ -#pragma once -///@file - -#include "source-accessor.hh" -#include "ref.hh" -#include "types.hh" -#include "file-system.hh" -#include "repair-flag.hh" -#include "content-address.hh" - -namespace nix { - -MakeError(RestrictedPathError, Error); - -struct SourcePath; -class StorePath; -class Store; - -struct InputAccessor : virtual SourceAccessor, std::enable_shared_from_this -{ - /** - * Return the maximum last-modified time of the files in this - * tree, if available. - */ - virtual std::optional getLastModified() - { - return std::nullopt; - } - - StorePath fetchToStore( - ref store, - const CanonPath & path, - std::string_view name = "source", - FileIngestionMethod method = FileIngestionMethod::Recursive, - PathFilter * filter = nullptr, - RepairFlag repair = NoRepair); - - SourcePath root(); -}; - -/** - * An abstraction for accessing source files during - * evaluation. Currently, it's just a wrapper around `CanonPath` that - * accesses files in the regular filesystem, but in the future it will - * support fetching files in other ways. - */ -struct SourcePath -{ - ref accessor; - CanonPath path; - - std::string_view baseName() const; - - /** - * Construct the parent of this `SourcePath`. Aborts if `this` - * denotes the root. - */ - SourcePath parent() const; - - /** - * If this `SourcePath` denotes a regular file (not a symlink), - * return its contents; otherwise throw an error. - */ - std::string readFile() const - { return accessor->readFile(path); } - - /** - * Return whether this `SourcePath` denotes a file (of any type) - * that exists - */ - bool pathExists() const - { return accessor->pathExists(path); } - - /** - * Return stats about this `SourcePath`, or throw an exception if - * it doesn't exist. - */ - InputAccessor::Stat lstat() const - { return accessor->lstat(path); } - - /** - * Return stats about this `SourcePath`, or std::nullopt if it - * doesn't exist. - */ - std::optional maybeLstat() const - { return accessor->maybeLstat(path); } - - /** - * If this `SourcePath` denotes a directory (not a symlink), - * return its directory entries; otherwise throw an error. - */ - InputAccessor::DirEntries readDirectory() const - { return accessor->readDirectory(path); } - - /** - * If this `SourcePath` denotes a symlink, return its target; - * otherwise throw an error. - */ - std::string readLink() const - { return accessor->readLink(path); } - - /** - * Dump this `SourcePath` to `sink` as a NAR archive. - */ - void dumpPath( - Sink & sink, - PathFilter & filter = defaultPathFilter) const - { return accessor->dumpPath(path, sink, filter); } - - /** - * Copy this `SourcePath` to the Nix store. - */ - StorePath fetchToStore( - ref store, - std::string_view name = "source", - FileIngestionMethod method = FileIngestionMethod::Recursive, - PathFilter * filter = nullptr, - RepairFlag repair = NoRepair) const; - - /** - * Return the location of this path in the "real" filesystem, if - * it has a physical location. - */ - std::optional getPhysicalPath() const - { return accessor->getPhysicalPath(path); } - - std::string to_string() const - { return path.abs(); } - - /** - * Append a `CanonPath` to this path. - */ - SourcePath operator + (const CanonPath & x) const - { return {accessor, path + x}; } - - /** - * Append a single component `c` to this path. `c` must not - * contain a slash. A slash is implicitly added between this path - * and `c`. - */ - SourcePath operator + (std::string_view c) const - { return {accessor, path + c}; } - - bool operator == (const SourcePath & x) const - { - return std::tie(accessor, path) == std::tie(x.accessor, x.path); - } - - bool operator != (const SourcePath & x) const - { - return std::tie(accessor, path) != std::tie(x.accessor, x.path); - } - - bool operator < (const SourcePath & x) const - { - return std::tie(accessor, path) < std::tie(x.accessor, x.path); - } - - /** - * Resolve any symlinks in this `SourcePath` (including its - * parents). The result is a `SourcePath` in which no element is a - * symlink. - */ - SourcePath resolveSymlinks() const; -}; - -std::ostream & operator << (std::ostream & str, const SourcePath & path); - -} diff --git a/src/libfetchers/local.mk b/src/libfetchers/local.mk index 2e8869d83..e54db4937 100644 --- a/src/libfetchers/local.mk +++ b/src/libfetchers/local.mk @@ -8,6 +8,6 @@ libfetchers_SOURCES := $(wildcard $(d)/*.cc) libfetchers_CXXFLAGS += -I src/libutil -I src/libstore -libfetchers_LDFLAGS += -pthread +libfetchers_LDFLAGS += $(THREAD_LDFLAGS) $(LIBGIT2_LIBS) -larchive libfetchers_LIBS = libutil libstore diff --git a/src/libfetchers/memory-input-accessor.cc b/src/libfetchers/memory-input-accessor.cc index 057f3e37f..88a2e34e8 100644 --- a/src/libfetchers/memory-input-accessor.cc +++ b/src/libfetchers/memory-input-accessor.cc @@ -1,5 +1,6 @@ #include "memory-input-accessor.hh" #include "memory-source-accessor.hh" +#include "source-path.hh" namespace nix { diff --git a/src/libfetchers/memory-input-accessor.hh b/src/libfetchers/memory-input-accessor.hh index b75b02bfd..508b07722 100644 --- a/src/libfetchers/memory-input-accessor.hh +++ b/src/libfetchers/memory-input-accessor.hh @@ -1,4 +1,5 @@ #include "input-accessor.hh" +#include "source-path.hh" namespace nix { diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc index 9244acf39..9982389ab 100644 --- a/src/libfetchers/mercurial.cc +++ b/src/libfetchers/mercurial.cc @@ -6,6 +6,7 @@ #include "tarfile.hh" #include "store-api.hh" #include "url-parts.hh" +#include "posix-source-accessor.hh" #include "fetch-settings.hh" @@ -210,7 +211,12 @@ struct MercurialInputScheme : InputScheme return files.count(file); }; - auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, htSHA256, filter); + PosixSourceAccessor accessor; + auto storePath = store->addToStore( + input.getName(), + accessor, CanonPath { actualPath }, + FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {}, + filter); return {std::move(storePath), input}; } @@ -218,16 +224,16 @@ struct MercurialInputScheme : InputScheme if (!input.getRef()) input.attrs.insert_or_assign("ref", "default"); - auto checkHashType = [&](const std::optional & hash) + auto checkHashAlgorithm = [&](const std::optional & hash) { - if (hash.has_value() && hash->type != htSHA1) + if (hash.has_value() && hash->algo != HashAlgorithm::SHA1) throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(HashFormat::Base16, true)); }; auto getLockedAttrs = [&]() { - checkHashType(input.getRev()); + checkHashAlgorithm(input.getRev()); return Attrs({ {"type", "hg"}, @@ -246,7 +252,7 @@ struct MercurialInputScheme : InputScheme }; if (input.getRev()) { - if (auto res = getCache()->lookup(store, getLockedAttrs())) + if (auto res = getCache()->lookup(*store, getLockedAttrs())) return makeResult(res->first, std::move(res->second)); } @@ -259,15 +265,15 @@ struct MercurialInputScheme : InputScheme {"ref", *input.getRef()}, }); - if (auto res = getCache()->lookup(store, unlockedAttrs)) { - auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), htSHA1); + if (auto res = getCache()->lookup(*store, unlockedAttrs)) { + auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), HashAlgorithm::SHA1); if (!input.getRev() || input.getRev() == rev2) { input.attrs.insert_or_assign("rev", rev2.gitRev()); return makeResult(res->first, std::move(res->second)); } } - Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, actualUrl).to_string(HashFormat::Base32, false)); + Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(HashAlgorithm::SHA256, actualUrl).to_string(HashFormat::Nix32, false)); /* If this is a commit hash that we already have, we don't have to pull again. */ @@ -301,11 +307,11 @@ struct MercurialInputScheme : InputScheme runHg({ "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" })); assert(tokens.size() == 3); - input.attrs.insert_or_assign("rev", Hash::parseAny(tokens[0], htSHA1).gitRev()); + input.attrs.insert_or_assign("rev", Hash::parseAny(tokens[0], HashAlgorithm::SHA1).gitRev()); auto revCount = std::stoull(tokens[1]); input.attrs.insert_or_assign("ref", tokens[2]); - if (auto res = getCache()->lookup(store, getLockedAttrs())) + if (auto res = getCache()->lookup(*store, getLockedAttrs())) return makeResult(res->first, std::move(res->second)); Path tmpDir = createTempDir(); @@ -315,7 +321,8 @@ struct MercurialInputScheme : InputScheme deletePath(tmpDir + "/.hg_archival.txt"); - auto storePath = store->addToStore(name, tmpDir); + PosixSourceAccessor accessor; + auto storePath = store->addToStore(name, accessor, CanonPath { tmpDir }); Attrs infoAttrs({ {"rev", input.getRev()->gitRev()}, @@ -324,14 +331,14 @@ struct MercurialInputScheme : InputScheme if (!_input.getRev()) getCache()->add( - store, + *store, unlockedAttrs, infoAttrs, storePath, false); getCache()->add( - store, + *store, getLockedAttrs(), infoAttrs, storePath, @@ -339,6 +346,14 @@ struct MercurialInputScheme : InputScheme return makeResult(infoAttrs, std::move(storePath)); } + + std::optional getFingerprint(ref store, const Input & input) const override + { + if (auto rev = input.getRev()) + return rev->gitRev(); + else + return std::nullopt; + } }; static auto rMercurialInputScheme = OnStartup([] { registerInputScheme(std::make_unique()); }); diff --git a/src/libfetchers/mounted-input-accessor.cc b/src/libfetchers/mounted-input-accessor.cc new file mode 100644 index 000000000..6f397eb17 --- /dev/null +++ b/src/libfetchers/mounted-input-accessor.cc @@ -0,0 +1,77 @@ +#include "mounted-input-accessor.hh" + +namespace nix { + +struct MountedInputAccessor : InputAccessor +{ + std::map> mounts; + + MountedInputAccessor(std::map> _mounts) + : mounts(std::move(_mounts)) + { + // Currently we require a root filesystem. This could be relaxed. + assert(mounts.contains(CanonPath::root)); + + // FIXME: return dummy parent directories automatically? + } + + std::string readFile(const CanonPath & path) override + { + auto [accessor, subpath] = resolve(path); + return accessor->readFile(subpath); + } + + bool pathExists(const CanonPath & path) override + { + auto [accessor, subpath] = resolve(path); + return accessor->pathExists(subpath); + } + + std::optional maybeLstat(const CanonPath & path) override + { + auto [accessor, subpath] = resolve(path); + return accessor->maybeLstat(subpath); + } + + DirEntries readDirectory(const CanonPath & path) override + { + auto [accessor, subpath] = resolve(path); + return accessor->readDirectory(subpath); + } + + std::string readLink(const CanonPath & path) override + { + auto [accessor, subpath] = resolve(path); + return accessor->readLink(subpath); + } + + std::string showPath(const CanonPath & path) override + { + auto [accessor, subpath] = resolve(path); + return accessor->showPath(subpath); + } + + std::pair, CanonPath> resolve(CanonPath path) + { + // Find the nearest parent of `path` that is a mount point. + std::vector subpath; + while (true) { + auto i = mounts.find(path); + if (i != mounts.end()) { + std::reverse(subpath.begin(), subpath.end()); + return {i->second, CanonPath(subpath)}; + } + + assert(!path.isRoot()); + subpath.push_back(std::string(*path.baseName())); + path.pop(); + } + } +}; + +ref makeMountedInputAccessor(std::map> mounts) +{ + return make_ref(std::move(mounts)); +} + +} diff --git a/src/libfetchers/mounted-input-accessor.hh b/src/libfetchers/mounted-input-accessor.hh new file mode 100644 index 000000000..b557c5dad --- /dev/null +++ b/src/libfetchers/mounted-input-accessor.hh @@ -0,0 +1,9 @@ +#pragma once + +#include "input-accessor.hh" + +namespace nix { + +ref makeMountedInputAccessor(std::map> mounts); + +} diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index 0062878a9..3b7709440 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -8,6 +8,7 @@ #include "tarfile.hh" #include "types.hh" #include "split.hh" +#include "posix-source-accessor.hh" namespace nix::fetchers { @@ -26,7 +27,7 @@ DownloadFileResult downloadFile( {"name", name}, }); - auto cached = getCache()->lookupExpired(store, inAttrs); + auto cached = getCache()->lookupExpired(*store, inAttrs); auto useCached = [&]() -> DownloadFileResult { @@ -73,7 +74,7 @@ DownloadFileResult downloadFile( } else { StringSink sink; dumpString(res.data, sink); - auto hash = hashString(htSHA256, res.data); + auto hash = hashString(HashAlgorithm::SHA256, res.data); ValidPathInfo info { *store, name, @@ -82,7 +83,7 @@ DownloadFileResult downloadFile( .hash = hash, .references = {}, }, - hashString(htSHA256, sink.s), + hashString(HashAlgorithm::SHA256, sink.s), }; info.narSize = sink.s.size(); auto source = StringSource { sink.s }; @@ -91,7 +92,7 @@ DownloadFileResult downloadFile( } getCache()->add( - store, + *store, inAttrs, infoAttrs, *storePath, @@ -99,7 +100,7 @@ DownloadFileResult downloadFile( if (url != res.effectiveUri) getCache()->add( - store, + *store, { {"type", "file"}, {"url", res.effectiveUri}, @@ -130,7 +131,7 @@ DownloadTarballResult downloadTarball( {"name", name}, }); - auto cached = getCache()->lookupExpired(store, inAttrs); + auto cached = getCache()->lookupExpired(*store, inAttrs); if (cached && !cached->expired) return { @@ -156,7 +157,8 @@ DownloadTarballResult downloadTarball( throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url); auto topDir = tmpDir + "/" + members.begin()->name; lastModified = lstat(topDir).st_mtime; - unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, NoRepair); + PosixSourceAccessor accessor; + unpackedStorePath = store->addToStore(name, accessor, CanonPath { topDir }, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {}, defaultPathFilter, NoRepair); } Attrs infoAttrs({ @@ -168,7 +170,7 @@ DownloadTarballResult downloadTarball( infoAttrs.emplace("immutableUrl", *res.immutableUrl); getCache()->add( - store, + *store, inAttrs, infoAttrs, *unpackedStorePath, diff --git a/src/libmain/local.mk b/src/libmain/local.mk index 99da95e27..5c7061863 100644 --- a/src/libmain/local.mk +++ b/src/libmain/local.mk @@ -14,4 +14,4 @@ libmain_LIBS = libstore libutil libmain_ALLOW_UNDEFINED = 1 -$(eval $(call install-file-in, $(d)/nix-main.pc, $(libdir)/pkgconfig, 0644)) +$(eval $(call install-file-in, $(buildprefix)$(d)/nix-main.pc, $(libdir)/pkgconfig, 0644)) diff --git a/src/libmain/progress-bar.cc b/src/libmain/progress-bar.cc index a7aee47c3..3aa012ee1 100644 --- a/src/libmain/progress-bar.cc +++ b/src/libmain/progress-bar.cc @@ -340,6 +340,14 @@ public: state->activitiesByType[type].expected += j; update(*state); } + + else if (type == resFetchStatus) { + auto i = state->its.find(act); + assert(i != state->its.end()); + ActInfo & actInfo = *i->second; + actInfo.lastLine = getS(fields, 0); + update(*state); + } } void update(State & state) diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index ae483c95e..ea1279e2e 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -12,6 +12,7 @@ #include "thread-pool.hh" #include "callback.hh" #include "signals.hh" +#include "archive.hh" #include #include @@ -27,7 +28,8 @@ BinaryCacheStore::BinaryCacheStore(const Params & params) , Store(params) { if (secretKeyFile != "") - secretKey = std::unique_ptr(new SecretKey(readFile(secretKeyFile))); + signer = std::make_unique( + SecretKey { readFile(secretKeyFile) }); StringSink sink; sink << narVersionMagic1; @@ -143,9 +145,9 @@ ref BinaryCacheStore::addToStoreCommon( /* Read the NAR simultaneously into a CompressionSink+FileSink (to write the compressed NAR to disk), into a HashSink (to get the NAR hash), and into a NarAccessor (to get the NAR listing). */ - HashSink fileHashSink { htSHA256 }; + HashSink fileHashSink { HashAlgorithm::SHA256 }; std::shared_ptr narAccessor; - HashSink narHashSink { htSHA256 }; + HashSink narHashSink { HashAlgorithm::SHA256 }; { FdSink fileSink(fdTemp.get()); TeeSink teeSinkCompressed { fileSink, fileHashSink }; @@ -165,8 +167,8 @@ ref BinaryCacheStore::addToStoreCommon( auto [fileHash, fileSize] = fileHashSink.finish(); narInfo->fileHash = fileHash; narInfo->fileSize = fileSize; - narInfo->url = "nar/" + narInfo->fileHash->to_string(HashFormat::Base32, false) + ".nar" - + (compression == "xz" ? ".xz" : + narInfo->url = "nar/" + narInfo->fileHash->to_string(HashFormat::Nix32, false) + ".nar" + + (compression == "xz" ? ".xz" : compression == "bzip2" ? ".bz2" : compression == "zstd" ? ".zst" : compression == "lzip" ? ".lzip" : @@ -273,7 +275,7 @@ ref BinaryCacheStore::addToStoreCommon( stats.narWriteCompressionTimeMs += duration; /* Atomically write the NAR info file.*/ - if (secretKey) narInfo->sign(*this, *secretKey); + if (signer) narInfo->sign(*this, *signer); writeNarInfo(narInfo); @@ -300,24 +302,60 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource }}); } -StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, std::string_view name, - FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references) +StorePath BinaryCacheStore::addToStoreFromDump( + Source & dump, + std::string_view name, + ContentAddressMethod method, + HashAlgorithm hashAlgo, + const StorePathSet & references, + RepairFlag repair) { - if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256) - unsupported("addToStoreFromDump"); - return addToStoreCommon(dump, repair, CheckSigs, [&](HashResult nar) { + std::optional caHash; + std::string nar; + + if (auto * dump2p = dynamic_cast(&dump)) { + auto & dump2 = *dump2p; + // Hack, this gives us a "replayable" source so we can compute + // multiple hashes more easily. + caHash = hashString(HashAlgorithm::SHA256, dump2.s); + switch (method.getFileIngestionMethod()) { + case FileIngestionMethod::Recursive: + // The dump is already NAR in this case, just use it. + nar = dump2.s; + break; + case FileIngestionMethod::Flat: + // The dump is Flat, so we need to convert it to NAR with a + // single file. + StringSink s; + dumpString(dump2.s, s); + nar = std::move(s.s); + break; + } + } else { + // Otherwise, we have to do th same hashing as NAR so our single + // hash will suffice for both purposes. + if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256) + unsupported("addToStoreFromDump"); + } + StringSource narDump { nar }; + + // Use `narDump` if we wrote to `nar`. + Source & narDump2 = nar.size() > 0 + ? static_cast(narDump) + : dump; + + return addToStoreCommon(narDump2, repair, CheckSigs, [&](HashResult nar) { ValidPathInfo info { *this, name, - FixedOutputInfo { - .method = method, - .hash = nar.first, - .references = { + ContentAddressWithReferences::fromParts( + method, + caHash ? *caHash : nar.first, + { .others = references, // caller is not capable of creating a self-reference, because this is content-addressed without modulus .self = false, - }, - }, + }), nar.first, }; info.narSize = nar.second; @@ -400,71 +438,35 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath, StorePath BinaryCacheStore::addToStore( std::string_view name, - const Path & srcPath, - FileIngestionMethod method, - HashType hashAlgo, + SourceAccessor & accessor, + const CanonPath & path, + ContentAddressMethod method, + HashAlgorithm hashAlgo, + const StorePathSet & references, PathFilter & filter, - RepairFlag repair, - const StorePathSet & references) + RepairFlag repair) { /* FIXME: Make BinaryCacheStore::addToStoreCommon support non-recursive+sha256 so we can just use the default implementation of this method in terms of addToStoreFromDump. */ - HashSink sink { hashAlgo }; - if (method == FileIngestionMethod::Recursive) { - dumpPath(srcPath, sink, filter); - } else { - readFile(srcPath, sink); - } - auto h = sink.finish().first; + auto h = hashPath(accessor, path, method.getFileIngestionMethod(), hashAlgo, filter).first; auto source = sinkToSource([&](Sink & sink) { - dumpPath(srcPath, sink, filter); + accessor.dumpPath(path, sink, filter); }); return addToStoreCommon(*source, repair, CheckSigs, [&](HashResult nar) { ValidPathInfo info { *this, name, - FixedOutputInfo { - .method = method, - .hash = h, - .references = { + ContentAddressWithReferences::fromParts( + method, + h, + { .others = references, // caller is not capable of creating a self-reference, because this is content-addressed without modulus .self = false, - }, - }, - nar.first, - }; - info.narSize = nar.second; - return info; - })->path; -} - -StorePath BinaryCacheStore::addTextToStore( - std::string_view name, - std::string_view s, - const StorePathSet & references, - RepairFlag repair) -{ - auto textHash = hashString(htSHA256, s); - auto path = makeTextPath(name, TextInfo { { textHash }, references }); - - if (!repair && isValidPath(path)) - return path; - - StringSink sink; - dumpString(s, sink); - StringSource source(sink.s); - return addToStoreCommon(source, repair, CheckSigs, [&](HashResult nar) { - ValidPathInfo info { - *this, - std::string { name }, - TextInfo { - .hash = textHash, - .references = references, - }, + }), nar.first, }; info.narSize = nar.second; diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh index cea2a571f..00ab73905 100644 --- a/src/libstore/binary-cache-store.hh +++ b/src/libstore/binary-cache-store.hh @@ -1,7 +1,7 @@ #pragma once ///@file -#include "crypto.hh" +#include "signature/local-keys.hh" #include "store-api.hh" #include "log-store.hh" @@ -57,8 +57,7 @@ class BinaryCacheStore : public virtual BinaryCacheStoreConfig, { private: - - std::unique_ptr secretKey; + std::unique_ptr signer; protected: @@ -123,22 +122,22 @@ public: void addToStore(const ValidPathInfo & info, Source & narSource, RepairFlag repair, CheckSigsFlag checkSigs) override; - StorePath addToStoreFromDump(Source & dump, std::string_view name, - FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references) override; + StorePath addToStoreFromDump( + Source & dump, + std::string_view name, + ContentAddressMethod method, + HashAlgorithm hashAlgo, + const StorePathSet & references, + RepairFlag repair) override; StorePath addToStore( std::string_view name, - const Path & srcPath, - FileIngestionMethod method, - HashType hashAlgo, - PathFilter & filter, - RepairFlag repair, - const StorePathSet & references) override; - - StorePath addTextToStore( - std::string_view name, - std::string_view s, + SourceAccessor & accessor, + const CanonPath & srcPath, + ContentAddressMethod method, + HashAlgorithm hashAlgo, const StorePathSet & references, + PathFilter & filter, RepairFlag repair) override; void registerDrvOutput(const Realisation & info) override; diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 360c6b70b..f8728ed4a 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -196,10 +196,19 @@ void DerivationGoal::loadDerivation() things being garbage collected while we're busy. */ worker.evalStore.addTempRoot(drvPath); - assert(worker.evalStore.isValidPath(drvPath)); + /* Get the derivation. It is probably in the eval store, but it might be inthe main store: - /* Get the derivation. */ - drv = std::make_unique(worker.evalStore.readDerivation(drvPath)); + - Resolved derivation are resolved against main store realisations, and so must be stored there. + + - Dynamic derivations are built, and so are found in the main store. + */ + for (auto * drvStore : { &worker.evalStore, &worker.store }) { + if (drvStore->isValidPath(drvPath)) { + drv = std::make_unique(drvStore->readDerivation(drvPath)); + break; + } + } + assert(drv); haveDerivation(); } @@ -401,11 +410,15 @@ void DerivationGoal::gaveUpOnSubstitution() } /* Copy the input sources from the eval store to the build - store. */ + store. + + Note that some inputs might not be in the eval store because they + are (resolved) derivation outputs in a resolved derivation. */ if (&worker.evalStore != &worker.store) { RealisedPath::Set inputSrcs; for (auto & i : drv->inputSrcs) - inputSrcs.insert(i); + if (worker.evalStore.isValidPath(i)) + inputSrcs.insert(i); copyClosure(worker.evalStore, worker.store, inputSrcs); } @@ -453,7 +466,7 @@ void DerivationGoal::repairClosure() std::map outputsToDrv; for (auto & i : inputClosure) if (i.isDerivation()) { - auto depOutputs = worker.store.queryPartialDerivationOutputMap(i); + auto depOutputs = worker.store.queryPartialDerivationOutputMap(i, &worker.evalStore); for (auto & j : depOutputs) if (j.second) outputsToDrv.insert_or_assign(*j.second, i); @@ -558,7 +571,7 @@ void DerivationGoal::inputsRealised() inputDrvOutputs statefully, sometimes it gets out of sync with the real source of truth (store). So we query the store directly if there's a problem. */ - attempt = fullDrv.tryResolve(worker.store); + attempt = fullDrv.tryResolve(worker.store, &worker.evalStore); } assert(attempt); Derivation drvResolved { std::move(*attempt) }; @@ -604,7 +617,13 @@ void DerivationGoal::inputsRealised() return *outPath; } else { - auto outMap = worker.evalStore.queryDerivationOutputMap(depDrvPath); + auto outMap = [&]{ + for (auto * drvStore : { &worker.evalStore, &worker.store }) + if (drvStore->isValidPath(depDrvPath)) + return worker.store.queryDerivationOutputMap(depDrvPath, drvStore); + assert(false); + }(); + auto outMapPath = outMap.find(outputName); if (outMapPath == outMap.end()) { throw Error( @@ -1085,8 +1104,12 @@ void DerivationGoal::resolvedFinished() auto newRealisation = realisation; newRealisation.id = DrvOutput { initialOutput->outputHash, outputName }; newRealisation.signatures.clear(); - if (!drv->type().isFixed()) - newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation.outPath); + if (!drv->type().isFixed()) { + auto & drvStore = worker.evalStore.isValidPath(drvPath) + ? worker.evalStore + : worker.store; + newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation.outPath, &drvStore); + } signRealisation(newRealisation); worker.store.registerDrvOutput(newRealisation); } @@ -1317,9 +1340,26 @@ void DerivationGoal::handleChildOutput(int fd, std::string_view data) auto s = handleJSONLogMessage(*json, worker.act, hook->activities, true); // ensure that logs from a builder using `ssh-ng://` as protocol // are also available to `nix log`. - if (s && !isWrittenToLog && logSink && (*json)["type"] == resBuildLogLine) { - auto f = (*json)["fields"]; - (*logSink)((f.size() > 0 ? f.at(0).get() : "") + "\n"); + if (s && !isWrittenToLog && logSink) { + const auto type = (*json)["type"]; + const auto fields = (*json)["fields"]; + if (type == resBuildLogLine) { + (*logSink)((fields.size() > 0 ? fields[0].get() : "") + "\n"); + } else if (type == resSetPhase && ! fields.is_null()) { + const auto phase = fields[0]; + if (! phase.is_null()) { + // nixpkgs' stdenv produces lines in the log to signal + // phase changes. + // We want to get the same lines in case of remote builds. + // The format is: + // @nix { "action": "setPhase", "phase": "$curPhase" } + const auto logLine = nlohmann::json::object({ + {"action", "setPhase"}, + {"phase", phase} + }); + (*logSink)("@nix " + logLine.dump(-1, ' ', false, nlohmann::json::error_handler_t::replace) + "\n"); + } + } } } currentHookLine.clear(); @@ -1362,7 +1402,10 @@ std::map> DerivationGoal::queryPartialDeri res.insert_or_assign(name, output.path(worker.store, drv->name, name)); return res; } else { - return worker.store.queryPartialDerivationOutputMap(drvPath); + for (auto * drvStore : { &worker.evalStore, &worker.store }) + if (drvStore->isValidPath(drvPath)) + return worker.store.queryPartialDerivationOutputMap(drvPath, drvStore); + assert(false); } } @@ -1375,7 +1418,10 @@ OutputPathMap DerivationGoal::queryDerivationOutputMap() res.insert_or_assign(name, *output.second); return res; } else { - return worker.store.queryDerivationOutputMap(drvPath); + for (auto * drvStore : { &worker.evalStore, &worker.store }) + if (drvStore->isValidPath(drvPath)) + return worker.store.queryDerivationOutputMap(drvPath, drvStore); + assert(false); } } @@ -1474,6 +1520,7 @@ void DerivationGoal::done( SingleDrvOutputs builtOutputs, std::optional ex) { + outputLocks.unlock(); buildResult.status = status; if (ex) buildResult.errorMsg = fmt("%s", normaltxt(ex->info().msg)); diff --git a/src/libstore/build/entry-points.cc b/src/libstore/build/entry-points.cc index 13ff22f45..74eca63f3 100644 --- a/src/libstore/build/entry-points.cc +++ b/src/libstore/build/entry-points.cc @@ -15,7 +15,7 @@ void Store::buildPaths(const std::vector & reqs, BuildMode buildMod worker.run(goals); - StorePathSet failed; + StringSet failed; std::optional ex; for (auto & i : goals) { if (i->ex) { @@ -26,9 +26,9 @@ void Store::buildPaths(const std::vector & reqs, BuildMode buildMod } if (i->exitCode != Goal::ecSuccess) { if (auto i2 = dynamic_cast(i.get())) - failed.insert(i2->drvPath); + failed.insert(std::string { i2->drvPath.to_string() }); else if (auto i2 = dynamic_cast(i.get())) - failed.insert(i2->storePath); + failed.insert(std::string { i2->storePath.to_string()}); } } @@ -37,7 +37,7 @@ void Store::buildPaths(const std::vector & reqs, BuildMode buildMod throw std::move(*ex); } else if (!failed.empty()) { if (ex) logError(ex->info()); - throw Error(worker.failingExitStatus(), "build of %s failed", showPaths(failed)); + throw Error(worker.failingExitStatus(), "build of %s failed", concatStringsSep(", ", quoteStrings(failed))); } } diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index a9f930773..b01d9e237 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -19,6 +19,8 @@ #include "namespaces.hh" #include "child.hh" #include "unix-domain-socket.hh" +#include "posix-fs-canonicalise.hh" +#include "posix-source-accessor.hh" #include #include @@ -1065,8 +1067,8 @@ void LocalDerivationGoal::initTmpDir() { if (passAsFile.find(i.first) == passAsFile.end()) { env[i.first] = i.second; } else { - auto hash = hashString(htSHA256, i.first); - std::string fn = ".attr-" + hash.to_string(HashFormat::Base32, false); + auto hash = hashString(HashAlgorithm::SHA256, i.first); + std::string fn = ".attr-" + hash.to_string(HashFormat::Nix32, false); Path p = tmpDir + "/" + fn; writeFile(p, rewriteStrings(i.second, inputRewrites)); chownToBuilder(p); @@ -1290,12 +1292,13 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In StorePath addToStore( std::string_view name, - const Path & srcPath, - FileIngestionMethod method, - HashType hashAlgo, + SourceAccessor & accessor, + const CanonPath & srcPath, + ContentAddressMethod method, + HashAlgorithm hashAlgo, + const StorePathSet & references, PathFilter & filter, - RepairFlag repair, - const StorePathSet & references) override + RepairFlag repair) override { throw Error("addToStore"); } void addToStore(const ValidPathInfo & info, Source & narSource, @@ -1305,26 +1308,15 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In goal.addDependency(info.path); } - StorePath addTextToStore( - std::string_view name, - std::string_view s, - const StorePathSet & references, - RepairFlag repair = NoRepair) override - { - auto path = next->addTextToStore(name, s, references, repair); - goal.addDependency(path); - return path; - } - StorePath addToStoreFromDump( Source & dump, std::string_view name, - FileIngestionMethod method, - HashType hashAlgo, - RepairFlag repair, - const StorePathSet & references) override + ContentAddressMethod method, + HashAlgorithm hashAlgo, + const StorePathSet & references, + RepairFlag repair) override { - auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair, references); + auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, references, repair); goal.addDependency(path); return path; } @@ -2452,8 +2444,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() throw BuildError( "output path %1% without valid stats info", actualPath); - if (outputHash.method == ContentAddressMethod { FileIngestionMethod::Flat } || - outputHash.method == ContentAddressMethod { TextIngestionMethod {} }) + if (outputHash.method.getFileIngestionMethod() == FileIngestionMethod::Flat) { /* The output path should be a regular file without execute permission. */ if (!S_ISREG(st->st_mode) || (st->st_mode & S_IXUSR) != 0) @@ -2465,38 +2456,23 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() rewriteOutput(outputRewrites); /* FIXME optimize and deduplicate with addToStore */ std::string oldHashPart { scratchPath->hashPart() }; - HashModuloSink caSink { outputHash.hashType, oldHashPart }; - std::visit(overloaded { - [&](const TextIngestionMethod &) { - readFile(actualPath, caSink); - }, - [&](const FileIngestionMethod & m2) { - switch (m2) { - case FileIngestionMethod::Recursive: - dumpPath(actualPath, caSink); - break; - case FileIngestionMethod::Flat: - readFile(actualPath, caSink); - break; - } - }, - }, outputHash.method.raw); - auto got = caSink.finish().first; + auto got = ({ + HashModuloSink caSink { outputHash.hashAlgo, oldHashPart }; + PosixSourceAccessor accessor; + dumpPath( + accessor, CanonPath { actualPath }, + caSink, + outputHash.method.getFileIngestionMethod()); + caSink.finish().first; + }); - auto optCA = ContentAddressWithReferences::fromPartsOpt( - outputHash.method, - std::move(got), - rewriteRefs()); - if (!optCA) { - // TODO track distinct failure modes separately (at the time of - // writing there is just one but `nullopt` is unclear) so this - // message can't get out of sync. - throw BuildError("output path '%s' has illegal content address, probably a spurious self-reference with text hashing"); - } ValidPathInfo newInfo0 { worker.store, outputPathName(drv->name, outputName), - std::move(*optCA), + ContentAddressWithReferences::fromParts( + outputHash.method, + std::move(got), + rewriteRefs()), Hash::dummy, }; if (*scratchPath != newInfo0.path) { @@ -2510,9 +2486,14 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() std::string(newInfo0.path.hashPart())}}); } - HashResult narHashAndSize = hashPath(htSHA256, actualPath); - newInfo0.narHash = narHashAndSize.first; - newInfo0.narSize = narHashAndSize.second; + { + PosixSourceAccessor accessor; + HashResult narHashAndSize = hashPath( + accessor, CanonPath { actualPath }, + FileIngestionMethod::Recursive, HashAlgorithm::SHA256); + newInfo0.narHash = narHashAndSize.first; + newInfo0.narSize = narHashAndSize.second; + } assert(newInfo0.ca); return newInfo0; @@ -2530,7 +2511,10 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() std::string { scratchPath->hashPart() }, std::string { requiredFinalPath.hashPart() }); rewriteOutput(outputRewrites); - auto narHashAndSize = hashPath(htSHA256, actualPath); + PosixSourceAccessor accessor; + HashResult narHashAndSize = hashPath( + accessor, CanonPath { actualPath }, + FileIngestionMethod::Recursive, HashAlgorithm::SHA256); ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first }; newInfo0.narSize = narHashAndSize.second; auto refs = rewriteRefs(); @@ -2545,7 +2529,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() auto newInfo0 = newInfoFromCA(DerivationOutput::CAFloating { .method = dof.ca.method, - .hashType = wanted.type, + .hashAlgo = wanted.algo, }); /* Check wanted hash */ @@ -2582,7 +2566,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() [&](const DerivationOutput::Impure & doi) { return newInfoFromCA(DerivationOutput::CAFloating { .method = doi.method, - .hashType = doi.hashType, + .hashAlgo = doi.hashAlgo, }); }, @@ -2944,7 +2928,7 @@ StorePath LocalDerivationGoal::makeFallbackPath(OutputNameView outputName) { return worker.store.makeStorePath( "rewrite:" + std::string(drvPath.to_string()) + ":name:" + std::string(outputName), - Hash(htSHA256), outputPathName(drv->name, outputName)); + Hash(HashAlgorithm::SHA256), outputPathName(drv->name, outputName)); } @@ -2952,7 +2936,7 @@ StorePath LocalDerivationGoal::makeFallbackPath(const StorePath & path) { return worker.store.makeStorePath( "rewrite:" + std::string(drvPath.to_string()) + ":" + std::string(path.to_string()), - Hash(htSHA256), path.name()); + Hash(HashAlgorithm::SHA256), path.name()); } diff --git a/src/libstore/build/sandbox-defaults.sb b/src/libstore/build/sandbox-defaults.sb index 77f013aea..25ec11285 100644 --- a/src/libstore/build/sandbox-defaults.sb +++ b/src/libstore/build/sandbox-defaults.sb @@ -68,6 +68,7 @@ R""( (allow file* (literal "/dev/null") (literal "/dev/random") + (literal "/dev/stderr") (literal "/dev/stdin") (literal "/dev/stdout") (literal "/dev/tty") diff --git a/src/libstore/build/substitution-goal.cc b/src/libstore/build/substitution-goal.cc index 93867007d..c7e8e2825 100644 --- a/src/libstore/build/substitution-goal.cc +++ b/src/libstore/build/substitution-goal.cc @@ -2,6 +2,7 @@ #include "substitution-goal.hh" #include "nar-info.hh" #include "finally.hh" +#include "signals.hh" namespace nix { @@ -217,6 +218,8 @@ void PathSubstitutionGoal::tryToRun() thr = std::thread([this]() { try { + ReceiveInterrupts receiveInterrupts; + /* Wake up the worker loop when we're done. */ Finally updateStats([this]() { outPipe.writeSide.close(); }); diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc index 01914e2d6..399ad47fd 100644 --- a/src/libstore/build/worker.cc +++ b/src/libstore/build/worker.cc @@ -199,8 +199,16 @@ void Worker::childStarted(GoalPtr goal, const std::set & fds, child.respectTimeouts = respectTimeouts; children.emplace_back(child); if (inBuildSlot) { - if (goal->jobCategory() == JobCategory::Substitution) nrSubstitutions++; - else nrLocalBuilds++; + switch (goal->jobCategory()) { + case JobCategory::Substitution: + nrSubstitutions++; + break; + case JobCategory::Build: + nrLocalBuilds++; + break; + default: + abort(); + } } } @@ -212,12 +220,17 @@ void Worker::childTerminated(Goal * goal, bool wakeSleepers) if (i == children.end()) return; if (i->inBuildSlot) { - if (goal->jobCategory() == JobCategory::Substitution) { + switch (goal->jobCategory()) { + case JobCategory::Substitution: assert(nrSubstitutions > 0); nrSubstitutions--; - } else { + break; + case JobCategory::Build: assert(nrLocalBuilds > 0); nrLocalBuilds--; + break; + default: + abort(); } } @@ -506,8 +519,10 @@ bool Worker::pathContentsGood(const StorePath & path) if (!pathExists(store.printStorePath(path))) res = false; else { - HashResult current = hashPath(info->narHash.type, store.printStorePath(path)); - Hash nullHash(htSHA256); + HashResult current = hashPath( + *store.getFSAccessor(), CanonPath { store.printStorePath(path) }, + FileIngestionMethod::Recursive, info->narHash.algo); + Hash nullHash(HashAlgorithm::SHA256); res = info->narHash == nullHash || info->narHash == current.first; } pathContentsGoodCache.insert_or_assign(path, res); diff --git a/src/libstore/builtins/buildenv.cc b/src/libstore/builtins/buildenv.cc index c8911d153..9283251ac 100644 --- a/src/libstore/builtins/buildenv.cc +++ b/src/libstore/builtins/buildenv.cc @@ -1,4 +1,5 @@ #include "buildenv.hh" +#include "derivations.hh" #include #include diff --git a/src/libstore/builtins/buildenv.hh b/src/libstore/builtins/buildenv.hh index 0923c2adb..8bebd390d 100644 --- a/src/libstore/builtins/buildenv.hh +++ b/src/libstore/builtins/buildenv.hh @@ -1,7 +1,6 @@ #pragma once ///@file -#include "derivations.hh" #include "store-api.hh" namespace nix { diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc index 357800333..2086bd0b9 100644 --- a/src/libstore/builtins/fetchurl.cc +++ b/src/libstore/builtins/fetchurl.cc @@ -63,9 +63,9 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) for (auto hashedMirror : settings.hashedMirrors.get()) try { if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/'; - std::optional ht = parseHashTypeOpt(getAttr("outputHashAlgo")); + std::optional ht = parseHashAlgoOpt(getAttr("outputHashAlgo")); Hash h = newHashAllowEmpty(getAttr("outputHash"), ht); - fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(HashFormat::Base16, false)); + fetch(hashedMirror + printHashAlgo(h.algo) + "/" + h.to_string(HashFormat::Base16, false)); return; } catch (Error & e) { debug(e.what()); diff --git a/src/libstore/common-protocol-impl.hh b/src/libstore/common-protocol-impl.hh index 079c182b8..360882c02 100644 --- a/src/libstore/common-protocol-impl.hh +++ b/src/libstore/common-protocol-impl.hh @@ -16,11 +16,11 @@ namespace nix { /* protocol-agnostic templates */ #define COMMON_USE_LENGTH_PREFIX_SERIALISER(TEMPLATE, T) \ - TEMPLATE T CommonProto::Serialise< T >::read(const Store & store, CommonProto::ReadConn conn) \ + TEMPLATE T CommonProto::Serialise< T >::read(const StoreDirConfig & store, CommonProto::ReadConn conn) \ { \ return LengthPrefixedProtoHelper::read(store, conn); \ } \ - TEMPLATE void CommonProto::Serialise< T >::write(const Store & store, CommonProto::WriteConn conn, const T & t) \ + TEMPLATE void CommonProto::Serialise< T >::write(const StoreDirConfig & store, CommonProto::WriteConn conn, const T & t) \ { \ LengthPrefixedProtoHelper::write(store, conn, t); \ } diff --git a/src/libstore/common-protocol.cc b/src/libstore/common-protocol.cc index 68445258f..fc2b5ac6f 100644 --- a/src/libstore/common-protocol.cc +++ b/src/libstore/common-protocol.cc @@ -13,40 +13,40 @@ namespace nix { /* protocol-agnostic definitions */ -std::string CommonProto::Serialise::read(const Store & store, CommonProto::ReadConn conn) +std::string CommonProto::Serialise::read(const StoreDirConfig & store, CommonProto::ReadConn conn) { return readString(conn.from); } -void CommonProto::Serialise::write(const Store & store, CommonProto::WriteConn conn, const std::string & str) +void CommonProto::Serialise::write(const StoreDirConfig & store, CommonProto::WriteConn conn, const std::string & str) { conn.to << str; } -StorePath CommonProto::Serialise::read(const Store & store, CommonProto::ReadConn conn) +StorePath CommonProto::Serialise::read(const StoreDirConfig & store, CommonProto::ReadConn conn) { return store.parseStorePath(readString(conn.from)); } -void CommonProto::Serialise::write(const Store & store, CommonProto::WriteConn conn, const StorePath & storePath) +void CommonProto::Serialise::write(const StoreDirConfig & store, CommonProto::WriteConn conn, const StorePath & storePath) { conn.to << store.printStorePath(storePath); } -ContentAddress CommonProto::Serialise::read(const Store & store, CommonProto::ReadConn conn) +ContentAddress CommonProto::Serialise::read(const StoreDirConfig & store, CommonProto::ReadConn conn) { return ContentAddress::parse(readString(conn.from)); } -void CommonProto::Serialise::write(const Store & store, CommonProto::WriteConn conn, const ContentAddress & ca) +void CommonProto::Serialise::write(const StoreDirConfig & store, CommonProto::WriteConn conn, const ContentAddress & ca) { conn.to << renderContentAddress(ca); } -Realisation CommonProto::Serialise::read(const Store & store, CommonProto::ReadConn conn) +Realisation CommonProto::Serialise::read(const StoreDirConfig & store, CommonProto::ReadConn conn) { std::string rawInput = readString(conn.from); return Realisation::fromJSON( @@ -55,41 +55,41 @@ Realisation CommonProto::Serialise::read(const Store & store, Commo ); } -void CommonProto::Serialise::write(const Store & store, CommonProto::WriteConn conn, const Realisation & realisation) +void CommonProto::Serialise::write(const StoreDirConfig & store, CommonProto::WriteConn conn, const Realisation & realisation) { conn.to << realisation.toJSON().dump(); } -DrvOutput CommonProto::Serialise::read(const Store & store, CommonProto::ReadConn conn) +DrvOutput CommonProto::Serialise::read(const StoreDirConfig & store, CommonProto::ReadConn conn) { return DrvOutput::parse(readString(conn.from)); } -void CommonProto::Serialise::write(const Store & store, CommonProto::WriteConn conn, const DrvOutput & drvOutput) +void CommonProto::Serialise::write(const StoreDirConfig & store, CommonProto::WriteConn conn, const DrvOutput & drvOutput) { conn.to << drvOutput.to_string(); } -std::optional CommonProto::Serialise>::read(const Store & store, CommonProto::ReadConn conn) +std::optional CommonProto::Serialise>::read(const StoreDirConfig & store, CommonProto::ReadConn conn) { auto s = readString(conn.from); return s == "" ? std::optional {} : store.parseStorePath(s); } -void CommonProto::Serialise>::write(const Store & store, CommonProto::WriteConn conn, const std::optional & storePathOpt) +void CommonProto::Serialise>::write(const StoreDirConfig & store, CommonProto::WriteConn conn, const std::optional & storePathOpt) { conn.to << (storePathOpt ? store.printStorePath(*storePathOpt) : ""); } -std::optional CommonProto::Serialise>::read(const Store & store, CommonProto::ReadConn conn) +std::optional CommonProto::Serialise>::read(const StoreDirConfig & store, CommonProto::ReadConn conn) { return ContentAddress::parseOpt(readString(conn.from)); } -void CommonProto::Serialise>::write(const Store & store, CommonProto::WriteConn conn, const std::optional & caOpt) +void CommonProto::Serialise>::write(const StoreDirConfig & store, CommonProto::WriteConn conn, const std::optional & caOpt) { conn.to << (caOpt ? renderContentAddress(*caOpt) : ""); } diff --git a/src/libstore/common-protocol.hh b/src/libstore/common-protocol.hh index f3f28972a..a878e84c9 100644 --- a/src/libstore/common-protocol.hh +++ b/src/libstore/common-protocol.hh @@ -5,7 +5,7 @@ namespace nix { -class Store; +struct StoreDirConfig; struct Source; // items being serialized @@ -48,7 +48,7 @@ struct CommonProto * infer the type instead of having to write it down explicitly. */ template - static void write(const Store & store, WriteConn conn, const T & t) + static void write(const StoreDirConfig & store, WriteConn conn, const T & t) { CommonProto::Serialise::write(store, conn, t); } @@ -57,8 +57,8 @@ struct CommonProto #define DECLARE_COMMON_SERIALISER(T) \ struct CommonProto::Serialise< T > \ { \ - static T read(const Store & store, CommonProto::ReadConn conn); \ - static void write(const Store & store, CommonProto::WriteConn conn, const T & str); \ + static T read(const StoreDirConfig & store, CommonProto::ReadConn conn); \ + static void write(const StoreDirConfig & store, CommonProto::WriteConn conn, const T & str); \ } template<> diff --git a/src/libstore/content-address.cc b/src/libstore/content-address.cc index a5f7cdf81..fc408f5af 100644 --- a/src/libstore/content-address.cc +++ b/src/libstore/content-address.cc @@ -38,14 +38,26 @@ ContentAddressMethod ContentAddressMethod::parsePrefix(std::string_view & m) return FileIngestionMethod::Flat; } -std::string ContentAddressMethod::render(HashType ht) const +std::string ContentAddressMethod::render(HashAlgorithm ha) const { return std::visit(overloaded { [&](const TextIngestionMethod & th) { - return std::string{"text:"} + printHashType(ht); + return std::string{"text:"} + printHashAlgo(ha); }, [&](const FileIngestionMethod & fim) { - return "fixed:" + makeFileIngestionPrefix(fim) + printHashType(ht); + return "fixed:" + makeFileIngestionPrefix(fim) + printHashAlgo(ha); + } + }, raw); +} + +FileIngestionMethod ContentAddressMethod::getFileIngestionMethod() const +{ + return std::visit(overloaded { + [&](const TextIngestionMethod & th) { + return FileIngestionMethod::Flat; + }, + [&](const FileIngestionMethod & fim) { + return fim; } }, raw); } @@ -61,13 +73,13 @@ std::string ContentAddress::render() const + makeFileIngestionPrefix(method); }, }, method.raw) - + this->hash.to_string(HashFormat::Base32, true); + + this->hash.to_string(HashFormat::Nix32, true); } /** * Parses content address strings up to the hash. */ -static std::pair parseContentAddressMethodPrefix(std::string_view & rest) +static std::pair parseContentAddressMethodPrefix(std::string_view & rest) { std::string_view wholeInput { rest }; @@ -79,31 +91,31 @@ static std::pair parseContentAddressMethodPrefix prefix = *optPrefix; } - auto parseHashType_ = [&](){ + auto parseHashAlgorithm_ = [&](){ auto hashTypeRaw = splitPrefixTo(rest, ':'); if (!hashTypeRaw) throw UsageError("content address hash must be in form ':', but found: %s", wholeInput); - HashType hashType = parseHashType(*hashTypeRaw); - return hashType; + HashAlgorithm hashAlgo = parseHashAlgo(*hashTypeRaw); + return hashAlgo; }; // Switch on prefix if (prefix == "text") { // No parsing of the ingestion method, "text" only support flat. - HashType hashType = parseHashType_(); + HashAlgorithm hashAlgo = parseHashAlgorithm_(); return { TextIngestionMethod {}, - std::move(hashType), + std::move(hashAlgo), }; } else if (prefix == "fixed") { // Parse method auto method = FileIngestionMethod::Flat; if (splitPrefix(rest, "r:")) method = FileIngestionMethod::Recursive; - HashType hashType = parseHashType_(); + HashAlgorithm hashAlgo = parseHashAlgorithm_(); return { std::move(method), - std::move(hashType), + std::move(hashAlgo), }; } else throw UsageError("content address prefix '%s' is unrecognized. Recogonized prefixes are 'text' or 'fixed'", prefix); @@ -113,15 +125,15 @@ ContentAddress ContentAddress::parse(std::string_view rawCa) { auto rest = rawCa; - auto [caMethod, hashType] = parseContentAddressMethodPrefix(rest); + auto [caMethod, hashAlgo] = parseContentAddressMethodPrefix(rest); return ContentAddress { .method = std::move(caMethod), - .hash = Hash::parseNonSRIUnprefixed(rest, hashType), + .hash = Hash::parseNonSRIUnprefixed(rest, hashAlgo), }; } -std::pair ContentAddressMethod::parse(std::string_view caMethod) +std::pair ContentAddressMethod::parse(std::string_view caMethod) { std::string asPrefix = std::string{caMethod} + ":"; // parseContentAddressMethodPrefix takes its argument by reference @@ -144,7 +156,7 @@ std::string renderContentAddress(std::optional ca) std::string ContentAddress::printMethodAlgo() const { return method.renderPrefix() - + printHashType(hash.type); + + printHashAlgo(hash.algo); } bool StoreReferences::empty() const @@ -176,13 +188,13 @@ ContentAddressWithReferences ContentAddressWithReferences::withoutRefs(const Con }, ca.method.raw); } -std::optional ContentAddressWithReferences::fromPartsOpt( - ContentAddressMethod method, Hash hash, StoreReferences refs) noexcept +ContentAddressWithReferences ContentAddressWithReferences::fromParts( + ContentAddressMethod method, Hash hash, StoreReferences refs) { return std::visit(overloaded { - [&](TextIngestionMethod _) -> std::optional { + [&](TextIngestionMethod _) -> ContentAddressWithReferences { if (refs.self) - return std::nullopt; + throw Error("self-reference not allowed with text hashing"); return ContentAddressWithReferences { TextInfo { .hash = std::move(hash), @@ -190,7 +202,7 @@ std::optional ContentAddressWithReferences::fromPa } }; }, - [&](FileIngestionMethod m2) -> std::optional { + [&](FileIngestionMethod m2) -> ContentAddressWithReferences { return ContentAddressWithReferences { FixedOutputInfo { .method = m2, diff --git a/src/libstore/content-address.hh b/src/libstore/content-address.hh index c4d619bdc..f0973412b 100644 --- a/src/libstore/content-address.hh +++ b/src/libstore/content-address.hh @@ -4,6 +4,7 @@ #include #include "hash.hh" #include "path.hh" +#include "file-content-address.hh" #include "comparator.hh" #include "variant-wrapper.hh" @@ -31,22 +32,6 @@ namespace nix { */ struct TextIngestionMethod : std::monostate { }; -/** - * An enumeration of the main ways we can serialize file system - * objects. - */ -enum struct FileIngestionMethod : uint8_t { - /** - * Flat-file hashing. Directly ingest the contents of a single file - */ - Flat = false, - /** - * Recursive (or NAR) hashing. Serializes the file-system object in Nix - * Archive format and ingest that - */ - Recursive = true -}; - /** * Compute the prefix to the hash algorithm which indicates how the * files were ingested. @@ -54,7 +39,7 @@ enum struct FileIngestionMethod : uint8_t { std::string makeFileIngestionPrefix(FileIngestionMethod m); /** - * An enumeration of all the ways we can serialize file system objects. + * An enumeration of all the ways we can content-address store objects. * * Just the type of a content address. Combine with the hash itself, and * we have a `ContentAddress` as defined below. Combine that, in turn, @@ -94,7 +79,7 @@ struct ContentAddressMethod /** * Parse a content addressing method and hash type. */ - static std::pair parse(std::string_view rawCaMethod); + static std::pair parse(std::string_view rawCaMethod); /** * Render a content addressing method and hash type in a @@ -102,7 +87,15 @@ struct ContentAddressMethod * * The rough inverse of `parse()`. */ - std::string render(HashType ht) const; + std::string render(HashAlgorithm ht) const; + + /** + * Get the underlying way to content-address file system objects. + * + * Different ways of hashing store objects may use the same method + * for hashing file systeme objects. + */ + FileIngestionMethod getFileIngestionMethod() const; }; @@ -116,11 +109,11 @@ struct ContentAddressMethod * serialisation methods (flat file vs NAR). Thus, ‘ca’ has one of the * following forms: * - * - ‘text:sha256:’: For paths - * computed by Store::makeTextPath() / Store::addTextToStore(). + * - `TextIngestionMethod`: + * ‘text:sha256:’ * - * - ‘fixed:::’: For paths computed by - * Store::makeFixedOutputPath() / Store::addToStore(). + * - `FixedIngestionMethod`: + * ‘fixed:::’ */ struct ContentAddress { @@ -266,11 +259,12 @@ struct ContentAddressWithReferences * * @param refs References to other store objects or oneself. * - * Do note that not all combinations are supported; `nullopt` is - * returns for invalid combinations. + * @note note that all combinations are supported. This is a + * *partial function* and exceptions will be thrown for invalid + * combinations. */ - static std::optional fromPartsOpt( - ContentAddressMethod method, Hash hash, StoreReferences refs) noexcept; + static ContentAddressWithReferences fromParts( + ContentAddressMethod method, Hash hash, StoreReferences refs); ContentAddressMethod getMethod() const; diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 105d92f25..923ea6447 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -400,31 +400,18 @@ static void performOp(TunnelLogger * logger, ref store, logger->startWork(); auto pathInfo = [&]() { // NB: FramedSource must be out of scope before logger->stopWork(); - auto [contentAddressMethod, hashType_] = ContentAddressMethod::parse(camStr); - auto hashType = hashType_; // work around clang bug + auto [contentAddressMethod, hashAlgo_] = ContentAddressMethod::parse(camStr); + auto hashAlgo = hashAlgo_; // work around clang bug FramedSource source(from); - // TODO this is essentially RemoteStore::addCAToStore. Move it up to Store. - return std::visit(overloaded { - [&](const TextIngestionMethod &) { - if (hashType != htSHA256) - throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given", - name, printHashType(hashType)); - // We could stream this by changing Store - std::string contents = source.drain(); - auto path = store->addTextToStore(name, contents, refs, repair); - return store->queryPathInfo(path); - }, - [&](const FileIngestionMethod & fim) { - auto path = store->addToStoreFromDump(source, name, fim, hashType, repair, refs); - return store->queryPathInfo(path); - }, - }, contentAddressMethod.raw); + // TODO these two steps are essentially RemoteStore::addCAToStore. Move it up to Store. + auto path = store->addToStoreFromDump(source, name, contentAddressMethod, hashAlgo, refs, repair); + return store->queryPathInfo(path); }(); logger->stopWork(); WorkerProto::Serialise::write(*store, wconn, *pathInfo); } else { - HashType hashAlgo; + HashAlgorithm hashAlgo; std::string baseName; FileIngestionMethod method; { @@ -440,7 +427,7 @@ static void performOp(TunnelLogger * logger, ref store, hashAlgoRaw = "sha256"; method = FileIngestionMethod::Recursive; } - hashAlgo = parseHashType(hashAlgoRaw); + hashAlgo = parseHashAlgo(hashAlgoRaw); } auto dumpSource = sinkToSource([&](Sink & saved) { @@ -496,7 +483,10 @@ static void performOp(TunnelLogger * logger, ref store, std::string s = readString(from); auto refs = WorkerProto::Serialise::read(*store, rconn); logger->startWork(); - auto path = store->addTextToStore(suffix, s, refs, NoRepair); + auto path = ({ + StringSource source { s }; + store->addToStoreFromDump(source, suffix, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, NoRepair); + }); logger->stopWork(); to << store->printStorePath(path); break; @@ -574,6 +564,15 @@ static void performOp(TunnelLogger * logger, ref store, case WorkerProto::Op::BuildDerivation: { auto drvPath = store->parseStorePath(readString(from)); BasicDerivation drv; + /* + * Note: unlike wopEnsurePath, this operation reads a + * derivation-to-be-realized from the client with + * readDerivation(Source,Store) rather than reading it from + * the local store with Store::readDerivation(). Since the + * derivation-to-be-realized is not registered in the store + * it cannot be trusted that its outPath was calculated + * correctly. + */ readDerivation(from, *store, drv, Derivation::nameFromPath(drvPath)); BuildMode buildMode = (BuildMode) readInt(from); logger->startWork(); @@ -657,6 +656,21 @@ static void performOp(TunnelLogger * logger, ref store, break; } + case WorkerProto::Op::AddPermRoot: { + if (!trusted) + throw Error( + "you are not privileged to create perm roots\n\n" + "hint: you can just do this client-side without special privileges, and probably want to do that instead."); + auto storePath = WorkerProto::Serialise::read(*store, rconn); + Path gcRoot = absPath(readString(from)); + logger->startWork(); + auto & localFSStore = require(*store); + localFSStore.addPermRoot(storePath, gcRoot); + logger->stopWork(); + to << gcRoot; + break; + } + case WorkerProto::Op::AddIndirectRoot: { Path path = absPath(readString(from)); @@ -868,7 +882,7 @@ static void performOp(TunnelLogger * logger, ref store, bool repair, dontCheckSigs; auto path = store->parseStorePath(readString(from)); auto deriver = readString(from); - auto narHash = Hash::parseAny(readString(from), htSHA256); + auto narHash = Hash::parseAny(readString(from), HashAlgorithm::SHA256); ValidPathInfo info { path, narHash }; if (deriver != "") info.deriver = store->parseStorePath(deriver); diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 1fecd1c97..2fafcb8e7 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -2,6 +2,7 @@ #include "downstream-placeholder.hh" #include "store-api.hh" #include "globals.hh" +#include "types.hh" #include "util.hh" #include "split.hh" #include "common-protocol.hh" @@ -11,7 +12,7 @@ namespace nix { -std::optional DerivationOutput::path(const Store & store, std::string_view drvName, OutputNameView outputName) const +std::optional DerivationOutput::path(const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const { return std::visit(overloaded { [](const DerivationOutput::InputAddressed & doi) -> std::optional { @@ -35,7 +36,7 @@ std::optional DerivationOutput::path(const Store & store, std::string } -StorePath DerivationOutput::CAFixed::path(const Store & store, std::string_view drvName, OutputNameView outputName) const +StorePath DerivationOutput::CAFixed::path(const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const { return store.makeFixedOutputPathFromCA( outputPathName(drvName, outputName), @@ -143,37 +144,89 @@ StorePath writeDerivation(Store & store, auto suffix = std::string(drv.name) + drvExtension; auto contents = drv.unparse(store, false); return readOnly || settings.readOnlyMode - ? store.computeStorePathForText(suffix, contents, references) - : store.addTextToStore(suffix, contents, references, repair); + ? store.makeFixedOutputPathFromCA(suffix, TextInfo { + .hash = hashString(HashAlgorithm::SHA256, contents), + .references = std::move(references), + }) + : ({ + StringSource s { contents }; + store.addToStoreFromDump(s, suffix, TextIngestionMethod {}, HashAlgorithm::SHA256, references, repair); + }); +} + + +namespace { +/** + * This mimics std::istream to some extent. We use this much smaller implementation + * instead of plain istreams because the sentry object overhead is too high. + */ +struct StringViewStream { + std::string_view remaining; + + int peek() const { + return remaining.empty() ? EOF : remaining[0]; + } + + int get() { + if (remaining.empty()) return EOF; + char c = remaining[0]; + remaining.remove_prefix(1); + return c; + } +}; + +constexpr struct Escapes { + char map[256]; + constexpr Escapes() { + for (int i = 0; i < 256; i++) map[i] = (char) (unsigned char) i; + map[(int) (unsigned char) 'n'] = '\n'; + map[(int) (unsigned char) 'r'] = '\r'; + map[(int) (unsigned char) 't'] = '\t'; + } + char operator[](char c) const { return map[(unsigned char) c]; } +} escapes; } /* Read string `s' from stream `str'. */ -static void expect(std::istream & str, std::string_view s) +static void expect(StringViewStream & str, std::string_view s) { - char s2[s.size()]; - str.read(s2, s.size()); - std::string_view s2View { s2, s.size() }; - if (s2View != s) - throw FormatError("expected string '%s', got '%s'", s, s2View); + if (!str.remaining.starts_with(s)) + throw FormatError("expected string '%1%'", s); + str.remaining.remove_prefix(s.size()); } /* Read a C-style string from stream `str'. */ -static std::string parseString(std::istream & str) +static BackedStringView parseString(StringViewStream & str) { - std::string res; expect(str, "\""); - int c; - while ((c = str.get()) != '"') - if (c == '\\') { - c = str.get(); - if (c == 'n') res += '\n'; - else if (c == 'r') res += '\r'; - else if (c == 't') res += '\t'; - else res += c; + auto c = str.remaining.begin(), end = str.remaining.end(); + bool escaped = false; + for (; c != end && *c != '"'; c++) { + if (*c == '\\') { + c++; + if (c == end) + throw FormatError("unterminated string in derivation"); + escaped = true; } - else res += c; + } + + const auto contentLen = c - str.remaining.begin(); + const auto content = str.remaining.substr(0, contentLen); + str.remaining.remove_prefix(contentLen + 1); + + if (!escaped) + return content; + + std::string res; + res.reserve(content.size()); + for (c = content.begin(), end = content.end(); c != end; c++) + if (*c == '\\') { + c++; + res += escapes[*c]; + } + else res += *c; return res; } @@ -182,15 +235,15 @@ static void validatePath(std::string_view s) { throw FormatError("bad path '%1%' in derivation", s); } -static Path parsePath(std::istream & str) +static BackedStringView parsePath(StringViewStream & str) { auto s = parseString(str); - validatePath(s); + validatePath(*s); return s; } -static bool endOfList(std::istream & str) +static bool endOfList(StringViewStream & str) { if (str.peek() == ',') { str.get(); @@ -204,37 +257,37 @@ static bool endOfList(std::istream & str) } -static StringSet parseStrings(std::istream & str, bool arePaths) +static StringSet parseStrings(StringViewStream & str, bool arePaths) { StringSet res; expect(str, "["); while (!endOfList(str)) - res.insert(arePaths ? parsePath(str) : parseString(str)); + res.insert((arePaths ? parsePath(str) : parseString(str)).toOwned()); return res; } static DerivationOutput parseDerivationOutput( - const Store & store, - std::string_view pathS, std::string_view hashAlgo, std::string_view hashS, + const StoreDirConfig & store, + std::string_view pathS, std::string_view hashAlgoStr, std::string_view hashS, const ExperimentalFeatureSettings & xpSettings) { - if (hashAlgo != "") { - ContentAddressMethod method = ContentAddressMethod::parsePrefix(hashAlgo); + if (hashAlgoStr != "") { + ContentAddressMethod method = ContentAddressMethod::parsePrefix(hashAlgoStr); if (method == TextIngestionMethod {}) xpSettings.require(Xp::DynamicDerivations); - const auto hashType = parseHashType(hashAlgo); + const auto hashAlgo = parseHashAlgo(hashAlgoStr); if (hashS == "impure") { xpSettings.require(Xp::ImpureDerivations); if (pathS != "") throw FormatError("impure derivation output should not specify output path"); return DerivationOutput::Impure { .method = std::move(method), - .hashType = std::move(hashType), + .hashAlgo = std::move(hashAlgo), }; } else if (hashS != "") { validatePath(pathS); - auto hash = Hash::parseNonSRIUnprefixed(hashS, hashType); + auto hash = Hash::parseNonSRIUnprefixed(hashS, hashAlgo); return DerivationOutput::CAFixed { .ca = ContentAddress { .method = std::move(method), @@ -247,7 +300,7 @@ static DerivationOutput parseDerivationOutput( throw FormatError("content-addressed derivation output should not specify output path"); return DerivationOutput::CAFloating { .method = std::move(method), - .hashType = std::move(hashType), + .hashAlgo = std::move(hashAlgo), }; } } else { @@ -262,7 +315,7 @@ static DerivationOutput parseDerivationOutput( } static DerivationOutput parseDerivationOutput( - const Store & store, std::istringstream & str, + const StoreDirConfig & store, StringViewStream & str, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings) { expect(str, ","); const auto pathS = parseString(str); @@ -270,7 +323,7 @@ static DerivationOutput parseDerivationOutput( expect(str, ","); const auto hash = parseString(str); expect(str, ")"); - return parseDerivationOutput(store, pathS, hashAlgo, hash, xpSettings); + return parseDerivationOutput(store, *pathS, *hashAlgo, *hash, xpSettings); } /** @@ -291,8 +344,8 @@ enum struct DerivationATermVersion { }; static DerivedPathMap::ChildNode parseDerivedPathMapNode( - const Store & store, - std::istringstream & str, + const StoreDirConfig & store, + StringViewStream & str, DerivationATermVersion version) { DerivedPathMap::ChildNode node; @@ -318,7 +371,7 @@ static DerivedPathMap::ChildNode parseDerivedPathMapNode( expect(str, ",["); while (!endOfList(str)) { expect(str, "("); - auto outputName = parseString(str); + auto outputName = parseString(str).toOwned(); expect(str, ","); node.childMap.insert_or_assign(outputName, parseDerivedPathMapNode(store, str, version)); expect(str, ")"); @@ -338,13 +391,13 @@ static DerivedPathMap::ChildNode parseDerivedPathMapNode( Derivation parseDerivation( - const Store & store, std::string && s, std::string_view name, + const StoreDirConfig & store, std::string && s, std::string_view name, const ExperimentalFeatureSettings & xpSettings) { Derivation drv; drv.name = name; - std::istringstream str(std::move(s)); + StringViewStream str{s}; expect(str, "D"); DerivationATermVersion version; switch (str.peek()) { @@ -355,12 +408,12 @@ Derivation parseDerivation( case 'r': { expect(str, "rvWithVersion("); auto versionS = parseString(str); - if (versionS == "xp-dyn-drv") { + if (*versionS == "xp-dyn-drv") { // Only verison we have so far version = DerivationATermVersion::DynamicDerivations; xpSettings.require(Xp::DynamicDerivations); } else { - throw FormatError("Unknown derivation ATerm format version '%s'", versionS); + throw FormatError("Unknown derivation ATerm format version '%s'", *versionS); } expect(str, ","); break; @@ -372,7 +425,7 @@ Derivation parseDerivation( /* Parse the list of outputs. */ expect(str, "["); while (!endOfList(str)) { - expect(str, "("); std::string id = parseString(str); + expect(str, "("); std::string id = parseString(str).toOwned(); auto output = parseDerivationOutput(store, str, xpSettings); drv.outputs.emplace(std::move(id), std::move(output)); } @@ -381,28 +434,28 @@ Derivation parseDerivation( expect(str, ",["); while (!endOfList(str)) { expect(str, "("); - Path drvPath = parsePath(str); + auto drvPath = parsePath(str); expect(str, ","); - drv.inputDrvs.map.insert_or_assign(store.parseStorePath(drvPath), parseDerivedPathMapNode(store, str, version)); + drv.inputDrvs.map.insert_or_assign(store.parseStorePath(*drvPath), parseDerivedPathMapNode(store, str, version)); expect(str, ")"); } expect(str, ","); drv.inputSrcs = store.parseStorePathSet(parseStrings(str, true)); - expect(str, ","); drv.platform = parseString(str); - expect(str, ","); drv.builder = parseString(str); + expect(str, ","); drv.platform = parseString(str).toOwned(); + expect(str, ","); drv.builder = parseString(str).toOwned(); /* Parse the builder arguments. */ expect(str, ",["); while (!endOfList(str)) - drv.args.push_back(parseString(str)); + drv.args.push_back(parseString(str).toOwned()); /* Parse the environment variables. */ expect(str, ",["); while (!endOfList(str)) { - expect(str, "("); auto name = parseString(str); - expect(str, ","); auto value = parseString(str); + expect(str, "("); auto name = parseString(str).toOwned(); + expect(str, ","); auto value = parseString(str).toOwned(); expect(str, ")"); - drv.env[name] = value; + drv.env.insert_or_assign(std::move(name), std::move(value)); } expect(str, ")"); @@ -471,7 +524,7 @@ static void printUnquotedStrings(std::string & res, ForwardIterator i, ForwardIt } -static void unparseDerivedPathMapNode(const Store & store, std::string & s, const DerivedPathMap::ChildNode & node) +static void unparseDerivedPathMapNode(const StoreDirConfig & store, std::string & s, const DerivedPathMap::ChildNode & node) { s += ','; if (node.childMap.empty()) { @@ -512,7 +565,7 @@ static bool hasDynamicDrvDep(const Derivation & drv) } -std::string Derivation::unparse(const Store & store, bool maskOutputs, +std::string Derivation::unparse(const StoreDirConfig & store, bool maskOutputs, DerivedPathMap::ChildNode::Map * actualInputs) const { std::string s; @@ -548,7 +601,7 @@ std::string Derivation::unparse(const Store & store, bool maskOutputs, }, [&](const DerivationOutput::CAFloating & dof) { s += ','; printUnquotedString(s, ""); - s += ','; printUnquotedString(s, dof.method.renderPrefix() + printHashType(dof.hashType)); + s += ','; printUnquotedString(s, dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo)); s += ','; printUnquotedString(s, ""); }, [&](const DerivationOutput::Deferred &) { @@ -559,7 +612,7 @@ std::string Derivation::unparse(const Store & store, bool maskOutputs, [&](const DerivationOutput::Impure & doi) { // FIXME s += ','; printUnquotedString(s, ""); - s += ','; printUnquotedString(s, doi.method.renderPrefix() + printHashType(doi.hashType)); + s += ','; printUnquotedString(s, doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo)); s += ','; printUnquotedString(s, "impure"); } }, i.second.raw); @@ -632,7 +685,7 @@ DerivationType BasicDerivation::type() const floatingCAOutputs, deferredIAOutputs, impureOutputs; - std::optional floatingHashType; + std::optional floatingHashAlgo; for (auto & i : outputs) { std::visit(overloaded { @@ -644,10 +697,10 @@ DerivationType BasicDerivation::type() const }, [&](const DerivationOutput::CAFloating & dof) { floatingCAOutputs.insert(i.first); - if (!floatingHashType) { - floatingHashType = dof.hashType; + if (!floatingHashAlgo) { + floatingHashAlgo = dof.hashAlgo; } else { - if (*floatingHashType != dof.hashType) + if (*floatingHashAlgo != dof.hashAlgo) throw Error("all floating outputs must use the same hash type"); } }, @@ -775,7 +828,7 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut std::map outputHashes; for (const auto & i : drv.outputs) { auto & dof = std::get(i.second.raw); - auto hash = hashString(htSHA256, "fixed:out:" + auto hash = hashString(HashAlgorithm::SHA256, "fixed:out:" + dof.ca.printMethodAlgo() + ":" + dof.ca.hash.to_string(HashFormat::Base16, false) + ":" + store.printStorePath(dof.path(store, drv.name, i.first))); @@ -826,7 +879,7 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut } } - auto hash = hashString(htSHA256, drv.unparse(store, maskOutputs, &inputs2)); + auto hash = hashString(HashAlgorithm::SHA256, drv.unparse(store, maskOutputs, &inputs2)); std::map outputHashes; for (const auto & [outputName, _] : drv.outputs) { @@ -846,7 +899,7 @@ std::map staticOutputHashes(Store & store, const Derivation & } -static DerivationOutput readDerivationOutput(Source & in, const Store & store) +static DerivationOutput readDerivationOutput(Source & in, const StoreDirConfig & store) { const auto pathS = readString(in); const auto hashAlgo = readString(in); @@ -863,7 +916,7 @@ StringSet BasicDerivation::outputNames() const return names; } -DerivationOutputsAndOptPaths BasicDerivation::outputsAndOptPaths(const Store & store) const +DerivationOutputsAndOptPaths BasicDerivation::outputsAndOptPaths(const StoreDirConfig & store) const { DerivationOutputsAndOptPaths outsAndOptPaths; for (auto & [outputName, output] : outputs) @@ -885,7 +938,7 @@ std::string_view BasicDerivation::nameFromPath(const StorePath & drvPath) } -Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv, std::string_view name) +Source & readDerivation(Source & in, const StoreDirConfig & store, BasicDerivation & drv, std::string_view name) { drv.name = name; @@ -913,7 +966,7 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv, } -void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv) +void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDerivation & drv) { out << drv.outputs.size(); for (auto & i : drv.outputs) { @@ -931,7 +984,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr }, [&](const DerivationOutput::CAFloating & dof) { out << "" - << (dof.method.renderPrefix() + printHashType(dof.hashType)) + << (dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo)) << ""; }, [&](const DerivationOutput::Deferred &) { @@ -941,7 +994,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr }, [&](const DerivationOutput::Impure & doi) { out << "" - << (doi.method.renderPrefix() + printHashType(doi.hashType)) + << (doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo)) << "impure"; }, }, i.second.raw); @@ -959,7 +1012,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr std::string hashPlaceholder(const OutputNameView outputName) { // FIXME: memoize? - return "/" + hashString(htSHA256, concatStrings("nix-output:", outputName)).to_string(HashFormat::Base32, false); + return "/" + hashString(HashAlgorithm::SHA256, concatStrings("nix-output:", outputName)).to_string(HashFormat::Nix32, false); } @@ -1003,13 +1056,13 @@ static void rewriteDerivation(Store & store, BasicDerivation & drv, const String } -std::optional Derivation::tryResolve(Store & store) const +std::optional Derivation::tryResolve(Store & store, Store * evalStore) const { std::map, StorePath> inputDrvOutputs; std::function::ChildNode &)> accum; accum = [&](auto & inputDrv, auto & node) { - for (auto & [outputName, outputPath] : store.queryPartialDerivationOutputMap(inputDrv)) { + for (auto & [outputName, outputPath] : store.queryPartialDerivationOutputMap(inputDrv, evalStore)) { if (outputPath) { inputDrvOutputs.insert_or_assign({inputDrv, outputName}, *outputPath); if (auto p = get(node.childMap, outputName)) @@ -1151,10 +1204,10 @@ void Derivation::checkInvariants(Store & store, const StorePath & drvPath) const } -const Hash impureOutputHash = hashString(htSHA256, "impure"); +const Hash impureOutputHash = hashString(HashAlgorithm::SHA256, "impure"); nlohmann::json DerivationOutput::toJSON( - const Store & store, std::string_view drvName, OutputNameView outputName) const + const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const { nlohmann::json res = nlohmann::json::object(); std::visit(overloaded { @@ -1168,11 +1221,11 @@ nlohmann::json DerivationOutput::toJSON( // FIXME print refs? }, [&](const DerivationOutput::CAFloating & dof) { - res["hashAlgo"] = dof.method.renderPrefix() + printHashType(dof.hashType); + res["hashAlgo"] = dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo); }, [&](const DerivationOutput::Deferred &) {}, [&](const DerivationOutput::Impure & doi) { - res["hashAlgo"] = doi.method.renderPrefix() + printHashType(doi.hashType); + res["hashAlgo"] = doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo); res["impure"] = true; }, }, raw); @@ -1181,7 +1234,7 @@ nlohmann::json DerivationOutput::toJSON( DerivationOutput DerivationOutput::fromJSON( - const Store & store, std::string_view drvName, OutputNameView outputName, + const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName, const nlohmann::json & _json, const ExperimentalFeatureSettings & xpSettings) { @@ -1192,15 +1245,15 @@ DerivationOutput DerivationOutput::fromJSON( for (const auto & [key, _] : json) keys.insert(key); - auto methodAlgo = [&]() -> std::pair { - std::string hashAlgo = json["hashAlgo"]; + auto methodAlgo = [&]() -> std::pair { + std::string hashAlgoStr = json["hashAlgo"]; // remaining to parse, will be mutated by parsers - std::string_view s = hashAlgo; + std::string_view s = hashAlgoStr; ContentAddressMethod method = ContentAddressMethod::parsePrefix(s); if (method == TextIngestionMethod {}) xpSettings.require(Xp::DynamicDerivations); - auto hashType = parseHashType(s); - return { std::move(method), std::move(hashType) }; + auto hashAlgo = parseHashAlgo(s); + return { std::move(method), std::move(hashAlgo) }; }; if (keys == (std::set { "path" })) { @@ -1210,11 +1263,11 @@ DerivationOutput DerivationOutput::fromJSON( } else if (keys == (std::set { "path", "hashAlgo", "hash" })) { - auto [method, hashType] = methodAlgo(); + auto [method, hashAlgo] = methodAlgo(); auto dof = DerivationOutput::CAFixed { .ca = ContentAddress { .method = std::move(method), - .hash = Hash::parseNonSRIUnprefixed((std::string) json["hash"], hashType), + .hash = Hash::parseNonSRIUnprefixed((std::string) json["hash"], hashAlgo), }, }; if (dof.path(store, drvName, outputName) != store.parseStorePath((std::string) json["path"])) @@ -1224,10 +1277,10 @@ DerivationOutput DerivationOutput::fromJSON( else if (keys == (std::set { "hashAlgo" })) { xpSettings.require(Xp::CaDerivations); - auto [method, hashType] = methodAlgo(); + auto [method, hashAlgo] = methodAlgo(); return DerivationOutput::CAFloating { .method = std::move(method), - .hashType = std::move(hashType), + .hashAlgo = std::move(hashAlgo), }; } @@ -1237,10 +1290,10 @@ DerivationOutput DerivationOutput::fromJSON( else if (keys == (std::set { "hashAlgo", "impure" })) { xpSettings.require(Xp::ImpureDerivations); - auto [method, hashType] = methodAlgo(); + auto [method, hashAlgo] = methodAlgo(); return DerivationOutput::Impure { .method = std::move(method), - .hashType = hashType, + .hashAlgo = hashAlgo, }; } @@ -1250,7 +1303,7 @@ DerivationOutput DerivationOutput::fromJSON( } -nlohmann::json Derivation::toJSON(const Store & store) const +nlohmann::json Derivation::toJSON(const StoreDirConfig & store) const { nlohmann::json res = nlohmann::json::object(); @@ -1303,7 +1356,7 @@ nlohmann::json Derivation::toJSON(const Store & store) const Derivation Derivation::fromJSON( - const Store & store, + const StoreDirConfig & store, const nlohmann::json & json, const ExperimentalFeatureSettings & xpSettings) { diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh index fa14e7536..2a326b578 100644 --- a/src/libstore/derivations.hh +++ b/src/libstore/derivations.hh @@ -17,7 +17,7 @@ namespace nix { -class Store; +struct StoreDirConfig; /* Abstract syntax of derivations. */ @@ -55,7 +55,7 @@ struct DerivationOutput * @param drvName The name of the derivation this is an output of, without the `.drv`. * @param outputName The name of this output. */ - StorePath path(const Store & store, std::string_view drvName, OutputNameView outputName) const; + StorePath path(const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const; GENERATE_CMP(CAFixed, me->ca); }; @@ -75,9 +75,9 @@ struct DerivationOutput /** * How the serialization will be hashed */ - HashType hashType; + HashAlgorithm hashAlgo; - GENERATE_CMP(CAFloating, me->method, me->hashType); + GENERATE_CMP(CAFloating, me->method, me->hashAlgo); }; /** @@ -102,9 +102,9 @@ struct DerivationOutput /** * How the serialization will be hashed */ - HashType hashType; + HashAlgorithm hashAlgo; - GENERATE_CMP(Impure, me->method, me->hashType); + GENERATE_CMP(Impure, me->method, me->hashAlgo); }; typedef std::variant< @@ -132,17 +132,17 @@ struct DerivationOutput * the safer interface provided by * BasicDerivation::outputsAndOptPaths */ - std::optional path(const Store & store, std::string_view drvName, OutputNameView outputName) const; + std::optional path(const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const; nlohmann::json toJSON( - const Store & store, + const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const; /** * @param xpSettings Stop-gap to avoid globals during unit tests. */ static DerivationOutput fromJSON( - const Store & store, + const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName, const nlohmann::json & json, @@ -304,7 +304,7 @@ struct BasicDerivation * augmented with knowledge of the Store paths they would be written * into. */ - DerivationOutputsAndOptPaths outputsAndOptPaths(const Store & store) const; + DerivationOutputsAndOptPaths outputsAndOptPaths(const StoreDirConfig & store) const; static std::string_view nameFromPath(const StorePath & storePath); @@ -318,6 +318,8 @@ struct BasicDerivation me->name); }; +class Store; + struct Derivation : BasicDerivation { /** @@ -328,7 +330,7 @@ struct Derivation : BasicDerivation /** * Print a derivation. */ - std::string unparse(const Store & store, bool maskOutputs, + std::string unparse(const StoreDirConfig & store, bool maskOutputs, DerivedPathMap::ChildNode::Map * actualInputs = nullptr) const; /** @@ -340,7 +342,7 @@ struct Derivation : BasicDerivation * 2. Input placeholders are replaced with realized input store * paths. */ - std::optional tryResolve(Store & store) const; + std::optional tryResolve(Store & store, Store * evalStore = nullptr) const; /** * Like the above, but instead of querying the Nix database for @@ -365,9 +367,9 @@ struct Derivation : BasicDerivation Derivation(const BasicDerivation & bd) : BasicDerivation(bd) { } Derivation(BasicDerivation && bd) : BasicDerivation(std::move(bd)) { } - nlohmann::json toJSON(const Store & store) const; + nlohmann::json toJSON(const StoreDirConfig & store) const; static Derivation fromJSON( - const Store & store, + const StoreDirConfig & store, const nlohmann::json & json, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); @@ -391,7 +393,7 @@ StorePath writeDerivation(Store & store, * Read a derivation from a file. */ Derivation parseDerivation( - const Store & store, + const StoreDirConfig & store, std::string && s, std::string_view name, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); @@ -493,8 +495,8 @@ extern Sync drvHashes; struct Source; struct Sink; -Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv, std::string_view name); -void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv); +Source & readDerivation(Source & in, const StoreDirConfig & store, BasicDerivation & drv, std::string_view name); +void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDerivation & drv); /** * This creates an opaque and almost certainly unique string diff --git a/src/libstore/derived-path.cc b/src/libstore/derived-path.cc index 47d784deb..a7b404321 100644 --- a/src/libstore/derived-path.cc +++ b/src/libstore/derived-path.cc @@ -1,4 +1,5 @@ #include "derived-path.hh" +#include "derivations.hh" #include "store-api.hh" #include @@ -11,9 +12,9 @@ namespace nix { bool MY_TYPE ::operator COMPARATOR (const MY_TYPE & other) const \ { \ const MY_TYPE* me = this; \ - auto fields1 = std::make_tuple(*me->drvPath, me->FIELD); \ + auto fields1 = std::tie(*me->drvPath, me->FIELD); \ me = &other; \ - auto fields2 = std::make_tuple(*me->drvPath, me->FIELD); \ + auto fields2 = std::tie(*me->drvPath, me->FIELD); \ return fields1 COMPARATOR fields2; \ } #define CMP(CHILD_TYPE, MY_TYPE, FIELD) \ @@ -21,18 +22,14 @@ namespace nix { CMP_ONE(CHILD_TYPE, MY_TYPE, FIELD, !=) \ CMP_ONE(CHILD_TYPE, MY_TYPE, FIELD, <) -#define FIELD_TYPE std::string CMP(SingleDerivedPath, SingleDerivedPathBuilt, output) -#undef FIELD_TYPE -#define FIELD_TYPE OutputsSpec CMP(SingleDerivedPath, DerivedPathBuilt, outputs) -#undef FIELD_TYPE #undef CMP #undef CMP_ONE -nlohmann::json DerivedPath::Opaque::toJSON(const Store & store) const +nlohmann::json DerivedPath::Opaque::toJSON(const StoreDirConfig & store) const { return store.printStorePath(path); } @@ -86,50 +83,50 @@ nlohmann::json DerivedPath::toJSON(Store & store) const }, raw()); } -std::string DerivedPath::Opaque::to_string(const Store & store) const +std::string DerivedPath::Opaque::to_string(const StoreDirConfig & store) const { return store.printStorePath(path); } -std::string SingleDerivedPath::Built::to_string(const Store & store) const +std::string SingleDerivedPath::Built::to_string(const StoreDirConfig & store) const { return drvPath->to_string(store) + "^" + output; } -std::string SingleDerivedPath::Built::to_string_legacy(const Store & store) const +std::string SingleDerivedPath::Built::to_string_legacy(const StoreDirConfig & store) const { return drvPath->to_string(store) + "!" + output; } -std::string DerivedPath::Built::to_string(const Store & store) const +std::string DerivedPath::Built::to_string(const StoreDirConfig & store) const { return drvPath->to_string(store) + '^' + outputs.to_string(); } -std::string DerivedPath::Built::to_string_legacy(const Store & store) const +std::string DerivedPath::Built::to_string_legacy(const StoreDirConfig & store) const { return drvPath->to_string_legacy(store) + "!" + outputs.to_string(); } -std::string SingleDerivedPath::to_string(const Store & store) const +std::string SingleDerivedPath::to_string(const StoreDirConfig & store) const { return std::visit( [&](const auto & req) { return req.to_string(store); }, raw()); } -std::string DerivedPath::to_string(const Store & store) const +std::string DerivedPath::to_string(const StoreDirConfig & store) const { return std::visit( [&](const auto & req) { return req.to_string(store); }, raw()); } -std::string SingleDerivedPath::to_string_legacy(const Store & store) const +std::string SingleDerivedPath::to_string_legacy(const StoreDirConfig & store) const { return std::visit(overloaded { [&](const SingleDerivedPath::Built & req) { return req.to_string_legacy(store); }, @@ -137,7 +134,7 @@ std::string SingleDerivedPath::to_string_legacy(const Store & store) const }, this->raw()); } -std::string DerivedPath::to_string_legacy(const Store & store) const +std::string DerivedPath::to_string_legacy(const StoreDirConfig & store) const { return std::visit(overloaded { [&](const DerivedPath::Built & req) { return req.to_string_legacy(store); }, @@ -146,7 +143,7 @@ std::string DerivedPath::to_string_legacy(const Store & store) const } -DerivedPath::Opaque DerivedPath::Opaque::parse(const Store & store, std::string_view s) +DerivedPath::Opaque DerivedPath::Opaque::parse(const StoreDirConfig & store, std::string_view s) { return {store.parseStorePath(s)}; } @@ -166,7 +163,7 @@ void drvRequireExperiment( } SingleDerivedPath::Built SingleDerivedPath::Built::parse( - const Store & store, ref drv, + const StoreDirConfig & store, ref drv, OutputNameView output, const ExperimentalFeatureSettings & xpSettings) { @@ -178,7 +175,7 @@ SingleDerivedPath::Built SingleDerivedPath::Built::parse( } DerivedPath::Built DerivedPath::Built::parse( - const Store & store, ref drv, + const StoreDirConfig & store, ref drv, OutputNameView outputsS, const ExperimentalFeatureSettings & xpSettings) { @@ -190,7 +187,7 @@ DerivedPath::Built DerivedPath::Built::parse( } static SingleDerivedPath parseWithSingle( - const Store & store, std::string_view s, std::string_view separator, + const StoreDirConfig & store, std::string_view s, std::string_view separator, const ExperimentalFeatureSettings & xpSettings) { size_t n = s.rfind(separator); @@ -207,7 +204,7 @@ static SingleDerivedPath parseWithSingle( } SingleDerivedPath SingleDerivedPath::parse( - const Store & store, + const StoreDirConfig & store, std::string_view s, const ExperimentalFeatureSettings & xpSettings) { @@ -215,7 +212,7 @@ SingleDerivedPath SingleDerivedPath::parse( } SingleDerivedPath SingleDerivedPath::parseLegacy( - const Store & store, + const StoreDirConfig & store, std::string_view s, const ExperimentalFeatureSettings & xpSettings) { @@ -223,7 +220,7 @@ SingleDerivedPath SingleDerivedPath::parseLegacy( } static DerivedPath parseWith( - const Store & store, std::string_view s, std::string_view separator, + const StoreDirConfig & store, std::string_view s, std::string_view separator, const ExperimentalFeatureSettings & xpSettings) { size_t n = s.rfind(separator); @@ -240,7 +237,7 @@ static DerivedPath parseWith( } DerivedPath DerivedPath::parse( - const Store & store, + const StoreDirConfig & store, std::string_view s, const ExperimentalFeatureSettings & xpSettings) { @@ -248,7 +245,7 @@ DerivedPath DerivedPath::parse( } DerivedPath DerivedPath::parseLegacy( - const Store & store, + const StoreDirConfig & store, std::string_view s, const ExperimentalFeatureSettings & xpSettings) { diff --git a/src/libstore/derived-path.hh b/src/libstore/derived-path.hh index 6c5dfeed9..b238f844e 100644 --- a/src/libstore/derived-path.hh +++ b/src/libstore/derived-path.hh @@ -12,6 +12,9 @@ namespace nix { +struct StoreDirConfig; + +// TODO stop needing this, `toJSON` below should be pure class Store; /** @@ -24,9 +27,9 @@ class Store; struct DerivedPathOpaque { StorePath path; - std::string to_string(const Store & store) const; - static DerivedPathOpaque parse(const Store & store, std::string_view); - nlohmann::json toJSON(const Store & store) const; + std::string to_string(const StoreDirConfig & store) const; + static DerivedPathOpaque parse(const StoreDirConfig & store, std::string_view); + nlohmann::json toJSON(const StoreDirConfig & store) const; GENERATE_CMP(DerivedPathOpaque, me->path); }; @@ -59,18 +62,18 @@ struct SingleDerivedPathBuilt { /** * Uses `^` as the separator */ - std::string to_string(const Store & store) const; + std::string to_string(const StoreDirConfig & store) const; /** * Uses `!` as the separator */ - std::string to_string_legacy(const Store & store) const; + std::string to_string_legacy(const StoreDirConfig & store) const; /** * The caller splits on the separator, so it works for both variants. * * @param xpSettings Stop-gap to avoid globals during unit tests. */ static SingleDerivedPathBuilt parse( - const Store & store, ref drvPath, + const StoreDirConfig & store, ref drvPath, OutputNameView outputs, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); nlohmann::json toJSON(Store & store) const; @@ -120,18 +123,18 @@ struct SingleDerivedPath : _SingleDerivedPathRaw { /** * Uses `^` as the separator */ - std::string to_string(const Store & store) const; + std::string to_string(const StoreDirConfig & store) const; /** * Uses `!` as the separator */ - std::string to_string_legacy(const Store & store) const; + std::string to_string_legacy(const StoreDirConfig & store) const; /** * Uses `^` as the separator * * @param xpSettings Stop-gap to avoid globals during unit tests. */ static SingleDerivedPath parse( - const Store & store, + const StoreDirConfig & store, std::string_view, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); /** @@ -140,7 +143,7 @@ struct SingleDerivedPath : _SingleDerivedPathRaw { * @param xpSettings Stop-gap to avoid globals during unit tests. */ static SingleDerivedPath parseLegacy( - const Store & store, + const StoreDirConfig & store, std::string_view, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); nlohmann::json toJSON(Store & store) const; @@ -182,18 +185,18 @@ struct DerivedPathBuilt { /** * Uses `^` as the separator */ - std::string to_string(const Store & store) const; + std::string to_string(const StoreDirConfig & store) const; /** * Uses `!` as the separator */ - std::string to_string_legacy(const Store & store) const; + std::string to_string_legacy(const StoreDirConfig & store) const; /** * The caller splits on the separator, so it works for both variants. * * @param xpSettings Stop-gap to avoid globals during unit tests. */ static DerivedPathBuilt parse( - const Store & store, ref, + const StoreDirConfig & store, ref, std::string_view, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); nlohmann::json toJSON(Store & store) const; @@ -242,18 +245,18 @@ struct DerivedPath : _DerivedPathRaw { /** * Uses `^` as the separator */ - std::string to_string(const Store & store) const; + std::string to_string(const StoreDirConfig & store) const; /** * Uses `!` as the separator */ - std::string to_string_legacy(const Store & store) const; + std::string to_string_legacy(const StoreDirConfig & store) const; /** * Uses `^` as the separator * * @param xpSettings Stop-gap to avoid globals during unit tests. */ static DerivedPath parse( - const Store & store, + const StoreDirConfig & store, std::string_view, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); /** @@ -262,7 +265,7 @@ struct DerivedPath : _DerivedPathRaw { * @param xpSettings Stop-gap to avoid globals during unit tests. */ static DerivedPath parseLegacy( - const Store & store, + const StoreDirConfig & store, std::string_view, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); diff --git a/src/libstore/downstream-placeholder.cc b/src/libstore/downstream-placeholder.cc index ca9f7476e..91d47f946 100644 --- a/src/libstore/downstream-placeholder.cc +++ b/src/libstore/downstream-placeholder.cc @@ -5,7 +5,7 @@ namespace nix { std::string DownstreamPlaceholder::render() const { - return "/" + hash.to_string(HashFormat::Base32, false); + return "/" + hash.to_string(HashFormat::Nix32, false); } @@ -19,7 +19,7 @@ DownstreamPlaceholder DownstreamPlaceholder::unknownCaOutput( auto drvName = drvNameWithExtension.substr(0, drvNameWithExtension.size() - 4); auto clearText = "nix-upstream-output:" + std::string { drvPath.hashPart() } + ":" + outputPathName(drvName, outputName); return DownstreamPlaceholder { - hashString(htSHA256, clearText) + hashString(HashAlgorithm::SHA256, clearText) }; } @@ -31,10 +31,10 @@ DownstreamPlaceholder DownstreamPlaceholder::unknownDerivation( xpSettings.require(Xp::DynamicDerivations); auto compressed = compressHash(placeholder.hash, 20); auto clearText = "nix-computed-output:" - + compressed.to_string(HashFormat::Base32, false) + + compressed.to_string(HashFormat::Nix32, false) + ":" + std::string { outputName }; return DownstreamPlaceholder { - hashString(htSHA256, clearText) + hashString(HashAlgorithm::SHA256, clearText) }; } diff --git a/src/libstore/dummy-store.cc b/src/libstore/dummy-store.cc index 821cda399..f52a309d1 100644 --- a/src/libstore/dummy-store.cc +++ b/src/libstore/dummy-store.cc @@ -58,13 +58,6 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store RepairFlag repair, CheckSigsFlag checkSigs) override { unsupported("addToStore"); } - StorePath addTextToStore( - std::string_view name, - std::string_view s, - const StorePathSet & references, - RepairFlag repair) override - { unsupported("addTextToStore"); } - void narFromPath(const StorePath & path, Sink & sink) override { unsupported("narFromPath"); } diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index 52130f8f6..d57b25bd7 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -30,7 +30,7 @@ void Store::exportPath(const StorePath & path, Sink & sink) { auto info = queryPathInfo(path); - HashSink hashSink(htSHA256); + HashSink hashSink(HashAlgorithm::SHA256); TeeSink teeSink(sink, hashSink); narFromPath(path, teeSink); @@ -39,9 +39,9 @@ void Store::exportPath(const StorePath & path, Sink & sink) filesystem corruption from spreading to other machines. Don't complain if the stored hash is zero (unknown). */ Hash hash = hashSink.currentHash().first; - if (hash != info->narHash && info->narHash != Hash(info->narHash.type)) + if (hash != info->narHash && info->narHash != Hash(info->narHash.algo)) throw Error("hash of path '%s' has changed from '%s' to '%s'!", - printStorePath(path), info->narHash.to_string(HashFormat::Base32, true), hash.to_string(HashFormat::Base32, true)); + printStorePath(path), info->narHash.to_string(HashFormat::Nix32, true), hash.to_string(HashFormat::Nix32, true)); teeSink << exportMagic @@ -79,7 +79,7 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs) auto references = CommonProto::Serialise::read(*this, CommonProto::ReadConn { .from = source }); auto deriver = readString(source); - auto narHash = hashString(htSHA256, saved.s); + auto narHash = hashString(HashAlgorithm::SHA256, saved.s); ValidPathInfo info { path, narHash }; if (deriver != "") diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index 8d05ae4bd..2bd3a2edc 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -50,7 +50,7 @@ static void makeSymlink(const Path & link, const Path & target) void LocalStore::addIndirectRoot(const Path & path) { - std::string hash = hashString(htSHA1, path).to_string(HashFormat::Base32, false); + std::string hash = hashString(HashAlgorithm::SHA1, path).to_string(HashFormat::Nix32, false); Path realRoot = canonPath(fmt("%1%/%2%/auto/%3%", stateDir, gcRootsDir, hash)); makeSymlink(realRoot, path); } @@ -330,9 +330,7 @@ typedef std::unordered_map> UncheckedRoots static void readProcLink(const std::string & file, UncheckedRoots & roots) { - /* 64 is the starting buffer size gnu readlink uses... */ - auto bufsiz = ssize_t{64}; -try_again: + constexpr auto bufsiz = PATH_MAX; char buf[bufsiz]; auto res = readlink(file.c_str(), buf, bufsiz); if (res == -1) { @@ -341,10 +339,7 @@ try_again: throw SysError("reading symlink"); } if (res == bufsiz) { - if (SSIZE_MAX / 2 < bufsiz) - throw Error("stupidly long symlink"); - bufsiz *= 2; - goto try_again; + throw Error("overly long symlink starting with '%1%'", std::string_view(buf, bufsiz)); } if (res > 0 && buf[0] == '/') roots[std::string(static_cast(buf), res)] diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index cc416a4d6..50584e06c 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -15,8 +15,6 @@ #include -#include - #ifdef __GLIBC__ # include # include @@ -116,7 +114,14 @@ Settings::Settings() void loadConfFile() { - globalConfig.applyConfigFile(settings.nixConfDir + "/nix.conf"); + auto applyConfigFile = [&](const Path & path) { + try { + std::string contents = readFile(path); + globalConfig.applyConfig(contents, path); + } catch (SysError &) { } + }; + + applyConfigFile(settings.nixConfDir + "/nix.conf"); /* We only want to send overrides to the daemon, i.e. stuff from ~/.nix/nix.conf or the command line. */ @@ -124,7 +129,7 @@ void loadConfFile() auto files = settings.nixUserConfFiles; for (auto file = files.rbegin(); file != files.rend(); file++) { - globalConfig.applyConfigFile(*file); + applyConfigFile(*file); } auto nixConfEnv = getEnv("NIX_CONFIG"); @@ -402,9 +407,6 @@ void initLibStore() { initLibUtil(); - if (sodium_init() == -1) - throw Error("could not initialise libsodium"); - loadConfFile(); preloadNSS(); diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 27caf42c4..49a4c1f2a 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -117,10 +117,11 @@ public: Setting storeUri{this, getEnv("NIX_REMOTE").value_or("auto"), "store", R"( - The [URL of the Nix store](@docroot@/command-ref/new-cli/nix3-help-stores.md#store-url-format) + The [URL of the Nix store](@docroot@/store/types/index.md#store-url-format) to use for most operations. - See [`nix help-stores`](@docroot@/command-ref/new-cli/nix3-help-stores.md) - for supported store types and settings. + See the + [Store Types](@docroot@/store/types/index.md) + section of the manual for supported store types and settings. )"}; Setting keepFailed{this, false, "keep-failed", @@ -143,7 +144,7 @@ public: */ bool verboseBuild = true; - Setting logLines{this, 10, "log-lines", + Setting logLines{this, 25, "log-lines", "The number of lines of the tail of " "the log to show if a build fails."}; @@ -183,7 +184,9 @@ public: command line switch and defaults to `1`. The value `0` means that the builder should use all available CPU cores in the system. )", - {"build-cores"}, false}; + {"build-cores"}, + // Don't document the machine-specific default value + false}; /** * Read-only mode. Don't copy stuff to the store, don't change @@ -198,7 +201,7 @@ public: Nix will only build a given [derivation](@docroot@/language/derivations.md) locally when its `system` attribute equals any of the values specified here or in [`extra-platforms`](#conf-extra-platforms). The default value is set when Nix itself is compiled for the system it will run on. - The following system types are widely used, as [Nix is actively supported on these platforms](@docroot@/contributing/hacking.md#platforms): + The following system types are widely used, as Nix is actively supported on these platforms: - `x86_64-linux` - `x86_64-darwin` @@ -211,7 +214,11 @@ public: In general, you do not have to modify this setting. While you can force Nix to run a Darwin-specific `builder` executable on a Linux machine, the result would obviously be wrong. - This value is available in the Nix language as [`builtins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem). + This value is available in the Nix language as + [`builtins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem) + if the + [`eval-system`](#conf-eval-system) + configuration option is set as the empty string. )"}; Setting maxSilentTime{ @@ -265,21 +272,16 @@ public: Setting alwaysAllowSubstitutes{ this, false, "always-allow-substitutes", R"( - If set to `true`, Nix will ignore the `allowSubstitutes` attribute in - derivations and always attempt to use available substituters. - For more information on `allowSubstitutes`, see [the manual chapter on advanced attributes](../language/advanced-attributes.md). + If set to `true`, Nix will ignore the [`allowSubstitutes`](@docroot@/language/advanced-attributes.md) attribute in derivations and always attempt to use [available substituters](#conf-substituters). )"}; Setting buildersUseSubstitutes{ this, false, "builders-use-substitutes", R"( - If set to `true`, Nix will instruct remote build machines to use - their own binary substitutes if available. In practical terms, this - means that remote hosts will fetch as many build dependencies as - possible from their own substitutes (e.g, from `cache.nixos.org`), - instead of waiting for this host to upload them all. This can - drastically reduce build times if the network connection between - this computer and the remote build host is slow. + If set to `true`, Nix will instruct [remote build machines](#conf-builders) to use their own [`substituters`](#conf-substituters) if available. + + It means that remote build hosts will fetch as many dependencies as possible from their own substituters (e.g, from `cache.nixos.org`) instead of waiting for the local machine to upload them all. + This can drastically reduce build times if the network connection between the local machine and the remote build host is slow. )"}; Setting reservedSize{this, 8 * 1024 * 1024, "gc-reserved-space", @@ -629,7 +631,7 @@ public: At least one of the following condition must be met for Nix to accept copying a store object from another - Nix store (such as a substituter): + Nix store (such as a [substituter](#conf-substituters)): - the store object has been signed using a key in the trusted keys list - the [`require-sigs`](#conf-require-sigs) option has been set to `false` @@ -699,7 +701,10 @@ public: Build systems will usually detect the target platform to be the current physical system and therefore produce machine code incompatible with what may be intended in the derivation. You should design your derivation's `builder` accordingly and cross-check the results when using this option against natively-built versions of your derivation. - )", {}, false}; + )", + {}, + // Don't document the machine-specific default value + false}; Setting systemFeatures{ this, @@ -744,15 +749,18 @@ public: [nspawn]: https://github.com/NixOS/nix/blob/67bcb99700a0da1395fa063d7c6586740b304598/tests/systemd-nspawn.nix. Included by default on Linux if the [`auto-allocate-uids`](#conf-auto-allocate-uids) setting is enabled. - )", {}, false}; + )", + {}, + // Don't document the machine-specific default value + false}; Setting substituters{ this, Strings{"https://cache.nixos.org/"}, "substituters", R"( - A list of [URLs of Nix stores](@docroot@/command-ref/new-cli/nix3-help-stores.md#store-url-format) to be used as substituters, separated by whitespace. - A substituter is an additional [store]{@docroot@/glossary.md##gloss-store} from which Nix can obtain [store objects](@docroot@/glossary.md#gloss-store-object) instead of building them. + A list of [URLs of Nix stores](@docroot@/store/types/index.md#store-url-format) to be used as substituters, separated by whitespace. + A substituter is an additional [store](@docroot@/glossary.md#gloss-store) from which Nix can obtain [store objects](@docroot@/glossary.md#gloss-store-object) instead of building them. Substituters are tried based on their priority value, which each substituter can set independently. Lower value means higher priority. @@ -770,7 +778,7 @@ public: Setting trustedSubstituters{ this, {}, "trusted-substituters", R"( - A list of [Nix store URLs](@docroot@/command-ref/new-cli/nix3-help-stores.md#store-url-format), separated by whitespace. + A list of [Nix store URLs](@docroot@/store/types/index.md#store-url-format), separated by whitespace. These are not used by default, but users of the Nix daemon can enable them by specifying [`substituters`](#conf-substituters). Unprivileged users (those set in only [`allowed-users`](#conf-allowed-users) but not [`trusted-users`](#conf-trusted-users)) can pass as `substituters` only those URLs listed in `trusted-substituters`. @@ -938,7 +946,9 @@ public: may be useful in certain scenarios (e.g. to spin up containers or set up userspace network interfaces in tests). )"}; +#endif +#if HAVE_ACL_SUPPORT Setting ignoredAcls{ this, {"security.selinux", "system.nfs4_acl", "security.csm"}, "ignored-acls", R"( diff --git a/src/libstore/indirect-root-store.hh b/src/libstore/indirect-root-store.hh index 59e45af45..c11679fe8 100644 --- a/src/libstore/indirect-root-store.hh +++ b/src/libstore/indirect-root-store.hh @@ -11,6 +11,30 @@ namespace nix { * reference. * * See methods for details on the operations it represents. + * + * @note + * To understand the purpose of this class, it might help to do some + * "closed-world" rather than "open-world" reasoning, and consider the + * problem it solved for us. This class was factored out from + * `LocalFSStore` in order to support the following table, which + * contains 4 concrete store types (non-abstract classes, exposed to the + * user), and how they implemented the two GC root methods: + * + * @note + * | | `addPermRoot()` | `addIndirectRoot()` | + * |-------------------|-----------------|---------------------| + * | `LocalStore` | local | local | + * | `UDSRemoteStore` | local | remote | + * | `SSHStore` | doesn't have | doesn't have | + * | `MountedSSHStore` | remote | doesn't have | + * + * @note + * Note how only the local implementations of `addPermRoot()` need + * `addIndirectRoot()`; that is what this class enforces. Without it, + * and with `addPermRoot()` and `addIndirectRoot()` both `virtual`, we + * would accidentally be allowing for a combinatorial explosion of + * possible implementations many of which make no sense. Having this and + * that invariant enforced cuts down that space. */ struct IndirectRootStore : public virtual LocalFSStore { diff --git a/src/libstore/keys.cc b/src/libstore/keys.cc new file mode 100644 index 000000000..2cc50970f --- /dev/null +++ b/src/libstore/keys.cc @@ -0,0 +1,31 @@ +#include "file-system.hh" +#include "globals.hh" +#include "keys.hh" + +namespace nix { + +PublicKeys getDefaultPublicKeys() +{ + PublicKeys publicKeys; + + // FIXME: filter duplicates + + for (auto s : settings.trustedPublicKeys.get()) { + PublicKey key(s); + publicKeys.emplace(key.name, key); + } + + for (auto secretKeyFile : settings.secretKeyFiles.get()) { + try { + SecretKey secretKey(readFile(secretKeyFile)); + publicKeys.emplace(secretKey.name, secretKey.toPublicKey()); + } catch (SysError & e) { + /* Ignore unreadable key files. That's normal in a + multi-user installation. */ + } + } + + return publicKeys; +} + +} diff --git a/src/libstore/keys.hh b/src/libstore/keys.hh new file mode 100644 index 000000000..3da19493f --- /dev/null +++ b/src/libstore/keys.hh @@ -0,0 +1,10 @@ +#pragma once +///@file + +#include "signature/local-keys.hh" + +namespace nix { + +PublicKeys getDefaultPublicKeys(); + +} diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 731457354..06bef9d08 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -1,3 +1,4 @@ +#include "legacy-ssh-store.hh" #include "ssh-store-config.hh" #include "archive.hh" #include "pool.hh" @@ -13,432 +14,355 @@ namespace nix { -struct LegacySSHStoreConfig : virtual CommonSSHStoreConfig +std::string LegacySSHStoreConfig::doc() { - using CommonSSHStoreConfig::CommonSSHStoreConfig; + return + #include "legacy-ssh-store.md" + ; +} - const Setting remoteProgram{this, "nix-store", "remote-program", - "Path to the `nix-store` executable on the remote machine."}; - const Setting maxConnections{this, 1, "max-connections", - "Maximum number of concurrent SSH connections."}; - - const std::string name() override { return "SSH Store"; } - - std::string doc() override - { - return - #include "legacy-ssh-store.md" - ; - } -}; - -struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Store +struct LegacySSHStore::Connection { - // Hack for getting remote build log output. - // Intentionally not in `LegacySSHStoreConfig` so that it doesn't appear in - // the documentation - const Setting logFD{this, -1, "log-fd", "file descriptor to which SSH's stderr is connected"}; - - struct Connection - { - std::unique_ptr sshConn; - FdSink to; - FdSource from; - ServeProto::Version remoteVersion; - bool good = true; - - /** - * Coercion to `ServeProto::ReadConn`. This makes it easy to use the - * factored out serve protocol searlizers with a - * `LegacySSHStore::Connection`. - * - * The serve protocol connection types are unidirectional, unlike - * this type. - */ - operator ServeProto::ReadConn () - { - return ServeProto::ReadConn { - .from = from, - .version = remoteVersion, - }; - } - - /* - * Coercion to `ServeProto::WriteConn`. This makes it easy to use the - * factored out serve protocol searlizers with a - * `LegacySSHStore::Connection`. - * - * The serve protocol connection types are unidirectional, unlike - * this type. - */ - operator ServeProto::WriteConn () - { - return ServeProto::WriteConn { - .to = to, - .version = remoteVersion, - }; - } - }; - - std::string host; - - ref> connections; - - SSHMaster master; - - static std::set uriSchemes() { return {"ssh"}; } - - LegacySSHStore(const std::string & scheme, const std::string & host, const Params & params) - : StoreConfig(params) - , CommonSSHStoreConfig(params) - , LegacySSHStoreConfig(params) - , Store(params) - , host(host) - , connections(make_ref>( - std::max(1, (int) maxConnections), - [this]() { return openConnection(); }, - [](const ref & r) { return r->good; } - )) - , master( - host, - sshKey, - sshPublicHostKey, - // Use SSH master only if using more than 1 connection. - connections->capacity() > 1, - compress, - logFD) - { - } - - ref openConnection() - { - auto conn = make_ref(); - conn->sshConn = master.startCommand( - fmt("%s --serve --write", remoteProgram) - + (remoteStore.get() == "" ? "" : " --store " + shellEscape(remoteStore.get()))); - conn->to = FdSink(conn->sshConn->in.get()); - conn->from = FdSource(conn->sshConn->out.get()); - - try { - conn->to << SERVE_MAGIC_1 << SERVE_PROTOCOL_VERSION; - conn->to.flush(); - - StringSink saved; - try { - TeeSource tee(conn->from, saved); - unsigned int magic = readInt(tee); - if (magic != SERVE_MAGIC_2) - throw Error("'nix-store --serve' protocol mismatch from '%s'", host); - } catch (SerialisationError & e) { - /* In case the other side is waiting for our input, - close it. */ - conn->sshConn->in.close(); - auto msg = conn->from.drain(); - throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'", - host, chomp(saved.s + msg)); - } - conn->remoteVersion = readInt(conn->from); - if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200) - throw Error("unsupported 'nix-store --serve' protocol version on '%s'", host); - - } catch (EndOfFile & e) { - throw Error("cannot connect to '%1%'", host); - } - - return conn; - }; - - std::string getUri() override - { - return *uriSchemes().begin() + "://" + host; - } - - void queryPathInfoUncached(const StorePath & path, - Callback> callback) noexcept override - { - try { - auto conn(connections->get()); - - /* No longer support missing NAR hash */ - assert(GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4); - - debug("querying remote host '%s' for info on '%s'", host, printStorePath(path)); - - conn->to << ServeProto::Command::QueryPathInfos << PathSet{printStorePath(path)}; - conn->to.flush(); - - auto p = readString(conn->from); - if (p.empty()) return callback(nullptr); - auto path2 = parseStorePath(p); - assert(path == path2); - /* Hash will be set below. FIXME construct ValidPathInfo at end. */ - auto info = std::make_shared(path, Hash::dummy); - - auto deriver = readString(conn->from); - if (deriver != "") - info->deriver = parseStorePath(deriver); - info->references = ServeProto::Serialise::read(*this, *conn); - readLongLong(conn->from); // download size - info->narSize = readLongLong(conn->from); - - { - auto s = readString(conn->from); - if (s == "") - throw Error("NAR hash is now mandatory"); - info->narHash = Hash::parseAnyPrefixed(s); - } - info->ca = ContentAddress::parseOpt(readString(conn->from)); - info->sigs = readStrings(conn->from); - - auto s = readString(conn->from); - assert(s == ""); - - callback(std::move(info)); - } catch (...) { callback.rethrow(); } - } - - void addToStore(const ValidPathInfo & info, Source & source, - RepairFlag repair, CheckSigsFlag checkSigs) override - { - debug("adding path '%s' to remote host '%s'", printStorePath(info.path), host); - - auto conn(connections->get()); - - if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 5) { - - conn->to - << ServeProto::Command::AddToStoreNar - << printStorePath(info.path) - << (info.deriver ? printStorePath(*info.deriver) : "") - << info.narHash.to_string(HashFormat::Base16, false); - ServeProto::write(*this, *conn, info.references); - conn->to - << info.registrationTime - << info.narSize - << info.ultimate - << info.sigs - << renderContentAddress(info.ca); - try { - copyNAR(source, conn->to); - } catch (...) { - conn->good = false; - throw; - } - conn->to.flush(); - - } else { - - conn->to - << ServeProto::Command::ImportPaths - << 1; - try { - copyNAR(source, conn->to); - } catch (...) { - conn->good = false; - throw; - } - conn->to - << exportMagic - << printStorePath(info.path); - ServeProto::write(*this, *conn, info.references); - conn->to - << (info.deriver ? printStorePath(*info.deriver) : "") - << 0 - << 0; - conn->to.flush(); - - } - - if (readInt(conn->from) != 1) - throw Error("failed to add path '%s' to remote host '%s'", printStorePath(info.path), host); - } - - void narFromPath(const StorePath & path, Sink & sink) override - { - auto conn(connections->get()); - - conn->to << ServeProto::Command::DumpStorePath << printStorePath(path); - conn->to.flush(); - copyNAR(conn->from, sink); - } - - std::optional queryPathFromHashPart(const std::string & hashPart) override - { unsupported("queryPathFromHashPart"); } - - StorePath addToStore( - std::string_view name, - const Path & srcPath, - FileIngestionMethod method, - HashType hashAlgo, - PathFilter & filter, - RepairFlag repair, - const StorePathSet & references) override - { unsupported("addToStore"); } - - StorePath addTextToStore( - std::string_view name, - std::string_view s, - const StorePathSet & references, - RepairFlag repair) override - { unsupported("addTextToStore"); } - -private: - - void putBuildSettings(Connection & conn) - { - conn.to - << settings.maxSilentTime - << settings.buildTimeout; - if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 2) - conn.to - << settings.maxLogSize; - if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 3) - conn.to - << 0 // buildRepeat hasn't worked for ages anyway - << 0; - - if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 7) { - conn.to << ((int) settings.keepFailed); - } - } - -public: - - BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, - BuildMode buildMode) override - { - auto conn(connections->get()); - - conn->to - << ServeProto::Command::BuildDerivation - << printStorePath(drvPath); - writeDerivation(conn->to, *this, drv); - - putBuildSettings(*conn); - - conn->to.flush(); - - return ServeProto::Serialise::read(*this, *conn); - } - - void buildPaths(const std::vector & drvPaths, BuildMode buildMode, std::shared_ptr evalStore) override - { - if (evalStore && evalStore.get() != this) - throw Error("building on an SSH store is incompatible with '--eval-store'"); - - auto conn(connections->get()); - - conn->to << ServeProto::Command::BuildPaths; - Strings ss; - for (auto & p : drvPaths) { - auto sOrDrvPath = StorePathWithOutputs::tryFromDerivedPath(p); - std::visit(overloaded { - [&](const StorePathWithOutputs & s) { - ss.push_back(s.to_string(*this)); - }, - [&](const StorePath & drvPath) { - throw Error("wanted to fetch '%s' but the legacy ssh protocol doesn't support merely substituting drv files via the build paths command. It would build them instead. Try using ssh-ng://", printStorePath(drvPath)); - }, - [&](std::monostate) { - throw Error("wanted build derivation that is itself a build product, but the legacy ssh protocol doesn't support that. Try using ssh-ng://"); - }, - }, sOrDrvPath); - } - conn->to << ss; - - putBuildSettings(*conn); - - conn->to.flush(); - - BuildResult result; - result.status = (BuildResult::Status) readInt(conn->from); - - if (!result.success()) { - conn->from >> result.errorMsg; - throw Error(result.status, result.errorMsg); - } - } - - void ensurePath(const StorePath & path) override - { unsupported("ensurePath"); } - - virtual ref getFSAccessor(bool requireValidPath) override - { unsupported("getFSAccessor"); } + std::unique_ptr sshConn; + FdSink to; + FdSource from; + ServeProto::Version remoteVersion; + bool good = true; /** - * The default instance would schedule the work on the client side, but - * for consistency with `buildPaths` and `buildDerivation` it should happen - * on the remote side. + * Coercion to `ServeProto::ReadConn`. This makes it easy to use the + * factored out serve protocol searlizers with a + * `LegacySSHStore::Connection`. * - * We make this fail for now so we can add implement this properly later - * without it being a breaking change. + * The serve protocol connection types are unidirectional, unlike + * this type. */ - void repairPath(const StorePath & path) override - { unsupported("repairPath"); } - - void computeFSClosure(const StorePathSet & paths, - StorePathSet & out, bool flipDirection = false, - bool includeOutputs = false, bool includeDerivers = false) override + operator ServeProto::ReadConn () { - if (flipDirection || includeDerivers) { - Store::computeFSClosure(paths, out, flipDirection, includeOutputs, includeDerivers); - return; - } - - auto conn(connections->get()); - - conn->to - << ServeProto::Command::QueryClosure - << includeOutputs; - ServeProto::write(*this, *conn, paths); - conn->to.flush(); - - for (auto & i : ServeProto::Serialise::read(*this, *conn)) - out.insert(i); + return ServeProto::ReadConn { + .from = from, + .version = remoteVersion, + }; } - StorePathSet queryValidPaths(const StorePathSet & paths, - SubstituteFlag maybeSubstitute = NoSubstitute) override - { - auto conn(connections->get()); - - conn->to - << ServeProto::Command::QueryValidPaths - << false // lock - << maybeSubstitute; - ServeProto::write(*this, *conn, paths); - conn->to.flush(); - - return ServeProto::Serialise::read(*this, *conn); - } - - void connect() override - { - auto conn(connections->get()); - } - - unsigned int getProtocol() override - { - auto conn(connections->get()); - return conn->remoteVersion; - } - - /** - * The legacy ssh protocol doesn't support checking for trusted-user. - * Try using ssh-ng:// instead if you want to know. + /* + * Coercion to `ServeProto::WriteConn`. This makes it easy to use the + * factored out serve protocol searlizers with a + * `LegacySSHStore::Connection`. + * + * The serve protocol connection types are unidirectional, unlike + * this type. */ - std::optional isTrustedClient() override + operator ServeProto::WriteConn () { - return std::nullopt; + return ServeProto::WriteConn { + .to = to, + .version = remoteVersion, + }; } - - void queryRealisationUncached(const DrvOutput &, - Callback> callback) noexcept override - // TODO: Implement - { unsupported("queryRealisation"); } }; + +LegacySSHStore::LegacySSHStore(const std::string & scheme, const std::string & host, const Params & params) + : StoreConfig(params) + , CommonSSHStoreConfig(params) + , LegacySSHStoreConfig(params) + , Store(params) + , host(host) + , connections(make_ref>( + std::max(1, (int) maxConnections), + [this]() { return openConnection(); }, + [](const ref & r) { return r->good; } + )) + , master( + host, + sshKey, + sshPublicHostKey, + // Use SSH master only if using more than 1 connection. + connections->capacity() > 1, + compress, + logFD) +{ +} + + +ref LegacySSHStore::openConnection() +{ + auto conn = make_ref(); + conn->sshConn = master.startCommand( + fmt("%s --serve --write", remoteProgram) + + (remoteStore.get() == "" ? "" : " --store " + shellEscape(remoteStore.get()))); + conn->to = FdSink(conn->sshConn->in.get()); + conn->from = FdSource(conn->sshConn->out.get()); + + try { + conn->to << SERVE_MAGIC_1 << SERVE_PROTOCOL_VERSION; + conn->to.flush(); + + StringSink saved; + try { + TeeSource tee(conn->from, saved); + unsigned int magic = readInt(tee); + if (magic != SERVE_MAGIC_2) + throw Error("'nix-store --serve' protocol mismatch from '%s'", host); + } catch (SerialisationError & e) { + /* In case the other side is waiting for our input, + close it. */ + conn->sshConn->in.close(); + auto msg = conn->from.drain(); + throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'", + host, chomp(saved.s + msg)); + } + conn->remoteVersion = readInt(conn->from); + if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200) + throw Error("unsupported 'nix-store --serve' protocol version on '%s'", host); + + } catch (EndOfFile & e) { + throw Error("cannot connect to '%1%'", host); + } + + return conn; +}; + + +std::string LegacySSHStore::getUri() +{ + return *uriSchemes().begin() + "://" + host; +} + + +void LegacySSHStore::queryPathInfoUncached(const StorePath & path, + Callback> callback) noexcept +{ + try { + auto conn(connections->get()); + + /* No longer support missing NAR hash */ + assert(GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4); + + debug("querying remote host '%s' for info on '%s'", host, printStorePath(path)); + + conn->to << ServeProto::Command::QueryPathInfos << PathSet{printStorePath(path)}; + conn->to.flush(); + + auto p = readString(conn->from); + if (p.empty()) return callback(nullptr); + auto path2 = parseStorePath(p); + assert(path == path2); + auto info = std::make_shared( + path, + ServeProto::Serialise::read(*this, *conn)); + + if (info->narHash == Hash::dummy) + throw Error("NAR hash is now mandatory"); + + auto s = readString(conn->from); + assert(s == ""); + + callback(std::move(info)); + } catch (...) { callback.rethrow(); } +} + + +void LegacySSHStore::addToStore(const ValidPathInfo & info, Source & source, + RepairFlag repair, CheckSigsFlag checkSigs) +{ + debug("adding path '%s' to remote host '%s'", printStorePath(info.path), host); + + auto conn(connections->get()); + + if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 5) { + + conn->to + << ServeProto::Command::AddToStoreNar + << printStorePath(info.path) + << (info.deriver ? printStorePath(*info.deriver) : "") + << info.narHash.to_string(HashFormat::Base16, false); + ServeProto::write(*this, *conn, info.references); + conn->to + << info.registrationTime + << info.narSize + << info.ultimate + << info.sigs + << renderContentAddress(info.ca); + try { + copyNAR(source, conn->to); + } catch (...) { + conn->good = false; + throw; + } + conn->to.flush(); + + } else { + + conn->to + << ServeProto::Command::ImportPaths + << 1; + try { + copyNAR(source, conn->to); + } catch (...) { + conn->good = false; + throw; + } + conn->to + << exportMagic + << printStorePath(info.path); + ServeProto::write(*this, *conn, info.references); + conn->to + << (info.deriver ? printStorePath(*info.deriver) : "") + << 0 + << 0; + conn->to.flush(); + + } + + if (readInt(conn->from) != 1) + throw Error("failed to add path '%s' to remote host '%s'", printStorePath(info.path), host); +} + + +void LegacySSHStore::narFromPath(const StorePath & path, Sink & sink) +{ + auto conn(connections->get()); + + conn->to << ServeProto::Command::DumpStorePath << printStorePath(path); + conn->to.flush(); + copyNAR(conn->from, sink); +} + + +void LegacySSHStore::putBuildSettings(Connection & conn) +{ + ServeProto::write(*this, conn, ServeProto::BuildOptions { + .maxSilentTime = settings.maxSilentTime, + .buildTimeout = settings.buildTimeout, + .maxLogSize = settings.maxLogSize, + .nrRepeats = 0, // buildRepeat hasn't worked for ages anyway + .enforceDeterminism = 0, + .keepFailed = settings.keepFailed, + }); +} + + +BuildResult LegacySSHStore::buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, + BuildMode buildMode) +{ + auto conn(connections->get()); + + conn->to + << ServeProto::Command::BuildDerivation + << printStorePath(drvPath); + writeDerivation(conn->to, *this, drv); + + putBuildSettings(*conn); + + conn->to.flush(); + + return ServeProto::Serialise::read(*this, *conn); +} + + +void LegacySSHStore::buildPaths(const std::vector & drvPaths, BuildMode buildMode, std::shared_ptr evalStore) +{ + if (evalStore && evalStore.get() != this) + throw Error("building on an SSH store is incompatible with '--eval-store'"); + + auto conn(connections->get()); + + conn->to << ServeProto::Command::BuildPaths; + Strings ss; + for (auto & p : drvPaths) { + auto sOrDrvPath = StorePathWithOutputs::tryFromDerivedPath(p); + std::visit(overloaded { + [&](const StorePathWithOutputs & s) { + ss.push_back(s.to_string(*this)); + }, + [&](const StorePath & drvPath) { + throw Error("wanted to fetch '%s' but the legacy ssh protocol doesn't support merely substituting drv files via the build paths command. It would build them instead. Try using ssh-ng://", printStorePath(drvPath)); + }, + [&](std::monostate) { + throw Error("wanted build derivation that is itself a build product, but the legacy ssh protocol doesn't support that. Try using ssh-ng://"); + }, + }, sOrDrvPath); + } + conn->to << ss; + + putBuildSettings(*conn); + + conn->to.flush(); + + BuildResult result; + result.status = (BuildResult::Status) readInt(conn->from); + + if (!result.success()) { + conn->from >> result.errorMsg; + throw Error(result.status, result.errorMsg); + } +} + + +void LegacySSHStore::computeFSClosure(const StorePathSet & paths, + StorePathSet & out, bool flipDirection, + bool includeOutputs, bool includeDerivers) +{ + if (flipDirection || includeDerivers) { + Store::computeFSClosure(paths, out, flipDirection, includeOutputs, includeDerivers); + return; + } + + auto conn(connections->get()); + + conn->to + << ServeProto::Command::QueryClosure + << includeOutputs; + ServeProto::write(*this, *conn, paths); + conn->to.flush(); + + for (auto & i : ServeProto::Serialise::read(*this, *conn)) + out.insert(i); +} + + +StorePathSet LegacySSHStore::queryValidPaths(const StorePathSet & paths, + SubstituteFlag maybeSubstitute) +{ + auto conn(connections->get()); + + conn->to + << ServeProto::Command::QueryValidPaths + << false // lock + << maybeSubstitute; + ServeProto::write(*this, *conn, paths); + conn->to.flush(); + + return ServeProto::Serialise::read(*this, *conn); +} + + +void LegacySSHStore::connect() +{ + auto conn(connections->get()); +} + + +unsigned int LegacySSHStore::getProtocol() +{ + auto conn(connections->get()); + return conn->remoteVersion; +} + + +/** + * The legacy ssh protocol doesn't support checking for trusted-user. + * Try using ssh-ng:// instead if you want to know. + */ +std::optional isTrustedClient() +{ + return std::nullopt; +} + + static RegisterStoreImplementation regLegacySSHStore; } diff --git a/src/libstore/legacy-ssh-store.hh b/src/libstore/legacy-ssh-store.hh new file mode 100644 index 000000000..c5a3ce677 --- /dev/null +++ b/src/libstore/legacy-ssh-store.hh @@ -0,0 +1,126 @@ +#pragma once +///@file + +#include "ssh-store-config.hh" +#include "store-api.hh" +#include "ssh.hh" +#include "callback.hh" +#include "pool.hh" + +namespace nix { + +struct LegacySSHStoreConfig : virtual CommonSSHStoreConfig +{ + using CommonSSHStoreConfig::CommonSSHStoreConfig; + + const Setting remoteProgram{this, "nix-store", "remote-program", + "Path to the `nix-store` executable on the remote machine."}; + + const Setting maxConnections{this, 1, "max-connections", + "Maximum number of concurrent SSH connections."}; + + const std::string name() override { return "SSH Store"; } + + std::string doc() override; +}; + +struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Store +{ + // Hack for getting remote build log output. + // Intentionally not in `LegacySSHStoreConfig` so that it doesn't appear in + // the documentation + const Setting logFD{this, -1, "log-fd", "file descriptor to which SSH's stderr is connected"}; + + struct Connection; + + std::string host; + + ref> connections; + + SSHMaster master; + + static std::set uriSchemes() { return {"ssh"}; } + + LegacySSHStore(const std::string & scheme, const std::string & host, const Params & params); + + ref openConnection(); + + std::string getUri() override; + + void queryPathInfoUncached(const StorePath & path, + Callback> callback) noexcept override; + + void addToStore(const ValidPathInfo & info, Source & source, + RepairFlag repair, CheckSigsFlag checkSigs) override; + + void narFromPath(const StorePath & path, Sink & sink) override; + + std::optional queryPathFromHashPart(const std::string & hashPart) override + { unsupported("queryPathFromHashPart"); } + + StorePath addToStore( + std::string_view name, + SourceAccessor & accessor, + const CanonPath & srcPath, + ContentAddressMethod method, + HashAlgorithm hashAlgo, + const StorePathSet & references, + PathFilter & filter, + RepairFlag repair) override + { unsupported("addToStore"); } + +private: + + void putBuildSettings(Connection & conn); + +public: + + BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, + BuildMode buildMode) override; + + void buildPaths(const std::vector & drvPaths, BuildMode buildMode, std::shared_ptr evalStore) override; + + void ensurePath(const StorePath & path) override + { unsupported("ensurePath"); } + + virtual ref getFSAccessor(bool requireValidPath) override + { unsupported("getFSAccessor"); } + + /** + * The default instance would schedule the work on the client side, but + * for consistency with `buildPaths` and `buildDerivation` it should happen + * on the remote side. + * + * We make this fail for now so we can add implement this properly later + * without it being a breaking change. + */ + void repairPath(const StorePath & path) override + { unsupported("repairPath"); } + + void computeFSClosure(const StorePathSet & paths, + StorePathSet & out, bool flipDirection = false, + bool includeOutputs = false, bool includeDerivers = false) override; + + StorePathSet queryValidPaths(const StorePathSet & paths, + SubstituteFlag maybeSubstitute = NoSubstitute) override; + + void connect() override; + + unsigned int getProtocol() override; + + /** + * The legacy ssh protocol doesn't support checking for trusted-user. + * Try using ssh-ng:// instead if you want to know. + */ + std::optional isTrustedClient() override + { + return std::nullopt; + } + + void queryRealisationUncached(const DrvOutput &, + Callback> callback) noexcept override + // TODO: Implement + { unsupported("queryRealisation"); } +}; + +} diff --git a/src/libstore/length-prefixed-protocol-helper.hh b/src/libstore/length-prefixed-protocol-helper.hh index 4061b0cd6..0cf950a47 100644 --- a/src/libstore/length-prefixed-protocol-helper.hh +++ b/src/libstore/length-prefixed-protocol-helper.hh @@ -10,7 +10,7 @@ namespace nix { -class Store; +struct StoreDirConfig; /** * Reusable serialisers for serialization container types in a @@ -44,8 +44,8 @@ struct LengthPrefixedProtoHelper; #define LENGTH_PREFIXED_PROTO_HELPER(Inner, T) \ struct LengthPrefixedProtoHelper< Inner, T > \ { \ - static T read(const Store & store, typename Inner::ReadConn conn); \ - static void write(const Store & store, typename Inner::WriteConn conn, const T & str); \ + static T read(const StoreDirConfig & store, typename Inner::ReadConn conn); \ + static void write(const StoreDirConfig & store, typename Inner::WriteConn conn, const T & str); \ private: \ template using S = typename Inner::template Serialise; \ } @@ -67,7 +67,7 @@ LENGTH_PREFIXED_PROTO_HELPER(Inner, _X); template std::vector LengthPrefixedProtoHelper>::read( - const Store & store, typename Inner::ReadConn conn) + const StoreDirConfig & store, typename Inner::ReadConn conn) { std::vector resSet; auto size = readNum(conn.from); @@ -80,7 +80,7 @@ LengthPrefixedProtoHelper>::read( template void LengthPrefixedProtoHelper>::write( - const Store & store, typename Inner::WriteConn conn, const std::vector & resSet) + const StoreDirConfig & store, typename Inner::WriteConn conn, const std::vector & resSet) { conn.to << resSet.size(); for (auto & key : resSet) { @@ -91,7 +91,7 @@ LengthPrefixedProtoHelper>::write( template std::set LengthPrefixedProtoHelper>::read( - const Store & store, typename Inner::ReadConn conn) + const StoreDirConfig & store, typename Inner::ReadConn conn) { std::set resSet; auto size = readNum(conn.from); @@ -104,7 +104,7 @@ LengthPrefixedProtoHelper>::read( template void LengthPrefixedProtoHelper>::write( - const Store & store, typename Inner::WriteConn conn, const std::set & resSet) + const StoreDirConfig & store, typename Inner::WriteConn conn, const std::set & resSet) { conn.to << resSet.size(); for (auto & key : resSet) { @@ -115,7 +115,7 @@ LengthPrefixedProtoHelper>::write( template std::map LengthPrefixedProtoHelper>::read( - const Store & store, typename Inner::ReadConn conn) + const StoreDirConfig & store, typename Inner::ReadConn conn) { std::map resMap; auto size = readNum(conn.from); @@ -130,7 +130,7 @@ LengthPrefixedProtoHelper>::read( template void LengthPrefixedProtoHelper>::write( - const Store & store, typename Inner::WriteConn conn, const std::map & resMap) + const StoreDirConfig & store, typename Inner::WriteConn conn, const std::map & resMap) { conn.to << resMap.size(); for (auto & i : resMap) { @@ -142,7 +142,7 @@ LengthPrefixedProtoHelper>::write( template std::tuple LengthPrefixedProtoHelper>::read( - const Store & store, typename Inner::ReadConn conn) + const StoreDirConfig & store, typename Inner::ReadConn conn) { return std::tuple { S::read(store, conn)..., @@ -152,7 +152,7 @@ LengthPrefixedProtoHelper>::read( template void LengthPrefixedProtoHelper>::write( - const Store & store, typename Inner::WriteConn conn, const std::tuple & res) + const StoreDirConfig & store, typename Inner::WriteConn conn, const std::tuple & res) { std::apply([&](const Us &... args) { (S::write(store, conn, args), ...); diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 2a3582ad8..0f3c37c8a 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -4,6 +4,7 @@ #include "pathlocks.hh" #include "worker-protocol.hh" #include "derivations.hh" +#include "realisation.hh" #include "nar-info.hh" #include "references.hh" #include "callback.hh" @@ -11,11 +12,16 @@ #include "finally.hh" #include "compression.hh" #include "signals.hh" +#include "posix-fs-canonicalise.hh" +#include "posix-source-accessor.hh" +#include "keys.hh" #include #include #include +#include +#include #include #include #include @@ -33,7 +39,6 @@ #include #include #include -#include #endif #ifdef __CYGWIN__ @@ -581,164 +586,6 @@ void LocalStore::makeStoreWritable() } -const time_t mtimeStore = 1; /* 1 second into the epoch */ - - -static void canonicaliseTimestampAndPermissions(const Path & path, const struct stat & st) -{ - if (!S_ISLNK(st.st_mode)) { - - /* Mask out all type related bits. */ - mode_t mode = st.st_mode & ~S_IFMT; - - if (mode != 0444 && mode != 0555) { - mode = (st.st_mode & S_IFMT) - | 0444 - | (st.st_mode & S_IXUSR ? 0111 : 0); - if (chmod(path.c_str(), mode) == -1) - throw SysError("changing mode of '%1%' to %2$o", path, mode); - } - - } - - if (st.st_mtime != mtimeStore) { - struct timeval times[2]; - times[0].tv_sec = st.st_atime; - times[0].tv_usec = 0; - times[1].tv_sec = mtimeStore; - times[1].tv_usec = 0; -#if HAVE_LUTIMES - if (lutimes(path.c_str(), times) == -1) - if (errno != ENOSYS || - (!S_ISLNK(st.st_mode) && utimes(path.c_str(), times) == -1)) -#else - if (!S_ISLNK(st.st_mode) && utimes(path.c_str(), times) == -1) -#endif - throw SysError("changing modification time of '%1%'", path); - } -} - - -void canonicaliseTimestampAndPermissions(const Path & path) -{ - canonicaliseTimestampAndPermissions(path, lstat(path)); -} - - -static void canonicalisePathMetaData_( - const Path & path, - std::optional> uidRange, - InodesSeen & inodesSeen) -{ - checkInterrupt(); - -#if __APPLE__ - /* Remove flags, in particular UF_IMMUTABLE which would prevent - the file from being garbage-collected. FIXME: Use - setattrlist() to remove other attributes as well. */ - if (lchflags(path.c_str(), 0)) { - if (errno != ENOTSUP) - throw SysError("clearing flags of path '%1%'", path); - } -#endif - - auto st = lstat(path); - - /* Really make sure that the path is of a supported type. */ - if (!(S_ISREG(st.st_mode) || S_ISDIR(st.st_mode) || S_ISLNK(st.st_mode))) - throw Error("file '%1%' has an unsupported type", path); - -#if __linux__ - /* Remove extended attributes / ACLs. */ - ssize_t eaSize = llistxattr(path.c_str(), nullptr, 0); - - if (eaSize < 0) { - if (errno != ENOTSUP && errno != ENODATA) - throw SysError("querying extended attributes of '%s'", path); - } else if (eaSize > 0) { - std::vector eaBuf(eaSize); - - if ((eaSize = llistxattr(path.c_str(), eaBuf.data(), eaBuf.size())) < 0) - throw SysError("querying extended attributes of '%s'", path); - - for (auto & eaName: tokenizeString(std::string(eaBuf.data(), eaSize), std::string("\000", 1))) { - if (settings.ignoredAcls.get().count(eaName)) continue; - if (lremovexattr(path.c_str(), eaName.c_str()) == -1) - throw SysError("removing extended attribute '%s' from '%s'", eaName, path); - } - } -#endif - - /* Fail if the file is not owned by the build user. This prevents - us from messing up the ownership/permissions of files - hard-linked into the output (e.g. "ln /etc/shadow $out/foo"). - However, ignore files that we chown'ed ourselves previously to - ensure that we don't fail on hard links within the same build - (i.e. "touch $out/foo; ln $out/foo $out/bar"). */ - if (uidRange && (st.st_uid < uidRange->first || st.st_uid > uidRange->second)) { - if (S_ISDIR(st.st_mode) || !inodesSeen.count(Inode(st.st_dev, st.st_ino))) - throw BuildError("invalid ownership on file '%1%'", path); - mode_t mode = st.st_mode & ~S_IFMT; - assert(S_ISLNK(st.st_mode) || (st.st_uid == geteuid() && (mode == 0444 || mode == 0555) && st.st_mtime == mtimeStore)); - return; - } - - inodesSeen.insert(Inode(st.st_dev, st.st_ino)); - - canonicaliseTimestampAndPermissions(path, st); - - /* Change ownership to the current uid. If it's a symlink, use - lchown if available, otherwise don't bother. Wrong ownership - of a symlink doesn't matter, since the owning user can't change - the symlink and can't delete it because the directory is not - writable. The only exception is top-level paths in the Nix - store (since that directory is group-writable for the Nix build - users group); we check for this case below. */ - if (st.st_uid != geteuid()) { -#if HAVE_LCHOWN - if (lchown(path.c_str(), geteuid(), getegid()) == -1) -#else - if (!S_ISLNK(st.st_mode) && - chown(path.c_str(), geteuid(), getegid()) == -1) -#endif - throw SysError("changing owner of '%1%' to %2%", - path, geteuid()); - } - - if (S_ISDIR(st.st_mode)) { - DirEntries entries = readDirectory(path); - for (auto & i : entries) - canonicalisePathMetaData_(path + "/" + i.name, uidRange, inodesSeen); - } -} - - -void canonicalisePathMetaData( - const Path & path, - std::optional> uidRange, - InodesSeen & inodesSeen) -{ - canonicalisePathMetaData_(path, uidRange, inodesSeen); - - /* On platforms that don't have lchown(), the top-level path can't - be a symlink, since we can't change its ownership. */ - auto st = lstat(path); - - if (st.st_uid != geteuid()) { - assert(S_ISLNK(st.st_mode)); - throw Error("wrong ownership of top-level store path '%1%'", path); - } -} - - -void canonicalisePathMetaData(const Path & path, - std::optional> uidRange) -{ - InodesSeen inodesSeen; - canonicalisePathMetaData(path, uidRange, inodesSeen); -} - - void LocalStore::registerDrvOutput(const Realisation & info, CheckSigsFlag checkSigs) { experimentalFeatureSettings.require(Xp::CaDerivations); @@ -1112,7 +959,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos) StorePathSet paths; for (auto & [_, i] : infos) { - assert(i.narHash.type == htSHA256); + assert(i.narHash.algo == HashAlgorithm::SHA256); if (isValidPath_(*state, i.path)) updatePathInfo(*state, i); else @@ -1226,7 +1073,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, /* While restoring the path from the NAR, compute the hash of the NAR. */ - HashSink hashSink(htSHA256); + HashSink hashSink(HashAlgorithm::SHA256); TeeSource wrapperSource { source, hashSink }; @@ -1237,7 +1084,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, if (hashResult.first != info.narHash) throw Error("hash mismatch importing path '%s';\n specified: %s\n got: %s", - printStorePath(info.path), info.narHash.to_string(HashFormat::Base32, true), hashResult.first.to_string(HashFormat::Base32, true)); + printStorePath(info.path), info.narHash.to_string(HashFormat::Nix32, true), hashResult.first.to_string(HashFormat::Nix32, true)); if (hashResult.second != info.narSize) throw Error("size mismatch importing path '%s';\n specified: %s\n got: %s", @@ -1245,16 +1092,27 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, if (info.ca) { auto & specified = *info.ca; - auto actualHash = hashCAPath( - specified.method, - specified.hash.type, - info.path - ); + auto actualHash = ({ + HashModuloSink caSink { + specified.hash.algo, + std::string { info.path.hashPart() }, + }; + PosixSourceAccessor accessor; + dumpPath( + *getFSAccessor(false), + CanonPath { printStorePath(info.path) }, + caSink, + specified.method.getFileIngestionMethod()); + ContentAddress { + .method = specified.method, + .hash = caSink.finish().first, + }; + }); if (specified.hash != actualHash.hash) { throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s", printStorePath(info.path), - specified.hash.to_string(HashFormat::Base32, true), - actualHash.hash.to_string(HashFormat::Base32, true)); + specified.hash.to_string(HashFormat::Nix32, true), + actualHash.hash.to_string(HashFormat::Nix32, true)); } } @@ -1272,8 +1130,13 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, } -StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name, - FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references) +StorePath LocalStore::addToStoreFromDump( + Source & source0, + std::string_view name, + ContentAddressMethod method, + HashAlgorithm hashAlgo, + const StorePathSet & references, + RepairFlag repair) { /* For computing the store path. */ auto hashSink = std::make_unique(hashAlgo); @@ -1287,7 +1150,11 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name path. */ bool inMemory = false; - std::string dump; + struct Free { + void operator()(void* v) { free(v); } + }; + std::unique_ptr dumpBuffer(nullptr); + std::string_view dump; /* Fill out buffer, and decide whether we are working strictly in memory based on whether we break out because the buffer is full @@ -1296,13 +1163,18 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name auto oldSize = dump.size(); constexpr size_t chunkSize = 65536; auto want = std::min(chunkSize, settings.narBufferSize - oldSize); - dump.resize(oldSize + want); + if (auto tmp = realloc(dumpBuffer.get(), oldSize + want)) { + dumpBuffer.release(); + dumpBuffer.reset((char*) tmp); + } else { + throw std::bad_alloc(); + } auto got = 0; Finally cleanup([&]() { - dump.resize(oldSize + got); + dump = {dumpBuffer.get(), dump.size() + got}; }); try { - got = source.read(dump.data() + oldSize, want); + got = source.read(dumpBuffer.get() + oldSize, want); } catch (EndOfFile &) { inMemory = true; break; @@ -1323,25 +1195,22 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name delTempDir = std::make_unique(tempDir); tempPath = tempDir + "/x"; - if (method == FileIngestionMethod::Recursive) - restorePath(tempPath, bothSource); - else - writeFile(tempPath, bothSource); + restorePath(tempPath, bothSource, method.getFileIngestionMethod()); - dump.clear(); + dumpBuffer.reset(); + dump = {}; } auto [hash, size] = hashSink->finish(); - ContentAddressWithReferences desc = FixedOutputInfo { - .method = method, - .hash = hash, - .references = { + auto desc = ContentAddressWithReferences::fromParts( + method, + hash, + { .others = references, // caller is not capable of creating a self-reference, because this is content-addressed without modulus .self = false, - }, - }; + }); auto dstPath = makeFixedOutputPathFromCA(name, desc); @@ -1364,11 +1233,8 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name if (inMemory) { StringSource dumpSource { dump }; - /* Restore from the NAR in memory. */ - if (method == FileIngestionMethod::Recursive) - restorePath(realPath, dumpSource); - else - writeFile(realPath, dumpSource); + /* Restore from the buffer in memory. */ + restorePath(realPath, dumpSource, method.getFileIngestionMethod()); } else { /* Move the temporary path we restored above. */ moveFile(tempPath, realPath); @@ -1377,8 +1243,8 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name /* For computing the nar hash. In recursive SHA-256 mode, this is the same as the store hash, so no need to do it again. */ auto narHash = std::pair { hash, size }; - if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256) { - HashSink narSink { htSHA256 }; + if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256) { + HashSink narSink { HashAlgorithm::SHA256 }; dumpPath(realPath, narSink); narHash = narSink.finish(); } @@ -1404,58 +1270,6 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name } -StorePath LocalStore::addTextToStore( - std::string_view name, - std::string_view s, - const StorePathSet & references, RepairFlag repair) -{ - auto hash = hashString(htSHA256, s); - auto dstPath = makeTextPath(name, TextInfo { - .hash = hash, - .references = references, - }); - - addTempRoot(dstPath); - - if (repair || !isValidPath(dstPath)) { - - auto realPath = Store::toRealPath(dstPath); - - PathLocks outputLock({realPath}); - - if (repair || !isValidPath(dstPath)) { - - deletePath(realPath); - - autoGC(); - - writeFile(realPath, s); - - canonicalisePathMetaData(realPath, {}); - - StringSink sink; - dumpString(s, sink); - auto narHash = hashString(htSHA256, sink.s); - - optimisePath(realPath, repair); - - ValidPathInfo info { dstPath, narHash }; - info.narSize = sink.s.size(); - info.references = references; - info.ca = { - .method = TextIngestionMethod {}, - .hash = hash, - }; - registerValidPath(info); - } - - outputLock.setDeletion(true); - } - - return dstPath; -} - - /* Create a temporary directory in the store that won't be garbage-collected until the returned FD is closed. */ std::pair LocalStore::createTempDirInStore() @@ -1546,7 +1360,10 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) for (auto & link : readDirectory(linksDir)) { printMsg(lvlTalkative, "checking contents of '%s'", link.name); Path linkPath = linksDir + "/" + link.name; - std::string hash = hashPath(htSHA256, linkPath).first.to_string(HashFormat::Base32, false); + PosixSourceAccessor accessor; + std::string hash = hashPath( + accessor, CanonPath { linkPath }, + FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first.to_string(HashFormat::Nix32, false); if (hash != link.name) { printError("link '%s' was modified! expected hash '%s', got '%s'", linkPath, link.name, hash); @@ -1563,7 +1380,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) printInfo("checking store hashes..."); - Hash nullHash(htSHA256); + Hash nullHash(HashAlgorithm::SHA256); for (auto & i : validPaths) { try { @@ -1572,14 +1389,14 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) /* Check the content hash (optionally - slow). */ printMsg(lvlTalkative, "checking contents of '%s'", printStorePath(i)); - auto hashSink = HashSink(info->narHash.type); + auto hashSink = HashSink(info->narHash.algo); dumpPath(Store::toRealPath(i), hashSink); auto current = hashSink.finish(); if (info->narHash != nullHash && info->narHash != current.first) { printError("path '%s' was modified! expected hash '%s', got '%s'", - printStorePath(i), info->narHash.to_string(HashFormat::Base32, true), current.first.to_string(HashFormat::Base32, true)); + printStorePath(i), info->narHash.to_string(HashFormat::Nix32, true), current.first.to_string(HashFormat::Nix32, true)); if (repair) repairPath(i); else errors = true; } else { @@ -1762,7 +1579,8 @@ void LocalStore::signRealisation(Realisation & realisation) for (auto & secretKeyFile : secretKeyFiles.get()) { SecretKey secretKey(readFile(secretKeyFile)); - realisation.sign(secretKey); + LocalSigner signer(std::move(secretKey)); + realisation.sign(signer); } } @@ -1774,7 +1592,8 @@ void LocalStore::signPathInfo(ValidPathInfo & info) for (auto & secretKeyFile : secretKeyFiles.get()) { SecretKey secretKey(readFile(secretKeyFile)); - info.sign(*this, secretKey); + LocalSigner signer(std::move(secretKey)); + info.sign(*this, signer); } } @@ -1853,42 +1672,6 @@ void LocalStore::queryRealisationUncached(const DrvOutput & id, } } -ContentAddress LocalStore::hashCAPath( - const ContentAddressMethod & method, const HashType & hashType, - const StorePath & path) -{ - return hashCAPath(method, hashType, Store::toRealPath(path), path.hashPart()); -} - -ContentAddress LocalStore::hashCAPath( - const ContentAddressMethod & method, - const HashType & hashType, - const Path & path, - const std::string_view pathHash -) -{ - HashModuloSink caSink ( hashType, std::string(pathHash) ); - std::visit(overloaded { - [&](const TextIngestionMethod &) { - readFile(path, caSink); - }, - [&](const FileIngestionMethod & m2) { - switch (m2) { - case FileIngestionMethod::Recursive: - dumpPath(path, caSink); - break; - case FileIngestionMethod::Flat: - readFile(path, caSink); - break; - } - }, - }, method.raw); - return ContentAddress { - .method = method, - .hash = caSink.finish().first, - }; -} - void LocalStore::addBuildLog(const StorePath & drvPath, std::string_view log) { assert(drvPath.isDerivation()); diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index 6d589bee5..ba56d3ead 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -177,12 +177,11 @@ public: void addToStore(const ValidPathInfo & info, Source & source, RepairFlag repair, CheckSigsFlag checkSigs) override; - StorePath addToStoreFromDump(Source & dump, std::string_view name, - FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references) override; - - StorePath addTextToStore( + StorePath addToStoreFromDump( + Source & dump, std::string_view name, - std::string_view s, + ContentAddressMethod method, + HashAlgorithm hashAlgo, const StorePathSet & references, RepairFlag repair) override; @@ -350,19 +349,6 @@ private: void signPathInfo(ValidPathInfo & info); void signRealisation(Realisation &); - // XXX: Make a generic `Store` method - ContentAddress hashCAPath( - const ContentAddressMethod & method, - const HashType & hashType, - const StorePath & path); - - ContentAddress hashCAPath( - const ContentAddressMethod & method, - const HashType & hashType, - const Path & path, - const std::string_view pathHash - ); - void addBuildLog(const StorePath & drvPath, std::string_view log) override; friend struct LocalDerivationGoal; @@ -371,38 +357,4 @@ private: friend struct DerivationGoal; }; - -typedef std::pair Inode; -typedef std::set InodesSeen; - - -/** - * "Fix", or canonicalise, the meta-data of the files in a store path - * after it has been built. In particular: - * - * - the last modification date on each file is set to 1 (i.e., - * 00:00:01 1/1/1970 UTC) - * - * - the permissions are set of 444 or 555 (i.e., read-only with or - * without execute permission; setuid bits etc. are cleared) - * - * - the owner and group are set to the Nix user and group, if we're - * running as root. - * - * If uidRange is not empty, this function will throw an error if it - * encounters files owned by a user outside of the closed interval - * [uidRange->first, uidRange->second]. - */ -void canonicalisePathMetaData( - const Path & path, - std::optional> uidRange, - InodesSeen & inodesSeen); -void canonicalisePathMetaData( - const Path & path, - std::optional> uidRange); - -void canonicaliseTimestampAndPermissions(const Path & path); - -MakeError(PathInUse, Error); - } diff --git a/src/libstore/local.mk b/src/libstore/local.mk index 0be0bf310..f86643849 100644 --- a/src/libstore/local.mk +++ b/src/libstore/local.mk @@ -8,7 +8,7 @@ libstore_SOURCES := $(wildcard $(d)/*.cc $(d)/builtins/*.cc $(d)/build/*.cc) libstore_LIBS = libutil -libstore_LDFLAGS += $(SQLITE3_LIBS) $(LIBCURL_LIBS) $(SODIUM_LIBS) -pthread +libstore_LDFLAGS += $(SQLITE3_LIBS) $(LIBCURL_LIBS) $(THREAD_LDFLAGS) ifdef HOST_LINUX libstore_LDFLAGS += -ldl endif @@ -16,15 +16,15 @@ endif $(foreach file,$(libstore_FILES),$(eval $(call install-data-in,$(d)/$(file),$(datadir)/nix/sandbox))) ifeq ($(ENABLE_S3), 1) - libstore_LDFLAGS += -laws-cpp-sdk-transfer -laws-cpp-sdk-s3 -laws-cpp-sdk-core -laws-crt-cpp + libstore_LDFLAGS += -laws-cpp-sdk-transfer -laws-cpp-sdk-s3 -laws-cpp-sdk-core -laws-crt-cpp endif ifdef HOST_SOLARIS - libstore_LDFLAGS += -lsocket + libstore_LDFLAGS += -lsocket endif ifeq ($(HAVE_SECCOMP), 1) - libstore_LDFLAGS += $(LIBSECCOMP_LIBS) + libstore_LDFLAGS += $(LIBSECCOMP_LIBS) endif libstore_CXXFLAGS += \ @@ -48,9 +48,9 @@ $(d)/embedded-sandbox-shell.gen.hh: $(sandbox_shell) $(trace-gen) hexdump -v -e '1/1 "0x%x," "\n"' < $< > $@.tmp @mv $@.tmp $@ else -ifneq ($(sandbox_shell),) -libstore_CXXFLAGS += -DSANDBOX_SHELL="\"$(sandbox_shell)\"" -endif + ifneq ($(sandbox_shell),) + libstore_CXXFLAGS += -DSANDBOX_SHELL="\"$(sandbox_shell)\"" + endif endif $(d)/local-store.cc: $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh @@ -59,7 +59,7 @@ $(d)/build.cc: clean-files += $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh -$(eval $(call install-file-in, $(d)/nix-store.pc, $(libdir)/pkgconfig, 0644)) +$(eval $(call install-file-in, $(buildprefix)$(d)/nix-store.pc, $(libdir)/pkgconfig, 0644)) $(foreach i, $(wildcard src/libstore/builtins/*.hh), \ $(eval $(call install-file-in, $(i), $(includedir)/nix/builtins, 0644))) diff --git a/src/libstore/make-content-addressed.cc b/src/libstore/make-content-addressed.cc index 253609ed2..170fe67b9 100644 --- a/src/libstore/make-content-addressed.cc +++ b/src/libstore/make-content-addressed.cc @@ -43,7 +43,7 @@ std::map makeContentAddressed( sink.s = rewriteStrings(sink.s, rewrites); - HashModuloSink hashModuloSink(htSHA256, oldHashPart); + HashModuloSink hashModuloSink(HashAlgorithm::SHA256, oldHashPart); hashModuloSink(sink.s); auto narModuloHash = hashModuloSink.finish().first; @@ -66,7 +66,7 @@ std::map makeContentAddressed( rsink2(sink.s); rsink2.flush(); - info.narHash = hashString(htSHA256, sink2.s); + info.narHash = hashString(HashAlgorithm::SHA256, sink2.s); info.narSize = sink.s.size(); StringSource source(sink2.s); diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc index 1035691c7..cc8ad3d02 100644 --- a/src/libstore/misc.cc +++ b/src/libstore/misc.cc @@ -4,6 +4,7 @@ #include "local-store.hh" #include "store-api.hh" #include "thread-pool.hh" +#include "realisation.hh" #include "topo-sort.hh" #include "callback.hh" #include "closure.hh" @@ -330,8 +331,11 @@ std::map drvOutputReferences( std::map drvOutputReferences( Store & store, const Derivation & drv, - const StorePath & outputPath) + const StorePath & outputPath, + Store * evalStore_) { + auto & evalStore = evalStore_ ? *evalStore_ : store; + std::set inputRealisations; std::function::ChildNode &)> accumRealisations; @@ -339,7 +343,7 @@ std::map drvOutputReferences( accumRealisations = [&](const StorePath & inputDrv, const DerivedPathMap::ChildNode & inputNode) { if (!inputNode.value.empty()) { auto outputHashes = - staticOutputHashes(store, store.readDerivation(inputDrv)); + staticOutputHashes(evalStore, evalStore.readDerivation(inputDrv)); for (const auto & outputName : inputNode.value) { auto outputHash = get(outputHashes, outputName); if (!outputHash) @@ -361,7 +365,7 @@ std::map drvOutputReferences( SingleDerivedPath next = SingleDerivedPath::Built { d, outputName }; accumRealisations( // TODO deep resolutions for dynamic derivations, issue #8947, would go here. - resolveDerivedPath(store, next), + resolveDerivedPath(store, next, evalStore_), childNode); } } diff --git a/src/libstore/mounted-ssh-store.md b/src/libstore/mounted-ssh-store.md new file mode 100644 index 000000000..1ebfe3081 --- /dev/null +++ b/src/libstore/mounted-ssh-store.md @@ -0,0 +1,18 @@ +R"( + +**Store URL format**: `mounted-ssh-ng://[username@]hostname` + +Experimental store type that allows full access to a Nix store on a remote machine, +and additionally requires that store be mounted in the local file system. + +The mounting of that store is not managed by Nix, and must by managed manually. +It could be accomplished with SSHFS or NFS, for example. + +The local file system is used to optimize certain operations. +For example, rather than serializing Nix archives and sending over the Nix channel, +we can directly access the file system data via the mount-point. + +The local file system is also used to make certain operations possible that wouldn't otherwise be. +For example, persistent GC roots can be created if they reside on the same file system as the remote store: +the remote side will create the symlinks necessary to avoid race conditions. +)" diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc index e50c15939..310105c75 100644 --- a/src/libstore/nar-info-disk-cache.cc +++ b/src/libstore/nar-info-disk-cache.cc @@ -333,9 +333,9 @@ public: (std::string(info->path.name())) (narInfo ? narInfo->url : "", narInfo != 0) (narInfo ? narInfo->compression : "", narInfo != 0) - (narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(HashFormat::Base32, true) : "", narInfo && narInfo->fileHash) + (narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(HashFormat::Nix32, true) : "", narInfo && narInfo->fileHash) (narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize) - (info->narHash.to_string(HashFormat::Base32, true)) + (info->narHash.to_string(HashFormat::Nix32, true)) (info->narSize) (concatStringsSep(" ", info->shortRefs())) (info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver) diff --git a/src/libstore/nar-info.cc b/src/libstore/nar-info.cc index 1060a6c8b..d9618d04c 100644 --- a/src/libstore/nar-info.cc +++ b/src/libstore/nar-info.cc @@ -113,11 +113,11 @@ std::string NarInfo::to_string(const Store & store) const res += "URL: " + url + "\n"; assert(compression != ""); res += "Compression: " + compression + "\n"; - assert(fileHash && fileHash->type == htSHA256); - res += "FileHash: " + fileHash->to_string(HashFormat::Base32, true) + "\n"; + assert(fileHash && fileHash->algo == HashAlgorithm::SHA256); + res += "FileHash: " + fileHash->to_string(HashFormat::Nix32, true) + "\n"; res += "FileSize: " + std::to_string(fileSize) + "\n"; - assert(narHash.type == htSHA256); - res += "NarHash: " + narHash.to_string(HashFormat::Base32, true) + "\n"; + assert(narHash.algo == HashAlgorithm::SHA256); + res += "NarHash: " + narHash.to_string(HashFormat::Nix32, true) + "\n"; res += "NarSize: " + std::to_string(narSize) + "\n"; res += "References: " + concatStringsSep(" ", shortRefs()) + "\n"; diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc index a4ac413b3..a494e6ecc 100644 --- a/src/libstore/optimise-store.cc +++ b/src/libstore/optimise-store.cc @@ -1,6 +1,8 @@ #include "local-store.hh" #include "globals.hh" #include "signals.hh" +#include "posix-fs-canonicalise.hh" +#include "posix-source-accessor.hh" #include #include @@ -145,17 +147,27 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, Also note that if `path' is a symlink, then we're hashing the contents of the symlink (i.e. the result of readlink()), not the contents of the target (which may not even exist). */ - Hash hash = hashPath(htSHA256, path).first; - debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Base32, true)); + Hash hash = ({ + PosixSourceAccessor accessor; + hashPath( + accessor, CanonPath { path }, + FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first; + }); + debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Nix32, true)); /* Check if this is a known hash. */ - Path linkPath = linksDir + "/" + hash.to_string(HashFormat::Base32, false); + Path linkPath = linksDir + "/" + hash.to_string(HashFormat::Nix32, false); /* Maybe delete the link, if it has been corrupted. */ if (pathExists(linkPath)) { auto stLink = lstat(linkPath); if (st.st_size != stLink.st_size - || (repair && hash != hashPath(htSHA256, linkPath).first)) + || (repair && hash != ({ + PosixSourceAccessor accessor; + hashPath( + accessor, CanonPath { linkPath }, + FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first; + }))) { // XXX: Consider overwriting linkPath with our valid version. warn("removing corrupted link '%s'", linkPath); diff --git a/src/libstore/parsed-derivations.cc b/src/libstore/parsed-derivations.cc index 73e55a96c..72f45143d 100644 --- a/src/libstore/parsed-derivations.cc +++ b/src/libstore/parsed-derivations.cc @@ -146,7 +146,7 @@ static nlohmann::json pathInfoToJSON( auto info = store.queryPathInfo(storePath); auto & jsonPath = jsonList.emplace_back( - info->toJSON(store, false, HashFormat::Base32)); + info->toJSON(store, false, HashFormat::Nix32)); // Add the path to the object whose metadata we are including. jsonPath["path"] = store.printStorePath(storePath); diff --git a/src/libstore/path-info.cc b/src/libstore/path-info.cc index 2d7dc972f..d82ccd0c9 100644 --- a/src/libstore/path-info.cc +++ b/src/libstore/path-info.cc @@ -31,16 +31,16 @@ std::string ValidPathInfo::fingerprint(const Store & store) const throw Error("cannot calculate fingerprint of path '%s' because its size is not known", store.printStorePath(path)); return - "1;" + store.printStorePath(path) + ";" - + narHash.to_string(HashFormat::Base32, true) + ";" - + std::to_string(narSize) + ";" + "1;" + store.printStorePath(path) + ";" + + narHash.to_string(HashFormat::Nix32, true) + ";" + + std::to_string(narSize) + ";" + concatStringsSep(",", store.printStorePathSet(references)); } -void ValidPathInfo::sign(const Store & store, const SecretKey & secretKey) +void ValidPathInfo::sign(const Store & store, const Signer & signer) { - sigs.insert(secretKey.signDetached(fingerprint(store))); + sigs.insert(signer.signDetached(fingerprint(store))); } std::optional ValidPathInfo::contentAddressWithReferences() const diff --git a/src/libstore/path-info.hh b/src/libstore/path-info.hh index 077abc7e1..b6dc0855d 100644 --- a/src/libstore/path-info.hh +++ b/src/libstore/path-info.hh @@ -1,7 +1,7 @@ #pragma once ///@file -#include "crypto.hh" +#include "signature/signer.hh" #include "path.hh" #include "hash.hh" #include "content-address.hh" @@ -107,7 +107,7 @@ struct ValidPathInfo : UnkeyedValidPathInfo { */ std::string fingerprint(const Store & store) const; - void sign(const Store & store, const SecretKey & secretKey); + void sign(const Store & store, const Signer & signer); /** * @return The `ContentAddressWithReferences` that determines the diff --git a/src/libstore/path-references.cc b/src/libstore/path-references.cc index 274b596c0..15f52ec9d 100644 --- a/src/libstore/path-references.cc +++ b/src/libstore/path-references.cc @@ -49,7 +49,7 @@ std::pair scanForReferences( const std::string & path, const StorePathSet & refs) { - HashSink hashSink { htSHA256 }; + HashSink hashSink { HashAlgorithm::SHA256 }; auto found = scanForReferences(hashSink, path, refs); auto hash = hashSink.finish(); return std::pair(found, hash); diff --git a/src/libstore/path-with-outputs.cc b/src/libstore/path-with-outputs.cc index af6837370..026e37647 100644 --- a/src/libstore/path-with-outputs.cc +++ b/src/libstore/path-with-outputs.cc @@ -5,7 +5,7 @@ namespace nix { -std::string StorePathWithOutputs::to_string(const Store & store) const +std::string StorePathWithOutputs::to_string(const StoreDirConfig & store) const { return outputs.empty() ? store.printStorePath(path) @@ -85,7 +85,7 @@ std::pair parsePathWithOutputs(std::string_view s) } -StorePathWithOutputs parsePathWithOutputs(const Store & store, std::string_view pathWithOutputs) +StorePathWithOutputs parsePathWithOutputs(const StoreDirConfig & store, std::string_view pathWithOutputs) { auto [path, outputs] = parsePathWithOutputs(pathWithOutputs); return StorePathWithOutputs { store.parseStorePath(path), std::move(outputs) }; diff --git a/src/libstore/path-with-outputs.hh b/src/libstore/path-with-outputs.hh index 57e03252d..5f76a583a 100644 --- a/src/libstore/path-with-outputs.hh +++ b/src/libstore/path-with-outputs.hh @@ -6,6 +6,8 @@ namespace nix { +struct StoreDirConfig; + /** * This is a deprecated old type just for use by the old CLI, and older * versions of the RPC protocols. In new code don't use it; you want @@ -19,7 +21,7 @@ struct StorePathWithOutputs StorePath path; std::set outputs; - std::string to_string(const Store & store) const; + std::string to_string(const StoreDirConfig & store) const; DerivedPath toDerivedPath() const; @@ -32,14 +34,14 @@ std::vector toDerivedPaths(const std::vector) std::pair parsePathWithOutputs(std::string_view s); -class Store; - /** * Split a string specifying a derivation and a set of outputs * (/nix/store/hash-foo!out1,out2,...) into the derivation path * and the outputs. */ -StorePathWithOutputs parsePathWithOutputs(const Store & store, std::string_view pathWithOutputs); +StorePathWithOutputs parsePathWithOutputs(const StoreDirConfig & store, std::string_view pathWithOutputs); + +class Store; StorePathWithOutputs followLinksToStorePathWithOutputs(const Store & store, std::string_view pathWithOutputs); diff --git a/src/libstore/path.cc b/src/libstore/path.cc index ec3e53232..a15a78545 100644 --- a/src/libstore/path.cc +++ b/src/libstore/path.cc @@ -1,6 +1,4 @@ -#include "store-api.hh" - -#include +#include "store-dir-config.hh" namespace nix { @@ -35,7 +33,7 @@ StorePath::StorePath(std::string_view _baseName) } StorePath::StorePath(const Hash & hash, std::string_view _name) - : baseName((hash.to_string(HashFormat::Base32, false) + "-").append(std::string(_name))) + : baseName((hash.to_string(HashFormat::Nix32, false) + "-").append(std::string(_name))) { checkName(baseName, name()); } @@ -49,12 +47,10 @@ StorePath StorePath::dummy("ffffffffffffffffffffffffffffffff-x"); StorePath StorePath::random(std::string_view name) { - Hash hash(htSHA1); - randombytes_buf(hash.hash, hash.hashSize); - return StorePath(hash, name); + return StorePath(Hash::random(HashAlgorithm::SHA1), name); } -StorePath Store::parseStorePath(std::string_view path) const +StorePath StoreDirConfig::parseStorePath(std::string_view path) const { auto p = canonPath(std::string(path)); if (dirOf(p) != storeDir) @@ -62,7 +58,7 @@ StorePath Store::parseStorePath(std::string_view path) const return StorePath(baseNameOf(p)); } -std::optional Store::maybeParseStorePath(std::string_view path) const +std::optional StoreDirConfig::maybeParseStorePath(std::string_view path) const { try { return parseStorePath(path); @@ -71,24 +67,24 @@ std::optional Store::maybeParseStorePath(std::string_view path) const } } -bool Store::isStorePath(std::string_view path) const +bool StoreDirConfig::isStorePath(std::string_view path) const { return (bool) maybeParseStorePath(path); } -StorePathSet Store::parseStorePathSet(const PathSet & paths) const +StorePathSet StoreDirConfig::parseStorePathSet(const PathSet & paths) const { StorePathSet res; for (auto & i : paths) res.insert(parseStorePath(i)); return res; } -std::string Store::printStorePath(const StorePath & path) const +std::string StoreDirConfig::printStorePath(const StorePath & path) const { return (storeDir + "/").append(path.to_string()); } -PathSet Store::printStorePathSet(const StorePathSet & paths) const +PathSet StoreDirConfig::printStorePathSet(const StorePathSet & paths) const { PathSet res; for (auto & i : paths) res.insert(printStorePath(i)); diff --git a/src/libstore/posix-fs-canonicalise.cc b/src/libstore/posix-fs-canonicalise.cc new file mode 100644 index 000000000..8b29e90d4 --- /dev/null +++ b/src/libstore/posix-fs-canonicalise.cc @@ -0,0 +1,171 @@ +#if HAVE_ACL_SUPPORT +# include +#endif + +#include "posix-fs-canonicalise.hh" +#include "file-system.hh" +#include "signals.hh" +#include "util.hh" +#include "globals.hh" +#include "store-api.hh" + +namespace nix { + +const time_t mtimeStore = 1; /* 1 second into the epoch */ + + +static void canonicaliseTimestampAndPermissions(const Path & path, const struct stat & st) +{ + if (!S_ISLNK(st.st_mode)) { + + /* Mask out all type related bits. */ + mode_t mode = st.st_mode & ~S_IFMT; + + if (mode != 0444 && mode != 0555) { + mode = (st.st_mode & S_IFMT) + | 0444 + | (st.st_mode & S_IXUSR ? 0111 : 0); + if (chmod(path.c_str(), mode) == -1) + throw SysError("changing mode of '%1%' to %2$o", path, mode); + } + + } + + if (st.st_mtime != mtimeStore) { + struct timeval times[2]; + times[0].tv_sec = st.st_atime; + times[0].tv_usec = 0; + times[1].tv_sec = mtimeStore; + times[1].tv_usec = 0; +#if HAVE_LUTIMES + if (lutimes(path.c_str(), times) == -1) + if (errno != ENOSYS || + (!S_ISLNK(st.st_mode) && utimes(path.c_str(), times) == -1)) +#else + if (!S_ISLNK(st.st_mode) && utimes(path.c_str(), times) == -1) +#endif + throw SysError("changing modification time of '%1%'", path); + } +} + + +void canonicaliseTimestampAndPermissions(const Path & path) +{ + canonicaliseTimestampAndPermissions(path, lstat(path)); +} + + +static void canonicalisePathMetaData_( + const Path & path, + std::optional> uidRange, + InodesSeen & inodesSeen) +{ + checkInterrupt(); + +#if __APPLE__ + /* Remove flags, in particular UF_IMMUTABLE which would prevent + the file from being garbage-collected. FIXME: Use + setattrlist() to remove other attributes as well. */ + if (lchflags(path.c_str(), 0)) { + if (errno != ENOTSUP) + throw SysError("clearing flags of path '%1%'", path); + } +#endif + + auto st = lstat(path); + + /* Really make sure that the path is of a supported type. */ + if (!(S_ISREG(st.st_mode) || S_ISDIR(st.st_mode) || S_ISLNK(st.st_mode))) + throw Error("file '%1%' has an unsupported type", path); + +#if HAVE_ACL_SUPPORT + /* Remove extended attributes / ACLs. */ + ssize_t eaSize = llistxattr(path.c_str(), nullptr, 0); + + if (eaSize < 0) { + if (errno != ENOTSUP && errno != ENODATA) + throw SysError("querying extended attributes of '%s'", path); + } else if (eaSize > 0) { + std::vector eaBuf(eaSize); + + if ((eaSize = llistxattr(path.c_str(), eaBuf.data(), eaBuf.size())) < 0) + throw SysError("querying extended attributes of '%s'", path); + + for (auto & eaName: tokenizeString(std::string(eaBuf.data(), eaSize), std::string("\000", 1))) { + if (settings.ignoredAcls.get().count(eaName)) continue; + if (lremovexattr(path.c_str(), eaName.c_str()) == -1) + throw SysError("removing extended attribute '%s' from '%s'", eaName, path); + } + } +#endif + + /* Fail if the file is not owned by the build user. This prevents + us from messing up the ownership/permissions of files + hard-linked into the output (e.g. "ln /etc/shadow $out/foo"). + However, ignore files that we chown'ed ourselves previously to + ensure that we don't fail on hard links within the same build + (i.e. "touch $out/foo; ln $out/foo $out/bar"). */ + if (uidRange && (st.st_uid < uidRange->first || st.st_uid > uidRange->second)) { + if (S_ISDIR(st.st_mode) || !inodesSeen.count(Inode(st.st_dev, st.st_ino))) + throw BuildError("invalid ownership on file '%1%'", path); + mode_t mode = st.st_mode & ~S_IFMT; + assert(S_ISLNK(st.st_mode) || (st.st_uid == geteuid() && (mode == 0444 || mode == 0555) && st.st_mtime == mtimeStore)); + return; + } + + inodesSeen.insert(Inode(st.st_dev, st.st_ino)); + + canonicaliseTimestampAndPermissions(path, st); + + /* Change ownership to the current uid. If it's a symlink, use + lchown if available, otherwise don't bother. Wrong ownership + of a symlink doesn't matter, since the owning user can't change + the symlink and can't delete it because the directory is not + writable. The only exception is top-level paths in the Nix + store (since that directory is group-writable for the Nix build + users group); we check for this case below. */ + if (st.st_uid != geteuid()) { +#if HAVE_LCHOWN + if (lchown(path.c_str(), geteuid(), getegid()) == -1) +#else + if (!S_ISLNK(st.st_mode) && + chown(path.c_str(), geteuid(), getegid()) == -1) +#endif + throw SysError("changing owner of '%1%' to %2%", + path, geteuid()); + } + + if (S_ISDIR(st.st_mode)) { + DirEntries entries = readDirectory(path); + for (auto & i : entries) + canonicalisePathMetaData_(path + "/" + i.name, uidRange, inodesSeen); + } +} + + +void canonicalisePathMetaData( + const Path & path, + std::optional> uidRange, + InodesSeen & inodesSeen) +{ + canonicalisePathMetaData_(path, uidRange, inodesSeen); + + /* On platforms that don't have lchown(), the top-level path can't + be a symlink, since we can't change its ownership. */ + auto st = lstat(path); + + if (st.st_uid != geteuid()) { + assert(S_ISLNK(st.st_mode)); + throw Error("wrong ownership of top-level store path '%1%'", path); + } +} + + +void canonicalisePathMetaData(const Path & path, + std::optional> uidRange) +{ + InodesSeen inodesSeen; + canonicalisePathMetaData(path, uidRange, inodesSeen); +} + +} diff --git a/src/libstore/posix-fs-canonicalise.hh b/src/libstore/posix-fs-canonicalise.hh new file mode 100644 index 000000000..35644af12 --- /dev/null +++ b/src/libstore/posix-fs-canonicalise.hh @@ -0,0 +1,45 @@ +#pragma once +///@file + +#include +#include + +#include "types.hh" +#include "error.hh" + +namespace nix { + +typedef std::pair Inode; +typedef std::set InodesSeen; + + +/** + * "Fix", or canonicalise, the meta-data of the files in a store path + * after it has been built. In particular: + * + * - the last modification date on each file is set to 1 (i.e., + * 00:00:01 1/1/1970 UTC) + * + * - the permissions are set of 444 or 555 (i.e., read-only with or + * without execute permission; setuid bits etc. are cleared) + * + * - the owner and group are set to the Nix user and group, if we're + * running as root. + * + * If uidRange is not empty, this function will throw an error if it + * encounters files owned by a user outside of the closed interval + * [uidRange->first, uidRange->second]. + */ +void canonicalisePathMetaData( + const Path & path, + std::optional> uidRange, + InodesSeen & inodesSeen); +void canonicalisePathMetaData( + const Path & path, + std::optional> uidRange); + +void canonicaliseTimestampAndPermissions(const Path & path); + +MakeError(PathInUse, Error); + +} diff --git a/src/libstore/realisation.cc b/src/libstore/realisation.cc index 93ddb5b20..86bfdd1a8 100644 --- a/src/libstore/realisation.cc +++ b/src/libstore/realisation.cc @@ -1,6 +1,7 @@ #include "realisation.hh" #include "store-api.hh" #include "closure.hh" +#include "signature/local-keys.hh" #include namespace nix { @@ -113,9 +114,9 @@ std::string Realisation::fingerprint() const return serialized.dump(); } -void Realisation::sign(const SecretKey & secretKey) +void Realisation::sign(const Signer &signer) { - signatures.insert(secretKey.signDetached(fingerprint())); + signatures.insert(signer.signDetached(fingerprint())); } bool Realisation::checkSignature(const PublicKeys & publicKeys, const std::string & sig) const diff --git a/src/libstore/realisation.hh b/src/libstore/realisation.hh index 4ba2123d8..ddb4af770 100644 --- a/src/libstore/realisation.hh +++ b/src/libstore/realisation.hh @@ -8,7 +8,7 @@ #include "derived-path.hh" #include #include "comparator.hh" -#include "crypto.hh" +#include "signature/signer.hh" namespace nix { @@ -64,7 +64,7 @@ struct Realisation { static Realisation fromJSON(const nlohmann::json& json, const std::string& whence); std::string fingerprint() const; - void sign(const SecretKey &); + void sign(const Signer &); bool checkSignature(const PublicKeys & publicKeys, const std::string & sig) const; size_t checkSignatures(const PublicKeys & publicKeys) const; diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index f16949f42..078b9fe00 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -16,11 +16,13 @@ #include "logging.hh" #include "callback.hh" #include "filetransfer.hh" +#include "signals.hh" + #include namespace nix { -/* TODO: Separate these store impls into different files, give them better names */ +/* TODO: Separate these store types into different files, give them better names */ RemoteStore::RemoteStore(const Params & params) : RemoteStoreConfig(params) , Store(params) @@ -186,7 +188,7 @@ void RemoteStore::ConnectionHandle::processStderr(Sink * sink, Source * source, if (m.find("parsing derivation") != std::string::npos && m.find("expected string") != std::string::npos && m.find("Derive([") != std::string::npos) - throw Error("%s, this might be because the daemon is too old to understand dependencies on dynamic derivations. Check to see if the raw dervation is in the form '%s'", std::move(m), "DrvWithVersion(..)"); + throw Error("%s, this might be because the daemon is too old to understand dependencies on dynamic derivations. Check to see if the raw derivation is in the form '%s'", std::move(m), "DrvWithVersion(..)"); } throw; } @@ -225,7 +227,7 @@ StorePathSet RemoteStore::queryValidPaths(const StorePathSet & paths, Substitute conn->to << WorkerProto::Op::QueryValidPaths; WorkerProto::write(*this, *conn, paths); if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 27) { - conn->to << (settings.buildersUseSubstitutes ? 1 : 0); + conn->to << maybeSubstitute; } conn.processStderr(); return WorkerProto::Serialise::read(*this, *conn); @@ -417,12 +419,12 @@ std::optional RemoteStore::queryPathFromHashPart(const std::string & ref RemoteStore::addCAToStore( - Source & dump, - std::string_view name, - ContentAddressMethod caMethod, - HashType hashType, - const StorePathSet & references, - RepairFlag repair) + Source & dump, + std::string_view name, + ContentAddressMethod caMethod, + HashAlgorithm hashAlgo, + const StorePathSet & references, + RepairFlag repair) { std::optional conn_(getConnection()); auto & conn = *conn_; @@ -432,7 +434,7 @@ ref RemoteStore::addCAToStore( conn->to << WorkerProto::Op::AddToStore << name - << caMethod.render(hashType); + << caMethod.render(hashAlgo); WorkerProto::write(*this, *conn, references); conn->to << repair; @@ -453,9 +455,9 @@ ref RemoteStore::addCAToStore( std::visit(overloaded { [&](const TextIngestionMethod & thm) -> void { - if (hashType != htSHA256) + if (hashAlgo != HashAlgorithm::SHA256) throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given", - name, printHashType(hashType)); + name, printHashAlgo(hashAlgo)); std::string s = dump.drain(); conn->to << WorkerProto::Op::AddTextToStore << name << s; WorkerProto::write(*this, *conn, references); @@ -465,9 +467,9 @@ ref RemoteStore::addCAToStore( conn->to << WorkerProto::Op::AddToStore << name - << ((hashType == htSHA256 && fim == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */ + << ((hashAlgo == HashAlgorithm::SHA256 && fim == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */ << (fim == FileIngestionMethod::Recursive ? 1 : 0) - << printHashType(hashType); + << printHashAlgo(hashAlgo); try { conn->to.written = 0; @@ -502,10 +504,15 @@ ref RemoteStore::addCAToStore( } -StorePath RemoteStore::addToStoreFromDump(Source & dump, std::string_view name, - FileIngestionMethod method, HashType hashType, RepairFlag repair, const StorePathSet & references) +StorePath RemoteStore::addToStoreFromDump( + Source & dump, + std::string_view name, + ContentAddressMethod method, + HashAlgorithm hashAlgo, + const StorePathSet & references, + RepairFlag repair) { - return addCAToStore(dump, name, method, hashType, references, repair)->path; + return addCAToStore(dump, name, method, hashAlgo, references, repair)->path; } @@ -603,16 +610,6 @@ void RemoteStore::addMultipleToStore( } -StorePath RemoteStore::addTextToStore( - std::string_view name, - std::string_view s, - const StorePathSet & references, - RepairFlag repair) -{ - StringSource source(s); - return addCAToStore(source, name, TextIngestionMethod {}, htSHA256, references, repair)->path; -} - void RemoteStore::registerDrvOutput(const Realisation & info) { auto conn(getConnection()); @@ -1071,6 +1068,7 @@ void RemoteStore::ConnectionHandle::withFramedSink(std::function addCAToStore( - Source & dump, - std::string_view name, - ContentAddressMethod caMethod, - HashType hashType, - const StorePathSet & references, - RepairFlag repair); + Source & dump, + std::string_view name, + ContentAddressMethod caMethod, + HashAlgorithm hashAlgo, + const StorePathSet & references, + RepairFlag repair); /** - * Add a content-addressable store path. Does not support references. `dump` will be drained. + * Add a content-addressable store path. `dump` will be drained. */ - StorePath addToStoreFromDump(Source & dump, std::string_view name, - FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair, const StorePathSet & references = StorePathSet()) override; + StorePath addToStoreFromDump( + Source & dump, + std::string_view name, + ContentAddressMethod method = FileIngestionMethod::Recursive, + HashAlgorithm hashAlgo = HashAlgorithm::SHA256, + const StorePathSet & references = StorePathSet(), + RepairFlag repair = NoRepair) override; void addToStore(const ValidPathInfo & info, Source & nar, RepairFlag repair, CheckSigsFlag checkSigs) override; @@ -101,12 +106,6 @@ public: RepairFlag repair, CheckSigsFlag checkSigs) override; - StorePath addTextToStore( - std::string_view name, - std::string_view s, - const StorePathSet & references, - RepairFlag repair) override; - void registerDrvOutput(const Realisation & info) override; void queryRealisationUncached(const DrvOutput &, diff --git a/src/libstore/serve-protocol-impl.hh b/src/libstore/serve-protocol-impl.hh index a3ce81026..6f3b177ac 100644 --- a/src/libstore/serve-protocol-impl.hh +++ b/src/libstore/serve-protocol-impl.hh @@ -16,11 +16,11 @@ namespace nix { /* protocol-agnostic templates */ #define SERVE_USE_LENGTH_PREFIX_SERIALISER(TEMPLATE, T) \ - TEMPLATE T ServeProto::Serialise< T >::read(const Store & store, ServeProto::ReadConn conn) \ + TEMPLATE T ServeProto::Serialise< T >::read(const StoreDirConfig & store, ServeProto::ReadConn conn) \ { \ return LengthPrefixedProtoHelper::read(store, conn); \ } \ - TEMPLATE void ServeProto::Serialise< T >::write(const Store & store, ServeProto::WriteConn conn, const T & t) \ + TEMPLATE void ServeProto::Serialise< T >::write(const StoreDirConfig & store, ServeProto::WriteConn conn, const T & t) \ { \ LengthPrefixedProtoHelper::write(store, conn, t); \ } @@ -41,12 +41,12 @@ SERVE_USE_LENGTH_PREFIX_SERIALISER( template struct ServeProto::Serialise { - static T read(const Store & store, ServeProto::ReadConn conn) + static T read(const StoreDirConfig & store, ServeProto::ReadConn conn) { return CommonProto::Serialise::read(store, CommonProto::ReadConn { .from = conn.from }); } - static void write(const Store & store, ServeProto::WriteConn conn, const T & t) + static void write(const StoreDirConfig & store, ServeProto::WriteConn conn, const T & t) { CommonProto::Serialise::write(store, CommonProto::WriteConn { .to = conn.to }, diff --git a/src/libstore/serve-protocol.cc b/src/libstore/serve-protocol.cc index 9bfcc279c..08bfad9e4 100644 --- a/src/libstore/serve-protocol.cc +++ b/src/libstore/serve-protocol.cc @@ -5,6 +5,7 @@ #include "serve-protocol.hh" #include "serve-protocol-impl.hh" #include "archive.hh" +#include "path-info.hh" #include @@ -12,7 +13,7 @@ namespace nix { /* protocol-specific definitions */ -BuildResult ServeProto::Serialise::read(const Store & store, ServeProto::ReadConn conn) +BuildResult ServeProto::Serialise::read(const StoreDirConfig & store, ServeProto::ReadConn conn) { BuildResult status; status.status = (BuildResult::Status) readInt(conn.from); @@ -34,7 +35,7 @@ BuildResult ServeProto::Serialise::read(const Store & store, ServeP return status; } -void ServeProto::Serialise::write(const Store & store, ServeProto::WriteConn conn, const BuildResult & status) +void ServeProto::Serialise::write(const StoreDirConfig & store, ServeProto::WriteConn conn, const BuildResult & status) { conn.to << status.status @@ -54,4 +55,83 @@ void ServeProto::Serialise::write(const Store & store, ServeProto:: } } + +UnkeyedValidPathInfo ServeProto::Serialise::read(const StoreDirConfig & store, ReadConn conn) +{ + /* Hash should be set below unless very old `nix-store --serve`. + Caller should assert that it did set it. */ + UnkeyedValidPathInfo info { Hash::dummy }; + + auto deriver = readString(conn.from); + if (deriver != "") + info.deriver = store.parseStorePath(deriver); + info.references = ServeProto::Serialise::read(store, conn); + + readLongLong(conn.from); // download size, unused + info.narSize = readLongLong(conn.from); + + if (GET_PROTOCOL_MINOR(conn.version) >= 4) { + auto s = readString(conn.from); + if (!s.empty()) + info.narHash = Hash::parseAnyPrefixed(s); + info.ca = ContentAddress::parseOpt(readString(conn.from)); + info.sigs = readStrings(conn.from); + } + + return info; +} + +void ServeProto::Serialise::write(const StoreDirConfig & store, WriteConn conn, const UnkeyedValidPathInfo & info) +{ + conn.to + << (info.deriver ? store.printStorePath(*info.deriver) : ""); + + ServeProto::write(store, conn, info.references); + // !!! Maybe we want compression? + conn.to + << info.narSize // downloadSize, lie a little + << info.narSize; + if (GET_PROTOCOL_MINOR(conn.version) >= 4) + conn.to + << info.narHash.to_string(HashFormat::Nix32, true) + << renderContentAddress(info.ca) + << info.sigs; +} + + +ServeProto::BuildOptions ServeProto::Serialise::read(const StoreDirConfig & store, ReadConn conn) +{ + BuildOptions options; + options.maxSilentTime = readInt(conn.from); + options.buildTimeout = readInt(conn.from); + if (GET_PROTOCOL_MINOR(conn.version) >= 2) + options.maxLogSize = readNum(conn.from); + if (GET_PROTOCOL_MINOR(conn.version) >= 3) { + options.nrRepeats = readInt(conn.from); + options.enforceDeterminism = readInt(conn.from); + } + if (GET_PROTOCOL_MINOR(conn.version) >= 7) { + options.keepFailed = (bool) readInt(conn.from); + } + return options; +} + +void ServeProto::Serialise::write(const StoreDirConfig & store, WriteConn conn, const ServeProto::BuildOptions & options) +{ + conn.to + << options.maxSilentTime + << options.buildTimeout; + if (GET_PROTOCOL_MINOR(conn.version) >= 2) + conn.to + << options.maxLogSize; + if (GET_PROTOCOL_MINOR(conn.version) >= 3) + conn.to + << options.nrRepeats + << options.enforceDeterminism; + + if (GET_PROTOCOL_MINOR(conn.version) >= 7) { + conn.to << ((int) options.keepFailed); + } +} + } diff --git a/src/libstore/serve-protocol.hh b/src/libstore/serve-protocol.hh index ba159f6e9..1665b935f 100644 --- a/src/libstore/serve-protocol.hh +++ b/src/libstore/serve-protocol.hh @@ -13,11 +13,12 @@ namespace nix { #define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff) -class Store; +struct StoreDirConfig; struct Source; // items being serialised struct BuildResult; +struct UnkeyedValidPathInfo; /** @@ -72,8 +73,8 @@ struct ServeProto // See `worker-protocol.hh` for a longer explanation. #if 0 { - static T read(const Store & store, ReadConn conn); - static void write(const Store & store, WriteConn conn, const T & t); + static T read(const StoreDirConfig & store, ReadConn conn); + static void write(const StoreDirConfig & store, WriteConn conn, const T & t); }; #endif @@ -82,10 +83,17 @@ struct ServeProto * infer the type instead of having to write it down explicitly. */ template - static void write(const Store & store, WriteConn conn, const T & t) + static void write(const StoreDirConfig & store, WriteConn conn, const T & t) { ServeProto::Serialise::write(store, conn, t); } + + /** + * Options for building shared between + * `ServeProto::Command::BuildPaths` and + * `ServeProto::Command::BuildDerivation`. + */ + struct BuildOptions; }; enum struct ServeProto::Command : uint64_t @@ -101,6 +109,22 @@ enum struct ServeProto::Command : uint64_t AddToStoreNar = 9, }; + +struct ServeProto::BuildOptions { + /** + * Default value in this and every other field is so tests pass when + * testing older deserialisers which do not set all the fields. + */ + time_t maxSilentTime = -1; + time_t buildTimeout = -1; + size_t maxLogSize = -1; + size_t nrRepeats = -1; + bool enforceDeterminism = -1; + bool keepFailed = -1; + + bool operator == (const ServeProto::BuildOptions &) const = default; +}; + /** * Convenience for sending operation codes. * @@ -135,12 +159,16 @@ inline std::ostream & operator << (std::ostream & s, ServeProto::Command op) #define DECLARE_SERVE_SERIALISER(T) \ struct ServeProto::Serialise< T > \ { \ - static T read(const Store & store, ServeProto::ReadConn conn); \ - static void write(const Store & store, ServeProto::WriteConn conn, const T & t); \ + static T read(const StoreDirConfig & store, ServeProto::ReadConn conn); \ + static void write(const StoreDirConfig & store, ServeProto::WriteConn conn, const T & t); \ }; template<> DECLARE_SERVE_SERIALISER(BuildResult); +template<> +DECLARE_SERVE_SERIALISER(UnkeyedValidPathInfo); +template<> +DECLARE_SERVE_SERIALISER(ServeProto::BuildOptions); template DECLARE_SERVE_SERIALISER(std::vector); diff --git a/src/libstore/ssh-store-config.hh b/src/libstore/ssh-store-config.hh index bf55d20cf..4ce4ffc4c 100644 --- a/src/libstore/ssh-store-config.hh +++ b/src/libstore/ssh-store-config.hh @@ -20,7 +20,7 @@ struct CommonSSHStoreConfig : virtual StoreConfig const Setting remoteStore{this, "", "remote-store", R"( - [Store URL](@docroot@/command-ref/new-cli/nix3-help-stores.md#store-url-format) + [Store URL](@docroot@/store/types/index.md#store-url-format) to be used on the remote machine. The default is `auto` (i.e. use the Nix daemon or `/nix/store` directly). )"}; diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc index 4a6aad449..d4c8ab5b2 100644 --- a/src/libstore/ssh-store.cc +++ b/src/libstore/ssh-store.cc @@ -3,9 +3,10 @@ #include "local-fs-store.hh" #include "remote-store.hh" #include "remote-store-connection.hh" -#include "remote-fs-accessor.hh" +#include "source-accessor.hh" #include "archive.hh" #include "worker-protocol.hh" +#include "worker-protocol-impl.hh" #include "pool.hh" #include "ssh.hh" @@ -78,6 +79,8 @@ protected: std::string host; + std::vector extraRemoteProgramArgs; + SSHMaster master; void setOptions(RemoteStore::Connection & conn) override @@ -91,6 +94,121 @@ protected: }; }; +struct MountedSSHStoreConfig : virtual SSHStoreConfig, virtual LocalFSStoreConfig +{ + using SSHStoreConfig::SSHStoreConfig; + using LocalFSStoreConfig::LocalFSStoreConfig; + + MountedSSHStoreConfig(StringMap params) + : StoreConfig(params) + , RemoteStoreConfig(params) + , CommonSSHStoreConfig(params) + , SSHStoreConfig(params) + , LocalFSStoreConfig(params) + { + } + + const std::string name() override { return "Experimental SSH Store with filesytem mounted"; } + + std::string doc() override + { + return + #include "mounted-ssh-store.md" + ; + } + + std::optional experimentalFeature() const override + { + return ExperimentalFeature::MountedSSHStore; + } +}; + +/** + * The mounted ssh store assumes that filesystems on the remote host are + * shared with the local host. This means that the remote nix store is + * available locally and is therefore treated as a local filesystem + * store. + * + * MountedSSHStore is very similar to UDSRemoteStore --- ignoring the + * superficial differnce of SSH vs Unix domain sockets, they both are + * accessing remote stores, and they both assume the store will be + * mounted in the local filesystem. + * + * The difference lies in how they manage GC roots. See addPermRoot + * below for details. + */ +class MountedSSHStore : public virtual MountedSSHStoreConfig, public virtual SSHStore, public virtual LocalFSStore +{ +public: + + MountedSSHStore(const std::string & scheme, const std::string & host, const Params & params) + : StoreConfig(params) + , RemoteStoreConfig(params) + , CommonSSHStoreConfig(params) + , SSHStoreConfig(params) + , LocalFSStoreConfig(params) + , MountedSSHStoreConfig(params) + , Store(params) + , RemoteStore(params) + , SSHStore(scheme, host, params) + , LocalFSStore(params) + { + extraRemoteProgramArgs = { + "--process-ops", + }; + } + + static std::set uriSchemes() + { + return {"mounted-ssh-ng"}; + } + + std::string getUri() override + { + return *uriSchemes().begin() + "://" + host; + } + + void narFromPath(const StorePath & path, Sink & sink) override + { + return LocalFSStore::narFromPath(path, sink); + } + + ref getFSAccessor(bool requireValidPath) override + { + return LocalFSStore::getFSAccessor(requireValidPath); + } + + std::optional getBuildLogExact(const StorePath & path) override + { + return LocalFSStore::getBuildLogExact(path); + } + + /** + * This is the key difference from UDSRemoteStore: UDSRemote store + * has the client create the direct root, and the remote side create + * the indirect root. + * + * We could also do that, but the race conditions (will the remote + * side see the direct root the client made?) seems bigger. + * + * In addition, the remote-side will have a process associated with + * the authenticating user handling the connection (even if there + * is a system-wide daemon or similar). This process can safely make + * the direct and indirect roots without there being such a risk of + * privilege escalation / symlinks in directories owned by the + * originating requester that they cannot delete. + */ + Path addPermRoot(const StorePath & path, const Path & gcRoot) override + { + auto conn(getConnection()); + conn->to << WorkerProto::Op::AddPermRoot; + WorkerProto::write(*this, *conn, path); + WorkerProto::write(*this, *conn, gcRoot); + conn.processStderr(); + return readString(conn->from); + } +}; + ref SSHStore::openConnection() { auto conn = make_ref(); @@ -98,6 +216,8 @@ ref SSHStore::openConnection() std::string command = remoteProgram + " --stdio"; if (remoteStore.get() != "") command += " --store " + shellEscape(remoteStore.get()); + for (auto & arg : extraRemoteProgramArgs) + command += " " + shellEscape(arg); conn->sshConn = master.startCommand(command); conn->to = FdSink(conn->sshConn->in.get()); @@ -106,5 +226,6 @@ ref SSHStore::openConnection() } static RegisterStoreImplementation regSSHStore; +static RegisterStoreImplementation regMountedSSHStore; } diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 0f88d9b92..0c37ecd30 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -1,6 +1,8 @@ -#include "crypto.hh" +#include "signature/local-keys.hh" #include "source-accessor.hh" #include "globals.hh" +#include "derived-path.hh" +#include "realisation.hh" #include "derivations.hh" #include "store-api.hh" #include "util.hh" @@ -25,13 +27,13 @@ using json = nlohmann::json; namespace nix { -bool Store::isInStore(PathView path) const +bool StoreDirConfig::isInStore(PathView path) const { return isInDir(path, storeDir); } -std::pair Store::toStorePath(PathView path) const +std::pair StoreDirConfig::toStorePath(PathView path) const { if (!isInStore(path)) throw Error("path '%1%' is not in the Nix store", path); @@ -145,25 +147,25 @@ StorePath Store::followLinksToStorePath(std::string_view path) const */ -StorePath Store::makeStorePath(std::string_view type, +StorePath StoreDirConfig::makeStorePath(std::string_view type, std::string_view hash, std::string_view name) const { /* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */ auto s = std::string(type) + ":" + std::string(hash) + ":" + storeDir + ":" + std::string(name); - auto h = compressHash(hashString(htSHA256, s), 20); + auto h = compressHash(hashString(HashAlgorithm::SHA256, s), 20); return StorePath(h, name); } -StorePath Store::makeStorePath(std::string_view type, +StorePath StoreDirConfig::makeStorePath(std::string_view type, const Hash & hash, std::string_view name) const { return makeStorePath(type, hash.to_string(HashFormat::Base16, true), name); } -StorePath Store::makeOutputPath(std::string_view id, +StorePath StoreDirConfig::makeOutputPath(std::string_view id, const Hash & hash, std::string_view name) const { return makeStorePath("output:" + std::string { id }, hash, outputPathName(name, id)); @@ -174,7 +176,7 @@ StorePath Store::makeOutputPath(std::string_view id, hacky, but we can't put them in, say, (per the grammar above) since that would be ambiguous. */ static std::string makeType( - const Store & store, + const StoreDirConfig & store, std::string && type, const StoreReferences & references) { @@ -187,14 +189,17 @@ static std::string makeType( } -StorePath Store::makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const +StorePath StoreDirConfig::makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const { - if (info.hash.type == htSHA256 && info.method == FileIngestionMethod::Recursive) { + if (info.hash.algo == HashAlgorithm::SHA256 && info.method == FileIngestionMethod::Recursive) { return makeStorePath(makeType(*this, "source", info.references), info.hash, name); } else { - assert(info.references.size() == 0); + if (!info.references.empty()) { + throw Error("fixed output derivation '%s' is not allowed to refer to other store paths.\nYou may need to use the 'unsafeDiscardReferences' derivation attribute, see the manual for more details.", + name); + } return makeStorePath("output:out", - hashString(htSHA256, + hashString(HashAlgorithm::SHA256, "fixed:out:" + makeFileIngestionPrefix(info.method) + info.hash.to_string(HashFormat::Base16, true) + ":"), @@ -203,25 +208,19 @@ StorePath Store::makeFixedOutputPath(std::string_view name, const FixedOutputInf } -StorePath Store::makeTextPath(std::string_view name, const TextInfo & info) const -{ - assert(info.hash.type == htSHA256); - return makeStorePath( - makeType(*this, "text", StoreReferences { - .others = info.references, - .self = false, - }), - info.hash, - name); -} - - -StorePath Store::makeFixedOutputPathFromCA(std::string_view name, const ContentAddressWithReferences & ca) const +StorePath StoreDirConfig::makeFixedOutputPathFromCA(std::string_view name, const ContentAddressWithReferences & ca) const { // New template return std::visit(overloaded { [&](const TextInfo & ti) { - return makeTextPath(name, ti); + assert(ti.hash.algo == HashAlgorithm::SHA256); + return makeStorePath( + makeType(*this, "text", StoreReferences { + .others = ti.references, + .self = false, + }), + ti.hash, + name); }, [&](const FixedOutputInfo & foi) { return makeFixedOutputPath(name, foi); @@ -230,54 +229,45 @@ StorePath Store::makeFixedOutputPathFromCA(std::string_view name, const ContentA } -std::pair Store::computeStorePathFromDump( - Source & dump, +std::pair StoreDirConfig::computeStorePath( std::string_view name, - FileIngestionMethod method, - HashType hashAlgo, - const StorePathSet & references) const + SourceAccessor & accessor, + const CanonPath & path, + ContentAddressMethod method, + HashAlgorithm hashAlgo, + const StorePathSet & references, + PathFilter & filter) const { - HashSink sink(hashAlgo); - dump.drainInto(sink); - auto h = sink.finish().first; - FixedOutputInfo caInfo { - .method = method, - .hash = h, - .references = {}, + auto h = hashPath(accessor, path, method.getFileIngestionMethod(), hashAlgo, filter).first; + return { + makeFixedOutputPathFromCA( + name, + ContentAddressWithReferences::fromParts( + method, + h, + { + .others = references, + .self = false, + })), + h, }; - return std::make_pair(makeFixedOutputPath(name, caInfo), h); -} - - -StorePath Store::computeStorePathForText( - std::string_view name, - std::string_view s, - const StorePathSet & references) const -{ - return makeTextPath(name, TextInfo { - .hash = hashString(htSHA256, s), - .references = references, - }); } StorePath Store::addToStore( std::string_view name, - const Path & _srcPath, - FileIngestionMethod method, - HashType hashAlgo, + SourceAccessor & accessor, + const CanonPath & path, + ContentAddressMethod method, + HashAlgorithm hashAlgo, + const StorePathSet & references, PathFilter & filter, - RepairFlag repair, - const StorePathSet & references) + RepairFlag repair) { - Path srcPath(absPath(_srcPath)); auto source = sinkToSource([&](Sink & sink) { - if (method == FileIngestionMethod::Recursive) - dumpPath(srcPath, sink, filter); - else - readFile(srcPath, sink); + dumpPath(accessor, path, sink, method.getFileIngestionMethod(), filter); }); - return addToStoreFromDump(*source, name, method, hashAlgo, repair, references); + return addToStoreFromDump(*source, name, method, hashAlgo, references, repair); } void Store::addMultipleToStore( @@ -402,11 +392,15 @@ digraph graphname { fileSink -> caHashSink } */ -ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath, - FileIngestionMethod method, HashType hashAlgo, +ValidPathInfo Store::addToStoreSlow( + std::string_view name, + SourceAccessor & accessor, + const CanonPath & srcPath, + ContentAddressMethod method, HashAlgorithm hashAlgo, + const StorePathSet & references, std::optional expectedCAHash) { - HashSink narHashSink { htSHA256 }; + HashSink narHashSink { HashAlgorithm::SHA256 }; HashSink caHashSink { hashAlgo }; /* Note that fileSink and unusualHashTee must be mutually exclusive, since @@ -415,7 +409,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath, RegularFileSink fileSink { caHashSink }; TeeSink unusualHashTee { narHashSink, caHashSink }; - auto & narSink = method == FileIngestionMethod::Recursive && hashAlgo != htSHA256 + auto & narSink = method == FileIngestionMethod::Recursive && hashAlgo != HashAlgorithm::SHA256 ? static_cast(unusualHashTee) : narHashSink; @@ -423,7 +417,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath, srcPath. The fact that we use scratchpadSink as a temporary buffer here is an implementation detail. */ auto fileSource = sinkToSource([&](Sink & scratchpadSink) { - dumpPath(srcPath, scratchpadSink); + accessor.dumpPath(srcPath, scratchpadSink); }); /* tapped provides the same data as fileSource, but we also write all the @@ -431,9 +425,11 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath, TeeSource tapped { *fileSource, narSink }; NullParseSink blank; - auto & parseSink = method == FileIngestionMethod::Flat + auto & parseSink = method.getFileIngestionMethod() == FileIngestionMethod::Flat ? (ParseSink &) fileSink - : (ParseSink &) blank; + : method.getFileIngestionMethod() == FileIngestionMethod::Recursive + ? (ParseSink &) blank + : (abort(), (ParseSink &)*(ParseSink *)nullptr); // handled both cases /* The information that flows from tapped (besides being replicated in narSink), is now put in parseSink. */ @@ -443,28 +439,31 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath, finish. */ auto [narHash, narSize] = narHashSink.finish(); - auto hash = method == FileIngestionMethod::Recursive && hashAlgo == htSHA256 + auto hash = method == FileIngestionMethod::Recursive && hashAlgo == HashAlgorithm::SHA256 ? narHash : caHashSink.finish().first; if (expectedCAHash && expectedCAHash != hash) throw Error("hash mismatch for '%s'", srcPath); + ValidPathInfo info { *this, name, - FixedOutputInfo { - .method = method, - .hash = hash, - .references = {}, - }, + ContentAddressWithReferences::fromParts( + method, + hash, + { + .others = references, + .self = false, + }), narHash, }; info.narSize = narSize; if (!isValidPath(info.path)) { auto source = sinkToSource([&](Sink & scratchpadSink) { - dumpPath(srcPath, scratchpadSink); + accessor.dumpPath(srcPath, scratchpadSink); }); addToStore(info, *source); } @@ -545,8 +544,8 @@ std::map> Store::queryPartialDerivationOut return outputs; } -OutputPathMap Store::queryDerivationOutputMap(const StorePath & path) { - auto resp = queryPartialDerivationOutputMap(path); +OutputPathMap Store::queryDerivationOutputMap(const StorePath & path, Store * evalStore) { + auto resp = queryPartialDerivationOutputMap(path, evalStore); OutputPathMap result; for (auto & [outName, optOutPath] : resp) { if (!optOutPath) @@ -1203,7 +1202,7 @@ std::optional decodeValidPathInfo(const Store & store, std::istre if (!hashGiven) { std::string s; getline(str, s); - auto narHash = Hash::parseAny(s, htSHA256); + auto narHash = Hash::parseAny(s, HashAlgorithm::SHA256); getline(str, s); auto narSize = string2Int(s); if (!narSize) throw Error("number expected"); @@ -1227,7 +1226,7 @@ std::optional decodeValidPathInfo(const Store & store, std::istre } -std::string Store::showPaths(const StorePathSet & paths) +std::string StoreDirConfig::showPaths(const StorePathSet & paths) { std::string s; for (auto & i : paths) { diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 32ad2aa44..9667b5e9e 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -1,8 +1,6 @@ #pragma once ///@file -#include "nar-info.hh" -#include "realisation.hh" #include "path.hh" #include "derived-path.hh" #include "hash.hh" @@ -14,6 +12,8 @@ #include "config.hh" #include "path-info.hh" #include "repair-flag.hh" +#include "store-dir-config.hh" +#include "source-path.hh" #include #include @@ -30,7 +30,7 @@ namespace nix { /** - * About the class hierarchy of the store implementations: + * About the class hierarchy of the store types: * * Each store type `Foo` consists of two classes: * @@ -64,12 +64,16 @@ MakeError(InvalidPath, Error); MakeError(Unsupported, Error); MakeError(SubstituteGone, Error); MakeError(SubstituterDisabled, Error); -MakeError(BadStorePath, Error); MakeError(InvalidStoreURI, Error); +struct Realisation; +struct RealisedPath; +struct DrvOutput; + struct BasicDerivation; struct Derivation; + struct SourceAccessor; class NarInfoDiskCache; class Store; @@ -96,11 +100,11 @@ struct KeyedBuildResult; typedef std::map> StorePathCAMap; -struct StoreConfig : public Config +struct StoreConfig : public StoreDirConfig { typedef std::map Params; - using Config::Config; + using StoreDirConfig::StoreDirConfig; StoreConfig() = delete; @@ -130,15 +134,6 @@ struct StoreConfig : public Config return std::nullopt; } - const PathSetting storeDir_{this, settings.nixStore, - "store", - R"( - Logical location of the Nix store, usually - `/nix/store`. Note that you can only copy store paths - between stores if they have the same `store` setting. - )"}; - const Path storeDir = storeDir_; - const Setting pathInfoCacheSize{this, 65536, "path-info-cache-size", "Size of the in-memory store path metadata cache."}; @@ -167,7 +162,10 @@ struct StoreConfig : public Config Optional [system features](@docroot@/command-ref/conf-file.md#conf-system-features) available on the system this store uses to build derivations. Example: `"kvm"` - )" }; + )", + {}, + // Don't document the machine-specific default value + false}; }; class Store : public std::enable_shared_from_this, public virtual StoreConfig @@ -223,45 +221,6 @@ public: virtual std::string getUri() = 0; - StorePath parseStorePath(std::string_view path) const; - - std::optional maybeParseStorePath(std::string_view path) const; - - std::string printStorePath(const StorePath & path) const; - - /** - * Deprecated - * - * \todo remove - */ - StorePathSet parseStorePathSet(const PathSet & paths) const; - - PathSet printStorePathSet(const StorePathSet & path) const; - - /** - * Display a set of paths in human-readable form (i.e., between quotes - * and separated by commas). - */ - std::string showPaths(const StorePathSet & paths); - - /** - * @return true if ‘path’ is in the Nix store (but not the Nix - * store itself). - */ - bool isInStore(PathView path) const; - - /** - * @return true if ‘path’ is a store path, i.e. a direct child of the - * Nix store. - */ - bool isStorePath(std::string_view path) const; - - /** - * Split a path like /nix/store/-/ into - * /nix/store/- and /. - */ - std::pair toStorePath(PathView path) const; - /** * Follow symlinks until we end up with a path in the Nix store. */ @@ -273,55 +232,6 @@ public: */ StorePath followLinksToStorePath(std::string_view path) const; - /** - * Constructs a unique store path name. - */ - StorePath makeStorePath(std::string_view type, - std::string_view hash, std::string_view name) const; - StorePath makeStorePath(std::string_view type, - const Hash & hash, std::string_view name) const; - - StorePath makeOutputPath(std::string_view id, - const Hash & hash, std::string_view name) const; - - StorePath makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const; - - StorePath makeTextPath(std::string_view name, const TextInfo & info) const; - - StorePath makeFixedOutputPathFromCA(std::string_view name, const ContentAddressWithReferences & ca) const; - - /** - * Read-only variant of addToStoreFromDump(). It returns the store - * path to which a NAR or flat file would be written. - */ - std::pair computeStorePathFromDump( - Source & dump, - std::string_view name, - FileIngestionMethod method = FileIngestionMethod::Recursive, - HashType hashAlgo = htSHA256, - const StorePathSet & references = {}) const; - - /** - * Preparatory part of addTextToStore(). - * - * !!! Computation of the path should take the references given to - * addTextToStore() into account, otherwise we have a (relatively - * minor) security hole: a caller can register a source file with - * bogus references. If there are too many references, the path may - * not be garbage collected when it has to be (not really a problem, - * the caller could create a root anyway), or it may be garbage - * collected when it shouldn't be (more serious). - * - * Hashing the references would solve this (bogus references would - * simply yield a different store path, so other users wouldn't be - * affected), but it has some backwards compatibility issues (the - * hashing scheme changes), so I'm not doing that for now. - */ - StorePath computeStorePathForText( - std::string_view name, - std::string_view s, - const StorePathSet & references) const; - /** * Check whether a path is valid. */ @@ -460,7 +370,7 @@ public: * Query the mapping outputName=>outputPath for the given derivation. * Assume every output has a mapping and throw an exception otherwise. */ - OutputPathMap queryDerivationOutputMap(const StorePath & path); + OutputPathMap queryDerivationOutputMap(const StorePath & path, Store * evalStore = nullptr); /** * Query the full store path given the hash part of a valid store @@ -519,20 +429,26 @@ public: */ virtual StorePath addToStore( std::string_view name, - const Path & srcPath, - FileIngestionMethod method = FileIngestionMethod::Recursive, - HashType hashAlgo = htSHA256, + SourceAccessor & accessor, + const CanonPath & path, + ContentAddressMethod method = FileIngestionMethod::Recursive, + HashAlgorithm hashAlgo = HashAlgorithm::SHA256, + const StorePathSet & references = StorePathSet(), PathFilter & filter = defaultPathFilter, - RepairFlag repair = NoRepair, - const StorePathSet & references = StorePathSet()); + RepairFlag repair = NoRepair); /** * Copy the contents of a path to the store and register the * validity the resulting path, using a constant amount of * memory. */ - ValidPathInfo addToStoreSlow(std::string_view name, const Path & srcPath, - FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, + ValidPathInfo addToStoreSlow( + std::string_view name, + SourceAccessor & accessor, + const CanonPath & path, + ContentAddressMethod method = FileIngestionMethod::Recursive, + HashAlgorithm hashAlgo = HashAlgorithm::SHA256, + const StorePathSet & references = StorePathSet(), std::optional expectedCAHash = {}); /** @@ -544,20 +460,14 @@ public: * * \todo remove? */ - virtual StorePath addToStoreFromDump(Source & dump, std::string_view name, - FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair, - const StorePathSet & references = StorePathSet()) - { unsupported("addToStoreFromDump"); } - - /** - * Like addToStore, but the contents written to the output path is a - * regular file containing the given string. - */ - virtual StorePath addTextToStore( + virtual StorePath addToStoreFromDump( + Source & dump, std::string_view name, - std::string_view s, - const StorePathSet & references, - RepairFlag repair = NoRepair) = 0; + ContentAddressMethod method = FileIngestionMethod::Recursive, + HashAlgorithm hashAlgo = HashAlgorithm::SHA256, + const StorePathSet & references = StorePathSet(), + RepairFlag repair = NoRepair) + { unsupported("addToStoreFromDump"); } /** * Add a mapping indicating that `deriver!outputName` maps to the output path @@ -885,7 +795,7 @@ void copyStorePath( */ std::map copyPaths( Store & srcStore, Store & dstStore, - const RealisedPath::Set &, + const std::set &, RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs, SubstituteFlag substitute = NoSubstitute); @@ -902,7 +812,7 @@ std::map copyPaths( */ void copyClosure( Store & srcStore, Store & dstStore, - const RealisedPath::Set & paths, + const std::set & paths, RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs, SubstituteFlag substitute = NoSubstitute); @@ -959,7 +869,7 @@ OutputPathMap resolveDerivedPath(Store &, const DerivedPath::Built &, Store * ev * - ‘ssh://[user@]’: A remote Nix store accessed by running * ‘nix-store --serve’ via SSH. * - * You can pass parameters to the store implementation by appending + * You can pass parameters to the store type by appending * ‘?key=value&key=value&...’ to the URI. */ ref openStore(const std::string & uri = settings.storeUri.get(), @@ -1034,6 +944,7 @@ const ContentAddress * getDerivationCA(const BasicDerivation & drv); std::map drvOutputReferences( Store & store, const Derivation & drv, - const StorePath & outputPath); + const StorePath & outputPath, + Store * evalStore = nullptr); } diff --git a/src/libstore/store-dir-config.hh b/src/libstore/store-dir-config.hh new file mode 100644 index 000000000..7ca8c2665 --- /dev/null +++ b/src/libstore/store-dir-config.hh @@ -0,0 +1,105 @@ +#pragma once + +#include "path.hh" +#include "hash.hh" +#include "content-address.hh" +#include "globals.hh" +#include "config.hh" + +#include +#include +#include + + +namespace nix { + +MakeError(BadStorePath, Error); + +struct StoreDirConfig : public Config +{ + using Config::Config; + + StoreDirConfig() = delete; + + virtual ~StoreDirConfig() = default; + + const PathSetting storeDir_{this, settings.nixStore, + "store", + R"( + Logical location of the Nix store, usually + `/nix/store`. Note that you can only copy store paths + between stores if they have the same `store` setting. + )"}; + const Path storeDir = storeDir_; + + // pure methods + + StorePath parseStorePath(std::string_view path) const; + + std::optional maybeParseStorePath(std::string_view path) const; + + std::string printStorePath(const StorePath & path) const; + + /** + * Deprecated + * + * \todo remove + */ + StorePathSet parseStorePathSet(const PathSet & paths) const; + + PathSet printStorePathSet(const StorePathSet & path) const; + + /** + * Display a set of paths in human-readable form (i.e., between quotes + * and separated by commas). + */ + std::string showPaths(const StorePathSet & paths); + + /** + * @return true if ‘path’ is in the Nix store (but not the Nix + * store itself). + */ + bool isInStore(PathView path) const; + + /** + * @return true if ‘path’ is a store path, i.e. a direct child of the + * Nix store. + */ + bool isStorePath(std::string_view path) const; + + /** + * Split a path like /nix/store/-/ into + * /nix/store/- and /. + */ + std::pair toStorePath(PathView path) const; + + /** + * Constructs a unique store path name. + */ + StorePath makeStorePath(std::string_view type, + std::string_view hash, std::string_view name) const; + StorePath makeStorePath(std::string_view type, + const Hash & hash, std::string_view name) const; + + StorePath makeOutputPath(std::string_view id, + const Hash & hash, std::string_view name) const; + + StorePath makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const; + + StorePath makeFixedOutputPathFromCA(std::string_view name, const ContentAddressWithReferences & ca) const; + + /** + * Read-only variant of addToStore(). It returns the store + * path for the given file sytem object. + */ + std::pair computeStorePath( + std::string_view name, + SourceAccessor & accessor, + const CanonPath & path, + ContentAddressMethod method = FileIngestionMethod::Recursive, + HashAlgorithm hashAlgo = HashAlgorithm::SHA256, + const StorePathSet & references = {}, + PathFilter & filter = defaultPathFilter) const; +}; + +} diff --git a/src/libstore/tests/local.mk b/src/libstore/tests/local.mk deleted file mode 100644 index 03becc7d1..000000000 --- a/src/libstore/tests/local.mk +++ /dev/null @@ -1,29 +0,0 @@ -check: libstore-tests-exe_RUN - -programs += libstore-tests-exe - -libstore-tests-exe_NAME = libnixstore-tests - -libstore-tests-exe_DIR := $(d) - -libstore-tests-exe_INSTALL_DIR := - -libstore-tests-exe_LIBS = libstore-tests - -libstore-tests-exe_LDFLAGS := $(GTEST_LIBS) - -libraries += libstore-tests - -libstore-tests_NAME = libnixstore-tests - -libstore-tests_DIR := $(d) - -libstore-tests_INSTALL_DIR := - -libstore-tests_SOURCES := $(wildcard $(d)/*.cc) - -libstore-tests_CXXFLAGS += -I src/libstore -I src/libutil - -libstore-tests_LIBS = libutil-tests libstore libutil - -libstore-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS) diff --git a/src/libstore/worker-protocol-impl.hh b/src/libstore/worker-protocol-impl.hh index c043588d6..026cc37bc 100644 --- a/src/libstore/worker-protocol-impl.hh +++ b/src/libstore/worker-protocol-impl.hh @@ -16,11 +16,11 @@ namespace nix { /* protocol-agnostic templates */ #define WORKER_USE_LENGTH_PREFIX_SERIALISER(TEMPLATE, T) \ - TEMPLATE T WorkerProto::Serialise< T >::read(const Store & store, WorkerProto::ReadConn conn) \ + TEMPLATE T WorkerProto::Serialise< T >::read(const StoreDirConfig & store, WorkerProto::ReadConn conn) \ { \ return LengthPrefixedProtoHelper::read(store, conn); \ } \ - TEMPLATE void WorkerProto::Serialise< T >::write(const Store & store, WorkerProto::WriteConn conn, const T & t) \ + TEMPLATE void WorkerProto::Serialise< T >::write(const StoreDirConfig & store, WorkerProto::WriteConn conn, const T & t) \ { \ LengthPrefixedProtoHelper::write(store, conn, t); \ } @@ -41,12 +41,12 @@ WORKER_USE_LENGTH_PREFIX_SERIALISER( template struct WorkerProto::Serialise { - static T read(const Store & store, WorkerProto::ReadConn conn) + static T read(const StoreDirConfig & store, WorkerProto::ReadConn conn) { return CommonProto::Serialise::read(store, CommonProto::ReadConn { .from = conn.from }); } - static void write(const Store & store, WorkerProto::WriteConn conn, const T & t) + static void write(const StoreDirConfig & store, WorkerProto::WriteConn conn, const T & t) { CommonProto::Serialise::write(store, CommonProto::WriteConn { .to = conn.to }, diff --git a/src/libstore/worker-protocol.cc b/src/libstore/worker-protocol.cc index 7118558b1..a50259d24 100644 --- a/src/libstore/worker-protocol.cc +++ b/src/libstore/worker-protocol.cc @@ -7,13 +7,14 @@ #include "archive.hh" #include "path-info.hh" +#include #include namespace nix { /* protocol-specific definitions */ -std::optional WorkerProto::Serialise>::read(const Store & store, WorkerProto::ReadConn conn) +std::optional WorkerProto::Serialise>::read(const StoreDirConfig & store, WorkerProto::ReadConn conn) { auto temp = readNum(conn.from); switch (temp) { @@ -28,7 +29,7 @@ std::optional WorkerProto::Serialise>::r } } -void WorkerProto::Serialise>::write(const Store & store, WorkerProto::WriteConn conn, const std::optional & optTrusted) +void WorkerProto::Serialise>::write(const StoreDirConfig & store, WorkerProto::WriteConn conn, const std::optional & optTrusted) { if (!optTrusted) conn.to << uint8_t{0}; @@ -47,7 +48,32 @@ void WorkerProto::Serialise>::write(const Store & sto } -DerivedPath WorkerProto::Serialise::read(const Store & store, WorkerProto::ReadConn conn) +std::optional WorkerProto::Serialise>::read(const StoreDirConfig & store, WorkerProto::ReadConn conn) +{ + auto tag = readNum(conn.from); + switch (tag) { + case 0: + return std::nullopt; + case 1: + return std::optional{std::chrono::microseconds(readNum(conn.from))}; + default: + throw Error("Invalid optional tag from remote"); + } +} + +void WorkerProto::Serialise>::write(const StoreDirConfig & store, WorkerProto::WriteConn conn, const std::optional & optDuration) +{ + if (!optDuration.has_value()) { + conn.to << uint8_t{0}; + } else { + conn.to + << uint8_t{1} + << optDuration.value().count(); + } +} + + +DerivedPath WorkerProto::Serialise::read(const StoreDirConfig & store, WorkerProto::ReadConn conn) { auto s = readString(conn.from); if (GET_PROTOCOL_MINOR(conn.version) >= 30) { @@ -57,7 +83,7 @@ DerivedPath WorkerProto::Serialise::read(const Store & store, Worke } } -void WorkerProto::Serialise::write(const Store & store, WorkerProto::WriteConn conn, const DerivedPath & req) +void WorkerProto::Serialise::write(const StoreDirConfig & store, WorkerProto::WriteConn conn, const DerivedPath & req) { if (GET_PROTOCOL_MINOR(conn.version) >= 30) { conn.to << req.to_string_legacy(store); @@ -81,7 +107,7 @@ void WorkerProto::Serialise::write(const Store & store, WorkerProto } -KeyedBuildResult WorkerProto::Serialise::read(const Store & store, WorkerProto::ReadConn conn) +KeyedBuildResult WorkerProto::Serialise::read(const StoreDirConfig & store, WorkerProto::ReadConn conn) { auto path = WorkerProto::Serialise::read(store, conn); auto br = WorkerProto::Serialise::read(store, conn); @@ -91,14 +117,14 @@ KeyedBuildResult WorkerProto::Serialise::read(const Store & st }; } -void WorkerProto::Serialise::write(const Store & store, WorkerProto::WriteConn conn, const KeyedBuildResult & res) +void WorkerProto::Serialise::write(const StoreDirConfig & store, WorkerProto::WriteConn conn, const KeyedBuildResult & res) { WorkerProto::write(store, conn, res.path); WorkerProto::write(store, conn, static_cast(res)); } -BuildResult WorkerProto::Serialise::read(const Store & store, WorkerProto::ReadConn conn) +BuildResult WorkerProto::Serialise::read(const StoreDirConfig & store, WorkerProto::ReadConn conn) { BuildResult res; res.status = static_cast(readInt(conn.from)); @@ -110,6 +136,10 @@ BuildResult WorkerProto::Serialise::read(const Store & store, Worke >> res.startTime >> res.stopTime; } + if (GET_PROTOCOL_MINOR(conn.version) >= 37) { + res.cpuUser = WorkerProto::Serialise>::read(store, conn); + res.cpuSystem = WorkerProto::Serialise>::read(store, conn); + } if (GET_PROTOCOL_MINOR(conn.version) >= 28) { auto builtOutputs = WorkerProto::Serialise::read(store, conn); for (auto && [output, realisation] : builtOutputs) @@ -120,7 +150,7 @@ BuildResult WorkerProto::Serialise::read(const Store & store, Worke return res; } -void WorkerProto::Serialise::write(const Store & store, WorkerProto::WriteConn conn, const BuildResult & res) +void WorkerProto::Serialise::write(const StoreDirConfig & store, WorkerProto::WriteConn conn, const BuildResult & res) { conn.to << res.status @@ -132,6 +162,10 @@ void WorkerProto::Serialise::write(const Store & store, WorkerProto << res.startTime << res.stopTime; } + if (GET_PROTOCOL_MINOR(conn.version) >= 37) { + WorkerProto::write(store, conn, res.cpuUser); + WorkerProto::write(store, conn, res.cpuSystem); + } if (GET_PROTOCOL_MINOR(conn.version) >= 28) { DrvOutputs builtOutputs; for (auto & [output, realisation] : res.builtOutputs) @@ -141,7 +175,7 @@ void WorkerProto::Serialise::write(const Store & store, WorkerProto } -ValidPathInfo WorkerProto::Serialise::read(const Store & store, ReadConn conn) +ValidPathInfo WorkerProto::Serialise::read(const StoreDirConfig & store, ReadConn conn) { auto path = WorkerProto::Serialise::read(store, conn); return ValidPathInfo { @@ -150,17 +184,17 @@ ValidPathInfo WorkerProto::Serialise::read(const Store & store, R }; } -void WorkerProto::Serialise::write(const Store & store, WriteConn conn, const ValidPathInfo & pathInfo) +void WorkerProto::Serialise::write(const StoreDirConfig & store, WriteConn conn, const ValidPathInfo & pathInfo) { WorkerProto::write(store, conn, pathInfo.path); WorkerProto::write(store, conn, static_cast(pathInfo)); } -UnkeyedValidPathInfo WorkerProto::Serialise::read(const Store & store, ReadConn conn) +UnkeyedValidPathInfo WorkerProto::Serialise::read(const StoreDirConfig & store, ReadConn conn) { auto deriver = readString(conn.from); - auto narHash = Hash::parseAny(readString(conn.from), htSHA256); + auto narHash = Hash::parseAny(readString(conn.from), HashAlgorithm::SHA256); UnkeyedValidPathInfo info(narHash); if (deriver != "") info.deriver = store.parseStorePath(deriver); info.references = WorkerProto::Serialise::read(store, conn); @@ -173,7 +207,7 @@ UnkeyedValidPathInfo WorkerProto::Serialise::read(const St return info; } -void WorkerProto::Serialise::write(const Store & store, WriteConn conn, const UnkeyedValidPathInfo & pathInfo) +void WorkerProto::Serialise::write(const StoreDirConfig & store, WriteConn conn, const UnkeyedValidPathInfo & pathInfo) { conn.to << (pathInfo.deriver ? store.printStorePath(*pathInfo.deriver) : "") diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh index 25d544ba7..91d277b77 100644 --- a/src/libstore/worker-protocol.hh +++ b/src/libstore/worker-protocol.hh @@ -1,6 +1,8 @@ #pragma once ///@file +#include + #include "common-protocol.hh" namespace nix { @@ -9,7 +11,7 @@ namespace nix { #define WORKER_MAGIC_1 0x6e697863 #define WORKER_MAGIC_2 0x6478696f -#define PROTOCOL_VERSION (1 << 8 | 35) +#define PROTOCOL_VERSION (1 << 8 | 37) #define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00) #define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff) @@ -24,7 +26,7 @@ namespace nix { #define STDERR_RESULT 0x52534c54 -class Store; +struct StoreDirConfig; struct Source; // items being serialised @@ -100,8 +102,8 @@ struct WorkerProto // This makes for a quicker debug cycle, as desired. #if 0 { - static T read(const Store & store, ReadConn conn); - static void write(const Store & store, WriteConn conn, const T & t); + static T read(const StoreDirConfig & store, ReadConn conn); + static void write(const StoreDirConfig & store, WriteConn conn, const T & t); }; #endif @@ -110,7 +112,7 @@ struct WorkerProto * infer the type instead of having to write it down explicitly. */ template - static void write(const Store & store, WriteConn conn, const T & t) + static void write(const StoreDirConfig & store, WriteConn conn, const T & t) { WorkerProto::Serialise::write(store, conn, t); } @@ -161,6 +163,7 @@ enum struct WorkerProto::Op : uint64_t AddMultipleToStore = 44, AddBuildLog = 45, BuildPathsWithResults = 46, + AddPermRoot = 47, }; /** @@ -197,8 +200,8 @@ inline std::ostream & operator << (std::ostream & s, WorkerProto::Op op) #define DECLARE_WORKER_SERIALISER(T) \ struct WorkerProto::Serialise< T > \ { \ - static T read(const Store & store, WorkerProto::ReadConn conn); \ - static void write(const Store & store, WorkerProto::WriteConn conn, const T & t); \ + static T read(const StoreDirConfig & store, WorkerProto::ReadConn conn); \ + static void write(const StoreDirConfig & store, WorkerProto::WriteConn conn, const T & t); \ }; template<> @@ -213,6 +216,8 @@ template<> DECLARE_WORKER_SERIALISER(UnkeyedValidPathInfo); template<> DECLARE_WORKER_SERIALISER(std::optional); +template<> +DECLARE_WORKER_SERIALISER(std::optional); template DECLARE_WORKER_SERIALISER(std::vector); diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index 465df2073..712ea51c7 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -140,7 +140,7 @@ static void parseContents(ParseSink & sink, Source & source, const Path & path) sink.preallocateContents(size); uint64_t left = size; - std::vector buf(65536); + std::array buf; while (left) { checkInterrupt(); diff --git a/src/libutil/args.cc b/src/libutil/args.cc index 4359c5e8e..e2668c673 100644 --- a/src/libutil/args.cc +++ b/src/libutil/args.cc @@ -97,6 +97,8 @@ struct Parser { virtual void operator()(std::shared_ptr & state, Strings & r) = 0; Parser(std::string_view s) : remaining(s) {}; + + virtual ~Parser() { }; }; struct ParseQuoted : public Parser { @@ -481,7 +483,7 @@ bool Args::processArgs(const Strings & args, bool finish) if (!anyCompleted) exp.handler.fun(ss); - /* Move the list element to the processedArgs. This is almost the same as + /* Move the list element to the processedArgs. This is almost the same as `processedArgs.push_back(expectedArgs.front()); expectedArgs.pop_front()`, except that it will only adjust the next and prev pointers of the list elements, meaning the actual contents don't move in memory. This is @@ -542,36 +544,70 @@ nlohmann::json Args::toJSON() return res; } -static void hashTypeCompleter(AddCompletions & completions, size_t index, std::string_view prefix) +static void hashFormatCompleter(AddCompletions & completions, size_t index, std::string_view prefix) { - for (auto & type : hashTypes) - if (hasPrefix(type, prefix)) - completions.add(type); + for (auto & format : hashFormats) { + if (hasPrefix(format, prefix)) { + completions.add(format); + } + } } -Args::Flag Args::Flag::mkHashTypeFlag(std::string && longName, HashType * ht) -{ - return Flag { - .longName = std::move(longName), - .description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512')", - .labels = {"hash-algo"}, - .handler = {[ht](std::string s) { - *ht = parseHashType(s); - }}, - .completer = hashTypeCompleter, +Args::Flag Args::Flag::mkHashFormatFlagWithDefault(std::string &&longName, HashFormat * hf) { + assert(*hf == nix::HashFormat::SRI); + return Flag{ + .longName = std::move(longName), + .description = "hash format ('base16', 'nix32', 'base64', 'sri'). Default: 'sri'", + .labels = {"hash-format"}, + .handler = {[hf](std::string s) { + *hf = parseHashFormat(s); + }}, + .completer = hashFormatCompleter, }; } -Args::Flag Args::Flag::mkHashTypeOptFlag(std::string && longName, std::optional * oht) +Args::Flag Args::Flag::mkHashFormatOptFlag(std::string && longName, std::optional * ohf) { + return Flag{ + .longName = std::move(longName), + .description = "hash format ('base16', 'nix32', 'base64', 'sri').", + .labels = {"hash-format"}, + .handler = {[ohf](std::string s) { + *ohf = std::optional{parseHashFormat(s)}; + }}, + .completer = hashFormatCompleter, + }; +} + +static void hashAlgoCompleter(AddCompletions & completions, size_t index, std::string_view prefix) { - return Flag { - .longName = std::move(longName), - .description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512'). Optional as can also be gotten from SRI hash itself.", - .labels = {"hash-algo"}, - .handler = {[oht](std::string s) { - *oht = std::optional { parseHashType(s) }; - }}, - .completer = hashTypeCompleter, + for (auto & algo : hashAlgorithms) + if (hasPrefix(algo, prefix)) + completions.add(algo); +} + +Args::Flag Args::Flag::mkHashAlgoFlag(std::string && longName, HashAlgorithm * ha) +{ + return Flag{ + .longName = std::move(longName), + .description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512')", + .labels = {"hash-algo"}, + .handler = {[ha](std::string s) { + *ha = parseHashAlgo(s); + }}, + .completer = hashAlgoCompleter, + }; +} + +Args::Flag Args::Flag::mkHashAlgoOptFlag(std::string && longName, std::optional * oha) +{ + return Flag{ + .longName = std::move(longName), + .description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512'). Optional as can also be gotten from SRI hash itself.", + .labels = {"hash-algo"}, + .handler = {[oha](std::string s) { + *oha = std::optional{parseHashAlgo(s)}; + }}, + .completer = hashAlgoCompleter, }; } @@ -620,8 +656,9 @@ std::optional Command::experimentalFeature () return { Xp::NixCommand }; } -MultiCommand::MultiCommand(const Commands & commands_) +MultiCommand::MultiCommand(std::string_view commandName, const Commands & commands_) : commands(commands_) + , commandName(commandName) { expectArgs({ .label = "subcommand", diff --git a/src/libutil/args.hh b/src/libutil/args.hh index 7af82b178..18b0ae583 100644 --- a/src/libutil/args.hh +++ b/src/libutil/args.hh @@ -14,7 +14,8 @@ namespace nix { -enum HashType : char; +enum struct HashAlgorithm : char; +enum struct HashFormat : int; class MultiCommand; @@ -175,8 +176,10 @@ protected: std::optional experimentalFeature; - static Flag mkHashTypeFlag(std::string && longName, HashType * ht); - static Flag mkHashTypeOptFlag(std::string && longName, std::optional * oht); + static Flag mkHashAlgoFlag(std::string && longName, HashAlgorithm * ha); + static Flag mkHashAlgoOptFlag(std::string && longName, std::optional * oha); + static Flag mkHashFormatFlagWithDefault(std::string && longName, HashFormat * hf); + static Flag mkHashFormatOptFlag(std::string && longName, std::optional * ohf); }; /** @@ -223,11 +226,11 @@ protected: std::list expectedArgs; /** * List of processed positional argument forms. - * + * * All items removed from `expectedArgs` are added here. After all * arguments were processed, this list should be exactly the same as * `expectedArgs` was before. - * + * * This list is used to extend the lifetime of the argument forms. * If this is not done, some closures that reference the command * itself will segfault. @@ -356,13 +359,16 @@ public: */ std::optional>> command; - MultiCommand(const Commands & commands); + MultiCommand(std::string_view commandName, const Commands & commands); bool processFlag(Strings::iterator & pos, Strings::iterator end) override; bool processArgs(const Strings & args, bool finish) override; nlohmann::json toJSON() override; + +protected: + std::string commandName = ""; }; Strings argvToStrings(int argc, char * * argv); diff --git a/src/libutil/canon-path.cc b/src/libutil/canon-path.cc index f678fae94..1e465f1f6 100644 --- a/src/libutil/canon-path.cc +++ b/src/libutil/canon-path.cc @@ -13,6 +13,13 @@ CanonPath::CanonPath(std::string_view raw, const CanonPath & root) : path(absPath((Path) raw, root.abs())) { } +CanonPath::CanonPath(const std::vector & elems) + : path("/") +{ + for (auto & s : elems) + push(s); +} + CanonPath CanonPath::fromCwd(std::string_view path) { return CanonPath(unchecked_t(), absPath((Path) path)); diff --git a/src/libutil/canon-path.hh b/src/libutil/canon-path.hh index eefe05ed5..6aff4ec0d 100644 --- a/src/libutil/canon-path.hh +++ b/src/libutil/canon-path.hh @@ -6,6 +6,7 @@ #include #include #include +#include namespace nix { @@ -46,6 +47,11 @@ public: : path(std::move(path)) { } + /** + * Construct a canon path from a vector of elements. + */ + CanonPath(const std::vector & elems); + static CanonPath fromCwd(std::string_view path = "."); static CanonPath root; @@ -199,8 +205,19 @@ public: * `CanonPath(this.makeRelative(x), this) == path`. */ std::string makeRelative(const CanonPath & path) const; + + friend class std::hash; }; std::ostream & operator << (std::ostream & stream, const CanonPath & path); } + +template<> +struct std::hash +{ + std::size_t operator ()(const nix::CanonPath & s) const noexcept + { + return std::hash{}(s.path); + } +}; diff --git a/src/libutil/comparator.hh b/src/libutil/comparator.hh index a4d20a675..cbc2bb4fd 100644 --- a/src/libutil/comparator.hh +++ b/src/libutil/comparator.hh @@ -13,9 +13,9 @@ #define GENERATE_ONE_CMP(PRE, QUAL, COMPARATOR, MY_TYPE, ...) \ PRE bool QUAL operator COMPARATOR(const MY_TYPE & other) const { \ __VA_OPT__(const MY_TYPE * me = this;) \ - auto fields1 = std::make_tuple( __VA_ARGS__ ); \ + auto fields1 = std::tie( __VA_ARGS__ ); \ __VA_OPT__(me = &other;) \ - auto fields2 = std::make_tuple( __VA_ARGS__ ); \ + auto fields2 = std::tie( __VA_ARGS__ ); \ return fields1 COMPARATOR fields2; \ } #define GENERATE_EQUAL(prefix, qualification, my_type, args...) \ diff --git a/src/libutil/config.cc b/src/libutil/config.cc index 96a0a4df8..a3310f4ec 100644 --- a/src/libutil/config.cc +++ b/src/libutil/config.cc @@ -88,10 +88,9 @@ void Config::getSettings(std::map & res, bool overridd res.emplace(opt.first, SettingInfo{opt.second.setting->to_string(), opt.second.setting->description}); } -void AbstractConfig::applyConfig(const std::string & contents, const std::string & path) { - unsigned int pos = 0; - std::vector> parsedContents; +static void applyConfigInner(const std::string & contents, const std::string & path, std::vector> & parsedContents) { + unsigned int pos = 0; while (pos < contents.size()) { std::string line; @@ -122,7 +121,12 @@ void AbstractConfig::applyConfig(const std::string & contents, const std::string throw UsageError("illegal configuration line '%1%' in '%2%'", line, path); auto p = absPath(tokens[1], dirOf(path)); if (pathExists(p)) { - applyConfigFile(p); + try { + std::string includedContents = readFile(path); + applyConfigInner(includedContents, p, parsedContents); + } catch (SysError &) { + // TODO: Do we actually want to ignore this? Or is it better to fail? + } } else if (!ignoreMissing) { throw Error("file '%1%' included from '%2%' not found", p, path); } @@ -142,6 +146,12 @@ void AbstractConfig::applyConfig(const std::string & contents, const std::string concatStringsSep(" ", Strings(i, tokens.end())), }); }; +} + +void AbstractConfig::applyConfig(const std::string & contents, const std::string & path) { + std::vector> parsedContents; + + applyConfigInner(contents, path, parsedContents); // First apply experimental-feature related settings for (const auto & [name, value] : parsedContents) @@ -154,14 +164,6 @@ void AbstractConfig::applyConfig(const std::string & contents, const std::string set(name, value); } -void AbstractConfig::applyConfigFile(const Path & path) -{ - try { - std::string contents = readFile(path); - applyConfig(contents, path); - } catch (SysError &) { } -} - void Config::resetOverridden() { for (auto & s : _settings) diff --git a/src/libutil/config.hh b/src/libutil/config.hh index 5d7bd8e0c..07322b60d 100644 --- a/src/libutil/config.hh +++ b/src/libutil/config.hh @@ -82,12 +82,6 @@ public: */ void applyConfig(const std::string & contents, const std::string & path = ""); - /** - * Applies a nix configuration file - * - path: the location of the config file to apply - */ - void applyConfigFile(const Path & path); - /** * Resets the `overridden` flag of all Settings */ diff --git a/src/libutil/error.cc b/src/libutil/error.cc index 8488e7e21..bd2f6b840 100644 --- a/src/libutil/error.cc +++ b/src/libutil/error.cc @@ -2,6 +2,7 @@ #include "environment-variables.hh" #include "signals.hh" #include "terminal.hh" +#include "position.hh" #include #include @@ -10,7 +11,7 @@ namespace nix { -void BaseError::addTrace(std::shared_ptr && e, hintformat hint, bool frame) +void BaseError::addTrace(std::shared_ptr && e, hintformat hint, bool frame) { err.traces.push_front(Trace { .pos = std::move(e), .hint = hint, .frame = frame }); } @@ -41,58 +42,36 @@ std::ostream & operator <<(std::ostream & os, const hintformat & hf) return os << hf.str(); } -std::ostream & operator <<(std::ostream & str, const AbstractPos & pos) +/** + * An arbitrarily defined value comparison for the purpose of using traces in the key of a sorted container. + */ +inline bool operator<(const Trace& lhs, const Trace& rhs) { - pos.print(str); - str << ":" << pos.line; - if (pos.column > 0) - str << ":" << pos.column; - return str; -} - -std::optional AbstractPos::getCodeLines() const -{ - if (line == 0) - return std::nullopt; - - if (auto source = getSource()) { - - std::istringstream iss(*source); - // count the newlines. - int count = 0; - std::string curLine; - int pl = line - 1; - - LinesOfCode loc; - - do { - std::getline(iss, curLine); - ++count; - if (count < pl) - ; - else if (count == pl) { - loc.prevLineOfCode = curLine; - } else if (count == pl + 1) { - loc.errLineOfCode = curLine; - } else if (count == pl + 2) { - loc.nextLineOfCode = curLine; - break; - } - - if (!iss.good()) - break; - } while (true); - - return loc; + // `std::shared_ptr` does not have value semantics for its comparison + // functions, so we need to check for nulls and compare the dereferenced + // values here. + if (lhs.pos != rhs.pos) { + if (!lhs.pos) + return true; + if (!rhs.pos) + return false; + if (*lhs.pos != *rhs.pos) + return *lhs.pos < *rhs.pos; } - - return std::nullopt; + // This formats a freshly formatted hint string and then throws it away, which + // shouldn't be much of a problem because it only runs when pos is equal, and this function is + // used for trace printing, which is infrequent. + return std::forward_as_tuple(lhs.hint.str(), lhs.frame) + < std::forward_as_tuple(rhs.hint.str(), rhs.frame); } +inline bool operator> (const Trace& lhs, const Trace& rhs) { return rhs < lhs; } +inline bool operator<=(const Trace& lhs, const Trace& rhs) { return !(lhs > rhs); } +inline bool operator>=(const Trace& lhs, const Trace& rhs) { return !(lhs < rhs); } // print lines of code to the ostream, indicating the error column. void printCodeLines(std::ostream & out, const std::string & prefix, - const AbstractPos & errPos, + const Pos & errPos, const LinesOfCode & loc) { // previous line of code. @@ -159,24 +138,23 @@ static std::string indent(std::string_view indentFirst, std::string_view indentR /** * A development aid for finding missing positions, to improve error messages. Example use: * - * NIX_DEVELOPER_SHOW_UNKNOWN_LOCATIONS=1 _NIX_TEST_ACCEPT=1 make tests/lang.sh.test + * _NIX_EVAL_SHOW_UNKNOWN_LOCATIONS=1 _NIX_TEST_ACCEPT=1 make tests/lang.sh.test * git diff -U20 tests * */ -static bool printUnknownLocations = getEnv("_NIX_DEVELOPER_SHOW_UNKNOWN_LOCATIONS").has_value(); +static bool printUnknownLocations = getEnv("_NIX_EVAL_SHOW_UNKNOWN_LOCATIONS").has_value(); /** * Print a position, if it is known. * * @return true if a position was printed. */ -static bool printPosMaybe(std::ostream & oss, std::string_view indent, const std::shared_ptr & pos) { +static bool printPosMaybe(std::ostream & oss, std::string_view indent, const std::shared_ptr & pos) { bool hasPos = pos && *pos; if (hasPos) { - oss << "\n" << indent << ANSI_BLUE << "at " ANSI_WARNING << *pos << ANSI_NORMAL << ":"; + oss << indent << ANSI_BLUE << "at " ANSI_WARNING << *pos << ANSI_NORMAL << ":"; if (auto loc = pos->getCodeLines()) { - oss << "\n"; printCodeLines(oss, "", *pos, *loc); oss << "\n"; } @@ -186,6 +164,69 @@ static bool printPosMaybe(std::ostream & oss, std::string_view indent, const std return hasPos; } +void printTrace( + std::ostream & output, + const std::string_view & indent, + size_t & count, + const Trace & trace) +{ + output << "\n" << "… " << trace.hint.str() << "\n"; + + if (printPosMaybe(output, indent, trace.pos)) + count++; +} + +void printSkippedTracesMaybe( + std::ostream & output, + const std::string_view & indent, + size_t & count, + std::vector & skippedTraces, + std::set tracesSeen) +{ + if (skippedTraces.size() > 0) { + // If we only skipped a few frames, print them out normally; + // messages like "1 duplicate frames omitted" aren't helpful. + if (skippedTraces.size() <= 5) { + for (auto & trace : skippedTraces) { + printTrace(output, indent, count, trace); + } + } else { + output << "\n" << ANSI_WARNING "(" << skippedTraces.size() << " duplicate frames omitted)" ANSI_NORMAL << "\n"; + // Clear the set of "seen" traces after printing a chunk of + // `duplicate frames omitted`. + // + // Consider a mutually recursive stack trace with: + // - 10 entries of A + // - 10 entries of B + // - 10 entries of A + // + // If we don't clear `tracesSeen` here, we would print output like this: + // - 1 entry of A + // - (9 duplicate frames omitted) + // - 1 entry of B + // - (19 duplicate frames omitted) + // + // This would obscure the control flow, which went from A, + // to B, and back to A again. + // + // In contrast, if we do clear `tracesSeen`, the output looks like this: + // - 1 entry of A + // - (9 duplicate frames omitted) + // - 1 entry of B + // - (9 duplicate frames omitted) + // - 1 entry of A + // - (9 duplicate frames omitted) + // + // See: `tests/functional/lang/eval-fail-mutual-recursion.nix` + tracesSeen.clear(); + } + } + // We've either printed each trace in `skippedTraces` normally, or + // printed a chunk of `duplicate frames omitted`. Either way, we've + // processed these traces and can clear them. + skippedTraces.clear(); +} + std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool showTrace) { std::string prefix; @@ -334,7 +375,13 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s bool frameOnly = false; if (!einfo.traces.empty()) { + // Stack traces seen since we last printed a chunk of `duplicate frames + // omitted`. + std::set tracesSeen; + // A consecutive sequence of stack traces that are all in `tracesSeen`. + std::vector skippedTraces; size_t count = 0; + for (const auto & trace : einfo.traces) { if (trace.hint.str().empty()) continue; if (frameOnly && !trace.frame) continue; @@ -344,14 +391,21 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s break; } + if (tracesSeen.count(trace)) { + skippedTraces.push_back(trace); + continue; + } + tracesSeen.insert(trace); + + printSkippedTracesMaybe(oss, ellipsisIndent, count, skippedTraces, tracesSeen); + count++; frameOnly = trace.frame; - oss << "\n" << "… " << trace.hint.str() << "\n"; - - if (printPosMaybe(oss, ellipsisIndent, trace.pos)) - count++; + printTrace(oss, ellipsisIndent, count, trace); } + + printSkippedTracesMaybe(oss, ellipsisIndent, count, skippedTraces, tracesSeen); oss << "\n" << prefix; } @@ -370,4 +424,5 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s return out; } + } diff --git a/src/libutil/error.hh b/src/libutil/error.hh index c04dcbd77..234cbe1f6 100644 --- a/src/libutil/error.hh +++ b/src/libutil/error.hh @@ -25,6 +25,7 @@ #include #include #include +#include #include #include @@ -62,51 +63,28 @@ struct LinesOfCode { std::optional nextLineOfCode; }; -/** - * An abstract type that represents a location in a source file. - */ -struct AbstractPos -{ - uint32_t line = 0; - uint32_t column = 0; - - /** - * An AbstractPos may be a "null object", representing an unknown position. - * - * Return true if this position is known. - */ - inline operator bool() const { return line != 0; }; - - /** - * Return the contents of the source file. - */ - virtual std::optional getSource() const - { return std::nullopt; }; - - virtual void print(std::ostream & out) const = 0; - - std::optional getCodeLines() const; - - virtual ~AbstractPos() = default; -}; - -std::ostream & operator << (std::ostream & str, const AbstractPos & pos); +struct Pos; void printCodeLines(std::ostream & out, const std::string & prefix, - const AbstractPos & errPos, + const Pos & errPos, const LinesOfCode & loc); struct Trace { - std::shared_ptr pos; + std::shared_ptr pos; hintformat hint; bool frame; }; +inline bool operator<(const Trace& lhs, const Trace& rhs); +inline bool operator> (const Trace& lhs, const Trace& rhs); +inline bool operator<=(const Trace& lhs, const Trace& rhs); +inline bool operator>=(const Trace& lhs, const Trace& rhs); + struct ErrorInfo { Verbosity level; hintformat msg; - std::shared_ptr errPos; + std::shared_ptr errPos; std::list traces; Suggestions suggestions; @@ -177,12 +155,12 @@ public: } template - void addTrace(std::shared_ptr && e, std::string_view fs, const Args & ... args) + void addTrace(std::shared_ptr && e, std::string_view fs, const Args & ... args) { addTrace(std::move(e), hintfmt(std::string(fs), args...)); } - void addTrace(std::shared_ptr && e, hintformat hint, bool frame = false); + void addTrace(std::shared_ptr && e, hintformat hint, bool frame = false); bool hasTrace() const { return !err.traces.empty(); } diff --git a/src/libutil/experimental-features.cc b/src/libutil/experimental-features.cc index ac4d189e1..9b46fc5b0 100644 --- a/src/libutil/experimental-features.cc +++ b/src/libutil/experimental-features.cc @@ -80,12 +80,11 @@ constexpr std::array xpFeatureDetails .description = R"( Enable the use of the [`fetchTree`](@docroot@/language/builtins.md#builtins-fetchTree) built-in function in the Nix language. - `fetchTree` exposes a large suite of fetching functionality in a more systematic way. + `fetchTree` exposes a generic interface for fetching remote file system trees from different types of remote sources. The [`flakes`](#xp-feature-flakes) feature flag always enables `fetch-tree`. + This built-in was previously guarded by the `flakes` experimental feature because of that overlap. - This built-in was previously guarded by the `flakes` experimental feature because of that overlap, - but since the plan is to work on stabilizing this first (due 2024 Q1), we are putting it underneath a separate feature. - Once we've made the changes we want to make, enabling just this feature will serve as a "release candidate" --- allowing users to try out the functionality we want to stabilize and not any other functionality we don't yet want to, in isolation. + Enabling just this feature serves as a "release candidate", allowing users to try it out in isolation. )", }, { @@ -252,7 +251,7 @@ constexpr std::array xpFeatureDetails .tag = Xp::ReadOnlyLocalStore, .name = "read-only-local-store", .description = R"( - Allow the use of the `read-only` parameter in [local store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-store) URIs. + Allow the use of the `read-only` parameter in [local store](@docroot@/store/types/local-store.md) URIs. )", }, { @@ -262,6 +261,13 @@ constexpr std::array xpFeatureDetails Allow the use of the [impure-env](@docroot@/command-ref/conf-file.md#conf-impure-env) setting. )", }, + { + .tag = Xp::MountedSSHStore, + .name = "mounted-ssh-store", + .description = R"( + Allow the use of the [`mounted SSH store`](@docroot@/command-ref/new-cli/nix3-help-stores.html#experimental-ssh-store-with-filesytem-mounted). + )", + }, { .tag = Xp::VerifiedFetches, .name = "verified-fetches", diff --git a/src/libutil/experimental-features.hh b/src/libutil/experimental-features.hh index c355b8081..eae4fa9b8 100644 --- a/src/libutil/experimental-features.hh +++ b/src/libutil/experimental-features.hh @@ -34,6 +34,7 @@ enum struct ExperimentalFeature ParseTomlTimestamps, ReadOnlyLocalStore, ConfigurableImpureEnv, + MountedSSHStore, VerifiedFetches, }; diff --git a/src/libutil/file-content-address.cc b/src/libutil/file-content-address.cc new file mode 100644 index 000000000..9917986f6 --- /dev/null +++ b/src/libutil/file-content-address.cc @@ -0,0 +1,49 @@ +#include "file-content-address.hh" +#include "archive.hh" + +namespace nix { + +void dumpPath( + SourceAccessor & accessor, const CanonPath & path, + Sink & sink, + FileIngestionMethod method, + PathFilter & filter) +{ + switch (method) { + case FileIngestionMethod::Flat: + accessor.readFile(path, sink); + break; + case FileIngestionMethod::Recursive: + accessor.dumpPath(path, sink, filter); + break; + } +} + + +void restorePath( + const Path & path, + Source & source, + FileIngestionMethod method) +{ + switch (method) { + case FileIngestionMethod::Flat: + writeFile(path, source); + break; + case FileIngestionMethod::Recursive: + restorePath(path, source); + break; + } +} + + +HashResult hashPath( + SourceAccessor & accessor, const CanonPath & path, + FileIngestionMethod method, HashAlgorithm ht, + PathFilter & filter) +{ + HashSink sink { ht }; + dumpPath(accessor, path, sink, method, filter); + return sink.finish(); +} + +} diff --git a/src/libutil/file-content-address.hh b/src/libutil/file-content-address.hh new file mode 100644 index 000000000..8e93f5847 --- /dev/null +++ b/src/libutil/file-content-address.hh @@ -0,0 +1,56 @@ +#pragma once +///@file + +#include "source-accessor.hh" +#include "fs-sink.hh" +#include "util.hh" + +namespace nix { + +/** + * An enumeration of the main ways we can serialize file system + * objects. + */ +enum struct FileIngestionMethod : uint8_t { + /** + * Flat-file hashing. Directly ingest the contents of a single file + */ + Flat = 0, + /** + * Recursive (or NAR) hashing. Serializes the file-system object in + * Nix Archive format and ingest that. + */ + Recursive = 1, +}; + +/** + * Dump a serialization of the given file system object. + */ +void dumpPath( + SourceAccessor & accessor, const CanonPath & path, + Sink & sink, + FileIngestionMethod method, + PathFilter & filter = defaultPathFilter); + +/** + * Restore a serialization of the given file system object. + * + * @TODO use an arbitrary `ParseSink`. + */ +void restorePath( + const Path & path, + Source & source, + FileIngestionMethod method); + +/** + * Compute the hash of the given file system object according to the + * given method. + * + * The hash is defined as (essentially) hashString(ht, dumpPath(path)). + */ +HashResult hashPath( + SourceAccessor & accessor, const CanonPath & path, + FileIngestionMethod method, HashAlgorithm ht, + PathFilter & filter = defaultPathFilter); + +} diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc index c96effff9..4cac35ace 100644 --- a/src/libutil/file-system.cc +++ b/src/libutil/file-system.cc @@ -307,7 +307,7 @@ void writeFile(const Path & path, Source & source, mode_t mode, bool sync) if (!fd) throw SysError("opening file '%1%'", path); - std::vector buf(64 * 1024); + std::array buf; try { while (true) { diff --git a/src/libutil/git.cc b/src/libutil/git.cc index a4bd60096..296b75628 100644 --- a/src/libutil/git.cc +++ b/src/libutil/git.cc @@ -106,7 +106,7 @@ void parse( std::string hashs = getString(source, 20); left -= 20; - Hash hash(htSHA1); + Hash hash(HashAlgorithm::SHA1); std::copy(hashs.begin(), hashs.end(), hash.hash); hook(name, TreeEntry { @@ -241,12 +241,12 @@ Mode dump( TreeEntry dumpHash( - HashType ht, - SourceAccessor & accessor, const CanonPath & path, PathFilter & filter) + HashAlgorithm ha, + SourceAccessor & accessor, const CanonPath & path, PathFilter & filter) { std::function hook; hook = [&](const CanonPath & path) -> TreeEntry { - auto hashSink = HashSink(ht); + auto hashSink = HashSink(ha); auto mode = dump(accessor, path, hashSink, hook, filter); auto hash = hashSink.finish().first; return { diff --git a/src/libutil/git.hh b/src/libutil/git.hh index 303460072..b24b25dd3 100644 --- a/src/libutil/git.hh +++ b/src/libutil/git.hh @@ -123,9 +123,9 @@ Mode dump( * A smaller wrapper around `dump`. */ TreeEntry dumpHash( - HashType ht, - SourceAccessor & accessor, const CanonPath & path, - PathFilter & filter = defaultPathFilter); + HashAlgorithm ha, + SourceAccessor & accessor, const CanonPath & path, + PathFilter & filter = defaultPathFilter); /** * A line from the output of `git ls-remote --symref`. diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index 144f7ae7e..d067da969 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -14,25 +14,28 @@ #include #include +#include + namespace nix { -static size_t regularHashSize(HashType type) { +static size_t regularHashSize(HashAlgorithm type) { switch (type) { - case htMD5: return md5HashSize; - case htSHA1: return sha1HashSize; - case htSHA256: return sha256HashSize; - case htSHA512: return sha512HashSize; + case HashAlgorithm::MD5: return md5HashSize; + case HashAlgorithm::SHA1: return sha1HashSize; + case HashAlgorithm::SHA256: return sha256HashSize; + case HashAlgorithm::SHA512: return sha512HashSize; } abort(); } -std::set hashTypes = { "md5", "sha1", "sha256", "sha512" }; +const std::set hashAlgorithms = {"md5", "sha1", "sha256", "sha512" }; +const std::set hashFormats = {"base64", "nix32", "base16", "sri" }; -Hash::Hash(HashType type) : type(type) +Hash::Hash(HashAlgorithm algo) : algo(algo) { - hashSize = regularHashSize(type); + hashSize = regularHashSize(algo); assert(hashSize <= maxHashSize); memset(hash, 0, maxHashSize); } @@ -81,7 +84,7 @@ static std::string printHash16(const Hash & hash) // omitted: E O U T -const std::string base32Chars = "0123456789abcdfghijklmnpqrsvwxyz"; +const std::string nix32Chars = "0123456789abcdfghijklmnpqrsvwxyz"; static std::string printHash32(const Hash & hash) @@ -100,7 +103,7 @@ static std::string printHash32(const Hash & hash) unsigned char c = (hash.hash[i] >> j) | (i >= hash.hashSize - 1 ? 0 : hash.hash[i + 1] << (8 - j)); - s.push_back(base32Chars[c & 0x1f]); + s.push_back(nix32Chars[c & 0x1f]); } return s; @@ -109,23 +112,23 @@ static std::string printHash32(const Hash & hash) std::string printHash16or32(const Hash & hash) { - assert(hash.type); - return hash.to_string(hash.type == htMD5 ? HashFormat::Base16 : HashFormat::Base32, false); + assert(static_cast(hash.algo)); + return hash.to_string(hash.algo == HashAlgorithm::MD5 ? HashFormat::Base16 : HashFormat::Nix32, false); } -std::string Hash::to_string(HashFormat hashFormat, bool includeType) const +std::string Hash::to_string(HashFormat hashFormat, bool includeAlgo) const { std::string s; - if (hashFormat == HashFormat::SRI || includeType) { - s += printHashType(type); + if (hashFormat == HashFormat::SRI || includeAlgo) { + s += printHashAlgo(algo); s += hashFormat == HashFormat::SRI ? '-' : ':'; } switch (hashFormat) { case HashFormat::Base16: s += printHash16(*this); break; - case HashFormat::Base32: + case HashFormat::Nix32: s += printHash32(*this); break; case HashFormat::Base64: @@ -136,7 +139,7 @@ std::string Hash::to_string(HashFormat hashFormat, bool includeType) const return s; } -Hash Hash::dummy(htSHA256); +Hash Hash::dummy(HashAlgorithm::SHA256); Hash Hash::parseSRI(std::string_view original) { auto rest = original; @@ -145,18 +148,18 @@ Hash Hash::parseSRI(std::string_view original) { auto hashRaw = splitPrefixTo(rest, '-'); if (!hashRaw) throw BadHash("hash '%s' is not SRI", original); - HashType parsedType = parseHashType(*hashRaw); + HashAlgorithm parsedType = parseHashAlgo(*hashRaw); return Hash(rest, parsedType, true); } // Mutates the string to eliminate the prefixes when found -static std::pair, bool> getParsedTypeAndSRI(std::string_view & rest) +static std::pair, bool> getParsedTypeAndSRI(std::string_view & rest) { bool isSRI = false; // Parse the hash type before the separator, if there was one. - std::optional optParsedType; + std::optional optParsedType; { auto hashRaw = splitPrefixTo(rest, ':'); @@ -166,7 +169,7 @@ static std::pair, bool> getParsedTypeAndSRI(std::string_ isSRI = true; } if (hashRaw) - optParsedType = parseHashType(*hashRaw); + optParsedType = parseHashAlgo(*hashRaw); } return {optParsedType, isSRI}; @@ -185,29 +188,29 @@ Hash Hash::parseAnyPrefixed(std::string_view original) return Hash(rest, *optParsedType, isSRI); } -Hash Hash::parseAny(std::string_view original, std::optional optType) +Hash Hash::parseAny(std::string_view original, std::optional optAlgo) { auto rest = original; auto [optParsedType, isSRI] = getParsedTypeAndSRI(rest); // Either the string or user must provide the type, if they both do they // must agree. - if (!optParsedType && !optType) + if (!optParsedType && !optAlgo) throw BadHash("hash '%s' does not include a type, nor is the type otherwise known from context", rest); - else if (optParsedType && optType && *optParsedType != *optType) - throw BadHash("hash '%s' should have type '%s'", original, printHashType(*optType)); + else if (optParsedType && optAlgo && *optParsedType != *optAlgo) + throw BadHash("hash '%s' should have type '%s'", original, printHashAlgo(*optAlgo)); - HashType hashType = optParsedType ? *optParsedType : *optType; - return Hash(rest, hashType, isSRI); + HashAlgorithm hashAlgo = optParsedType ? *optParsedType : *optAlgo; + return Hash(rest, hashAlgo, isSRI); } -Hash Hash::parseNonSRIUnprefixed(std::string_view s, HashType type) +Hash Hash::parseNonSRIUnprefixed(std::string_view s, HashAlgorithm algo) { - return Hash(s, type, false); + return Hash(s, algo, false); } -Hash::Hash(std::string_view rest, HashType type, bool isSRI) - : Hash(type) +Hash::Hash(std::string_view rest, HashAlgorithm algo, bool isSRI) + : Hash(algo) { if (!isSRI && rest.size() == base16Len()) { @@ -230,8 +233,8 @@ Hash::Hash(std::string_view rest, HashType type, bool isSRI) for (unsigned int n = 0; n < rest.size(); ++n) { char c = rest[rest.size() - n - 1]; unsigned char digit; - for (digit = 0; digit < base32Chars.size(); ++digit) /* !!! slow */ - if (base32Chars[digit] == c) break; + for (digit = 0; digit < nix32Chars.size(); ++digit) /* !!! slow */ + if (nix32Chars[digit] == c) break; if (digit >= 32) throw BadHash("invalid base-32 hash '%s'", rest); unsigned int b = n * 5; @@ -257,19 +260,26 @@ Hash::Hash(std::string_view rest, HashType type, bool isSRI) } else - throw BadHash("hash '%s' has wrong length for hash type '%s'", rest, printHashType(this->type)); + throw BadHash("hash '%s' has wrong length for hash algorithm '%s'", rest, printHashAlgo(this->algo)); } -Hash newHashAllowEmpty(std::string_view hashStr, std::optional ht) +Hash Hash::random(HashAlgorithm algo) +{ + Hash hash(algo); + randombytes_buf(hash.hash, hash.hashSize); + return hash; +} + +Hash newHashAllowEmpty(std::string_view hashStr, std::optional ha) { if (hashStr.empty()) { - if (!ht) + if (!ha) throw BadHash("empty hash requires explicit hash type"); - Hash h(*ht); + Hash h(*ha); warn("found empty hash, assuming '%s'", h.to_string(HashFormat::SRI, true)); return h; } else - return Hash::parseAny(hashStr, ht); + return Hash::parseAny(hashStr, ha); } @@ -282,58 +292,58 @@ union Ctx }; -static void start(HashType ht, Ctx & ctx) +static void start(HashAlgorithm ha, Ctx & ctx) { - if (ht == htMD5) MD5_Init(&ctx.md5); - else if (ht == htSHA1) SHA1_Init(&ctx.sha1); - else if (ht == htSHA256) SHA256_Init(&ctx.sha256); - else if (ht == htSHA512) SHA512_Init(&ctx.sha512); + if (ha == HashAlgorithm::MD5) MD5_Init(&ctx.md5); + else if (ha == HashAlgorithm::SHA1) SHA1_Init(&ctx.sha1); + else if (ha == HashAlgorithm::SHA256) SHA256_Init(&ctx.sha256); + else if (ha == HashAlgorithm::SHA512) SHA512_Init(&ctx.sha512); } -static void update(HashType ht, Ctx & ctx, - std::string_view data) +static void update(HashAlgorithm ha, Ctx & ctx, + std::string_view data) { - if (ht == htMD5) MD5_Update(&ctx.md5, data.data(), data.size()); - else if (ht == htSHA1) SHA1_Update(&ctx.sha1, data.data(), data.size()); - else if (ht == htSHA256) SHA256_Update(&ctx.sha256, data.data(), data.size()); - else if (ht == htSHA512) SHA512_Update(&ctx.sha512, data.data(), data.size()); + if (ha == HashAlgorithm::MD5) MD5_Update(&ctx.md5, data.data(), data.size()); + else if (ha == HashAlgorithm::SHA1) SHA1_Update(&ctx.sha1, data.data(), data.size()); + else if (ha == HashAlgorithm::SHA256) SHA256_Update(&ctx.sha256, data.data(), data.size()); + else if (ha == HashAlgorithm::SHA512) SHA512_Update(&ctx.sha512, data.data(), data.size()); } -static void finish(HashType ht, Ctx & ctx, unsigned char * hash) +static void finish(HashAlgorithm ha, Ctx & ctx, unsigned char * hash) { - if (ht == htMD5) MD5_Final(hash, &ctx.md5); - else if (ht == htSHA1) SHA1_Final(hash, &ctx.sha1); - else if (ht == htSHA256) SHA256_Final(hash, &ctx.sha256); - else if (ht == htSHA512) SHA512_Final(hash, &ctx.sha512); + if (ha == HashAlgorithm::MD5) MD5_Final(hash, &ctx.md5); + else if (ha == HashAlgorithm::SHA1) SHA1_Final(hash, &ctx.sha1); + else if (ha == HashAlgorithm::SHA256) SHA256_Final(hash, &ctx.sha256); + else if (ha == HashAlgorithm::SHA512) SHA512_Final(hash, &ctx.sha512); } -Hash hashString(HashType ht, std::string_view s) +Hash hashString(HashAlgorithm ha, std::string_view s) { Ctx ctx; - Hash hash(ht); - start(ht, ctx); - update(ht, ctx, s); - finish(ht, ctx, hash.hash); + Hash hash(ha); + start(ha, ctx); + update(ha, ctx, s); + finish(ha, ctx, hash.hash); return hash; } -Hash hashFile(HashType ht, const Path & path) +Hash hashFile(HashAlgorithm ha, const Path & path) { - HashSink sink(ht); + HashSink sink(ha); readFile(path, sink); return sink.finish().first; } -HashSink::HashSink(HashType ht) : ht(ht) +HashSink::HashSink(HashAlgorithm ha) : ha(ha) { ctx = new Ctx; bytes = 0; - start(ht, *ctx); + start(ha, *ctx); } HashSink::~HashSink() @@ -345,14 +355,14 @@ HashSink::~HashSink() void HashSink::writeUnbuffered(std::string_view data) { bytes += data.size(); - update(ht, *ctx, data); + update(ha, *ctx, data); } HashResult HashSink::finish() { flush(); - Hash hash(ht); - nix::finish(ht, *ctx, hash.hash); + Hash hash(ha); + nix::finish(ha, *ctx, hash.hash); return HashResult(hash, bytes); } @@ -360,24 +370,15 @@ HashResult HashSink::currentHash() { flush(); Ctx ctx2 = *ctx; - Hash hash(ht); - nix::finish(ht, ctx2, hash.hash); + Hash hash(ha); + nix::finish(ha, ctx2, hash.hash); return HashResult(hash, bytes); } -HashResult hashPath( - HashType ht, const Path & path, PathFilter & filter) -{ - HashSink sink(ht); - dumpPath(path, sink, filter); - return sink.finish(); -} - - Hash compressHash(const Hash & hash, unsigned int newSize) { - Hash h(hash.type); + Hash h(hash.algo); h.hashSize = newSize; for (unsigned int i = 0; i < hash.hashSize; ++i) h.hash[i % newSize] ^= hash.hash[i]; @@ -388,7 +389,11 @@ Hash compressHash(const Hash & hash, unsigned int newSize) std::optional parseHashFormatOpt(std::string_view hashFormatName) { if (hashFormatName == "base16") return HashFormat::Base16; - if (hashFormatName == "base32") return HashFormat::Base32; + if (hashFormatName == "nix32") return HashFormat::Nix32; + if (hashFormatName == "base32") { + warn(R"("base32" is a deprecated alias for hash format "nix32".)"); + return HashFormat::Nix32; + } if (hashFormatName == "base64") return HashFormat::Base64; if (hashFormatName == "sri") return HashFormat::SRI; return std::nullopt; @@ -407,8 +412,8 @@ std::string_view printHashFormat(HashFormat HashFormat) switch (HashFormat) { case HashFormat::Base64: return "base64"; - case HashFormat::Base32: - return "base32"; + case HashFormat::Nix32: + return "nix32"; case HashFormat::Base16: return "base16"; case HashFormat::SRI: @@ -420,31 +425,31 @@ std::string_view printHashFormat(HashFormat HashFormat) } } -std::optional parseHashTypeOpt(std::string_view s) +std::optional parseHashAlgoOpt(std::string_view s) { - if (s == "md5") return htMD5; - if (s == "sha1") return htSHA1; - if (s == "sha256") return htSHA256; - if (s == "sha512") return htSHA512; + if (s == "md5") return HashAlgorithm::MD5; + if (s == "sha1") return HashAlgorithm::SHA1; + if (s == "sha256") return HashAlgorithm::SHA256; + if (s == "sha512") return HashAlgorithm::SHA512; return std::nullopt; } -HashType parseHashType(std::string_view s) +HashAlgorithm parseHashAlgo(std::string_view s) { - auto opt_h = parseHashTypeOpt(s); + auto opt_h = parseHashAlgoOpt(s); if (opt_h) return *opt_h; else throw UsageError("unknown hash algorithm '%1%', expect 'md5', 'sha1', 'sha256', or 'sha512'", s); } -std::string_view printHashType(HashType ht) +std::string_view printHashAlgo(HashAlgorithm ha) { - switch (ht) { - case htMD5: return "md5"; - case htSHA1: return "sha1"; - case htSHA256: return "sha256"; - case htSHA512: return "sha512"; + switch (ha) { + case HashAlgorithm::MD5: return "md5"; + case HashAlgorithm::SHA1: return "sha1"; + case HashAlgorithm::SHA256: return "sha256"; + case HashAlgorithm::SHA512: return "sha512"; default: // illegal hash type enum value internally, as opposed to external input // which should be validated with nice error message. diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh index 6ade6555c..f7e8eb265 100644 --- a/src/libutil/hash.hh +++ b/src/libutil/hash.hh @@ -5,14 +5,13 @@ #include "serialise.hh" #include "file-system.hh" - namespace nix { MakeError(BadHash, Error); -enum HashType : char { htMD5 = 42, htSHA1, htSHA256, htSHA512 }; +enum struct HashAlgorithm : char { MD5 = 42, SHA1, SHA256, SHA512 }; const int md5HashSize = 16; @@ -20,9 +19,9 @@ const int sha1HashSize = 20; const int sha256HashSize = 32; const int sha512HashSize = 64; -extern std::set hashTypes; +extern const std::set hashAlgorithms; -extern const std::string base32Chars; +extern const std::string nix32Chars; /** * @brief Enumeration representing the hash formats. @@ -31,8 +30,8 @@ enum struct HashFormat : int { /// @brief Base 64 encoding. /// @see [IETF RFC 4648, section 4](https://datatracker.ietf.org/doc/html/rfc4648#section-4). Base64, - /// @brief Nix-specific base-32 encoding. @see base32Chars - Base32, + /// @brief Nix-specific base-32 encoding. @see nix32Chars + Nix32, /// @brief Lowercase hexadecimal encoding. @see base16Chars Base16, /// @brief ":", format of the SRI integrity attribute. @@ -40,6 +39,7 @@ enum struct HashFormat : int { SRI }; +extern const std::set hashFormats; struct Hash { @@ -47,12 +47,12 @@ struct Hash size_t hashSize = 0; uint8_t hash[maxHashSize] = {}; - HashType type; + HashAlgorithm algo; /** * Create a zero-filled hash object. */ - Hash(HashType type); + explicit Hash(HashAlgorithm algo); /** * Parse the hash from a string representation in the format @@ -61,7 +61,7 @@ struct Hash * is not present, then the hash type must be specified in the * string. */ - static Hash parseAny(std::string_view s, std::optional type); + static Hash parseAny(std::string_view s, std::optional optAlgo); /** * Parse a hash from a string representation like the above, except the @@ -73,7 +73,7 @@ struct Hash * Parse a plain hash that musst not have any prefix indicating the type. * The type is passed in to disambiguate. */ - static Hash parseNonSRIUnprefixed(std::string_view s, HashType type); + static Hash parseNonSRIUnprefixed(std::string_view s, HashAlgorithm algo); static Hash parseSRI(std::string_view original); @@ -82,7 +82,7 @@ private: * The type must be provided, the string view must not include * prefix. `isSRI` helps disambigate the various base-* encodings. */ - Hash(std::string_view s, HashType type, bool isSRI); + Hash(std::string_view s, HashAlgorithm algo, bool isSRI); public: /** @@ -103,7 +103,7 @@ public: /** * Returns the length of a base-16 representation of this hash. */ - size_t base16Len() const + [[nodiscard]] size_t base16Len() const { return hashSize * 2; } @@ -111,7 +111,7 @@ public: /** * Returns the length of a base-32 representation of this hash. */ - size_t base32Len() const + [[nodiscard]] size_t base32Len() const { return (hashSize * 8 - 1) / 5 + 1; } @@ -119,35 +119,40 @@ public: /** * Returns the length of a base-64 representation of this hash. */ - size_t base64Len() const + [[nodiscard]] size_t base64Len() const { return ((4 * hashSize / 3) + 3) & ~3; } /** * Return a string representation of the hash, in base-16, base-32 - * or base-64. By default, this is prefixed by the hash type + * or base-64. By default, this is prefixed by the hash algo * (e.g. "sha256:"). */ - std::string to_string(HashFormat hashFormat, bool includeType) const; + [[nodiscard]] std::string to_string(HashFormat hashFormat, bool includeAlgo) const; - std::string gitRev() const + [[nodiscard]] std::string gitRev() const { return to_string(HashFormat::Base16, false); } - std::string gitShortRev() const + [[nodiscard]] std::string gitShortRev() const { return std::string(to_string(HashFormat::Base16, false), 0, 7); } static Hash dummy; + + /** + * @return a random hash with hash algorithm `algo` + */ + static Hash random(HashAlgorithm algo); }; /** * Helper that defaults empty hashes to the 0 hash. */ -Hash newHashAllowEmpty(std::string_view hashStr, std::optional ht); +Hash newHashAllowEmpty(std::string_view hashStr, std::optional ha); /** * Print a hash in base-16 if it's MD5, or base-32 otherwise. @@ -157,24 +162,21 @@ std::string printHash16or32(const Hash & hash); /** * Compute the hash of the given string. */ -Hash hashString(HashType ht, std::string_view s); +Hash hashString(HashAlgorithm ha, std::string_view s); /** * Compute the hash of the given file, hashing its contents directly. * * (Metadata, such as the executable permission bit, is ignored.) */ -Hash hashFile(HashType ht, const Path & path); +Hash hashFile(HashAlgorithm ha, const Path & path); /** - * Compute the hash of the given path, serializing as a Nix Archive and - * then hashing that. + * The final hash and the number of bytes digested. * - * The hash is defined as (essentially) hashString(ht, dumpPath(path)). + * @todo Convert to proper struct */ typedef std::pair HashResult; -HashResult hashPath(HashType ht, const Path & path, - PathFilter & filter = defaultPathFilter); /** * Compress a hash to the specified number of bytes by cyclically @@ -200,17 +202,17 @@ std::string_view printHashFormat(HashFormat hashFormat); /** * Parse a string representing a hash type. */ -HashType parseHashType(std::string_view s); +HashAlgorithm parseHashAlgo(std::string_view s); /** * Will return nothing on parse error */ -std::optional parseHashTypeOpt(std::string_view s); +std::optional parseHashAlgoOpt(std::string_view s); /** * And the reverse. */ -std::string_view printHashType(HashType ht); +std::string_view printHashAlgo(HashAlgorithm ha); union Ctx; @@ -223,12 +225,12 @@ struct AbstractHashSink : virtual Sink class HashSink : public BufferedSink, public AbstractHashSink { private: - HashType ht; + HashAlgorithm ha; Ctx * ctx; uint64_t bytes; public: - HashSink(HashType ht); + HashSink(HashAlgorithm ha); HashSink(const HashSink & h); ~HashSink(); void writeUnbuffered(std::string_view data) override; diff --git a/src/libutil/input-accessor.hh b/src/libutil/input-accessor.hh new file mode 100644 index 000000000..55b7c2f2f --- /dev/null +++ b/src/libutil/input-accessor.hh @@ -0,0 +1,27 @@ +#pragma once +///@file + +#include "source-accessor.hh" +#include "ref.hh" +#include "repair-flag.hh" + +namespace nix { + +MakeError(RestrictedPathError, Error); + +struct InputAccessor : virtual SourceAccessor, std::enable_shared_from_this +{ + std::optional fingerprint; + + /** + * Return the maximum last-modified time of the files in this + * tree, if available. + */ + virtual std::optional getLastModified() + { + return std::nullopt; + } + +}; + +} diff --git a/src/libutil/json-utils.hh b/src/libutil/json-utils.hh index 77c63595c..06dd80cf7 100644 --- a/src/libutil/json-utils.hh +++ b/src/libutil/json-utils.hh @@ -78,20 +78,29 @@ namespace nlohmann { */ template struct adl_serializer> { - static std::optional from_json(const json & json) { + /** + * @brief Convert a JSON type to an `optional` treating + * `null` as `std::nullopt`. + */ + static void from_json(const json & json, std::optional & t) { static_assert( nix::json_avoids_null::value, "null is already in use for underlying type's JSON"); - return json.is_null() + t = json.is_null() ? std::nullopt - : std::optional { adl_serializer::from_json(json) }; + : std::make_optional(json.template get()); } - static void to_json(json & json, std::optional t) { + + /** + * @brief Convert an optional type to a JSON type treating `std::nullopt` + * as `null`. + */ + static void to_json(json & json, const std::optional & t) { static_assert( nix::json_avoids_null::value, "null is already in use for underlying type's JSON"); if (t) - adl_serializer::to_json(json, *t); + json = *t; else json = nullptr; } diff --git a/src/libutil/local.mk b/src/libutil/local.mk index 81efaafec..200026c1e 100644 --- a/src/libutil/local.mk +++ b/src/libutil/local.mk @@ -4,15 +4,18 @@ libutil_NAME = libnixutil libutil_DIR := $(d) -libutil_SOURCES := $(wildcard $(d)/*.cc) +libutil_SOURCES := $(wildcard $(d)/*.cc $(d)/signature/*.cc) libutil_CXXFLAGS += -I src/libutil -libutil_LDFLAGS += -pthread $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context +libutil_LDFLAGS += $(THREAD_LDFLAGS) $(LIBCURL_LIBS) $(SODIUM_LIBS) $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context $(foreach i, $(wildcard $(d)/args/*.hh), \ $(eval $(call install-file-in, $(i), $(includedir)/nix/args, 0644))) +$(foreach i, $(wildcard $(d)/signature/*.hh), \ + $(eval $(call install-file-in, $(i), $(includedir)/nix/signature, 0644))) + ifeq ($(HAVE_LIBCPUID), 1) - libutil_LDFLAGS += -lcpuid + libutil_LDFLAGS += -lcpuid endif diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index 60b0865bf..183aee2dc 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -4,6 +4,8 @@ #include "terminal.hh" #include "util.hh" #include "config.hh" +#include "source-path.hh" +#include "position.hh" #include #include @@ -136,13 +138,13 @@ Activity::Activity(Logger & logger, Verbosity lvl, ActivityType type, logger.startActivity(id, lvl, type, s, fields, parent); } -void to_json(nlohmann::json & json, std::shared_ptr pos) +void to_json(nlohmann::json & json, std::shared_ptr pos) { if (pos) { json["line"] = pos->line; json["column"] = pos->column; std::ostringstream str; - pos->print(str); + pos->print(str, true); json["file"] = str.str(); } else { json["line"] = nullptr; diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh index 5aa6bee95..183f2d8e1 100644 --- a/src/libutil/logging.hh +++ b/src/libutil/logging.hh @@ -23,6 +23,7 @@ typedef enum { actQueryPathInfo = 109, actPostBuildHook = 110, actBuildWaiting = 111, + actFetchTree = 112, } ActivityType; typedef enum { @@ -34,6 +35,7 @@ typedef enum { resProgress = 105, resSetExpected = 106, resPostBuildLogLine = 107, + resFetchStatus = 108, } ResultType; typedef uint64_t ActivityId; diff --git a/src/libutil/position.cc b/src/libutil/position.cc new file mode 100644 index 000000000..b39a5a1d4 --- /dev/null +++ b/src/libutil/position.cc @@ -0,0 +1,112 @@ +#include "position.hh" + +namespace nix { + +Pos::Pos(const Pos * other) +{ + if (!other) { + return; + } + line = other->line; + column = other->column; + origin = std::move(other->origin); +} + +Pos::operator std::shared_ptr() const +{ + return std::make_shared(&*this); +} + +bool Pos::operator<(const Pos &rhs) const +{ + return std::forward_as_tuple(line, column, origin) + < std::forward_as_tuple(rhs.line, rhs.column, rhs.origin); +} + +std::optional Pos::getCodeLines() const +{ + if (line == 0) + return std::nullopt; + + if (auto source = getSource()) { + + std::istringstream iss(*source); + // count the newlines. + int count = 0; + std::string curLine; + int pl = line - 1; + + LinesOfCode loc; + + do { + std::getline(iss, curLine); + ++count; + if (count < pl) + ; + else if (count == pl) { + loc.prevLineOfCode = curLine; + } else if (count == pl + 1) { + loc.errLineOfCode = curLine; + } else if (count == pl + 2) { + loc.nextLineOfCode = curLine; + break; + } + + if (!iss.good()) + break; + } while (true); + + return loc; + } + + return std::nullopt; +} + + +std::optional Pos::getSource() const +{ + return std::visit(overloaded { + [](const std::monostate &) -> std::optional { + return std::nullopt; + }, + [](const Pos::Stdin & s) -> std::optional { + // Get rid of the null terminators added by the parser. + return std::string(s.source->c_str()); + }, + [](const Pos::String & s) -> std::optional { + // Get rid of the null terminators added by the parser. + return std::string(s.source->c_str()); + }, + [](const SourcePath & path) -> std::optional { + try { + return path.readFile(); + } catch (Error &) { + return std::nullopt; + } + } + }, origin); +} + +void Pos::print(std::ostream & out, bool showOrigin) const +{ + if (showOrigin) { + std::visit(overloaded { + [&](const std::monostate &) { out << "«none»"; }, + [&](const Pos::Stdin &) { out << "«stdin»"; }, + [&](const Pos::String & s) { out << "«string»"; }, + [&](const SourcePath & path) { out << path; } + }, origin); + out << ":"; + } + out << line; + if (column > 0) + out << ":" << column; +} + +std::ostream & operator<<(std::ostream & str, const Pos & pos) +{ + pos.print(str, true); + return str; +} + +} diff --git a/src/libutil/position.hh b/src/libutil/position.hh new file mode 100644 index 000000000..a184997ed --- /dev/null +++ b/src/libutil/position.hh @@ -0,0 +1,74 @@ +#pragma once +/** + * @file + * + * @brief Pos and AbstractPos + */ + +#include +#include + +#include "source-path.hh" + +namespace nix { + +/** + * A position and an origin for that position (like a source file). + */ +struct Pos +{ + uint32_t line = 0; + uint32_t column = 0; + + struct Stdin { + ref source; + bool operator==(const Stdin & rhs) const + { return *source == *rhs.source; } + bool operator!=(const Stdin & rhs) const + { return *source != *rhs.source; } + bool operator<(const Stdin & rhs) const + { return *source < *rhs.source; } + }; + struct String { + ref source; + bool operator==(const String & rhs) const + { return *source == *rhs.source; } + bool operator!=(const String & rhs) const + { return *source != *rhs.source; } + bool operator<(const String & rhs) const + { return *source < *rhs.source; } + }; + + typedef std::variant Origin; + + Origin origin = std::monostate(); + + Pos() { } + Pos(uint32_t line, uint32_t column, Origin origin) + : line(line), column(column), origin(origin) { } + Pos(Pos & other) = default; + Pos(const Pos & other) = default; + Pos(Pos && other) = default; + Pos(const Pos * other); + + explicit operator bool() const { return line > 0; } + + operator std::shared_ptr() const; + + /** + * Return the contents of the source file. + */ + std::optional getSource() const; + + void print(std::ostream & out, bool showOrigin) const; + + std::optional getCodeLines() const; + + bool operator==(const Pos & rhs) const = default; + bool operator!=(const Pos & rhs) const = default; + bool operator<(const Pos & rhs) const; +}; + +std::ostream & operator<<(std::ostream & str, const Pos & pos); + +} diff --git a/src/libutil/posix-source-accessor.cc b/src/libutil/posix-source-accessor.cc index dc96f84e5..5f26fa67b 100644 --- a/src/libutil/posix-source-accessor.cc +++ b/src/libutil/posix-source-accessor.cc @@ -1,5 +1,8 @@ #include "posix-source-accessor.hh" #include "signals.hh" +#include "sync.hh" + +#include namespace nix { @@ -8,9 +11,9 @@ void PosixSourceAccessor::readFile( Sink & sink, std::function sizeCallback) { - // FIXME: add O_NOFOLLOW since symlinks should be resolved by the - // caller? - AutoCloseFD fd = open(path.c_str(), O_RDONLY | O_CLOEXEC); + assertNoSymlinks(path); + + AutoCloseFD fd = open(path.c_str(), O_RDONLY | O_CLOEXEC | O_NOFOLLOW); if (!fd) throw SysError("opening file '%1%'", path); @@ -22,7 +25,7 @@ void PosixSourceAccessor::readFile( off_t left = st.st_size; - std::vector buf(64 * 1024); + std::array buf; while (left) { checkInterrupt(); ssize_t rd = read(fd.get(), buf.data(), (size_t) std::min(left, (off_t) buf.size())); @@ -42,30 +45,55 @@ void PosixSourceAccessor::readFile( bool PosixSourceAccessor::pathExists(const CanonPath & path) { + if (auto parent = path.parent()) assertNoSymlinks(*parent); return nix::pathExists(path.abs()); } +std::optional PosixSourceAccessor::cachedLstat(const CanonPath & path) +{ + static Sync>> _cache; + + { + auto cache(_cache.lock()); + auto i = cache->find(path); + if (i != cache->end()) return i->second; + } + + std::optional st{std::in_place}; + if (::lstat(path.c_str(), &*st)) { + if (errno == ENOENT || errno == ENOTDIR) + st.reset(); + else + throw SysError("getting status of '%s'", showPath(path)); + } + + auto cache(_cache.lock()); + if (cache->size() >= 16384) cache->clear(); + cache->emplace(path, st); + + return st; +} + std::optional PosixSourceAccessor::maybeLstat(const CanonPath & path) { - struct stat st; - if (::lstat(path.c_str(), &st)) { - if (errno == ENOENT) return std::nullopt; - throw SysError("getting status of '%s'", showPath(path)); - } - mtime = std::max(mtime, st.st_mtime); + if (auto parent = path.parent()) assertNoSymlinks(*parent); + auto st = cachedLstat(path); + if (!st) return std::nullopt; + mtime = std::max(mtime, st->st_mtime); return Stat { .type = - S_ISREG(st.st_mode) ? tRegular : - S_ISDIR(st.st_mode) ? tDirectory : - S_ISLNK(st.st_mode) ? tSymlink : + S_ISREG(st->st_mode) ? tRegular : + S_ISDIR(st->st_mode) ? tDirectory : + S_ISLNK(st->st_mode) ? tSymlink : tMisc, - .fileSize = S_ISREG(st.st_mode) ? std::optional(st.st_size) : std::nullopt, - .isExecutable = S_ISREG(st.st_mode) && st.st_mode & S_IXUSR, + .fileSize = S_ISREG(st->st_mode) ? std::optional(st->st_size) : std::nullopt, + .isExecutable = S_ISREG(st->st_mode) && st->st_mode & S_IXUSR, }; } SourceAccessor::DirEntries PosixSourceAccessor::readDirectory(const CanonPath & path) { + assertNoSymlinks(path); DirEntries res; for (auto & entry : nix::readDirectory(path.abs())) { std::optional type; @@ -81,6 +109,7 @@ SourceAccessor::DirEntries PosixSourceAccessor::readDirectory(const CanonPath & std::string PosixSourceAccessor::readLink(const CanonPath & path) { + if (auto parent = path.parent()) assertNoSymlinks(*parent); return nix::readLink(path.abs()); } @@ -89,4 +118,14 @@ std::optional PosixSourceAccessor::getPhysicalPath(const CanonPath & return path; } +void PosixSourceAccessor::assertNoSymlinks(CanonPath path) +{ + while (!path.isRoot()) { + auto st = cachedLstat(path); + if (st && S_ISLNK(st->st_mode)) + throw Error("path '%s' is a symlink", showPath(path)); + path.pop(); + } +} + } diff --git a/src/libutil/posix-source-accessor.hh b/src/libutil/posix-source-accessor.hh index cf087d26e..b2bd39805 100644 --- a/src/libutil/posix-source-accessor.hh +++ b/src/libutil/posix-source-accessor.hh @@ -7,7 +7,7 @@ namespace nix { /** * A source accessor that uses the Unix filesystem. */ -struct PosixSourceAccessor : SourceAccessor +struct PosixSourceAccessor : virtual SourceAccessor { /** * The most recent mtime seen by lstat(). This is a hack to @@ -29,6 +29,15 @@ struct PosixSourceAccessor : SourceAccessor std::string readLink(const CanonPath & path) override; std::optional getPhysicalPath(const CanonPath & path) override; + +private: + + /** + * Throw an error if `path` or any of its ancestors are symlinks. + */ + void assertNoSymlinks(CanonPath path); + + std::optional cachedLstat(const CanonPath & path); }; } diff --git a/src/libutil/ref.hh b/src/libutil/ref.hh index af5f8304c..5d0c3696d 100644 --- a/src/libutil/ref.hh +++ b/src/libutil/ref.hh @@ -1,6 +1,7 @@ #pragma once ///@file +#include #include #include #include diff --git a/src/libutil/references.cc b/src/libutil/references.cc index 9d75606ef..b30e62c7b 100644 --- a/src/libutil/references.cc +++ b/src/libutil/references.cc @@ -23,8 +23,8 @@ static void search( static bool isBase32[256]; std::call_once(initialised, [](){ for (unsigned int i = 0; i < 256; ++i) isBase32[i] = false; - for (unsigned int i = 0; i < base32Chars.size(); ++i) - isBase32[(unsigned char) base32Chars[i]] = true; + for (unsigned int i = 0; i < nix32Chars.size(); ++i) + isBase32[(unsigned char) nix32Chars[i]] = true; }); for (size_t i = 0; i + refLength <= s.size(); ) { @@ -110,8 +110,8 @@ void RewritingSink::flush() prev.clear(); } -HashModuloSink::HashModuloSink(HashType ht, const std::string & modulus) - : hashSink(ht) +HashModuloSink::HashModuloSink(HashAlgorithm ha, const std::string & modulus) + : hashSink(ha) , rewritingSink(modulus, std::string(modulus.size(), 0), hashSink) { } diff --git a/src/libutil/references.hh b/src/libutil/references.hh index f0baeffe1..8bc9f7ec9 100644 --- a/src/libutil/references.hh +++ b/src/libutil/references.hh @@ -46,7 +46,7 @@ struct HashModuloSink : AbstractHashSink HashSink hashSink; RewritingSink rewritingSink; - HashModuloSink(HashType ht, const std::string & modulus); + HashModuloSink(HashAlgorithm ha, const std::string & modulus); void operator () (std::string_view data) override; diff --git a/src/libstore/repair-flag.hh b/src/libutil/repair-flag.hh similarity index 100% rename from src/libstore/repair-flag.hh rename to src/libutil/repair-flag.hh diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index d7950b11b..76b378e18 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -82,7 +82,7 @@ void Source::operator () (std::string_view data) void Source::drainInto(Sink & sink) { std::string s; - std::vector buf(8192); + std::array buf; while (true) { size_t n; try { @@ -448,7 +448,7 @@ Error readError(Source & source) auto msg = readString(source); ErrorInfo info { .level = level, - .msg = hintformat(fmt("%s", msg)), + .msg = hintfmt(msg), }; auto havePos = readNum(source); assert(havePos == 0); @@ -457,7 +457,7 @@ Error readError(Source & source) havePos = readNum(source); assert(havePos == 0); info.traces.push_back(Trace { - .hint = hintformat(fmt("%s", readString(source))) + .hint = hintfmt(readString(source)) }); } return Error(std::move(info)); diff --git a/src/libstore/crypto.cc b/src/libutil/signature/local-keys.cc similarity index 64% rename from src/libstore/crypto.cc rename to src/libutil/signature/local-keys.cc index 1b705733c..858b036f5 100644 --- a/src/libstore/crypto.cc +++ b/src/libutil/signature/local-keys.cc @@ -1,13 +1,12 @@ -#include "crypto.hh" +#include "signature/local-keys.hh" + #include "file-system.hh" #include "util.hh" -#include "globals.hh" - #include namespace nix { -static std::pair split(std::string_view s) +BorrowedCryptoValue BorrowedCryptoValue::parse(std::string_view s) { size_t colon = s.find(':'); if (colon == std::string::npos || colon == 0) @@ -17,10 +16,10 @@ static std::pair split(std::string_view s) Key::Key(std::string_view s) { - auto ss = split(s); + auto ss = BorrowedCryptoValue::parse(s); - name = ss.first; - key = ss.second; + name = ss.name; + key = ss.payload; if (name == "" || key == "") throw Error("secret key is corrupt"); @@ -73,45 +72,34 @@ PublicKey::PublicKey(std::string_view s) throw Error("public key is not valid"); } -bool verifyDetached(const std::string & data, const std::string & sig, - const PublicKeys & publicKeys) +bool PublicKey::verifyDetached(std::string_view data, std::string_view sig) const { - auto ss = split(sig); + auto ss = BorrowedCryptoValue::parse(sig); - auto key = publicKeys.find(std::string(ss.first)); - if (key == publicKeys.end()) return false; + if (ss.name != std::string_view { name }) return false; - auto sig2 = base64Decode(ss.second); + return verifyDetachedAnon(data, ss.payload); +} + +bool PublicKey::verifyDetachedAnon(std::string_view data, std::string_view sig) const +{ + auto sig2 = base64Decode(sig); if (sig2.size() != crypto_sign_BYTES) throw Error("signature is not valid"); return crypto_sign_verify_detached((unsigned char *) sig2.data(), (unsigned char *) data.data(), data.size(), - (unsigned char *) key->second.key.data()) == 0; + (unsigned char *) key.data()) == 0; } -PublicKeys getDefaultPublicKeys() +bool verifyDetached(std::string_view data, std::string_view sig, const PublicKeys & publicKeys) { - PublicKeys publicKeys; + auto ss = BorrowedCryptoValue::parse(sig); - // FIXME: filter duplicates + auto key = publicKeys.find(std::string(ss.name)); + if (key == publicKeys.end()) return false; - for (auto s : settings.trustedPublicKeys.get()) { - PublicKey key(s); - publicKeys.emplace(key.name, key); - } - - for (auto secretKeyFile : settings.secretKeyFiles.get()) { - try { - SecretKey secretKey(readFile(secretKeyFile)); - publicKeys.emplace(secretKey.name, secretKey.toPublicKey()); - } catch (SysError & e) { - /* Ignore unreadable key files. That's normal in a - multi-user installation. */ - } - } - - return publicKeys; + return key->second.verifyDetachedAnon(data, ss.payload); } } diff --git a/src/libstore/crypto.hh b/src/libutil/signature/local-keys.hh similarity index 51% rename from src/libstore/crypto.hh rename to src/libutil/signature/local-keys.hh index 35216d470..4aafc1239 100644 --- a/src/libstore/crypto.hh +++ b/src/libutil/signature/local-keys.hh @@ -7,6 +7,25 @@ namespace nix { +/** + * Except where otherwise noted, Nix serializes keys and signatures in + * the form: + * + * ``` + * : + * ``` + */ +struct BorrowedCryptoValue { + std::string_view name; + std::string_view payload; + + /** + * This splits on the colon, the user can then separated decode the + * Base64 payload separately. + */ + static BorrowedCryptoValue parse(std::string_view); +}; + struct Key { std::string name; @@ -49,21 +68,36 @@ struct PublicKey : Key { PublicKey(std::string_view data); + /** + * @return true iff `sig` and this key's names match, and `sig` is a + * correct signature over `data` using the given public key. + */ + bool verifyDetached(std::string_view data, std::string_view sigs) const; + + /** + * @return true iff `sig` is a correct signature over `data` using the + * given public key. + * + * @param just the Base64 signature itself, not a colon-separated pair of a + * public key name and signature. + */ + bool verifyDetachedAnon(std::string_view data, std::string_view sigs) const; + private: PublicKey(std::string_view name, std::string && key) : Key(name, std::move(key)) { } friend struct SecretKey; }; +/** + * Map from key names to public keys + */ typedef std::map PublicKeys; /** * @return true iff ‘sig’ is a correct signature over ‘data’ using one * of the given public keys. */ -bool verifyDetached(const std::string & data, const std::string & sig, - const PublicKeys & publicKeys); - -PublicKeys getDefaultPublicKeys(); +bool verifyDetached(std::string_view data, std::string_view sig, const PublicKeys & publicKeys); } diff --git a/src/libutil/signature/signer.cc b/src/libutil/signature/signer.cc new file mode 100644 index 000000000..0d26867b5 --- /dev/null +++ b/src/libutil/signature/signer.cc @@ -0,0 +1,23 @@ +#include "signature/signer.hh" +#include "error.hh" + +#include + +namespace nix { + +LocalSigner::LocalSigner(SecretKey && privateKey) + : privateKey(privateKey) + , publicKey(privateKey.toPublicKey()) +{ } + +std::string LocalSigner::signDetached(std::string_view s) const +{ + return privateKey.signDetached(s); +} + +const PublicKey & LocalSigner::getPublicKey() +{ + return publicKey; +} + +} diff --git a/src/libutil/signature/signer.hh b/src/libutil/signature/signer.hh new file mode 100644 index 000000000..e50170fe2 --- /dev/null +++ b/src/libutil/signature/signer.hh @@ -0,0 +1,61 @@ +#pragma once + +#include "types.hh" +#include "signature/local-keys.hh" + +#include +#include + +namespace nix { + +/** + * An abstract signer + * + * Derive from this class to implement a custom signature scheme. + * + * It is only necessary to implement signature of bytes and provide a + * public key. + */ +struct Signer +{ + virtual ~Signer() = default; + + /** + * Sign the given data, creating a (detached) signature. + * + * @param data data to be signed. + * + * @return the [detached + * signature](https://en.wikipedia.org/wiki/Detached_signature), + * i.e. just the signature itself without a copy of the signed data. + */ + virtual std::string signDetached(std::string_view data) const = 0; + + /** + * View the public key associated with this `Signer`. + */ + virtual const PublicKey & getPublicKey() = 0; +}; + +using Signers = std::map; + +/** + * Local signer + * + * The private key is held in this machine's RAM + */ +struct LocalSigner : Signer +{ + LocalSigner(SecretKey && privateKey); + + std::string signDetached(std::string_view s) const override; + + const PublicKey & getPublicKey() override; + +private: + + SecretKey privateKey; + PublicKey publicKey; +}; + +} diff --git a/src/libutil/source-accessor.cc b/src/libutil/source-accessor.cc index e2114e18f..afbbbe1a9 100644 --- a/src/libutil/source-accessor.cc +++ b/src/libutil/source-accessor.cc @@ -7,6 +7,7 @@ static std::atomic nextNumber{0}; SourceAccessor::SourceAccessor() : number(++nextNumber) + , displayPrefix{"«unknown»"} { } @@ -38,11 +39,11 @@ void SourceAccessor::readFile( } Hash SourceAccessor::hashPath( - const CanonPath & path, - PathFilter & filter, - HashType ht) + const CanonPath & path, + PathFilter & filter, + HashAlgorithm ha) { - HashSink sink(ht); + HashSink sink(ha); dumpPath(path, sink, filter); return sink.finish().first; } @@ -55,9 +56,15 @@ SourceAccessor::Stat SourceAccessor::lstat(const CanonPath & path) throw Error("path '%s' does not exist", showPath(path)); } +void SourceAccessor::setPathDisplay(std::string displayPrefix, std::string displaySuffix) +{ + this->displayPrefix = std::move(displayPrefix); + this->displaySuffix = std::move(displaySuffix); +} + std::string SourceAccessor::showPath(const CanonPath & path) { - return path.abs(); + return displayPrefix + path.abs() + displaySuffix; } } diff --git a/src/libutil/source-accessor.hh b/src/libutil/source-accessor.hh index 1a4e80361..4f4ff09c1 100644 --- a/src/libutil/source-accessor.hh +++ b/src/libutil/source-accessor.hh @@ -17,6 +17,8 @@ struct SourceAccessor { const size_t number; + std::string displayPrefix, displaySuffix; + SourceAccessor(); virtual ~SourceAccessor() @@ -24,6 +26,13 @@ struct SourceAccessor /** * Return the contents of a file as a string. + * + * @note Unlike Unix, this method should *not* follow symlinks. Nix + * by default wants to manipulate symlinks explicitly, and not + * implictly follow them, as they are frequently untrusted user data + * and thus may point to arbitrary locations. Acting on the targets + * targets of symlinks should only occasionally be done, and only + * with care. */ virtual std::string readFile(const CanonPath & path); @@ -32,7 +41,10 @@ struct SourceAccessor * called with the size of the file before any data is written to * the sink. * - * Note: subclasses of `SourceAccessor` need to implement at least + * @note Like the other `readFile`, this method should *not* follow + * symlinks. + * + * @note subclasses of `SourceAccessor` need to implement at least * one of the `readFile()` variants. */ virtual void readFile( @@ -85,6 +97,9 @@ struct SourceAccessor typedef std::map DirEntries; + /** + * @note Like `readFile`, this method should *not* follow symlinks. + */ virtual DirEntries readDirectory(const CanonPath & path) = 0; virtual std::string readLink(const CanonPath & path) = 0; @@ -95,9 +110,9 @@ struct SourceAccessor PathFilter & filter = defaultPathFilter); Hash hashPath( - const CanonPath & path, - PathFilter & filter = defaultPathFilter, - HashType ht = htSHA256); + const CanonPath & path, + PathFilter & filter = defaultPathFilter, + HashAlgorithm ha = HashAlgorithm::SHA256); /** * Return a corresponding path in the root filesystem, if @@ -117,6 +132,8 @@ struct SourceAccessor return number < x.number; } + void setPathDisplay(std::string displayPrefix, std::string displaySuffix = ""); + virtual std::string showPath(const CanonPath & path); }; diff --git a/src/libutil/source-path.cc b/src/libutil/source-path.cc new file mode 100644 index 000000000..d85b0b7fe --- /dev/null +++ b/src/libutil/source-path.cc @@ -0,0 +1,105 @@ +#include "source-path.hh" + +namespace nix { + +std::string_view SourcePath::baseName() const +{ return path.baseName().value_or("source"); } + +SourcePath SourcePath::parent() const +{ + auto p = path.parent(); + assert(p); + return {accessor, std::move(*p)}; +} + +std::string SourcePath::readFile() const +{ return accessor->readFile(path); } + +bool SourcePath::pathExists() const +{ return accessor->pathExists(path); } + +InputAccessor::Stat SourcePath::lstat() const +{ return accessor->lstat(path); } + +std::optional SourcePath::maybeLstat() const +{ return accessor->maybeLstat(path); } + +InputAccessor::DirEntries SourcePath::readDirectory() const +{ return accessor->readDirectory(path); } + +std::string SourcePath::readLink() const +{ return accessor->readLink(path); } + +void SourcePath::dumpPath( + Sink & sink, + PathFilter & filter) const +{ return accessor->dumpPath(path, sink, filter); } + +std::optional SourcePath::getPhysicalPath() const +{ return accessor->getPhysicalPath(path); } + +std::string SourcePath::to_string() const +{ return accessor->showPath(path); } + +SourcePath SourcePath::operator+(const CanonPath & x) const +{ return {accessor, path + x}; } + +SourcePath SourcePath::operator+(std::string_view c) const +{ return {accessor, path + c}; } + +bool SourcePath::operator==(const SourcePath & x) const +{ + return std::tie(*accessor, path) == std::tie(*x.accessor, x.path); +} + +bool SourcePath::operator!=(const SourcePath & x) const +{ + return std::tie(*accessor, path) != std::tie(*x.accessor, x.path); +} + +bool SourcePath::operator<(const SourcePath & x) const +{ + return std::tie(*accessor, path) < std::tie(*x.accessor, x.path); +} + +SourcePath SourcePath::resolveSymlinks() const +{ + auto res = SourcePath(accessor); + + int linksAllowed = 1024; + + std::list todo; + for (auto & c : path) + todo.push_back(std::string(c)); + + while (!todo.empty()) { + auto c = *todo.begin(); + todo.pop_front(); + if (c == "" || c == ".") + ; + else if (c == "..") + res.path.pop(); + else { + res.path.push(c); + if (auto st = res.maybeLstat(); st && st->type == InputAccessor::tSymlink) { + if (!linksAllowed--) + throw Error("infinite symlink recursion in path '%s'", path); + auto target = res.readLink(); + res.path.pop(); + if (hasPrefix(target, "/")) + res.path = CanonPath::root; + todo.splice(todo.begin(), tokenizeString>(target, "/")); + } + } + } + + return res; +} + +std::ostream & operator<<(std::ostream & str, const SourcePath & path) +{ + str << path.to_string(); + return str; +} + +} diff --git a/src/libutil/source-path.hh b/src/libutil/source-path.hh new file mode 100644 index 000000000..bf5625ca5 --- /dev/null +++ b/src/libutil/source-path.hh @@ -0,0 +1,114 @@ +#pragma once +/** + * @file + * + * @brief SourcePath + */ + +#include "ref.hh" +#include "canon-path.hh" +#include "input-accessor.hh" + +namespace nix { + +/** + * An abstraction for accessing source files during + * evaluation. Currently, it's just a wrapper around `CanonPath` that + * accesses files in the regular filesystem, but in the future it will + * support fetching files in other ways. + */ +struct SourcePath +{ + ref accessor; + CanonPath path; + + SourcePath(ref accessor, CanonPath path = CanonPath::root) + : accessor(std::move(accessor)) + , path(std::move(path)) + { } + + std::string_view baseName() const; + + /** + * Construct the parent of this `SourcePath`. Aborts if `this` + * denotes the root. + */ + SourcePath parent() const; + + /** + * If this `SourcePath` denotes a regular file (not a symlink), + * return its contents; otherwise throw an error. + */ + std::string readFile() const; + + /** + * Return whether this `SourcePath` denotes a file (of any type) + * that exists + */ + bool pathExists() const; + + /** + * Return stats about this `SourcePath`, or throw an exception if + * it doesn't exist. + */ + InputAccessor::Stat lstat() const; + + /** + * Return stats about this `SourcePath`, or std::nullopt if it + * doesn't exist. + */ + std::optional maybeLstat() const; + + /** + * If this `SourcePath` denotes a directory (not a symlink), + * return its directory entries; otherwise throw an error. + */ + InputAccessor::DirEntries readDirectory() const; + + /** + * If this `SourcePath` denotes a symlink, return its target; + * otherwise throw an error. + */ + std::string readLink() const; + + /** + * Dump this `SourcePath` to `sink` as a NAR archive. + */ + void dumpPath( + Sink & sink, + PathFilter & filter = defaultPathFilter) const; + + /** + * Return the location of this path in the "real" filesystem, if + * it has a physical location. + */ + std::optional getPhysicalPath() const; + + std::string to_string() const; + + /** + * Append a `CanonPath` to this path. + */ + SourcePath operator + (const CanonPath & x) const; + + /** + * Append a single component `c` to this path. `c` must not + * contain a slash. A slash is implicitly added between this path + * and `c`. + */ + SourcePath operator+(std::string_view c) const; + bool operator==(const SourcePath & x) const; + bool operator!=(const SourcePath & x) const; + bool operator<(const SourcePath & x) const; + + /** + * Resolve any symlinks in this `SourcePath` (including its + * parents). The result is a `SourcePath` in which no element is a + * symlink. + */ + SourcePath resolveSymlinks() const; +}; + +std::ostream & operator << (std::ostream & str, const SourcePath & path); + +} diff --git a/src/libutil/tarfile.cc b/src/libutil/tarfile.cc index 1733c791c..187b3e948 100644 --- a/src/libutil/tarfile.cc +++ b/src/libutil/tarfile.cc @@ -53,6 +53,7 @@ TarArchive::TarArchive(Source & source, bool raw) : buffer(65536) archive_read_support_format_raw(archive); archive_read_support_format_empty(archive); } + archive_read_set_option(archive, NULL, "mac-ext", NULL); check(archive_read_open(archive, (void *)this, callback_open, callback_read, callback_close), "Failed to open archive (%s)"); } @@ -63,6 +64,7 @@ TarArchive::TarArchive(const Path & path) archive_read_support_filter_all(archive); archive_read_support_format_all(archive); + archive_read_set_option(archive, NULL, "mac-ext", NULL); check(archive_read_open_filename(archive, path.c_str(), 16384), "failed to open archive: %s"); } diff --git a/src/libutil/tests/local.mk b/src/libutil/tests/local.mk deleted file mode 100644 index c8b8557cb..000000000 --- a/src/libutil/tests/local.mk +++ /dev/null @@ -1,33 +0,0 @@ -check: libutil-tests_RUN - -programs += libutil-tests-exe - -libutil-tests-exe_NAME = libnixutil-tests - -libutil-tests-exe_DIR := $(d) - -libutil-tests-exe_INSTALL_DIR := - -libutil-tests-exe_LIBS = libutil-tests - -libutil-tests-exe_LDFLAGS := $(GTEST_LIBS) - -libraries += libutil-tests - -libutil-tests_NAME = libnixutil-tests - -libutil-tests_DIR := $(d) - -libutil-tests_INSTALL_DIR := - -libutil-tests_SOURCES := $(wildcard $(d)/*.cc) - -libutil-tests_CXXFLAGS += -I src/libutil - -libutil-tests_LIBS = libutil - -libutil-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS) - -check: unit-test-data/libutil/git/check-data.sh.test - -$(eval $(call run-test,unit-test-data/libutil/git/check-data.sh)) diff --git a/src/libutil/thread-pool.cc b/src/libutil/thread-pool.cc index c5e735617..9a7dfee56 100644 --- a/src/libutil/thread-pool.cc +++ b/src/libutil/thread-pool.cc @@ -79,6 +79,8 @@ void ThreadPool::process() void ThreadPool::doWork(bool mainThread) { + ReceiveInterrupts receiveInterrupts; + if (!mainThread) interruptCheck = [&]() { return (bool) quit; }; diff --git a/src/libutil/url-parts.hh b/src/libutil/url-parts.hh index 5c5a30dc2..1ddc6a536 100644 --- a/src/libutil/url-parts.hh +++ b/src/libutil/url-parts.hh @@ -8,7 +8,7 @@ namespace nix { // URI stuff. const static std::string pctEncoded = "(?:%[0-9a-fA-F][0-9a-fA-F])"; -const static std::string schemeRegex = "(?:[a-z][a-z0-9+.-]*)"; +const static std::string schemeNameRegex = "(?:[a-z][a-z0-9+.-]*)"; const static std::string ipv6AddressSegmentRegex = "[0-9a-fA-F:]+(?:%\\w+)?"; const static std::string ipv6AddressRegex = "(?:\\[" + ipv6AddressSegmentRegex + "\\]|" + ipv6AddressSegmentRegex + ")"; const static std::string unreservedRegex = "(?:[a-zA-Z0-9-._~])"; @@ -19,13 +19,15 @@ const static std::string userRegex = "(?:(?:" + unreservedRegex + "|" + pctEncod const static std::string authorityRegex = "(?:" + userRegex + "@)?" + hostRegex + "(?::[0-9]+)?"; const static std::string pcharRegex = "(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|[:@])"; const static std::string queryRegex = "(?:" + pcharRegex + "|[/? \"])*"; +const static std::string fragmentRegex = "(?:" + pcharRegex + "|[/? \"^])*"; const static std::string segmentRegex = "(?:" + pcharRegex + "*)"; const static std::string absPathRegex = "(?:(?:/" + segmentRegex + ")*/?)"; const static std::string pathRegex = "(?:" + segmentRegex + "(?:/" + segmentRegex + ")*/?)"; /// A Git ref (i.e. branch or tag name). /// \todo check that this is correct. -const static std::string refRegexS = "[a-zA-Z0-9@][a-zA-Z0-9_.\\/@-]*"; +/// This regex incomplete. See https://git-scm.com/docs/git-check-ref-format +const static std::string refRegexS = "[a-zA-Z0-9@][a-zA-Z0-9_.\\/@+-]*"; extern std::regex refRegex; /// Instead of defining what a good Git Ref is, we define what a bad Git Ref is diff --git a/src/libutil/url.cc b/src/libutil/url.cc index 9b438e6cd..c6561441d 100644 --- a/src/libutil/url.cc +++ b/src/libutil/url.cc @@ -2,6 +2,7 @@ #include "url-parts.hh" #include "util.hh" #include "split.hh" +#include "canon-path.hh" namespace nix { @@ -12,10 +13,10 @@ std::regex revRegex(revRegexS, std::regex::ECMAScript); ParsedURL parseURL(const std::string & url) { static std::regex uriRegex( - "((" + schemeRegex + "):" + "((" + schemeNameRegex + "):" + "(?:(?://(" + authorityRegex + ")(" + absPathRegex + "))|(/?" + pathRegex + ")))" + "(?:\\?(" + queryRegex + "))?" - + "(?:#(" + queryRegex + "))?", + + "(?:#(" + fragmentRegex + "))?", std::regex::ECMAScript); std::smatch match; @@ -141,6 +142,13 @@ bool ParsedURL::operator ==(const ParsedURL & other) const && fragment == other.fragment; } +ParsedURL ParsedURL::canonicalise() +{ + ParsedURL res(*this); + res.path = CanonPath(res.path).abs(); + return res; +} + /** * Parse a URL scheme of the form '(applicationScheme\+)?transportScheme' * into a tuple '(applicationScheme, transportScheme)' @@ -175,4 +183,12 @@ std::string fixGitURL(const std::string & url) } } +// https://www.rfc-editor.org/rfc/rfc3986#section-3.1 +bool isValidSchemeName(std::string_view s) +{ + static std::regex regex(schemeNameRegex, std::regex::ECMAScript); + + return std::regex_match(s.begin(), s.end(), regex, std::regex_constants::match_default); +} + } diff --git a/src/libutil/url.hh b/src/libutil/url.hh index 26c2dcc28..24806bbff 100644 --- a/src/libutil/url.hh +++ b/src/libutil/url.hh @@ -19,6 +19,11 @@ struct ParsedURL std::string to_string() const; bool operator ==(const ParsedURL & other) const; + + /** + * Remove `.` and `..` path elements. + */ + ParsedURL canonicalise(); }; MakeError(BadURL, Error); @@ -50,4 +55,13 @@ ParsedUrlScheme parseUrlScheme(std::string_view scheme); changes absolute paths into file:// URLs. */ std::string fixGitURL(const std::string & url); +/** + * Whether a string is valid as RFC 3986 scheme name. + * Colon `:` is part of the URI; not the scheme name, and therefore rejected. + * See https://www.rfc-editor.org/rfc/rfc3986#section-3.1 + * + * Does not check whether the scheme is understood, as that's context-dependent. + */ +bool isValidSchemeName(std::string_view scheme); + } diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 5bb3f374b..7b4b1d031 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -7,6 +7,7 @@ #include #include +#include namespace nix { @@ -28,6 +29,9 @@ void initLibUtil() { } // This is not actually the main point of this check, but let's make sure anyway: assert(caught); + + if (sodium_init() == -1) + throw Error("could not initialise libsodium"); } ////////////////////////////////////////////////////////////////////// diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 75ce12a8c..ee2addb72 100644 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -14,6 +14,7 @@ #include "store-api.hh" #include "local-fs-store.hh" #include "globals.hh" +#include "realisation.hh" #include "derivations.hh" #include "shared.hh" #include "path-with-outputs.hh" @@ -310,8 +311,11 @@ static void main_nix_build(int argc, char * * argv) else /* If we're in a #! script, interpret filenames relative to the script. */ - exprs.push_back(state->parseExprFromFile(resolveExprPath(state->checkSourcePath(lookupFileArg(*state, - inShebang && !packages ? absPath(i, absPath(dirOf(script))) : i))))); + exprs.push_back( + state->parseExprFromFile( + resolveExprPath( + lookupFileArg(*state, + inShebang && !packages ? absPath(i, absPath(dirOf(script))) : i)))); } } @@ -346,7 +350,7 @@ static void main_nix_build(int argc, char * * argv) takesNixShellAttr(vRoot) ? *autoArgsWithInNixShell : *autoArgs, vRoot ).first); - state->forceValue(v, [&]() { return v.determinePos(noPos); }); + state->forceValue(v, v.determinePos(noPos)); getDerivations( *state, v, @@ -458,7 +462,7 @@ static void main_nix_build(int argc, char * * argv) if (dryRun) return; if (shellDrv) { - auto shellDrvOutputs = store->queryPartialDerivationOutputMap(shellDrv.value()); + auto shellDrvOutputs = store->queryPartialDerivationOutputMap(shellDrv.value(), &*evalStore); shell = store->printStorePath(shellDrvOutputs.at("out").value()) + "/bin/bash"; } @@ -511,7 +515,7 @@ static void main_nix_build(int argc, char * * argv) std::function::ChildNode &)> accumInputClosure; accumInputClosure = [&](const StorePath & inputDrv, const DerivedPathMap::ChildNode & inputNode) { - auto outputs = evalStore->queryPartialDerivationOutputMap(inputDrv); + auto outputs = store->queryPartialDerivationOutputMap(inputDrv, &*evalStore); for (auto & i : inputNode.value) { auto o = outputs.at(i); store->computeFSClosure(*o, inputs); @@ -649,7 +653,7 @@ static void main_nix_build(int argc, char * * argv) if (counter) drvPrefix += fmt("-%d", counter + 1); - auto builtOutputs = evalStore->queryPartialDerivationOutputMap(drvPath); + auto builtOutputs = store->queryPartialDerivationOutputMap(drvPath, &*evalStore); auto maybeOutputPath = builtOutputs.at(outputName); assert(maybeOutputPath); diff --git a/src/nix-copy-closure/nix-copy-closure.cc b/src/nix-copy-closure/nix-copy-closure.cc index 7f2bb93b6..b64af758f 100644 --- a/src/nix-copy-closure/nix-copy-closure.cc +++ b/src/nix-copy-closure/nix-copy-closure.cc @@ -1,4 +1,5 @@ #include "shared.hh" +#include "realisation.hh" #include "store-api.hh" #include "legacy.hh" diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index 213a20d93..e2bbd9775 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -97,7 +97,7 @@ static bool isNixExpr(const SourcePath & path, struct InputAccessor::Stat & st) { return st.type == InputAccessor::tRegular - || (st.type == InputAccessor::tDirectory && (path + "default.nix").pathExists()); + || (st.type == InputAccessor::tDirectory && (path + "default.nix").resolveSymlinks().pathExists()); } @@ -116,11 +116,11 @@ static void getAllExprs(EvalState & state, are implemented using profiles). */ if (i == "manifest.nix") continue; - SourcePath path2 = path + i; + auto path2 = (path + i).resolveSymlinks(); InputAccessor::Stat st; try { - st = path2.resolveSymlinks().lstat(); + st = path2.lstat(); } catch (Error &) { continue; // ignore dangling symlinks in ~/.nix-defexpr } @@ -172,7 +172,7 @@ static void loadSourceExpr(EvalState & state, const SourcePath & path, Value & v directory). */ else if (st.type == InputAccessor::tDirectory) { auto attrs = state.buildBindings(maxAttrs); - attrs.alloc("_combineChannels").mkList(0); + state.mkList(attrs.alloc("_combineChannels"), 0); StringSet seen; getAllExprs(state, path, seen, attrs); v.mkAttrs(attrs); @@ -922,7 +922,7 @@ static VersionDiff compareVersionAgainstSet( } -static void queryJSON(Globals & globals, std::vector & elems, bool printOutPath, bool printMeta) +static void queryJSON(Globals & globals, std::vector & elems, bool printOutPath, bool printDrvPath, bool printMeta) { using nlohmann::json; json topObj = json::object(); @@ -953,6 +953,11 @@ static void queryJSON(Globals & globals, std::vector & elems, bool prin } } + if (printDrvPath) { + auto drvPath = i.queryDrvPath(); + if (drvPath) pkgObj["drvPath"] = globals.state->store->printStorePath(*drvPath); + } + if (printMeta) { json &metaObj = pkgObj["meta"]; metaObj = json::object(); @@ -1079,7 +1084,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) /* Print the desired columns, or XML output. */ if (jsonOutput) { - queryJSON(globals, elems, printOutPath, printMeta); + queryJSON(globals, elems, printOutPath, printDrvPath, printMeta); cout << '\n'; return; } diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc index 250224e7d..9f4d063d2 100644 --- a/src/nix-env/user-env.cc +++ b/src/nix-env/user-env.cc @@ -21,7 +21,7 @@ DrvInfos queryInstalled(EvalState & state, const Path & userEnv) auto manifestFile = userEnv + "/manifest.nix"; if (pathExists(manifestFile)) { Value v; - state.evalFile(state.rootPath(CanonPath(manifestFile)), v); + state.evalFile(state.rootPath(CanonPath(manifestFile)).resolveSymlinks(), v); Bindings & bindings(*state.allocBindings(0)); getDerivations(state, v, "", bindings, elems, false); } @@ -104,10 +104,15 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, /* Also write a copy of the list of user environment elements to the store; we need it for future modifications of the environment. */ - std::ostringstream str; - manifest.print(state.symbols, str, true); - auto manifestFile = state.store->addTextToStore("env-manifest.nix", - str.str(), references); + auto manifestFile = ({ + std::ostringstream str; + manifest.print(state.symbols, str, true); + // TODO with C++20 we can use str.view() instead and avoid copy. + std::string str2 = str.str(); + StringSource source { str2 }; + state.store->addToStoreFromDump( + source, "env-manifest.nix", TextIngestionMethod {}, HashAlgorithm::SHA256, references); + }); /* Get the environment builder expression. */ Value envBuilder; @@ -128,7 +133,7 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, /* Evaluate it. */ debug("evaluating user environment builder"); - state.forceValue(topLevel, [&]() { return topLevel.determinePos(noPos); }); + state.forceValue(topLevel, topLevel.determinePos(noPos)); NixStringContext context; Attr & aDrvPath(*topLevel.attrs->find(state.sDrvPath)); auto topLevelDrv = state.coerceToStorePath(aDrvPath.pos, *aDrvPath.value, context, ""); diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index c67409e89..ab590b3a6 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -40,7 +40,7 @@ void processExpr(EvalState & state, const Strings & attrPaths, for (auto & i : attrPaths) { Value & v(*findAlongAttrPath(state, i, autoArgs, vRoot).first); - state.forceValue(v, [&]() { return v.determinePos(noPos); }); + state.forceValue(v, v.determinePos(noPos)); NixStringContext context; if (evalOnly) { @@ -183,7 +183,7 @@ static int main_nix_instantiate(int argc, char * * argv) for (auto & i : files) { Expr * e = fromArgs ? state->parseExprFromString(i, state->rootPath(CanonPath::fromCwd())) - : state->parseExprFromFile(resolveExprPath(state->checkSourcePath(lookupFileArg(*state, i)))); + : state->parseExprFromFile(resolveExprPath(lookupFileArg(*state, i))); processExpr(*state, attrPaths, parseOnly, strict, autoArgs, evalOnly, outputKind, xmlOutputSourceLocation, e); } diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 123283dfe..0a0a3ab1a 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -13,7 +13,9 @@ #include "shared.hh" #include "graphml.hh" #include "legacy.hh" +#include "posix-source-accessor.hh" #include "path-with-outputs.hh" +#include "posix-fs-canonicalise.hh" #include #include @@ -174,8 +176,12 @@ static void opAdd(Strings opFlags, Strings opArgs) { if (!opFlags.empty()) throw UsageError("unknown flag"); + PosixSourceAccessor accessor; for (auto & i : opArgs) - cout << fmt("%s\n", store->printStorePath(store->addToStore(std::string(baseNameOf(i)), i))); + cout << fmt("%s\n", store->printStorePath(store->addToStore( + std::string(baseNameOf(i)), + accessor, + CanonPath::fromCwd(i)))); } @@ -192,11 +198,17 @@ static void opAddFixed(Strings opFlags, Strings opArgs) if (opArgs.empty()) throw UsageError("first argument must be hash algorithm"); - HashType hashAlgo = parseHashType(opArgs.front()); + HashAlgorithm hashAlgo = parseHashAlgo(opArgs.front()); opArgs.pop_front(); + PosixSourceAccessor accessor; for (auto & i : opArgs) - std::cout << fmt("%s\n", store->printStorePath(store->addToStoreSlow(baseNameOf(i), i, method, hashAlgo).path)); + std::cout << fmt("%s\n", store->printStorePath(store->addToStoreSlow( + baseNameOf(i), + accessor, + CanonPath::fromCwd(i), + method, + hashAlgo).path)); } @@ -213,7 +225,7 @@ static void opPrintFixedPath(Strings opFlags, Strings opArgs) throw UsageError("'--print-fixed-path' requires three arguments"); Strings::iterator i = opArgs.begin(); - HashType hashAlgo = parseHashType(*i++); + HashAlgorithm hashAlgo = parseHashAlgo(*i++); std::string hash = *i++; std::string name = *i++; @@ -404,8 +416,8 @@ static void opQuery(Strings opFlags, Strings opArgs) for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) { auto info = store->queryPathInfo(j); if (query == qHash) { - assert(info->narHash.type == htSHA256); - cout << fmt("%s\n", info->narHash.to_string(HashFormat::Base32, true)); + assert(info->narHash.algo == HashAlgorithm::SHA256); + cout << fmt("%s\n", info->narHash.to_string(HashFormat::Nix32, true)); } else if (query == qSize) cout << fmt("%d\n", info->narSize); } @@ -540,7 +552,10 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise) if (canonicalise) canonicalisePathMetaData(store->printStorePath(info->path), {}); if (!hashGiven) { - HashResult hash = hashPath(htSHA256, store->printStorePath(info->path)); + HashResult hash = hashPath( + *store->getFSAccessor(false), CanonPath { store->printStorePath(info->path) }, + + FileIngestionMethod::Recursive, HashAlgorithm::SHA256); info->narHash = hash.first; info->narSize = hash.second; } @@ -762,14 +777,14 @@ static void opVerifyPath(Strings opFlags, Strings opArgs) auto path = store->followLinksToStorePath(i); printMsg(lvlTalkative, "checking path '%s'...", store->printStorePath(path)); auto info = store->queryPathInfo(path); - HashSink sink(info->narHash.type); + HashSink sink(info->narHash.algo); store->narFromPath(path, sink); auto current = sink.finish(); if (current.first != info->narHash) { printError("path '%s' was modified! expected hash '%s', got '%s'", store->printStorePath(path), - info->narHash.to_string(HashFormat::Base32, true), - current.first.to_string(HashFormat::Base32, true)); + info->narHash.to_string(HashFormat::Nix32, true), + current.first.to_string(HashFormat::Nix32, true)); status = 1; } } @@ -834,27 +849,33 @@ static void opServe(Strings opFlags, Strings opArgs) verbosity = lvlError; settings.keepLog = false; settings.useSubstitutes = false; - settings.maxSilentTime = readInt(in); - settings.buildTimeout = readInt(in); + + auto options = ServeProto::Serialise::read(*store, rconn); + + // Only certain feilds get initialized based on the protocol + // version. This is why not all the code below is unconditional. + // See how the serialization logic in + // `ServeProto::Serialise` matches + // these conditions. + settings.maxSilentTime = options.maxSilentTime; + settings.buildTimeout = options.buildTimeout; if (GET_PROTOCOL_MINOR(clientVersion) >= 2) - settings.maxLogSize = readNum(in); + settings.maxLogSize = options.maxLogSize; if (GET_PROTOCOL_MINOR(clientVersion) >= 3) { - auto nrRepeats = readInt(in); - if (nrRepeats != 0) { + if (options.nrRepeats != 0) { throw Error("client requested repeating builds, but this is not currently implemented"); } - // Ignore 'enforceDeterminism'. It used to be true by - // default, but also only never had any effect when - // `nrRepeats == 0`. We have already asserted that - // `nrRepeats` in fact is 0, so we can safely ignore this - // without doing something other than what the client - // asked for. - readInt(in); - + // Ignore 'options.enforceDeterminism'. + // + // It used to be true by default, but also only never had + // any effect when `nrRepeats == 0`. We have already + // checked that `nrRepeats` in fact is 0, so we can safely + // ignore this without doing something other than what the + // client asked for. settings.runDiffHook = true; } if (GET_PROTOCOL_MINOR(clientVersion) >= 7) { - settings.keepFailed = (bool) readInt(in); + settings.keepFailed = options.keepFailed; } }; @@ -890,16 +911,8 @@ static void opServe(Strings opFlags, Strings opArgs) for (auto & i : paths) { try { auto info = store->queryPathInfo(i); - out << store->printStorePath(info->path) - << (info->deriver ? store->printStorePath(*info->deriver) : ""); - ServeProto::write(*store, wconn, info->references); - // !!! Maybe we want compression? - out << info->narSize // downloadSize - << info->narSize; - if (GET_PROTOCOL_MINOR(clientVersion) >= 4) - out << info->narHash.to_string(HashFormat::Base32, true) - << renderContentAddress(info->ca) - << info->sigs; + out << store->printStorePath(info->path); + ServeProto::write(*store, wconn, static_cast(*info)); } catch (InvalidPath &) { } } @@ -978,7 +991,7 @@ static void opServe(Strings opFlags, Strings opArgs) auto deriver = readString(in); ValidPathInfo info { store->parseStorePath(path), - Hash::parseAny(readString(in), htSHA256), + Hash::parseAny(readString(in), HashAlgorithm::SHA256), }; if (deriver != "") info.deriver = store->parseStorePath(deriver); diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc index f9d487ada..64a43ecfa 100644 --- a/src/nix/add-to-store.cc +++ b/src/nix/add-to-store.cc @@ -2,6 +2,7 @@ #include "common-args.hh" #include "store-api.hh" #include "archive.hh" +#include "posix-source-accessor.hh" using namespace nix; @@ -20,7 +21,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand { Path path; std::optional namePart; - FileIngestionMethod ingestionMethod = FileIngestionMethod::Recursive; + ContentAddressMethod caMethod = FileIngestionMethod::Recursive; CmdAddToStore() { @@ -48,7 +49,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand )", .labels = {"hash-mode"}, .handler = {[this](std::string s) { - this->ingestionMethod = parseIngestionMethod(s); + this->caMethod = parseIngestionMethod(s); }}, }); } @@ -57,36 +58,17 @@ struct CmdAddToStore : MixDryRun, StoreCommand { if (!namePart) namePart = baseNameOf(path); - StringSink sink; - dumpPath(path, sink); + PosixSourceAccessor accessor; - auto narHash = hashString(htSHA256, sink.s); + auto path2 = CanonPath::fromCwd(path); - Hash hash = narHash; - if (ingestionMethod == FileIngestionMethod::Flat) { - HashSink hsink(htSHA256); - readFile(path, hsink); - hash = hsink.finish().first; - } + auto storePath = dryRun + ? store->computeStorePath( + *namePart, accessor, path2, caMethod, HashAlgorithm::SHA256, {}).first + : store->addToStoreSlow( + *namePart, accessor, path2, caMethod, HashAlgorithm::SHA256, {}).path; - ValidPathInfo info { - *store, - std::move(*namePart), - FixedOutputInfo { - .method = std::move(ingestionMethod), - .hash = std::move(hash), - .references = {}, - }, - narHash, - }; - info.narSize = sink.s.size(); - - if (!dryRun) { - auto source = StringSource(sink.s); - store->addToStore(info, source); - } - - logger->cout("%s", store->printStorePath(info.path)); + logger->cout("%s", store->printStorePath(storePath)); } }; @@ -110,7 +92,7 @@ struct CmdAddFile : CmdAddToStore { CmdAddFile() { - ingestionMethod = FileIngestionMethod::Flat; + caMethod = FileIngestionMethod::Flat; } std::string description() override diff --git a/src/nix/cat.cc b/src/nix/cat.cc index 6e5a736f2..4df086d4f 100644 --- a/src/nix/cat.cc +++ b/src/nix/cat.cc @@ -1,6 +1,7 @@ #include "command.hh" #include "store-api.hh" #include "nar-accessor.hh" +#include "progress-bar.hh" using namespace nix; @@ -13,6 +14,7 @@ struct MixCat : virtual Args auto st = accessor->lstat(CanonPath(path)); if (st.type != SourceAccessor::Type::tRegular) throw Error("path '%1%' is not a regular file", path); + stopProgressBar(); writeFull(STDOUT_FILENO, accessor->readFile(CanonPath(path))); } }; diff --git a/src/nix/doctor.cc b/src/nix/config-check.cc similarity index 97% rename from src/nix/doctor.cc rename to src/nix/config-check.cc index 59f9e3e5d..410feca2f 100644 --- a/src/nix/doctor.cc +++ b/src/nix/config-check.cc @@ -38,7 +38,7 @@ void checkInfo(const std::string & msg) { } -struct CmdDoctor : StoreCommand +struct CmdConfigCheck : StoreCommand { bool success = true; @@ -152,4 +152,4 @@ struct CmdDoctor : StoreCommand } }; -static auto rCmdDoctor = registerCommand("doctor"); +static auto rCmdConfigCheck = registerCommand2({ "config", "check" }); diff --git a/src/nix/show-config.cc b/src/nix/config.cc similarity index 73% rename from src/nix/show-config.cc rename to src/nix/config.cc index 3530584f9..52706afcf 100644 --- a/src/nix/show-config.cc +++ b/src/nix/config.cc @@ -7,11 +7,24 @@ using namespace nix; -struct CmdShowConfig : Command, MixJSON +struct CmdConfig : NixMultiCommand +{ + CmdConfig() : NixMultiCommand("config", RegisterCommand::getCommandsFor({"config"})) + { } + + std::string description() override + { + return "manipulate the Nix configuration"; + } + + Category category() override { return catUtility; } +}; + +struct CmdConfigShow : Command, MixJSON { std::optional name; - CmdShowConfig() { + CmdConfigShow() { expectArgs({ .label = {"name"}, .optional = true, @@ -56,4 +69,5 @@ struct CmdShowConfig : Command, MixJSON } }; -static auto rShowConfig = registerCommand("show-config"); +static auto rCmdConfig = registerCommand("config"); +static auto rShowConfig = registerCommand2({"config", "show"}); diff --git a/src/nix/daemon.cc b/src/nix/daemon.cc index 373dedf7c..4dada8e0e 100644 --- a/src/nix/daemon.cc +++ b/src/nix/daemon.cc @@ -443,16 +443,23 @@ static void processStdioConnection(ref store, TrustedFlag trustClient) * * @param forceTrustClientOpt See `daemonLoop()` and the parameter with * the same name over there for details. + * + * @param procesOps Whether to force processing ops even if the next + * store also is a remote store and could process it directly. */ -static void runDaemon(bool stdio, std::optional forceTrustClientOpt) +static void runDaemon(bool stdio, std::optional forceTrustClientOpt, bool processOps) { if (stdio) { auto store = openUncachedStore(); + std::shared_ptr remoteStore; + // If --force-untrusted is passed, we cannot forward the connection and // must process it ourselves (before delegating to the next store) to // force untrusting the client. - if (auto remoteStore = store.dynamic_pointer_cast(); remoteStore && (!forceTrustClientOpt || *forceTrustClientOpt != NotTrusted)) + processOps |= !forceTrustClientOpt || *forceTrustClientOpt != NotTrusted; + + if (!processOps && (remoteStore = store.dynamic_pointer_cast())) forwardStdioConnection(*remoteStore); else // `Trusted` is passed in the auto (no override case) because we @@ -468,6 +475,7 @@ static int main_nix_daemon(int argc, char * * argv) { auto stdio = false; std::optional isTrustedOpt = std::nullopt; + auto processOps = false; parseCmdLine(argc, argv, [&](Strings::iterator & arg, const Strings::iterator & end) { if (*arg == "--daemon") @@ -487,11 +495,14 @@ static int main_nix_daemon(int argc, char * * argv) } else if (*arg == "--default-trust") { experimentalFeatureSettings.require(Xp::DaemonTrustOverride); isTrustedOpt = std::nullopt; + } else if (*arg == "--process-ops") { + experimentalFeatureSettings.require(Xp::MountedSSHStore); + processOps = true; } else return false; return true; }); - runDaemon(stdio, isTrustedOpt); + runDaemon(stdio, isTrustedOpt, processOps); return 0; } @@ -503,6 +514,7 @@ struct CmdDaemon : StoreCommand { bool stdio = false; std::optional isTrustedOpt = std::nullopt; + bool processOps = false; CmdDaemon() { @@ -538,6 +550,19 @@ struct CmdDaemon : StoreCommand }}, .experimentalFeature = Xp::DaemonTrustOverride, }); + + addFlag({ + .longName = "process-ops", + .description = R"( + Forces the daemon to process received commands itself rather than forwarding the commands straight to the remote store. + + This is useful for the `mounted-ssh://` store where some actions need to be performed on the remote end but as connected user, and not as the user of the underlying daemon on the remote end. + )", + .handler = {[&]() { + processOps = true; + }}, + .experimentalFeature = Xp::MountedSSHStore, + }); } std::string description() override @@ -556,7 +581,7 @@ struct CmdDaemon : StoreCommand void run(ref store) override { - runDaemon(stdio, isTrustedOpt); + runDaemon(stdio, isTrustedOpt, processOps); } }; diff --git a/src/nix/derivation.cc b/src/nix/derivation.cc index cd3975a4f..59a78d378 100644 --- a/src/nix/derivation.cc +++ b/src/nix/derivation.cc @@ -2,9 +2,9 @@ using namespace nix; -struct CmdDerivation : virtual NixMultiCommand +struct CmdDerivation : NixMultiCommand { - CmdDerivation() : MultiCommand(RegisterCommand::getCommandsFor({"derivation"})) + CmdDerivation() : NixMultiCommand("derivation", RegisterCommand::getCommandsFor({"derivation"})) { } std::string description() override @@ -13,13 +13,6 @@ struct CmdDerivation : virtual NixMultiCommand } Category category() override { return catUtility; } - - void run() override - { - if (!command) - throw UsageError("'nix derivation' requires a sub-command."); - command->second->run(); - } }; static auto rCmdDerivation = registerCommand("derivation"); diff --git a/src/nix/develop.cc b/src/nix/develop.cc index 4a561e52b..5e25833eb 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -223,7 +223,11 @@ static StorePath getDerivationEnvironment(ref store, ref evalStore if (builder != "bash") throw Error("'nix develop' only works on derivations that use 'bash' as their builder"); - auto getEnvShPath = evalStore->addTextToStore("get-env.sh", getEnvSh, {}); + auto getEnvShPath = ({ + StringSource source { getEnvSh }; + evalStore->addToStoreFromDump( + source, "get-env.sh", TextIngestionMethod {}, HashAlgorithm::SHA256, {}); + }); drv.args = {store->printStorePath(getEnvShPath)}; @@ -371,7 +375,7 @@ struct Common : InstallableCommand, MixProfile for (auto & [installable_, dir_] : redirects) { auto dir = absPath(dir_); auto installable = parseInstallable(store, installable_); - auto builtPaths = Installable::toStorePaths( + auto builtPaths = Installable::toStorePathSet( getEvalStore(), store, Realise::Nothing, OperateOn::Output, {installable}); for (auto & path: builtPaths) { auto from = store->printStorePath(path); @@ -626,7 +630,7 @@ struct CmdDevelop : Common, MixEnvironment bool found = false; - for (auto & path : Installable::toStorePaths(getEvalStore(), store, Realise::Outputs, OperateOn::Output, {bashInstallable})) { + for (auto & path : Installable::toStorePathSet(getEvalStore(), store, Realise::Outputs, OperateOn::Output, {bashInstallable})) { auto s = store->printStorePath(path) + "/bin/bash"; if (pathExists(s)) { shell = s; @@ -665,7 +669,7 @@ struct CmdDevelop : Common, MixEnvironment } } - runProgramInStore(store, shell, args, buildEnvironment.getSystem()); + runProgramInStore(store, UseSearchPath::Use, shell, args, buildEnvironment.getSystem()); } }; diff --git a/src/nix/dump-path.cc b/src/nix/dump-path.cc index c4edc894b..0850d4c1c 100644 --- a/src/nix/dump-path.cc +++ b/src/nix/dump-path.cc @@ -61,4 +61,12 @@ struct CmdDumpPath2 : Command } }; -static auto rDumpPath2 = registerCommand2({"nar", "dump-path"}); +struct CmdNarDumpPath : CmdDumpPath2 { + void run() override { + warn("'nix nar dump-path' is a deprecated alias for 'nix nar pack'"); + CmdDumpPath2::run(); + } +}; + +static auto rCmdNarPack = registerCommand2({"nar", "pack"}); +static auto rCmdNarDumpPath = registerCommand2({"nar", "dump-path"}); diff --git a/src/nix/flake-init.md b/src/nix/flake-init.md index fc1f4f805..ea274bf29 100644 --- a/src/nix/flake-init.md +++ b/src/nix/flake-init.md @@ -30,7 +30,7 @@ using `-t`. # Template definitions A flake can declare templates through its `templates` output -attribute. A template has two attributes: +attribute. A template has the following attributes: * `description`: A one-line description of the template, in CommonMark syntax. diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 38938f09e..2b6e56283 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -89,7 +89,13 @@ public: .label="inputs", .optional=true, .handler={[&](std::string inputToUpdate){ - auto inputPath = flake::parseInputPath(inputToUpdate); + InputPath inputPath; + try { + inputPath = flake::parseInputPath(inputToUpdate); + } catch (Error & e) { + warn("Invalid flake input '%s'. To update a specific flake, use 'nix flake update --flake %s' instead.", inputToUpdate, inputToUpdate); + throw e; + } if (lockFlags.inputUpdates.contains(inputPath)) warn("Input '%s' was specified multiple times. You may have done this by accident."); lockFlags.inputUpdates.insert(inputPath); @@ -1393,7 +1399,9 @@ struct CmdFlakePrefetch : FlakeCommand, MixJSON struct CmdFlake : NixMultiCommand { CmdFlake() - : MultiCommand({ + : NixMultiCommand( + "flake", + { {"update", []() { return make_ref(); }}, {"lock", []() { return make_ref(); }}, {"metadata", []() { return make_ref(); }}, @@ -1423,10 +1431,8 @@ struct CmdFlake : NixMultiCommand void run() override { - if (!command) - throw UsageError("'nix flake' requires a sub-command."); experimentalFeatureSettings.require(Xp::Flakes); - command->second->run(); + NixMultiCommand::run(); } }; diff --git a/src/nix/fmt.cc b/src/nix/fmt.cc index c85eacded..059904150 100644 --- a/src/nix/fmt.cc +++ b/src/nix/fmt.cc @@ -49,7 +49,7 @@ struct CmdFmt : SourceExprCommand { } } - runProgramInStore(store, app.program, programArgs); + runProgramInStore(store, UseSearchPath::DontUse, app.program, programArgs); }; }; diff --git a/src/nix/hash.cc b/src/nix/hash.cc index d6595dcca..83694306e 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -5,19 +5,25 @@ #include "shared.hh" #include "references.hh" #include "archive.hh" +#include "posix-source-accessor.hh" using namespace nix; +/** + * Base for `nix hash file` (deprecated), `nix hash path` and `nix-hash` (legacy). + * + * Deprecation Issue: https://github.com/NixOS/nix/issues/8876 + */ struct CmdHashBase : Command { FileIngestionMethod mode; HashFormat hashFormat = HashFormat::SRI; bool truncate = false; - HashType ht = htSHA256; + HashAlgorithm ha = HashAlgorithm::SHA256; std::vector paths; std::optional modulus; - CmdHashBase(FileIngestionMethod mode) : mode(mode) + explicit CmdHashBase(FileIngestionMethod mode) : mode(mode) { addFlag({ .longName = "sri", @@ -34,7 +40,7 @@ struct CmdHashBase : Command addFlag({ .longName = "base32", .description = "Print the hash in base-32 (Nix-specific) format.", - .handler = {&hashFormat, HashFormat::Base32}, + .handler = {&hashFormat, HashFormat::Nix32}, }); addFlag({ @@ -43,7 +49,7 @@ struct CmdHashBase : Command .handler = {&hashFormat, HashFormat::Base16}, }); - addFlag(Flag::mkHashTypeFlag("type", &ht)); + addFlag(Flag::mkHashAlgoFlag("type", &ha)); #if 0 addFlag({ @@ -79,18 +85,12 @@ struct CmdHashBase : Command std::unique_ptr hashSink; if (modulus) - hashSink = std::make_unique(ht, *modulus); + hashSink = std::make_unique(ha, *modulus); else - hashSink = std::make_unique(ht); + hashSink = std::make_unique(ha); - switch (mode) { - case FileIngestionMethod::Flat: - readFile(path, *hashSink); - break; - case FileIngestionMethod::Recursive: - dumpPath(path, *hashSink); - break; - } + PosixSourceAccessor accessor; + dumpPath(accessor, CanonPath::fromCwd(path), *hashSink, mode); Hash h = hashSink->finish().first; if (truncate && h.hashSize > 20) h = compressHash(h, 20); @@ -102,39 +102,90 @@ struct CmdHashBase : Command struct CmdToBase : Command { HashFormat hashFormat; - std::optional ht; + std::optional ht; std::vector args; CmdToBase(HashFormat hashFormat) : hashFormat(hashFormat) { - addFlag(Flag::mkHashTypeOptFlag("type", &ht)); + addFlag(Flag::mkHashAlgoOptFlag("type", &ht)); expectArgs("strings", &args); } std::string description() override { - return fmt("convert a hash to %s representation", + return fmt("convert a hash to %s representation (deprecated, use `nix hash convert` instead)", hashFormat == HashFormat::Base16 ? "base-16" : - hashFormat == HashFormat::Base32 ? "base-32" : + hashFormat == HashFormat::Nix32 ? "base-32" : hashFormat == HashFormat::Base64 ? "base-64" : "SRI"); } void run() override { + warn("The old format conversion sub commands of `nix hash` where deprecated in favor of `nix hash convert`."); for (auto s : args) logger->cout(Hash::parseAny(s, ht).to_string(hashFormat, hashFormat == HashFormat::SRI)); } }; +/** + * `nix hash convert` + */ +struct CmdHashConvert : Command +{ + std::optional from; + HashFormat to; + std::optional algo; + std::vector hashStrings; + + CmdHashConvert(): to(HashFormat::SRI) { + addFlag(Args::Flag::mkHashFormatOptFlag("from", &from)); + addFlag(Args::Flag::mkHashFormatFlagWithDefault("to", &to)); + addFlag(Args::Flag::mkHashAlgoOptFlag("algo", &algo)); + expectArgs({ + .label = "hashes", + .handler = {&hashStrings}, + }); + } + + std::string description() override + { + std::string descr( "convert between different hash formats. Choose from: "); + auto iter = hashFormats.begin(); + assert(iter != hashFormats.end()); + descr += *iter++; + while (iter != hashFormats.end()) { + descr += ", " + *iter++; + } + + return descr; + } + + Category category() override { return catUtility; } + + void run() override { + for (const auto& s: hashStrings) { + Hash h = Hash::parseAny(s, algo); + if (from && h.to_string(*from, from == HashFormat::SRI) != s) { + auto from_as_string = printHashFormat(*from); + throw BadHash("input hash '%s' does not have the expected format '--from %s'", s, from_as_string); + } + logger->cout(h.to_string(to, to == HashFormat::SRI)); + } + } +}; + struct CmdHash : NixMultiCommand { CmdHash() - : MultiCommand({ + : NixMultiCommand( + "hash", + { + {"convert", []() { return make_ref();}}, {"file", []() { return make_ref(FileIngestionMethod::Flat);; }}, {"path", []() { return make_ref(FileIngestionMethod::Recursive); }}, {"to-base16", []() { return make_ref(HashFormat::Base16); }}, - {"to-base32", []() { return make_ref(HashFormat::Base32); }}, + {"to-base32", []() { return make_ref(HashFormat::Nix32); }}, {"to-base64", []() { return make_ref(HashFormat::Base64); }}, {"to-sri", []() { return make_ref(HashFormat::SRI); }}, }) @@ -146,13 +197,6 @@ struct CmdHash : NixMultiCommand } Category category() override { return catUtility; } - - void run() override - { - if (!command) - throw UsageError("'nix hash' requires a sub-command."); - command->second->run(); - } }; static auto rCmdHash = registerCommand("hash"); @@ -160,7 +204,10 @@ static auto rCmdHash = registerCommand("hash"); /* Legacy nix-hash command. */ static int compatNixHash(int argc, char * * argv) { - std::optional ht; + // Wait until `nix hash convert` is not hidden behind experimental flags anymore. + // warn("`nix-hash` has been deprecated in favor of `nix hash convert`."); + + std::optional ha; bool flat = false; HashFormat hashFormat = HashFormat::Base16; bool truncate = false; @@ -174,13 +221,13 @@ static int compatNixHash(int argc, char * * argv) printVersion("nix-hash"); else if (*arg == "--flat") flat = true; else if (*arg == "--base16") hashFormat = HashFormat::Base16; - else if (*arg == "--base32") hashFormat = HashFormat::Base32; + else if (*arg == "--base32") hashFormat = HashFormat::Nix32; else if (*arg == "--base64") hashFormat = HashFormat::Base64; else if (*arg == "--sri") hashFormat = HashFormat::SRI; else if (*arg == "--truncate") truncate = true; else if (*arg == "--type") { std::string s = getArg(*arg, arg, end); - ht = parseHashType(s); + ha = parseHashAlgo(s); } else if (*arg == "--to-base16") { op = opTo; @@ -188,7 +235,7 @@ static int compatNixHash(int argc, char * * argv) } else if (*arg == "--to-base32") { op = opTo; - hashFormat = HashFormat::Base32; + hashFormat = HashFormat::Nix32; } else if (*arg == "--to-base64") { op = opTo; @@ -207,8 +254,8 @@ static int compatNixHash(int argc, char * * argv) if (op == opHash) { CmdHashBase cmd(flat ? FileIngestionMethod::Flat : FileIngestionMethod::Recursive); - if (!ht.has_value()) ht = htMD5; - cmd.ht = ht.value(); + if (!ha.has_value()) ha = HashAlgorithm::MD5; + cmd.ha = ha.value(); cmd.hashFormat = hashFormat; cmd.truncate = truncate; cmd.paths = ss; @@ -218,7 +265,7 @@ static int compatNixHash(int argc, char * * argv) else { CmdToBase cmd(hashFormat); cmd.args = ss; - if (ht.has_value()) cmd.ht = ht; + if (ha.has_value()) cmd.ht = ha; cmd.run(); } diff --git a/src/nix/local.mk b/src/nix/local.mk index 57f8259c4..1d6f560d6 100644 --- a/src/nix/local.mk +++ b/src/nix/local.mk @@ -18,7 +18,7 @@ nix_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/libexpr nix_LIBS = libexpr libmain libfetchers libstore libutil libcmd -nix_LDFLAGS = -pthread $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) $(LOWDOWN_LIBS) +nix_LDFLAGS = $(THREAD_LDFLAGS) $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) $(LOWDOWN_LIBS) $(foreach name, \ nix-build nix-channel nix-collect-garbage nix-copy-closure nix-daemon nix-env nix-hash nix-instantiate nix-prefetch-url nix-shell nix-store, \ @@ -31,10 +31,23 @@ src/nix/develop.cc: src/nix/get-env.sh.gen.hh src/nix-channel/nix-channel.cc: src/nix-channel/unpack-channel.nix.gen.hh -src/nix/main.cc: doc/manual/generate-manpage.nix.gen.hh doc/manual/utils.nix.gen.hh doc/manual/generate-settings.nix.gen.hh doc/manual/generate-store-info.nix.gen.hh +src/nix/main.cc: \ + doc/manual/generate-manpage.nix.gen.hh \ + doc/manual/utils.nix.gen.hh doc/manual/generate-settings.nix.gen.hh \ + doc/manual/generate-store-info.nix.gen.hh \ + src/nix/generated-doc/help-stores.md -src/nix/doc/files/%.md: doc/manual/src/command-ref/files/%.md +src/nix/generated-doc/files/%.md: doc/manual/src/command-ref/files/%.md @mkdir -p $$(dirname $@) @cp $< $@ -src/nix/profile.cc: src/nix/profile.md src/nix/doc/files/profiles.md.gen.hh +src/nix/profile.cc: src/nix/profile.md src/nix/generated-doc/files/profiles.md.gen.hh + +src/nix/generated-doc/help-stores.md: doc/manual/src/store/types/index.md.in + @mkdir -p $$(dirname $@) + @echo 'R"(' >> $@.tmp + @echo >> $@.tmp + @cat $^ >> $@.tmp + @echo >> $@.tmp + @echo ')"' >> $@.tmp + @mv $@.tmp $@ diff --git a/src/nix/main.cc b/src/nix/main.cc index 73641f6d2..39c04069b 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -67,7 +67,7 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs, virtual RootArgs bool helpRequested = false; bool showVersion = false; - NixArgs() : MultiCommand(RegisterCommand::getCommandsFor({})), MixCommonArgs("nix") + NixArgs() : MultiCommand("", RegisterCommand::getCommandsFor({})), MixCommonArgs("nix") { categories.clear(); categories[catHelp] = "Help commands"; @@ -134,10 +134,12 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs, virtual RootArgs {"ping-store", {"store", "ping"}}, {"sign-paths", {"store", "sign"}}, {"show-derivation", {"derivation", "show"}}, + {"show-config", {"config", "show"}}, {"to-base16", {"hash", "to-base16"}}, {"to-base32", {"hash", "to-base32"}}, {"to-base64", {"hash", "to-base64"}}, {"verify", {"store", "verify"}}, + {"doctor", {"config", "check"}}, }; bool aliasUsed = false; @@ -296,7 +298,7 @@ struct CmdHelpStores : Command std::string doc() override { return - #include "help-stores.md" + #include "generated-doc/help-stores.md" ; } @@ -371,6 +373,7 @@ void mainWrapped(int argc, char * * argv) Xp::Flakes, Xp::FetchClosure, Xp::DynamicDerivations, + Xp::FetchTree, }; evalSettings.pureEval = false; EvalState state({}, openStore("dummy://")); diff --git a/src/nix/nar-dump-path.md b/src/nix/nar-dump-path.md index 26191ad25..de82202de 100644 --- a/src/nix/nar-dump-path.md +++ b/src/nix/nar-dump-path.md @@ -5,7 +5,7 @@ R""( * To serialise directory `foo` as a NAR: ```console - # nix nar dump-path ./foo > foo.nar + # nix nar pack ./foo > foo.nar ``` # Description diff --git a/src/nix/nar.cc b/src/nix/nar.cc index 9815410cf..8ad4f92a7 100644 --- a/src/nix/nar.cc +++ b/src/nix/nar.cc @@ -4,7 +4,7 @@ using namespace nix; struct CmdNar : NixMultiCommand { - CmdNar() : MultiCommand(RegisterCommand::getCommandsFor({"nar"})) + CmdNar() : NixMultiCommand("nar", RegisterCommand::getCommandsFor({"nar"})) { } std::string description() override @@ -20,13 +20,6 @@ struct CmdNar : NixMultiCommand } Category category() override { return catUtility; } - - void run() override - { - if (!command) - throw UsageError("'nix nar' requires a sub-command."); - command->second->run(); - } }; static auto rCmdNar = registerCommand("nar"); diff --git a/src/nix/nix.md b/src/nix/nix.md index eb150f03b..749456014 100644 --- a/src/nix/nix.md +++ b/src/nix/nix.md @@ -235,10 +235,14 @@ operate are determined as follows: # Nix stores -Most `nix` subcommands operate on a *Nix store*. These are documented -in [`nix help-stores`](./nix3-help-stores.md). +Most `nix` subcommands operate on a *Nix store*. +The various store types are documented in the +[Store Types](@docroot@/store/types/index.md) +section of the manual. -# Shebang interpreter +The same information is also available from the [`nix help-stores`](./nix3-help-stores.md) command. + +# Shebang interpreter The `nix` command can be used as a `#!` interpreter. Arguments to Nix can be passed on subsequent lines in the script. diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc index 23198a120..5f10cfb61 100644 --- a/src/nix/path-info.cc +++ b/src/nix/path-info.cc @@ -2,6 +2,7 @@ #include "shared.hh" #include "store-api.hh" #include "common-args.hh" +#include "nar-info.hh" #include #include @@ -54,7 +55,7 @@ static json pathInfoToJSON( jsonObject["closureSize"] = getStoreObjectsTotalSize(store, closure); - if (auto * narInfo = dynamic_cast(&*info)) { + if (dynamic_cast(&*info)) { uint64_t totalDownloadSize = 0; for (auto & p : closure) { auto depInfo = store.queryPathInfo(p); diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc index 3ed7946a8..b5d619006 100644 --- a/src/nix/prefetch.cc +++ b/src/nix/prefetch.cc @@ -9,6 +9,7 @@ #include "attr-path.hh" #include "eval-inline.hh" #include "legacy.hh" +#include "posix-source-accessor.hh" #include @@ -46,13 +47,13 @@ std::string resolveMirrorUrl(EvalState & state, const std::string & url) } std::tuple prefetchFile( - ref store, - std::string_view url, - std::optional name, - HashType hashType, - std::optional expectedHash, - bool unpack, - bool executable) + ref store, + std::string_view url, + std::optional name, + HashAlgorithm hashAlgo, + std::optional expectedHash, + bool unpack, + bool executable) { auto ingestionMethod = unpack || executable ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat; @@ -69,7 +70,7 @@ std::tuple prefetchFile( /* If an expected hash is given, the file may already exist in the store. */ if (expectedHash) { - hashType = expectedHash->type; + hashAlgo = expectedHash->algo; storePath = store->makeFixedOutputPath(*name, FixedOutputInfo { .method = ingestionMethod, .hash = *expectedHash, @@ -122,7 +123,11 @@ std::tuple prefetchFile( Activity act(*logger, lvlChatty, actUnknown, fmt("adding '%s' to the store", url)); - auto info = store->addToStoreSlow(*name, tmpFile, ingestionMethod, hashType, expectedHash); + PosixSourceAccessor accessor; + auto info = store->addToStoreSlow( + *name, + accessor, CanonPath::fromCwd(tmpFile), + ingestionMethod, hashAlgo, {}, expectedHash); storePath = info.path; assert(info.ca); hash = info.ca->hash; @@ -134,7 +139,7 @@ std::tuple prefetchFile( static int main_nix_prefetch_url(int argc, char * * argv) { { - HashType ht = htSHA256; + HashAlgorithm ha = HashAlgorithm::SHA256; std::vector args; bool printPath = getEnv("PRINT_PATH") == "1"; bool fromExpr = false; @@ -155,7 +160,7 @@ static int main_nix_prefetch_url(int argc, char * * argv) printVersion("nix-prefetch-url"); else if (*arg == "--type") { auto s = getArg(*arg, arg, end); - ht = parseHashType(s); + ha = parseHashAlgo(s); } else if (*arg == "--print-path") printPath = true; @@ -233,10 +238,10 @@ static int main_nix_prefetch_url(int argc, char * * argv) std::optional expectedHash; if (args.size() == 2) - expectedHash = Hash::parseAny(args[1], ht); + expectedHash = Hash::parseAny(args[1], ha); auto [storePath, hash] = prefetchFile( - store, resolveMirrorUrl(*state, url), name, ht, expectedHash, unpack, executable); + store, resolveMirrorUrl(*state, url), name, ha, expectedHash, unpack, executable); stopProgressBar(); @@ -258,7 +263,7 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON std::string url; bool executable = false; std::optional name; - HashType hashType = htSHA256; + HashAlgorithm hashAlgo = HashAlgorithm::SHA256; std::optional expectedHash; CmdStorePrefetchFile() @@ -275,11 +280,11 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON .description = "The expected hash of the file.", .labels = {"hash"}, .handler = {[&](std::string s) { - expectedHash = Hash::parseAny(s, hashType); + expectedHash = Hash::parseAny(s, hashAlgo); }} }); - addFlag(Flag::mkHashTypeFlag("hash-type", &hashType)); + addFlag(Flag::mkHashAlgoFlag("hash-type", &hashAlgo)); addFlag({ .longName = "executable", @@ -305,7 +310,7 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON } void run(ref store) override { - auto [storePath, hash] = prefetchFile(store, url, name, hashType, expectedHash, false, executable); + auto [storePath, hash] = prefetchFile(store, url, name, hashAlgo, expectedHash, false, executable); if (json) { auto res = nlohmann::json::object(); diff --git a/src/nix/profile-list.md b/src/nix/profile-list.md index 5d7fcc0ec..facfdf0d6 100644 --- a/src/nix/profile-list.md +++ b/src/nix/profile-list.md @@ -6,12 +6,14 @@ R""( ```console # nix profile list + Name: gdb Index: 0 Flake attribute: legacyPackages.x86_64-linux.gdb Original flake URL: flake:nixpkgs Locked flake URL: github:NixOS/nixpkgs/7b38b03d76ab71bdc8dc325e3f6338d984cc35ca Store paths: /nix/store/indzcw5wvlhx6vwk7k4iq29q15chvr3d-gdb-11.1 + Name: blender-bin Index: 1 Flake attribute: packages.x86_64-linux.default Original flake URL: flake:blender-bin @@ -26,7 +28,7 @@ R""( # nix build github:edolstra/nix-warez/91f2ffee657bf834e4475865ae336e2379282d34?dir=blender#packages.x86_64-linux.default ``` - will build the package with index 1 shown above. + will build the package `blender-bin` shown above. # Description @@ -34,10 +36,14 @@ This command shows what packages are currently installed in a profile. For each installed package, it shows the following information: -* `Index`: An integer that can be used to unambiguously identify the +* `Name`: A unique name used to unambiguously identify the package in invocations of `nix profile remove` and `nix profile upgrade`. +* `Index`: An integer that can be used to unambiguously identify the + package in invocations of `nix profile remove` and `nix profile upgrade`. + (*Deprecated, will be removed in a future version in favor of `Name`.*) + * `Flake attribute`: The flake output attribute path that provides the package (e.g. `packages.x86_64-linux.hello`). diff --git a/src/nix/profile-remove.md b/src/nix/profile-remove.md index ba85441d8..c994b79bd 100644 --- a/src/nix/profile-remove.md +++ b/src/nix/profile-remove.md @@ -2,18 +2,19 @@ R""( # Examples -* Remove a package by position: +* Remove a package by name: + + ```console + # nix profile remove hello + ``` + +* Remove a package by index + *(deprecated, will be removed in a future version)*: ```console # nix profile remove 3 ``` -* Remove a package by attribute path: - - ```console - # nix profile remove packages.x86_64-linux.hello - ``` - * Remove all packages: ```console diff --git a/src/nix/profile-upgrade.md b/src/nix/profile-upgrade.md index 39cca428b..47103edfc 100644 --- a/src/nix/profile-upgrade.md +++ b/src/nix/profile-upgrade.md @@ -9,18 +9,16 @@ R""( # nix profile upgrade '.*' ``` -* Upgrade a specific package: +* Upgrade a specific package by name: ```console - # nix profile upgrade packages.x86_64-linux.hello + # nix profile upgrade hello ``` -* Upgrade a specific profile element by number: +* Upgrade a specific package by index + *(deprecated, will be removed in a future version)*: ```console - # nix profile list - 0 flake:nixpkgs#legacyPackages.x86_64-linux.spotify … - # nix profile upgrade 0 ``` diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 476ddcd60..abd56e4f4 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -10,6 +10,8 @@ #include "../nix-env/user-env.hh" #include "profiles.hh" #include "names.hh" +#include "url.hh" +#include "flake/url-name.hh" #include #include @@ -43,6 +45,7 @@ const int defaultPriority = 5; struct ProfileElement { StorePathSet storePaths; + std::string name; std::optional source; bool active = true; int priority = defaultPriority; @@ -116,6 +119,8 @@ struct ProfileManifest if (pathExists(manifestPath)) { auto json = nlohmann::json::parse(readFile(manifestPath)); + /* Keep track of already found names to allow preventing duplicates. */ + std::set foundNames; auto version = json.value("version", 0); std::string sUrl; @@ -149,6 +154,25 @@ struct ProfileManifest e["outputs"].get() }; } + + std::string nameCandidate = element.identifier(); + if (e.contains("name")) { + nameCandidate = e["name"]; + } + else if (element.source) { + auto url = parseURL(element.source->to_string()); + auto name = getNameFromURL(url); + if (name) + nameCandidate = *name; + } + + auto finalName = nameCandidate; + for (int i = 1; foundNames.contains(finalName); ++i) { + finalName = nameCandidate + std::to_string(i); + } + element.name = finalName; + foundNames.insert(element.name); + elements.emplace_back(std::move(element)); } } @@ -163,6 +187,7 @@ struct ProfileManifest for (auto & drvInfo : drvInfos) { ProfileElement element; element.storePaths = {drvInfo.queryOutPath()}; + element.name = element.identifier(); elements.emplace_back(std::move(element)); } } @@ -216,7 +241,7 @@ struct ProfileManifest StringSink sink; dumpPath(tempDir, sink); - auto narHash = hashString(htSHA256, sink.s); + auto narHash = hashString(HashAlgorithm::SHA256, sink.s); ValidPathInfo info { *store, @@ -451,15 +476,25 @@ public: { std::vector res; + auto anyIndexMatchers = false; + for (auto & s : _matchers) { - if (auto n = string2Int(s)) + if (auto n = string2Int(s)) { res.push_back(*n); + anyIndexMatchers = true; + } else if (store->isStorePath(s)) res.push_back(s); else res.push_back(RegexPattern{s,std::regex(s, std::regex::extended | std::regex::icase)}); } + if (anyIndexMatchers) { + warn("Indices are deprecated and will be removed in a future version!\n" + " Refer to packages by their `Name` as printed by `nix profile list`.\n" + " See https://github.com/NixOS/nix/issues/9171 for more information."); + } + return res; } @@ -471,8 +506,7 @@ public: } else if (auto path = std::get_if(&matcher)) { if (element.storePaths.count(store.parseStorePath(*path))) return true; } else if (auto regex = std::get_if(&matcher)) { - if (element.source - && std::regex_match(element.source->attrPath, regex->reg)) + if (std::regex_match(element.name, regex->reg)) return true; } } @@ -556,62 +590,83 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf Installables installables; std::vector indices; + auto matchedCount = 0; auto upgradedCount = 0; for (size_t i = 0; i < manifest.elements.size(); ++i) { auto & element(manifest.elements[i]); - if (element.source - && !element.source->originalRef.input.isLocked() - && matches(*store, element, i, matchers)) - { - upgradedCount++; - - Activity act(*logger, lvlChatty, actUnknown, - fmt("checking '%s' for updates", element.source->attrPath)); - - auto installable = make_ref( - this, - getEvalState(), - FlakeRef(element.source->originalRef), - "", - element.source->outputs, - Strings{element.source->attrPath}, - Strings{}, - lockFlags); - - auto derivedPaths = installable->toDerivedPaths(); - if (derivedPaths.empty()) continue; - auto * infop = dynamic_cast(&*derivedPaths[0].info); - // `InstallableFlake` should use `ExtraPathInfoFlake`. - assert(infop); - auto & info = *infop; - - if (element.source->lockedRef == info.flake.lockedRef) continue; - - printInfo("upgrading '%s' from flake '%s' to '%s'", - element.source->attrPath, element.source->lockedRef, info.flake.lockedRef); - - element.source = ProfileElementSource { - .originalRef = installable->flakeRef, - .lockedRef = info.flake.lockedRef, - .attrPath = info.value.attrPath, - .outputs = installable->extendedOutputsSpec, - }; - - installables.push_back(installable); - indices.push_back(i); + if (!matches(*store, element, i, matchers)) { + continue; } + + matchedCount++; + + if (!element.source) { + warn( + "Found package '%s', but it was not installed from a flake, so it can't be checked for upgrades!", + element.identifier() + ); + continue; + } + if (element.source->originalRef.input.isLocked()) { + warn( + "Found package '%s', but it was installed from a locked flake reference so it can't be upgraded!", + element.identifier() + ); + continue; + } + + upgradedCount++; + + Activity act(*logger, lvlChatty, actUnknown, + fmt("checking '%s' for updates", element.source->attrPath)); + + auto installable = make_ref( + this, + getEvalState(), + FlakeRef(element.source->originalRef), + "", + element.source->outputs, + Strings{element.source->attrPath}, + Strings{}, + lockFlags); + + auto derivedPaths = installable->toDerivedPaths(); + if (derivedPaths.empty()) continue; + auto * infop = dynamic_cast(&*derivedPaths[0].info); + // `InstallableFlake` should use `ExtraPathInfoFlake`. + assert(infop); + auto & info = *infop; + + if (element.source->lockedRef == info.flake.lockedRef) continue; + + printInfo("upgrading '%s' from flake '%s' to '%s'", + element.source->attrPath, element.source->lockedRef, info.flake.lockedRef); + + element.source = ProfileElementSource { + .originalRef = installable->flakeRef, + .lockedRef = info.flake.lockedRef, + .attrPath = info.value.attrPath, + .outputs = installable->extendedOutputsSpec, + }; + + installables.push_back(installable); + indices.push_back(i); } if (upgradedCount == 0) { - for (auto & matcher : matchers) { - if (const size_t * index = std::get_if(&matcher)){ - warn("'%d' is not a valid index", *index); - } else if (const Path * path = std::get_if(&matcher)){ - warn("'%s' does not match any paths", *path); - } else if (const RegexPattern * regex = std::get_if(&matcher)){ - warn("'%s' does not match any packages", regex->pattern); + if (matchedCount == 0) { + for (auto & matcher : matchers) { + if (const size_t * index = std::get_if(&matcher)){ + warn("'%d' is not a valid index", *index); + } else if (const Path * path = std::get_if(&matcher)){ + warn("'%s' does not match any paths", *path); + } else if (const RegexPattern * regex = std::get_if(&matcher)){ + warn("'%s' does not match any packages", regex->pattern); + } } + } else { + warn("Found some packages but none of them could be upgraded."); } warn ("Use 'nix profile list' to see the current profile."); } @@ -657,9 +712,10 @@ struct CmdProfileList : virtual EvalCommand, virtual StoreCommand, MixDefaultPro for (size_t i = 0; i < manifest.elements.size(); ++i) { auto & element(manifest.elements[i]); if (i) logger->cout(""); - logger->cout("Index: " ANSI_BOLD "%s" ANSI_NORMAL "%s", - i, + logger->cout("Name: " ANSI_BOLD "%s" ANSI_NORMAL "%s", + element.name, element.active ? "" : " " ANSI_RED "(inactive)" ANSI_NORMAL); + logger->cout("Index: %s", i); if (element.source) { logger->cout("Flake attribute: %s%s", element.source->attrPath, element.source->outputs.to_string()); logger->cout("Original flake URL: %s", element.source->originalRef.to_string()); @@ -825,7 +881,9 @@ struct CmdProfileWipeHistory : virtual StoreCommand, MixDefaultProfile, MixDryRu struct CmdProfile : NixMultiCommand { CmdProfile() - : MultiCommand({ + : NixMultiCommand( + "profile", + { {"install", []() { return make_ref(); }}, {"remove", []() { return make_ref(); }}, {"upgrade", []() { return make_ref(); }}, @@ -848,13 +906,6 @@ struct CmdProfile : NixMultiCommand #include "profile.md" ; } - - void run() override - { - if (!command) - throw UsageError("'nix profile' requires a sub-command."); - command->second->run(); - } }; static auto rCmdProfile = registerCommand("profile"); diff --git a/src/nix/profile.md b/src/nix/profile.md index bd13f906f..9b2f86f4a 100644 --- a/src/nix/profile.md +++ b/src/nix/profile.md @@ -11,7 +11,7 @@ them to be rolled back easily. )"" -#include "doc/files/profiles.md.gen.hh" +#include "generated-doc/files/profiles.md.gen.hh" R""( diff --git a/src/nix/realisation.cc b/src/nix/realisation.cc index e19e93219..e1f231222 100644 --- a/src/nix/realisation.cc +++ b/src/nix/realisation.cc @@ -5,9 +5,9 @@ using namespace nix; -struct CmdRealisation : virtual NixMultiCommand +struct CmdRealisation : NixMultiCommand { - CmdRealisation() : MultiCommand(RegisterCommand::getCommandsFor({"realisation"})) + CmdRealisation() : NixMultiCommand("realisation", RegisterCommand::getCommandsFor({"realisation"})) { } std::string description() override @@ -16,13 +16,6 @@ struct CmdRealisation : virtual NixMultiCommand } Category category() override { return catUtility; } - - void run() override - { - if (!command) - throw UsageError("'nix realisation' requires a sub-command."); - command->second->run(); - } }; static auto rCmdRealisation = registerCommand("realisation"); diff --git a/src/nix/registry.cc b/src/nix/registry.cc index f509ccae8..0346ec1e0 100644 --- a/src/nix/registry.cc +++ b/src/nix/registry.cc @@ -196,10 +196,12 @@ struct CmdRegistryPin : RegistryCommand, EvalCommand } }; -struct CmdRegistry : virtual NixMultiCommand +struct CmdRegistry : NixMultiCommand { CmdRegistry() - : MultiCommand({ + : NixMultiCommand( + "registry", + { {"list", []() { return make_ref(); }}, {"add", []() { return make_ref(); }}, {"remove", []() { return make_ref(); }}, @@ -221,14 +223,6 @@ struct CmdRegistry : virtual NixMultiCommand } Category category() override { return catSecondary; } - - void run() override - { - experimentalFeatureSettings.require(Xp::Flakes); - if (!command) - throw UsageError("'nix registry' requires a sub-command."); - command->second->run(); - } }; static auto rCmdRegistry = registerCommand("registry"); diff --git a/src/nix/run.cc b/src/nix/run.cc index ea0a17897..9bca5b9d0 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -25,6 +25,7 @@ std::string chrootHelperName = "__run_in_chroot"; namespace nix { void runProgramInStore(ref store, + UseSearchPath useSearchPath, const std::string & program, const Strings & args, std::optional system) @@ -58,7 +59,10 @@ void runProgramInStore(ref store, if (system) setPersonality(*system); - execvp(program.c_str(), stringsToCharPtrs(args).data()); + if (useSearchPath == UseSearchPath::Use) + execvp(program.c_str(), stringsToCharPtrs(args).data()); + else + execv(program.c_str(), stringsToCharPtrs(args).data()); throw SysError("unable to execute '%s'", program); } @@ -110,7 +114,7 @@ struct CmdShell : InstallablesCommand, MixEnvironment setEnviron(); - auto unixPath = tokenizeString(getEnv("PATH").value_or(""), ":"); + std::vector pathAdditions; while (!todo.empty()) { auto path = todo.front(); @@ -118,7 +122,7 @@ struct CmdShell : InstallablesCommand, MixEnvironment if (!done.insert(path).second) continue; if (true) - unixPath.push_front(store->printStorePath(path) + "/bin"); + pathAdditions.push_back(store->printStorePath(path) + "/bin"); auto propPath = CanonPath(store->printStorePath(path)) + "nix-support" + "propagated-user-env-packages"; if (auto st = accessor->maybeLstat(propPath); st && st->type == SourceAccessor::tRegular) { @@ -127,12 +131,15 @@ struct CmdShell : InstallablesCommand, MixEnvironment } } - setenv("PATH", concatStringsSep(":", unixPath).c_str(), 1); + auto unixPath = tokenizeString(getEnv("PATH").value_or(""), ":"); + unixPath.insert(unixPath.begin(), pathAdditions.begin(), pathAdditions.end()); + auto unixPathString = concatStringsSep(":", unixPath); + setenv("PATH", unixPathString.c_str(), 1); Strings args; for (auto & arg : command) args.push_back(arg); - runProgramInStore(store, *command.begin(), args); + runProgramInStore(store, UseSearchPath::Use, *command.begin(), args); } }; @@ -194,7 +201,7 @@ struct CmdRun : InstallableValueCommand Strings allArgs{app.program}; for (auto & i : args) allArgs.push_back(i); - runProgramInStore(store, app.program, allArgs); + runProgramInStore(store, UseSearchPath::DontUse, app.program, allArgs); } }; diff --git a/src/nix/run.hh b/src/nix/run.hh index 97ddef19b..a55917b06 100644 --- a/src/nix/run.hh +++ b/src/nix/run.hh @@ -5,7 +5,13 @@ namespace nix { +enum struct UseSearchPath { + Use, + DontUse +}; + void runProgramInStore(ref store, + UseSearchPath useSearchPath, const std::string & program, const Strings & args, std::optional system = std::nullopt); diff --git a/src/nix/sigs.cc b/src/nix/sigs.cc index a68616355..dfef44869 100644 --- a/src/nix/sigs.cc +++ b/src/nix/sigs.cc @@ -3,6 +3,7 @@ #include "shared.hh" #include "store-api.hh" #include "thread-pool.hh" +#include "progress-bar.hh" #include @@ -111,7 +112,7 @@ struct CmdSign : StorePathsCommand std::string description() override { - return "sign store paths"; + return "sign store paths with a local key"; } void run(ref store, StorePaths && storePaths) override @@ -120,6 +121,7 @@ struct CmdSign : StorePathsCommand throw UsageError("you must specify a secret key file using '-k'"); SecretKey secretKey(readFile(secretKeyFile)); + LocalSigner signer(std::move(secretKey)); size_t added{0}; @@ -128,7 +130,7 @@ struct CmdSign : StorePathsCommand auto info2(*info); info2.sigs.clear(); - info2.sign(*store, secretKey); + info2.sign(*store, signer); assert(!info2.sigs.empty()); if (!info->sigs.count(*info2.sigs.begin())) { @@ -174,6 +176,7 @@ struct CmdKeyGenerateSecret : Command if (!keyName) throw UsageError("required argument '--key-name' is missing"); + stopProgressBar(); writeFull(STDOUT_FILENO, SecretKey::generate(*keyName).to_string()); } }; @@ -195,6 +198,7 @@ struct CmdKeyConvertSecretToPublic : Command void run() override { SecretKey secretKey(drainFD(STDIN_FILENO)); + stopProgressBar(); writeFull(STDOUT_FILENO, secretKey.toPublicKey().to_string()); } }; @@ -202,7 +206,9 @@ struct CmdKeyConvertSecretToPublic : Command struct CmdKey : NixMultiCommand { CmdKey() - : MultiCommand({ + : NixMultiCommand( + "key", + { {"generate-secret", []() { return make_ref(); }}, {"convert-secret-to-public", []() { return make_ref(); }}, }) @@ -215,13 +221,6 @@ struct CmdKey : NixMultiCommand } Category category() override { return catUtility; } - - void run() override - { - if (!command) - throw UsageError("'nix key' requires a sub-command."); - command->second->run(); - } }; static auto rCmdKey = registerCommand("key"); diff --git a/src/nix/store.cc b/src/nix/store.cc index 2879e03b3..79b41e096 100644 --- a/src/nix/store.cc +++ b/src/nix/store.cc @@ -2,9 +2,9 @@ using namespace nix; -struct CmdStore : virtual NixMultiCommand +struct CmdStore : NixMultiCommand { - CmdStore() : MultiCommand(RegisterCommand::getCommandsFor({"store"})) + CmdStore() : NixMultiCommand("store", RegisterCommand::getCommandsFor({"store"})) { } std::string description() override @@ -13,13 +13,6 @@ struct CmdStore : virtual NixMultiCommand } Category category() override { return catUtility; } - - void run() override - { - if (!command) - throw UsageError("'nix store' requires a sub-command."); - command->second->run(); - } }; static auto rCmdStore = registerCommand("store"); diff --git a/src/nix/verify.cc b/src/nix/verify.cc index 78cb765ce..2a0cbd19f 100644 --- a/src/nix/verify.cc +++ b/src/nix/verify.cc @@ -5,6 +5,7 @@ #include "thread-pool.hh" #include "references.hh" #include "signals.hh" +#include "keys.hh" #include @@ -98,7 +99,7 @@ struct CmdVerify : StorePathsCommand if (!noContents) { - auto hashSink = HashSink(info->narHash.type); + auto hashSink = HashSink(info->narHash.algo); store->narFromPath(info->path, hashSink); @@ -109,8 +110,8 @@ struct CmdVerify : StorePathsCommand act2.result(resCorruptedPath, store->printStorePath(info->path)); printError("path '%s' was modified! expected hash '%s', got '%s'", store->printStorePath(info->path), - info->narHash.to_string(HashFormat::Base32, true), - hash.first.to_string(HashFormat::Base32, true)); + info->narHash.to_string(HashFormat::Nix32, true), + hash.first.to_string(HashFormat::Nix32, true)); } } diff --git a/tests/functional/build-remote-trustless-should-fail-0.sh b/tests/functional/build-remote-trustless-should-fail-0.sh index fad1def59..3d4a4b097 100644 --- a/tests/functional/build-remote-trustless-should-fail-0.sh +++ b/tests/functional/build-remote-trustless-should-fail-0.sh @@ -4,6 +4,7 @@ enableFeatures "daemon-trust-override" restartDaemon +requireSandboxSupport [[ $busybox =~ busybox ]] || skipTest "no busybox" unset NIX_STORE_DIR diff --git a/tests/functional/build-remote-with-mounted-ssh-ng.sh b/tests/functional/build-remote-with-mounted-ssh-ng.sh new file mode 100644 index 000000000..443acb6ca --- /dev/null +++ b/tests/functional/build-remote-with-mounted-ssh-ng.sh @@ -0,0 +1,22 @@ +source common.sh + +requireSandboxSupport +[[ $busybox =~ busybox ]] || skipTest "no busybox" + +enableFeatures mounted-ssh-store + +nix build -Lvf simple.nix \ + --arg busybox $busybox \ + --out-link $TEST_ROOT/result-from-remote \ + --store mounted-ssh-ng://localhost + +nix build -Lvf simple.nix \ + --arg busybox $busybox \ + --out-link $TEST_ROOT/result-from-remote-new-cli \ + --store 'mounted-ssh-ng://localhost?remote-program=nix daemon' + +# This verifies that the out link was actually created and valid. The ability +# to create out links (permanent gc roots) is the distinguishing feature of +# the mounted-ssh-ng store. +cat $TEST_ROOT/result-from-remote/hello | grepQuiet 'Hello World!' +cat $TEST_ROOT/result-from-remote-new-cli/hello | grepQuiet 'Hello World!' diff --git a/tests/functional/ca/eval-store.sh b/tests/functional/ca/eval-store.sh new file mode 100644 index 000000000..9cc499606 --- /dev/null +++ b/tests/functional/ca/eval-store.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +# Ensure that garbage collection works properly with ca derivations + +source common.sh + +export NIX_TESTS_CA_BY_DEFAULT=1 + +cd .. +source eval-store.sh diff --git a/tests/functional/ca/local.mk b/tests/functional/ca/local.mk index fd87b8d1f..4f86b268f 100644 --- a/tests/functional/ca/local.mk +++ b/tests/functional/ca/local.mk @@ -5,6 +5,7 @@ ca-tests := \ $(d)/concurrent-builds.sh \ $(d)/derivation-json.sh \ $(d)/duplicate-realisation-in-closure.sh \ + $(d)/eval-store.sh \ $(d)/gc.sh \ $(d)/import-derivation.sh \ $(d)/new-build-cmd.sh \ diff --git a/tests/functional/config.sh b/tests/functional/config.sh index 0780c55d0..324fe95bd 100644 --- a/tests/functional/config.sh +++ b/tests/functional/config.sh @@ -40,20 +40,20 @@ files=$(nix-build --verbose --version | grep "User config" | cut -d ':' -f2- | x # Test that it's possible to load the config from a custom location here=$(readlink -f "$(dirname "${BASH_SOURCE[0]}")") export NIX_USER_CONF_FILES=$here/config/nix-with-substituters.conf -var=$(nix show-config | grep '^substituters =' | cut -d '=' -f 2 | xargs) +var=$(nix config show | grep '^substituters =' | cut -d '=' -f 2 | xargs) [[ $var == https://example.com ]] # Test that it's possible to load config from the environment -prev=$(nix show-config | grep '^cores' | cut -d '=' -f 2 | xargs) +prev=$(nix config show | grep '^cores' | cut -d '=' -f 2 | xargs) export NIX_CONFIG="cores = 4242"$'\n'"experimental-features = nix-command flakes" -exp_cores=$(nix show-config | grep '^cores' | cut -d '=' -f 2 | xargs) -exp_features=$(nix show-config | grep '^experimental-features' | cut -d '=' -f 2 | xargs) +exp_cores=$(nix config show | grep '^cores' | cut -d '=' -f 2 | xargs) +exp_features=$(nix config show | grep '^experimental-features' | cut -d '=' -f 2 | xargs) [[ $prev != $exp_cores ]] [[ $exp_cores == "4242" ]] # flakes implies fetch-tree [[ $exp_features == "fetch-tree flakes nix-command" ]] # Test that it's possible to retrieve a single setting's value -val=$(nix show-config | grep '^warn-dirty' | cut -d '=' -f 2 | xargs) -val2=$(nix show-config warn-dirty) +val=$(nix config show | grep '^warn-dirty' | cut -d '=' -f 2 | xargs) +val2=$(nix config show warn-dirty) [[ $val == $val2 ]] diff --git a/tests/functional/eval-store.sh b/tests/functional/eval-store.sh index 8fc859730..9937ecbce 100644 --- a/tests/functional/eval-store.sh +++ b/tests/functional/eval-store.sh @@ -11,7 +11,16 @@ rm -rf "$eval_store" nix build -f dependencies.nix --eval-store "$eval_store" -o "$TEST_ROOT/result" [[ -e $TEST_ROOT/result/foobar ]] -(! ls $NIX_STORE_DIR/*.drv) +if [[ ! -n "${NIX_TESTS_CA_BY_DEFAULT:-}" ]]; then + # Resolved CA derivations are written to store for building + # + # TODO when we something more systematic + # (https://github.com/NixOS/nix/issues/5025) that distinguishes + # between scratch storage for building and the final destination + # store, we'll be able to make this unconditional again -- resolved + # derivations should only appear in the scratch store. + (! ls $NIX_STORE_DIR/*.drv) +fi ls $eval_store/nix/store/*.drv clearStore @@ -26,5 +35,16 @@ rm -rf "$eval_store" nix-build dependencies.nix --eval-store "$eval_store" -o "$TEST_ROOT/result" [[ -e $TEST_ROOT/result/foobar ]] -(! ls $NIX_STORE_DIR/*.drv) +if [[ ! -n "${NIX_TESTS_CA_BY_DEFAULT:-}" ]]; then + # See above + (! ls $NIX_STORE_DIR/*.drv) +fi ls $eval_store/nix/store/*.drv + +clearStore +rm -rf "$eval_store" + +# Confirm that import-from-derivation builds on the build store +[[ $(nix eval --eval-store "$eval_store?require-sigs=false" --impure --raw --file ./ifd.nix) = hi ]] +ls $NIX_STORE_DIR/*dependencies-top/foobar +(! ls $eval_store/nix/store/*dependencies-top/foobar) diff --git a/tests/functional/experimental-features.sh b/tests/functional/experimental-features.sh index 607bf0a8e..9ee4a53d4 100644 --- a/tests/functional/experimental-features.sh +++ b/tests/functional/experimental-features.sh @@ -31,7 +31,7 @@ source common.sh NIX_CONFIG=' experimental-features = nix-command accept-flake-config = true -' nix show-config accept-flake-config 1>$TEST_ROOT/stdout 2>$TEST_ROOT/stderr +' nix config show accept-flake-config 1>$TEST_ROOT/stdout 2>$TEST_ROOT/stderr grepQuiet "false" $TEST_ROOT/stdout grepQuiet "Ignoring setting 'accept-flake-config' because experimental feature 'flakes' is not enabled" $TEST_ROOT/stderr @@ -39,7 +39,7 @@ grepQuiet "Ignoring setting 'accept-flake-config' because experimental feature ' NIX_CONFIG=' accept-flake-config = true experimental-features = nix-command -' nix show-config accept-flake-config 1>$TEST_ROOT/stdout 2>$TEST_ROOT/stderr +' nix config show accept-flake-config 1>$TEST_ROOT/stdout 2>$TEST_ROOT/stderr grepQuiet "false" $TEST_ROOT/stdout grepQuiet "Ignoring setting 'accept-flake-config' because experimental feature 'flakes' is not enabled" $TEST_ROOT/stderr @@ -47,7 +47,7 @@ grepQuiet "Ignoring setting 'accept-flake-config' because experimental feature ' NIX_CONFIG=' experimental-features = nix-command flakes accept-flake-config = true -' nix show-config accept-flake-config 1>$TEST_ROOT/stdout 2>$TEST_ROOT/stderr +' nix config show accept-flake-config 1>$TEST_ROOT/stdout 2>$TEST_ROOT/stderr grepQuiet "true" $TEST_ROOT/stdout grepQuietInverse "Ignoring setting 'accept-flake-config'" $TEST_ROOT/stderr @@ -55,7 +55,7 @@ grepQuietInverse "Ignoring setting 'accept-flake-config'" $TEST_ROOT/stderr NIX_CONFIG=' accept-flake-config = true experimental-features = nix-command flakes -' nix show-config accept-flake-config 1>$TEST_ROOT/stdout 2>$TEST_ROOT/stderr +' nix config show accept-flake-config 1>$TEST_ROOT/stdout 2>$TEST_ROOT/stderr grepQuiet "true" $TEST_ROOT/stdout grepQuietInverse "Ignoring setting 'accept-flake-config'" $TEST_ROOT/stderr diff --git a/tests/functional/fetchGit.sh b/tests/functional/fetchGit.sh index fc89f2040..4985c7764 100644 --- a/tests/functional/fetchGit.sh +++ b/tests/functional/fetchGit.sh @@ -51,9 +51,7 @@ git -C $repo add differentbranch git -C $repo commit -m 'Test2' git -C $repo checkout master devrev=$(git -C $repo rev-parse devtest) -out=$(nix eval --impure --raw --expr "builtins.fetchGit { url = file://$repo; rev = \"$devrev\"; }" 2>&1) || status=$? -[[ $status == 1 ]] -[[ $out =~ 'Cannot find Git revision' ]] +nix eval --impure --raw --expr "builtins.fetchGit { url = file://$repo; rev = \"$devrev\"; }" [[ $(nix eval --raw --expr "builtins.readFile (builtins.fetchGit { url = file://$repo; rev = \"$devrev\"; allRefs = true; } + \"/differentbranch\")") = 'different file' ]] @@ -185,11 +183,7 @@ path5=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = $repo; ref = # Nuke the cache rm -rf $TEST_HOME/.cache/nix -# Try again, but without 'git' on PATH. This should fail. -NIX=$(command -v nix) -(! PATH= $NIX eval --impure --raw --expr "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath" ) - -# Try again, with 'git' available. This should work. +# Try again. This should work. path5=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath") [[ $path3 = $path5 ]] @@ -241,6 +235,7 @@ rm -rf $repo/.git # should succeed for a repo without commits git init $repo +git -C $repo add hello # need to add at least one file to cause the root of the repo to be visible path10=$(nix eval --impure --raw --expr "(builtins.fetchGit \"file://$repo\").outPath") # should succeed for a path with a space diff --git a/tests/functional/fetchGitSubmodules.sh b/tests/functional/fetchGitSubmodules.sh index df81232e5..369cdc5db 100644 --- a/tests/functional/fetchGitSubmodules.sh +++ b/tests/functional/fetchGitSubmodules.sh @@ -118,11 +118,3 @@ cloneRepo=$TEST_ROOT/a/b/gitSubmodulesClone # NB /a/b to make the relative path git clone $rootRepo $cloneRepo pathIndirect=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$cloneRepo; rev = \"$rev2\"; submodules = true; }).outPath") [[ $pathIndirect = $pathWithRelative ]] - -# Test that if the clone has the submodule already, we're not fetching -# it again. -git -C $cloneRepo submodule update --init -rm $TEST_HOME/.cache/nix/fetcher-cache* -rm -rf $subRepo -pathSubmoduleGone=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$cloneRepo; rev = \"$rev2\"; submodules = true; }).outPath") -[[ $pathSubmoduleGone = $pathWithRelative ]] diff --git a/tests/functional/fetchGitVerification.sh b/tests/functional/fetchGitVerification.sh index 4d9209498..b80e061b5 100644 --- a/tests/functional/fetchGitVerification.sh +++ b/tests/functional/fetchGitVerification.sh @@ -34,6 +34,12 @@ out=$(nix eval --impure --raw --expr "builtins.fetchGit { url = \"file://$repo\" [[ $(nix eval --impure --raw --expr "builtins.readFile (builtins.fetchGit { url = \"file://$repo\"; publicKey = \"$publicKey1\"; } + \"/text\")") = 'hello' ]] echo 'hello world' > $repo/text + +# Verification on a dirty repo should fail. +out=$(nix eval --impure --raw --expr "builtins.fetchGit { url = \"file://$repo\"; keytype = \"ssh-rsa\"; publicKey = \"$publicKey2\"; }" 2>&1) || status=$? +[[ $status == 1 ]] +[[ $out =~ 'dirty' ]] + git -C $repo add text git -C $repo -c "user.signingkey=$key2File" commit -S -m 'second commit' @@ -73,4 +79,4 @@ cat > "$flakeDir/flake.nix" <&1) || status=$? [[ $status == 1 ]] -[[ $out =~ 'No principal matched.' ]] \ No newline at end of file +[[ $out =~ 'No principal matched.' ]] diff --git a/tests/functional/fixed.nix b/tests/functional/fixed.nix index babe71504..5bdf79333 100644 --- a/tests/functional/fixed.nix +++ b/tests/functional/fixed.nix @@ -48,6 +48,15 @@ rec { (f ./fixed.builder1.sh "flat" "md5" "ddd8be4b179a529afa5f2ffae4b9858") ]; + badReferences = mkDerivation rec { + name = "bad-hash"; + builder = script; + script = builtins.toFile "installer.sh" "echo $script >$out"; + outputHash = "1ixr6yd3297ciyp9im522dfxpqbkhcw0pylkb2aab915278fqaik"; + outputHashAlgo = "sha256"; + outputHashMode = "flat"; + }; + # Test for building two derivations in parallel that produce the # same output path because they're fixed-output derivations. parallelSame = [ diff --git a/tests/functional/fixed.sh b/tests/functional/fixed.sh index f1e1ce420..d98d4cd15 100644 --- a/tests/functional/fixed.sh +++ b/tests/functional/fixed.sh @@ -26,6 +26,11 @@ nix-build fixed.nix -A good2 --no-out-link echo 'testing reallyBad...' nix-instantiate fixed.nix -A reallyBad && fail "should fail" +if isDaemonNewer "2.20pre20240108"; then + echo 'testing fixed with references...' + expectStderr 1 nix-build fixed.nix -A badReferences | grepQuiet "not allowed to refer to other store paths" +fi + # While we're at it, check attribute selection a bit more. echo 'testing attribute selection...' test $(nix-instantiate fixed.nix -A good.1 | wc -l) = 1 diff --git a/tests/functional/flakes/flake-in-submodule.sh b/tests/functional/flakes/flake-in-submodule.sh index 21a4b52de..85a4d3389 100644 --- a/tests/functional/flakes/flake-in-submodule.sh +++ b/tests/functional/flakes/flake-in-submodule.sh @@ -46,7 +46,16 @@ echo '"expression in root repo"' > $rootRepo/root.nix git -C $rootRepo add root.nix git -C $rootRepo commit -m "Add root.nix" +flakeref=git+file://$rootRepo\?submodules=1\&dir=submodule + # Flake can live inside a submodule and can be accessed via ?dir=submodule -[[ $(nix eval --json git+file://$rootRepo\?submodules=1\&dir=submodule#sub ) = '"expression in submodule"' ]] +[[ $(nix eval --json $flakeref#sub ) = '"expression in submodule"' ]] + # The flake can access content outside of the submodule -[[ $(nix eval --json git+file://$rootRepo\?submodules=1\&dir=submodule#root ) = '"expression in root repo"' ]] +[[ $(nix eval --json $flakeref#root ) = '"expression in root repo"' ]] + +# Check that dirtying a submodule makes the entire thing dirty. +[[ $(nix flake metadata --json $flakeref | jq -r .locked.rev) != null ]] +echo '"foo"' > $rootRepo/submodule/sub.nix +[[ $(nix eval --json $flakeref#sub ) = '"foo"' ]] +[[ $(nix flake metadata --json $flakeref | jq -r .locked.rev) = null ]] diff --git a/tests/functional/flakes/flakes.sh b/tests/functional/flakes/flakes.sh index ccf1699f9..7506b6b3b 100644 --- a/tests/functional/flakes/flakes.sh +++ b/tests/functional/flakes/flakes.sh @@ -193,6 +193,14 @@ nix build -o "$TEST_ROOT/result" flake1 nix build -o "$TEST_ROOT/result" "$flake1Dir" nix build -o "$TEST_ROOT/result" "git+file://$flake1Dir" +# Test explicit packages.default. +nix build -o "$TEST_ROOT/result" "$flake1Dir#default" +nix build -o "$TEST_ROOT/result" "git+file://$flake1Dir#default" + +# Test explicit packages.default with query. +nix build -o "$TEST_ROOT/result" "$flake1Dir?ref=HEAD#default" +nix build -o "$TEST_ROOT/result" "git+file://$flake1Dir?ref=HEAD#default" + # Check that store symlinks inside a flake are not interpreted as flakes. nix build -o "$flake1Dir/result" "git+file://$flake1Dir" nix path-info "$flake1Dir/result" diff --git a/tests/functional/flakes/follow-paths.sh b/tests/functional/flakes/follow-paths.sh index 9f23d738b..1afd91bd2 100644 --- a/tests/functional/flakes/follow-paths.sh +++ b/tests/functional/flakes/follow-paths.sh @@ -260,3 +260,79 @@ EOF checkRes=$(nix flake lock "$flakeFollowCycle" 2>&1 && fail "nix flake lock should have failed." || true) echo $checkRes | grep -F "error: follow cycle detected: [baz -> foo -> bar -> baz]" + + +# Test transitive input url locking +# This tests the following lockfile issue: https://github.com/NixOS/nix/issues/9143 +# +# We construct the following graph, where p->q means p has input q. +# +# A -> B -> C +# +# And override B/C to flake D, first in A's flake.nix and then with --override-input. +# +# A -> B -> D +flakeFollowsCustomUrlA="$TEST_ROOT/follows/custom-url/flakeA" +flakeFollowsCustomUrlB="$TEST_ROOT/follows/custom-url/flakeA/flakeB" +flakeFollowsCustomUrlC="$TEST_ROOT/follows/custom-url/flakeA/flakeB/flakeC" +flakeFollowsCustomUrlD="$TEST_ROOT/follows/custom-url/flakeA/flakeB/flakeD" + + +createGitRepo "$flakeFollowsCustomUrlA" +mkdir -p "$flakeFollowsCustomUrlB" +mkdir -p "$flakeFollowsCustomUrlC" +mkdir -p "$flakeFollowsCustomUrlD" + +cat > "$flakeFollowsCustomUrlD/flake.nix" < "$flakeFollowsCustomUrlC/flake.nix" < "$flakeFollowsCustomUrlB/flake.nix" < "$flakeFollowsCustomUrlA/flake.nix" <&1 || echo "exit: $?") + [[ "$fail" == *"error: input hash"*"exit: 1" ]] + fail=$(nix hash convert --algo "$1" --from base16 "$3" 2>&1 || echo "exit: $?") + [[ "$fail" == *"error: input hash"*"exit: 1" ]] + fail=$(nix hash convert --algo "$1" --from nix32 "$4" 2>&1 || echo "exit: $?") + [[ "$fail" == *"error: input hash"*"exit: 1" ]] + } + try3 sha1 "800d59cfcd3c05e900cb4e214be48f6b886a08df" "vw46m23bizj4n8afrc0fj19wrp7mj3c0" "gA1Zz808BekAy04hS+SPa4hqCN8=" try3 sha256 "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad" "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s" "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=" try3 sha512 "204a8fc6dda82f0a0ced7beb8e08a41657c16ef468b228a8279be331a703c33596fd15c13b1b07f9aa1d3bea57789ca031ad85c7a71dd70354ec631238ca3445" "12k9jiq29iyqm03swfsgiw5mlqs173qazm3n7daz43infy12pyrcdf30fkk3qwv4yl2ick8yipc2mqnlh48xsvvxl60lbx8vp38yji0" "IEqPxt2oLwoM7XvrjgikFlfBbvRosiioJ5vjMacDwzWW/RXBOxsH+aodO+pXeJygMa2Fx6cd1wNU7GMSOMo0RQ==" diff --git a/tests/functional/ifd.nix b/tests/functional/ifd.nix new file mode 100644 index 000000000..d0b9b54ad --- /dev/null +++ b/tests/functional/ifd.nix @@ -0,0 +1,10 @@ +with import ./config.nix; +import ( + mkDerivation { + name = "foo"; + bla = import ./dependencies.nix {}; + buildCommand = " + echo \\\"hi\\\" > $out + "; + } +) diff --git a/tests/functional/impure-eval.sh b/tests/functional/impure-eval.sh new file mode 100644 index 000000000..6c72f01d7 --- /dev/null +++ b/tests/functional/impure-eval.sh @@ -0,0 +1,35 @@ +source common.sh + +export REMOTE_STORE="dummy://" + +simpleTest () { + local expr=$1; shift + local result=$1; shift + # rest, extra args + + [[ "$(nix eval --impure --raw "$@" --expr "$expr")" == "$result" ]] +} + +# `builtins.storeDir` + +## Store dir follows `store` store setting +simpleTest 'builtins.storeDir' '/foo' --store "$REMOTE_STORE?store=/foo" +simpleTest 'builtins.storeDir' '/bar' --store "$REMOTE_STORE?store=/bar" + +# `builtins.currentSystem` + +## `system` alone affects by default +simpleTest 'builtins.currentSystem' 'foo' --system 'foo' +simpleTest 'builtins.currentSystem' 'bar' --system 'bar' + +## `system` affects if `eval-system` is an empty string +simpleTest 'builtins.currentSystem' 'foo' --system 'foo' --eval-system '' +simpleTest 'builtins.currentSystem' 'bar' --system 'bar' --eval-system '' + +## `eval-system` alone affects +simpleTest 'builtins.currentSystem' 'foo' --eval-system 'foo' +simpleTest 'builtins.currentSystem' 'bar' --eval-system 'bar' + +## `eval-system` overrides `system` +simpleTest 'builtins.currentSystem' 'bar' --system 'foo' --eval-system 'bar' +simpleTest 'builtins.currentSystem' 'baz' --system 'foo' --eval-system 'baz' diff --git a/tests/functional/init.sh b/tests/functional/init.sh index c420e8c9f..d697b1a30 100755 --- a/tests/functional/init.sh +++ b/tests/functional/init.sh @@ -20,7 +20,7 @@ cat > "$NIX_CONF_DIR"/nix.conf < "$NIX_CONF_DIR"/nix.conf.extra < 0 + + … from call site + at /pwd/lang/eval-fail-duplicate-traces.nix:6:10: + 5| if n > 0 + 6| then throwAfter (n - 1) + | ^ + 7| else throw "Uh oh!"; + + … while calling 'throwAfter' + at /pwd/lang/eval-fail-duplicate-traces.nix:4:16: + 3| let + 4| throwAfter = n: + | ^ + 5| if n > 0 + + … from call site + at /pwd/lang/eval-fail-duplicate-traces.nix:6:10: + 5| if n > 0 + 6| then throwAfter (n - 1) + | ^ + 7| else throw "Uh oh!"; + + … while calling 'throwAfter' + at /pwd/lang/eval-fail-duplicate-traces.nix:4:16: + 3| let + 4| throwAfter = n: + | ^ + 5| if n > 0 + + error: Uh oh! diff --git a/tests/functional/lang/eval-fail-duplicate-traces.nix b/tests/functional/lang/eval-fail-duplicate-traces.nix new file mode 100644 index 000000000..17ce374ec --- /dev/null +++ b/tests/functional/lang/eval-fail-duplicate-traces.nix @@ -0,0 +1,9 @@ +# Check that we only omit duplicate stack traces when there's a bunch of them. +# Here, there's only a couple duplicate entries, so we output them all. +let + throwAfter = n: + if n > 0 + then throwAfter (n - 1) + else throw "Uh oh!"; +in + throwAfter 2 diff --git a/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.err.exp b/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.err.exp index 0069285fb..7cb08af8a 100644 --- a/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.err.exp +++ b/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.err.exp @@ -1,35 +1,27 @@ error: … while calling the 'foldl'' builtin - at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:2:1: - 1| # Tests that the result of applying op is forced even if the value is never used 2| builtins.foldl' | ^ 3| (_: f: f null) … while calling anonymous lambda - at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:3:7: - 2| builtins.foldl' 3| (_: f: f null) | ^ 4| null … from call site - at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:3:10: - 2| builtins.foldl' 3| (_: f: f null) | ^ 4| null … while calling anonymous lambda - at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:5:6: - 4| null 5| [ (_: throw "Not the final value, but is still forced!") (_: 23) ] | ^ diff --git a/tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp b/tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp index 5b60d253d..73f9df8cc 100644 --- a/tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp +++ b/tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp @@ -1,8 +1,6 @@ error: … while calling the 'fromTOML' builtin - at /pwd/lang/eval-fail-fromTOML-timestamps.nix:1:1: - 1| builtins.fromTOML '' | ^ 2| key = "value" diff --git a/tests/functional/lang/eval-fail-hashfile-missing.err.exp b/tests/functional/lang/eval-fail-hashfile-missing.err.exp index 6d38608c0..1e4653927 100644 --- a/tests/functional/lang/eval-fail-hashfile-missing.err.exp +++ b/tests/functional/lang/eval-fail-hashfile-missing.err.exp @@ -1,8 +1,6 @@ error: … while calling the 'toString' builtin - at /pwd/lang/eval-fail-hashfile-missing.nix:4:3: - 3| in 4| toString (builtins.concatLists (map (hash: map (builtins.hashFile hash) paths) ["md5" "sha1" "sha256" "sha512"])) | ^ diff --git a/tests/functional/lang/eval-fail-infinite-recursion-lambda.err.exp b/tests/functional/lang/eval-fail-infinite-recursion-lambda.err.exp new file mode 100644 index 000000000..5d843d827 --- /dev/null +++ b/tests/functional/lang/eval-fail-infinite-recursion-lambda.err.exp @@ -0,0 +1,38 @@ +error: + … from call site + at /pwd/lang/eval-fail-infinite-recursion-lambda.nix:1:1: + 1| (x: x x) (x: x x) + | ^ + 2| + + … while calling anonymous lambda + at /pwd/lang/eval-fail-infinite-recursion-lambda.nix:1:2: + 1| (x: x x) (x: x x) + | ^ + 2| + + … from call site + at /pwd/lang/eval-fail-infinite-recursion-lambda.nix:1:5: + 1| (x: x x) (x: x x) + | ^ + 2| + + … while calling anonymous lambda + at /pwd/lang/eval-fail-infinite-recursion-lambda.nix:1:11: + 1| (x: x x) (x: x x) + | ^ + 2| + + … from call site + at /pwd/lang/eval-fail-infinite-recursion-lambda.nix:1:14: + 1| (x: x x) (x: x x) + | ^ + 2| + + (19997 duplicate frames omitted) + + error: stack overflow; max-call-depth exceeded + at /pwd/lang/eval-fail-infinite-recursion-lambda.nix:1:14: + 1| (x: x x) (x: x x) + | ^ + 2| diff --git a/tests/functional/lang/eval-fail-infinite-recursion-lambda.nix b/tests/functional/lang/eval-fail-infinite-recursion-lambda.nix new file mode 100644 index 000000000..dd0a8bf2e --- /dev/null +++ b/tests/functional/lang/eval-fail-infinite-recursion-lambda.nix @@ -0,0 +1 @@ +(x: x x) (x: x x) diff --git a/tests/functional/lang/eval-fail-list.err.exp b/tests/functional/lang/eval-fail-list.err.exp index 24d682118..4320fc022 100644 --- a/tests/functional/lang/eval-fail-list.err.exp +++ b/tests/functional/lang/eval-fail-list.err.exp @@ -1,8 +1,6 @@ error: … while evaluating one of the elements to concatenate - at /pwd/lang/eval-fail-list.nix:1:2: - 1| 8++1 | ^ 2| diff --git a/tests/functional/lang/eval-fail-missing-arg.err.exp b/tests/functional/lang/eval-fail-missing-arg.err.exp index 61fabf0d5..3b162fe1b 100644 --- a/tests/functional/lang/eval-fail-missing-arg.err.exp +++ b/tests/functional/lang/eval-fail-missing-arg.err.exp @@ -1,16 +1,12 @@ error: … from call site - at /pwd/lang/eval-fail-missing-arg.nix:1:1: - 1| ({x, y, z}: x + y + z) {x = "foo"; z = "bar";} | ^ 2| error: function 'anonymous lambda' called without required argument 'y' - at /pwd/lang/eval-fail-missing-arg.nix:1:2: - 1| ({x, y, z}: x + y + z) {x = "foo"; z = "bar";} | ^ 2| diff --git a/tests/functional/lang/eval-fail-mutual-recursion.err.exp b/tests/functional/lang/eval-fail-mutual-recursion.err.exp new file mode 100644 index 000000000..dc2e11766 --- /dev/null +++ b/tests/functional/lang/eval-fail-mutual-recursion.err.exp @@ -0,0 +1,57 @@ +error: + … from call site + at /pwd/lang/eval-fail-mutual-recursion.nix:36:3: + 35| in + 36| throwAfterA true 10 + | ^ + 37| + + … while calling 'throwAfterA' + at /pwd/lang/eval-fail-mutual-recursion.nix:29:26: + 28| + 29| throwAfterA = recurse: n: + | ^ + 30| if n > 0 + + … from call site + at /pwd/lang/eval-fail-mutual-recursion.nix:31:10: + 30| if n > 0 + 31| then throwAfterA recurse (n - 1) + | ^ + 32| else if recurse + + (19 duplicate frames omitted) + + … from call site + at /pwd/lang/eval-fail-mutual-recursion.nix:33:10: + 32| else if recurse + 33| then throwAfterB true 10 + | ^ + 34| else throw "Uh oh!"; + + … while calling 'throwAfterB' + at /pwd/lang/eval-fail-mutual-recursion.nix:22:26: + 21| let + 22| throwAfterB = recurse: n: + | ^ + 23| if n > 0 + + … from call site + at /pwd/lang/eval-fail-mutual-recursion.nix:24:10: + 23| if n > 0 + 24| then throwAfterB recurse (n - 1) + | ^ + 25| else if recurse + + (19 duplicate frames omitted) + + … from call site + at /pwd/lang/eval-fail-mutual-recursion.nix:26:10: + 25| else if recurse + 26| then throwAfterA false 10 + | ^ + 27| else throw "Uh oh!"; + + (21 duplicate frames omitted) + + error: Uh oh! diff --git a/tests/functional/lang/eval-fail-mutual-recursion.nix b/tests/functional/lang/eval-fail-mutual-recursion.nix new file mode 100644 index 000000000..d090d3158 --- /dev/null +++ b/tests/functional/lang/eval-fail-mutual-recursion.nix @@ -0,0 +1,36 @@ +# Check that stack frame deduplication only affects consecutive intervals, and +# that they are reported independently of any preceding sections, even if +# they're indistinguishable. +# +# In terms of the current implementation, we check that we clear the set of +# "seen frames" after eliding a group of frames. +# +# Suppose we have: +# - 10 frames in a function A +# - 10 frames in a function B +# - 10 frames in a function A +# +# We want to output: +# - a few frames of A (skip the rest) +# - a few frames of B (skip the rest) +# - a few frames of A (skip the rest) +# +# If we implemented this in the naive manner, we'd instead get: +# - a few frames of A (skip the rest) +# - a few frames of B (skip the rest, _and_ skip the remaining frames of A) +let + throwAfterB = recurse: n: + if n > 0 + then throwAfterB recurse (n - 1) + else if recurse + then throwAfterA false 10 + else throw "Uh oh!"; + + throwAfterA = recurse: n: + if n > 0 + then throwAfterA recurse (n - 1) + else if recurse + then throwAfterB true 10 + else throw "Uh oh!"; +in + throwAfterA true 10 diff --git a/tests/functional/lang/eval-fail-not-throws.err.exp b/tests/functional/lang/eval-fail-not-throws.err.exp new file mode 100644 index 000000000..fc81f7277 --- /dev/null +++ b/tests/functional/lang/eval-fail-not-throws.err.exp @@ -0,0 +1,14 @@ +error: + … in the argument of the not operator + at /pwd/lang/eval-fail-not-throws.nix:1:4: + 1| ! (throw "uh oh!") + | ^ + 2| + + … while calling the 'throw' builtin + at /pwd/lang/eval-fail-not-throws.nix:1:4: + 1| ! (throw "uh oh!") + | ^ + 2| + + error: uh oh! diff --git a/tests/functional/lang/eval-fail-not-throws.nix b/tests/functional/lang/eval-fail-not-throws.nix new file mode 100644 index 000000000..a74ce4ebe --- /dev/null +++ b/tests/functional/lang/eval-fail-not-throws.nix @@ -0,0 +1 @@ +! (throw "uh oh!") diff --git a/tests/functional/lang/eval-fail-path-slash.err.exp b/tests/functional/lang/eval-fail-path-slash.err.exp index f0011c97f..e3531d352 100644 --- a/tests/functional/lang/eval-fail-path-slash.err.exp +++ b/tests/functional/lang/eval-fail-path-slash.err.exp @@ -1,7 +1,5 @@ error: path has a trailing slash - at /pwd/lang/eval-fail-path-slash.nix:6:12: - 5| # and https://nixos.org/nix-dev/2016-June/020829.html 6| /nix/store/ | ^ diff --git a/tests/functional/lang/eval-fail-recursion.err.exp b/tests/functional/lang/eval-fail-recursion.err.exp index af64133cb..19380dc65 100644 --- a/tests/functional/lang/eval-fail-recursion.err.exp +++ b/tests/functional/lang/eval-fail-recursion.err.exp @@ -1,16 +1,12 @@ error: … in the right operand of the update (//) operator - at /pwd/lang/eval-fail-recursion.nix:1:12: - 1| let a = {} // a; in a.foo | ^ 2| error: infinite recursion encountered - at /pwd/lang/eval-fail-recursion.nix:1:15: - 1| let a = {} // a; in a.foo | ^ 2| diff --git a/tests/functional/lang/eval-fail-remove.err.exp b/tests/functional/lang/eval-fail-remove.err.exp index e82cdac98..292b3c3f3 100644 --- a/tests/functional/lang/eval-fail-remove.err.exp +++ b/tests/functional/lang/eval-fail-remove.err.exp @@ -1,17 +1,13 @@ error: … while evaluating the attribute 'body' - at /pwd/lang/eval-fail-remove.nix:4:3: - 3| 4| body = (removeAttrs attrs ["x"]).x; | ^ 5| } error: attribute 'x' missing - at /pwd/lang/eval-fail-remove.nix:4:10: - 3| 4| body = (removeAttrs attrs ["x"]).x; | ^ diff --git a/tests/functional/lang/eval-fail-scope-5.err.exp b/tests/functional/lang/eval-fail-scope-5.err.exp index 22b6166f8..b0b05cad7 100644 --- a/tests/functional/lang/eval-fail-scope-5.err.exp +++ b/tests/functional/lang/eval-fail-scope-5.err.exp @@ -1,35 +1,27 @@ error: … while evaluating the attribute 'body' - at /pwd/lang/eval-fail-scope-5.nix:8:3: - 7| 8| body = f {}; | ^ 9| … from call site - at /pwd/lang/eval-fail-scope-5.nix:8:10: - 7| 8| body = f {}; | ^ 9| … while calling 'f' - at /pwd/lang/eval-fail-scope-5.nix:6:7: - 5| 6| f = {x ? y, y ? x}: x + y; | ^ 7| error: infinite recursion encountered - at /pwd/lang/eval-fail-scope-5.nix:6:12: - 5| 6| f = {x ? y, y ? x}: x + y; | ^ diff --git a/tests/functional/lang/eval-fail-seq.err.exp b/tests/functional/lang/eval-fail-seq.err.exp index 33a7e9491..3e3d71b15 100644 --- a/tests/functional/lang/eval-fail-seq.err.exp +++ b/tests/functional/lang/eval-fail-seq.err.exp @@ -1,16 +1,12 @@ error: … while calling the 'seq' builtin - at /pwd/lang/eval-fail-seq.nix:1:1: - 1| builtins.seq (abort "foo") 2 | ^ 2| … while calling the 'abort' builtin - at /pwd/lang/eval-fail-seq.nix:1:15: - 1| builtins.seq (abort "foo") 2 | ^ 2| diff --git a/tests/functional/lang/eval-fail-set.err.exp b/tests/functional/lang/eval-fail-set.err.exp index 0d0140508..6dd646e11 100644 --- a/tests/functional/lang/eval-fail-set.err.exp +++ b/tests/functional/lang/eval-fail-set.err.exp @@ -1,7 +1,5 @@ error: undefined variable 'x' - at /pwd/lang/eval-fail-set.nix:1:3: - 1| 8.x | ^ 2| diff --git a/tests/functional/lang/eval-fail-substring.err.exp b/tests/functional/lang/eval-fail-substring.err.exp index 5c58be29a..0457a826e 100644 --- a/tests/functional/lang/eval-fail-substring.err.exp +++ b/tests/functional/lang/eval-fail-substring.err.exp @@ -1,8 +1,6 @@ error: … while calling the 'substring' builtin - at /pwd/lang/eval-fail-substring.nix:1:1: - 1| builtins.substring (builtins.sub 0 1) 1 "x" | ^ 2| diff --git a/tests/functional/lang/eval-fail-to-path.err.exp b/tests/functional/lang/eval-fail-to-path.err.exp index 4ffa2cf6d..d6b17be99 100644 --- a/tests/functional/lang/eval-fail-to-path.err.exp +++ b/tests/functional/lang/eval-fail-to-path.err.exp @@ -1,8 +1,6 @@ error: … while calling the 'toPath' builtin - at /pwd/lang/eval-fail-to-path.nix:1:1: - 1| builtins.toPath "foo/bar" | ^ 2| diff --git a/tests/functional/lang/eval-fail-toJSON.err.exp b/tests/functional/lang/eval-fail-toJSON.err.exp index 4e618c203..4f6003437 100644 --- a/tests/functional/lang/eval-fail-toJSON.err.exp +++ b/tests/functional/lang/eval-fail-toJSON.err.exp @@ -1,25 +1,19 @@ error: … while calling the 'toJSON' builtin - at /pwd/lang/eval-fail-toJSON.nix:1:1: - 1| builtins.toJSON { | ^ 2| a.b = [ … while evaluating attribute 'a' - at /pwd/lang/eval-fail-toJSON.nix:2:3: - 1| builtins.toJSON { 2| a.b = [ | ^ 3| true … while evaluating attribute 'b' - at /pwd/lang/eval-fail-toJSON.nix:2:3: - 1| builtins.toJSON { 2| a.b = [ | ^ @@ -28,27 +22,21 @@ error: … while evaluating list element at index 3 … while evaluating attribute 'c' - at /pwd/lang/eval-fail-toJSON.nix:7:7: - 6| { 7| c.d = throw "hah no"; | ^ 8| } … while evaluating attribute 'd' - at /pwd/lang/eval-fail-toJSON.nix:7:7: - 6| { 7| c.d = throw "hah no"; | ^ 8| } … while calling the 'throw' builtin - at /pwd/lang/eval-fail-toJSON.nix:7:13: - 6| { 7| c.d = throw "hah no"; | ^ diff --git a/tests/functional/lang/eval-fail-undeclared-arg.err.exp b/tests/functional/lang/eval-fail-undeclared-arg.err.exp index 30db743c7..6e13a138e 100644 --- a/tests/functional/lang/eval-fail-undeclared-arg.err.exp +++ b/tests/functional/lang/eval-fail-undeclared-arg.err.exp @@ -1,16 +1,12 @@ error: … from call site - at /pwd/lang/eval-fail-undeclared-arg.nix:1:1: - 1| ({x, z}: x + z) {x = "foo"; y = "bla"; z = "bar";} | ^ 2| error: function 'anonymous lambda' called with unexpected argument 'y' - at /pwd/lang/eval-fail-undeclared-arg.nix:1:2: - 1| ({x, z}: x + z) {x = "foo"; y = "bla"; z = "bar";} | ^ 2| diff --git a/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp b/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp new file mode 100644 index 000000000..0a4f56ac5 --- /dev/null +++ b/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp @@ -0,0 +1,9 @@ +error: + … while evaluating an attribute name + at /pwd/lang/eval-fail-using-set-as-attr-name.nix:5:10: + 4| in + 5| attr.${key} + | ^ + 6| + + error: value is a set while a string was expected diff --git a/tests/functional/lang/eval-fail-using-set-as-attr-name.nix b/tests/functional/lang/eval-fail-using-set-as-attr-name.nix new file mode 100644 index 000000000..48e071a41 --- /dev/null +++ b/tests/functional/lang/eval-fail-using-set-as-attr-name.nix @@ -0,0 +1,5 @@ +let + attr = {foo = "bar";}; + key = {}; +in + attr.${key} diff --git a/tests/functional/lang/eval-okay-convertHash.err.exp b/tests/functional/lang/eval-okay-convertHash.err.exp new file mode 100644 index 000000000..41d746725 --- /dev/null +++ b/tests/functional/lang/eval-okay-convertHash.err.exp @@ -0,0 +1,108 @@ +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". diff --git a/tests/functional/lang/eval-okay-convertHash.exp b/tests/functional/lang/eval-okay-convertHash.exp index 60e0a3c49..16b0240e5 100644 --- a/tests/functional/lang/eval-okay-convertHash.exp +++ b/tests/functional/lang/eval-okay-convertHash.exp @@ -1 +1 @@ -{ hashesBase16 = [ "d41d8cd98f00b204e9800998ecf8427e" "6c69ee7f211c640419d5366cc076ae46" "bb3438fbabd460ea6dbd27d153e2233b" "da39a3ee5e6b4b0d3255bfef95601890afd80709" "cd54e8568c1b37cf1e5badb0779bcbf382212189" "6d12e10b1d331dad210e47fd25d4f260802b7e77" "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" "900a4469df00ccbfd0c145c6d1e4b7953dd0afafadd7534e3a4019e8d38fc663" "ad0387b3bd8652f730ca46d25f9c170af0fd589f42e7f23f5a9e6412d97d7e56" "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e" "9d0886f8c6b389398a16257bc79780fab9831c7fc11c8ab07fa732cb7b348feade382f92617c9c5305fefba0af02ab5fd39a587d330997ff5bd0db19f7666653" "21644b72aa259e5a588cd3afbafb1d4310f4889680f6c83b9d531596a5a284f34dbebff409d23bcc86aee6bad10c891606f075c6f4755cb536da27db5693f3a7" ]; hashesBase32 = [ "3y8bwfr609h3lh9ch0izcqq7fl" "26mrvc0v1nslch8r0w45zywsbc" "1v4gi57l97pmnylq6lmgxkhd5v" "143xibwh31h9bvxzalr0sjvbbvpa6ffs" "i4hj30pkrfdpgc5dbcgcydqviibfhm6d" "fxz2p030yba2bza71qhss79k3l5y24kd" "0mdqa9w1p6cmli6976v4wi0sw9r4p5prkj7lzfd1877wk11c9c73" "0qy6iz9yh6a079757mxdmypx0gcmnzjd3ij5q78bzk00vxll82lh" "0mkygpci4r4yb8zz5rs2kxcgvw0a2yf5zlj6r8qgfll6pnrqf0xd" "0zdl9zrg8r3i9c1g90lgg9ip5ijzv3yhz91i0zzn3r8ap9ws784gkp9dk9j3aglhgf1amqb0pj21mh7h1nxcl18akqvvf7ggqsy30yg" "19ncrpp37dx0nzzjw4k6zaqkb9mzaq2myhgpzh5aff7qqcj5wwdxslg6ixwncm7gyq8l761gwf87fgsh2bwfyr52s53k2dkqvw8c24x" "2kz74snvckxldmmbisz9ikmy031d28cs6xfdbl6rhxx42glpyz4vww4lajrc5akklxwixl0js4g84233pxvmbykiic5m7i5m9r4nr11" ]; hashesBase64 = [ "1B2M2Y8AsgTpgAmY7PhCfg==" "bGnufyEcZAQZ1TZswHauRg==" "uzQ4+6vUYOptvSfRU+IjOw==" "2jmj7l5rSw0yVb/vlWAYkK/YBwk=" "zVToVowbN88eW62wd5vL84IhIYk=" "bRLhCx0zHa0hDkf9JdTyYIArfnc=" "47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=" "kApEad8AzL/QwUXG0eS3lT3Qr6+t11NOOkAZ6NOPxmM=" "rQOHs72GUvcwykbSX5wXCvD9WJ9C5/I/Wp5kEtl9flY=" "z4PhNX7vuL3xVChQ1m2AB9Yg5AULVxXcg/SpIdNs6c5H0NE8XYXysP+DGNKHfuwvY7kxvUdBeoGlODJ6+SfaPg==" "nQiG+MaziTmKFiV7x5eA+rmDHH/BHIqwf6cyy3s0j+reOC+SYXycUwX++6CvAqtf05pYfTMJl/9b0NsZ92ZmUw==" "IWRLcqolnlpYjNOvuvsdQxD0iJaA9sg7nVMVlqWihPNNvr/0CdI7zIau5rrRDIkWBvB1xvR1XLU22ifbVpPzpw==" ]; hashesSRI = [ "md5-1B2M2Y8AsgTpgAmY7PhCfg==" "md5-bGnufyEcZAQZ1TZswHauRg==" "md5-uzQ4+6vUYOptvSfRU+IjOw==" "sha1-2jmj7l5rSw0yVb/vlWAYkK/YBwk=" "sha1-zVToVowbN88eW62wd5vL84IhIYk=" "sha1-bRLhCx0zHa0hDkf9JdTyYIArfnc=" "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=" "sha256-kApEad8AzL/QwUXG0eS3lT3Qr6+t11NOOkAZ6NOPxmM=" "sha256-rQOHs72GUvcwykbSX5wXCvD9WJ9C5/I/Wp5kEtl9flY=" "sha512-z4PhNX7vuL3xVChQ1m2AB9Yg5AULVxXcg/SpIdNs6c5H0NE8XYXysP+DGNKHfuwvY7kxvUdBeoGlODJ6+SfaPg==" "sha512-nQiG+MaziTmKFiV7x5eA+rmDHH/BHIqwf6cyy3s0j+reOC+SYXycUwX++6CvAqtf05pYfTMJl/9b0NsZ92ZmUw==" "sha512-IWRLcqolnlpYjNOvuvsdQxD0iJaA9sg7nVMVlqWihPNNvr/0CdI7zIau5rrRDIkWBvB1xvR1XLU22ifbVpPzpw==" ]; } +{ hashesBase16 = [ "d41d8cd98f00b204e9800998ecf8427e" "6c69ee7f211c640419d5366cc076ae46" "bb3438fbabd460ea6dbd27d153e2233b" "da39a3ee5e6b4b0d3255bfef95601890afd80709" "cd54e8568c1b37cf1e5badb0779bcbf382212189" "6d12e10b1d331dad210e47fd25d4f260802b7e77" "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" "900a4469df00ccbfd0c145c6d1e4b7953dd0afafadd7534e3a4019e8d38fc663" "ad0387b3bd8652f730ca46d25f9c170af0fd589f42e7f23f5a9e6412d97d7e56" "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e" "9d0886f8c6b389398a16257bc79780fab9831c7fc11c8ab07fa732cb7b348feade382f92617c9c5305fefba0af02ab5fd39a587d330997ff5bd0db19f7666653" "21644b72aa259e5a588cd3afbafb1d4310f4889680f6c83b9d531596a5a284f34dbebff409d23bcc86aee6bad10c891606f075c6f4755cb536da27db5693f3a7" ]; hashesBase32 = [ "3y8bwfr609h3lh9ch0izcqq7fl" "26mrvc0v1nslch8r0w45zywsbc" "1v4gi57l97pmnylq6lmgxkhd5v" "143xibwh31h9bvxzalr0sjvbbvpa6ffs" "i4hj30pkrfdpgc5dbcgcydqviibfhm6d" "fxz2p030yba2bza71qhss79k3l5y24kd" "0mdqa9w1p6cmli6976v4wi0sw9r4p5prkj7lzfd1877wk11c9c73" "0qy6iz9yh6a079757mxdmypx0gcmnzjd3ij5q78bzk00vxll82lh" "0mkygpci4r4yb8zz5rs2kxcgvw0a2yf5zlj6r8qgfll6pnrqf0xd" "0zdl9zrg8r3i9c1g90lgg9ip5ijzv3yhz91i0zzn3r8ap9ws784gkp9dk9j3aglhgf1amqb0pj21mh7h1nxcl18akqvvf7ggqsy30yg" "19ncrpp37dx0nzzjw4k6zaqkb9mzaq2myhgpzh5aff7qqcj5wwdxslg6ixwncm7gyq8l761gwf87fgsh2bwfyr52s53k2dkqvw8c24x" "2kz74snvckxldmmbisz9ikmy031d28cs6xfdbl6rhxx42glpyz4vww4lajrc5akklxwixl0js4g84233pxvmbykiic5m7i5m9r4nr11" ]; hashesBase64 = [ "1B2M2Y8AsgTpgAmY7PhCfg==" "bGnufyEcZAQZ1TZswHauRg==" "uzQ4+6vUYOptvSfRU+IjOw==" "2jmj7l5rSw0yVb/vlWAYkK/YBwk=" "zVToVowbN88eW62wd5vL84IhIYk=" "bRLhCx0zHa0hDkf9JdTyYIArfnc=" "47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=" "kApEad8AzL/QwUXG0eS3lT3Qr6+t11NOOkAZ6NOPxmM=" "rQOHs72GUvcwykbSX5wXCvD9WJ9C5/I/Wp5kEtl9flY=" "z4PhNX7vuL3xVChQ1m2AB9Yg5AULVxXcg/SpIdNs6c5H0NE8XYXysP+DGNKHfuwvY7kxvUdBeoGlODJ6+SfaPg==" "nQiG+MaziTmKFiV7x5eA+rmDHH/BHIqwf6cyy3s0j+reOC+SYXycUwX++6CvAqtf05pYfTMJl/9b0NsZ92ZmUw==" "IWRLcqolnlpYjNOvuvsdQxD0iJaA9sg7nVMVlqWihPNNvr/0CdI7zIau5rrRDIkWBvB1xvR1XLU22ifbVpPzpw==" ]; hashesNix32 = [ "3y8bwfr609h3lh9ch0izcqq7fl" "26mrvc0v1nslch8r0w45zywsbc" "1v4gi57l97pmnylq6lmgxkhd5v" "143xibwh31h9bvxzalr0sjvbbvpa6ffs" "i4hj30pkrfdpgc5dbcgcydqviibfhm6d" "fxz2p030yba2bza71qhss79k3l5y24kd" "0mdqa9w1p6cmli6976v4wi0sw9r4p5prkj7lzfd1877wk11c9c73" "0qy6iz9yh6a079757mxdmypx0gcmnzjd3ij5q78bzk00vxll82lh" "0mkygpci4r4yb8zz5rs2kxcgvw0a2yf5zlj6r8qgfll6pnrqf0xd" "0zdl9zrg8r3i9c1g90lgg9ip5ijzv3yhz91i0zzn3r8ap9ws784gkp9dk9j3aglhgf1amqb0pj21mh7h1nxcl18akqvvf7ggqsy30yg" "19ncrpp37dx0nzzjw4k6zaqkb9mzaq2myhgpzh5aff7qqcj5wwdxslg6ixwncm7gyq8l761gwf87fgsh2bwfyr52s53k2dkqvw8c24x" "2kz74snvckxldmmbisz9ikmy031d28cs6xfdbl6rhxx42glpyz4vww4lajrc5akklxwixl0js4g84233pxvmbykiic5m7i5m9r4nr11" ]; hashesSRI = [ "md5-1B2M2Y8AsgTpgAmY7PhCfg==" "md5-bGnufyEcZAQZ1TZswHauRg==" "md5-uzQ4+6vUYOptvSfRU+IjOw==" "sha1-2jmj7l5rSw0yVb/vlWAYkK/YBwk=" "sha1-zVToVowbN88eW62wd5vL84IhIYk=" "sha1-bRLhCx0zHa0hDkf9JdTyYIArfnc=" "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=" "sha256-kApEad8AzL/QwUXG0eS3lT3Qr6+t11NOOkAZ6NOPxmM=" "sha256-rQOHs72GUvcwykbSX5wXCvD9WJ9C5/I/Wp5kEtl9flY=" "sha512-z4PhNX7vuL3xVChQ1m2AB9Yg5AULVxXcg/SpIdNs6c5H0NE8XYXysP+DGNKHfuwvY7kxvUdBeoGlODJ6+SfaPg==" "sha512-nQiG+MaziTmKFiV7x5eA+rmDHH/BHIqwf6cyy3s0j+reOC+SYXycUwX++6CvAqtf05pYfTMJl/9b0NsZ92ZmUw==" "sha512-IWRLcqolnlpYjNOvuvsdQxD0iJaA9sg7nVMVlqWihPNNvr/0CdI7zIau5rrRDIkWBvB1xvR1XLU22ifbVpPzpw==" ]; } diff --git a/tests/functional/lang/eval-okay-convertHash.nix b/tests/functional/lang/eval-okay-convertHash.nix index cf4909aaf..a0191ee8d 100644 --- a/tests/functional/lang/eval-okay-convertHash.nix +++ b/tests/functional/lang/eval-okay-convertHash.nix @@ -5,12 +5,14 @@ let map2' = f: fsts: snds: map2 f { inherit fsts snds; }; getOutputHashes = hashes: { hashesBase16 = map2' (hashAlgo: hash: builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base16";}) hashAlgos hashes; + hashesNix32 = map2' (hashAlgo: hash: builtins.convertHash { inherit hash hashAlgo; toHashFormat = "nix32";}) hashAlgos hashes; hashesBase32 = map2' (hashAlgo: hash: builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base32";}) hashAlgos hashes; hashesBase64 = map2' (hashAlgo: hash: builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base64";}) hashAlgos hashes; hashesSRI = map2' (hashAlgo: hash: builtins.convertHash { inherit hash hashAlgo; toHashFormat = "sri" ;}) hashAlgos hashes; }; getOutputHashesColon = hashes: { hashesBase16 = map2' (hashAlgo: hashBody: builtins.convertHash { hash = hashAlgo + ":" + hashBody; toHashFormat = "base16";}) hashAlgos hashes; + hashesNix32 = map2' (hashAlgo: hashBody: builtins.convertHash { hash = hashAlgo + ":" + hashBody; toHashFormat = "nix32";}) hashAlgos hashes; hashesBase32 = map2' (hashAlgo: hashBody: builtins.convertHash { hash = hashAlgo + ":" + hashBody; toHashFormat = "base32";}) hashAlgos hashes; hashesBase64 = map2' (hashAlgo: hashBody: builtins.convertHash { hash = hashAlgo + ":" + hashBody; toHashFormat = "base64";}) hashAlgos hashes; hashesSRI = map2' (hashAlgo: hashBody: builtins.convertHash { hash = hashAlgo + ":" + hashBody; toHashFormat = "sri" ;}) hashAlgos hashes; diff --git a/tests/functional/lang/eval-okay-symlink-resolution.exp b/tests/functional/lang/eval-okay-symlink-resolution.exp new file mode 100644 index 000000000..8b8441b91 --- /dev/null +++ b/tests/functional/lang/eval-okay-symlink-resolution.exp @@ -0,0 +1 @@ +"test" diff --git a/tests/functional/lang/eval-okay-symlink-resolution.nix b/tests/functional/lang/eval-okay-symlink-resolution.nix new file mode 100644 index 000000000..ffb1818bd --- /dev/null +++ b/tests/functional/lang/eval-okay-symlink-resolution.nix @@ -0,0 +1 @@ +import symlink-resolution/foo/overlays/overlay.nix diff --git a/tests/functional/lang/parse-fail-dup-attrs-1.err.exp b/tests/functional/lang/parse-fail-dup-attrs-1.err.exp index 4fe6b7a1f..6c3a3510c 100644 --- a/tests/functional/lang/parse-fail-dup-attrs-1.err.exp +++ b/tests/functional/lang/parse-fail-dup-attrs-1.err.exp @@ -1,7 +1,5 @@ error: attribute 'x' already defined at «stdin»:1:3 - at «stdin»:3:3: - 2| y = 456; 3| x = 789; | ^ diff --git a/tests/functional/lang/parse-fail-dup-attrs-2.err.exp b/tests/functional/lang/parse-fail-dup-attrs-2.err.exp index 3aba2891f..fecdece20 100644 --- a/tests/functional/lang/parse-fail-dup-attrs-2.err.exp +++ b/tests/functional/lang/parse-fail-dup-attrs-2.err.exp @@ -1,7 +1,5 @@ error: attribute 'x' already defined at «stdin»:9:5 - at «stdin»:10:17: - 9| x = 789; 10| inherit (as) x; | ^ diff --git a/tests/functional/lang/parse-fail-dup-attrs-3.err.exp b/tests/functional/lang/parse-fail-dup-attrs-3.err.exp index 3aba2891f..fecdece20 100644 --- a/tests/functional/lang/parse-fail-dup-attrs-3.err.exp +++ b/tests/functional/lang/parse-fail-dup-attrs-3.err.exp @@ -1,7 +1,5 @@ error: attribute 'x' already defined at «stdin»:9:5 - at «stdin»:10:17: - 9| x = 789; 10| inherit (as) x; | ^ diff --git a/tests/functional/lang/parse-fail-dup-attrs-4.err.exp b/tests/functional/lang/parse-fail-dup-attrs-4.err.exp index ff68446a1..f85ffea51 100644 --- a/tests/functional/lang/parse-fail-dup-attrs-4.err.exp +++ b/tests/functional/lang/parse-fail-dup-attrs-4.err.exp @@ -1,7 +1,5 @@ error: attribute 'services.ssh.port' already defined at «stdin»:2:3 - at «stdin»:3:3: - 2| services.ssh.port = 22; 3| services.ssh.port = 23; | ^ diff --git a/tests/functional/lang/parse-fail-dup-attrs-7.err.exp b/tests/functional/lang/parse-fail-dup-attrs-7.err.exp index 512a499ca..98cea9dae 100644 --- a/tests/functional/lang/parse-fail-dup-attrs-7.err.exp +++ b/tests/functional/lang/parse-fail-dup-attrs-7.err.exp @@ -1,7 +1,5 @@ error: attribute 'x' already defined at «stdin»:6:12 - at «stdin»:7:12: - 6| inherit x; 7| inherit x; | ^ diff --git a/tests/functional/lang/parse-fail-dup-formals.err.exp b/tests/functional/lang/parse-fail-dup-formals.err.exp index 1d566fb33..d7c7e0237 100644 --- a/tests/functional/lang/parse-fail-dup-formals.err.exp +++ b/tests/functional/lang/parse-fail-dup-formals.err.exp @@ -1,6 +1,4 @@ error: duplicate formal function argument 'x' - at «stdin»:1:8: - 1| {x, y, x}: x | ^ diff --git a/tests/functional/lang/parse-fail-eof-in-string.err.exp b/tests/functional/lang/parse-fail-eof-in-string.err.exp index f9fa72312..b28d35950 100644 --- a/tests/functional/lang/parse-fail-eof-in-string.err.exp +++ b/tests/functional/lang/parse-fail-eof-in-string.err.exp @@ -1,7 +1,5 @@ error: syntax error, unexpected end of file, expecting '"' - at «stdin»:3:5: - 2| # Note that this file must not end with a newline. 3| a 1"$ | ^ diff --git a/tests/functional/lang/parse-fail-mixed-nested-attrs1.err.exp b/tests/functional/lang/parse-fail-mixed-nested-attrs1.err.exp index 32f776795..a4472156b 100644 --- a/tests/functional/lang/parse-fail-mixed-nested-attrs1.err.exp +++ b/tests/functional/lang/parse-fail-mixed-nested-attrs1.err.exp @@ -1,7 +1,5 @@ error: attribute 'z' already defined at «stdin»:3:16 - at «stdin»:2:3: - 1| { 2| x.z = 3; | ^ diff --git a/tests/functional/lang/parse-fail-mixed-nested-attrs2.err.exp b/tests/functional/lang/parse-fail-mixed-nested-attrs2.err.exp index 0437cd50c..ead1f0dbd 100644 --- a/tests/functional/lang/parse-fail-mixed-nested-attrs2.err.exp +++ b/tests/functional/lang/parse-fail-mixed-nested-attrs2.err.exp @@ -1,7 +1,5 @@ error: attribute 'y' already defined at «stdin»:3:9 - at «stdin»:2:3: - 1| { 2| x.y.y = 3; | ^ diff --git a/tests/functional/lang/parse-fail-patterns-1.err.exp b/tests/functional/lang/parse-fail-patterns-1.err.exp index 634a04aaa..6ba39d884 100644 --- a/tests/functional/lang/parse-fail-patterns-1.err.exp +++ b/tests/functional/lang/parse-fail-patterns-1.err.exp @@ -1,7 +1,5 @@ error: duplicate formal function argument 'args' - at «stdin»:1:1: - 1| args@{args, x, y, z}: x | ^ 2| diff --git a/tests/functional/lang/parse-fail-regression-20060610.err.exp b/tests/functional/lang/parse-fail-regression-20060610.err.exp index 167d01e85..d8875a6a5 100644 --- a/tests/functional/lang/parse-fail-regression-20060610.err.exp +++ b/tests/functional/lang/parse-fail-regression-20060610.err.exp @@ -1,7 +1,5 @@ error: undefined variable 'gcc' - at «stdin»:8:12: - 7| 8| body = ({ | ^ diff --git a/tests/functional/lang/parse-fail-undef-var-2.err.exp b/tests/functional/lang/parse-fail-undef-var-2.err.exp index 77c96bbd2..a58d8dca4 100644 --- a/tests/functional/lang/parse-fail-undef-var-2.err.exp +++ b/tests/functional/lang/parse-fail-undef-var-2.err.exp @@ -1,7 +1,5 @@ error: syntax error, unexpected ':', expecting '}' - at «stdin»:3:13: - 2| 3| f = {x, y : | ^ diff --git a/tests/functional/lang/parse-fail-undef-var.err.exp b/tests/functional/lang/parse-fail-undef-var.err.exp index 48e88747f..3d143d9af 100644 --- a/tests/functional/lang/parse-fail-undef-var.err.exp +++ b/tests/functional/lang/parse-fail-undef-var.err.exp @@ -1,7 +1,5 @@ error: undefined variable 'y' - at «stdin»:1:4: - 1| x: y | ^ 2| diff --git a/tests/functional/lang/parse-fail-utf8.err.exp b/tests/functional/lang/parse-fail-utf8.err.exp index 6087479a3..e83abdb9e 100644 --- a/tests/functional/lang/parse-fail-utf8.err.exp +++ b/tests/functional/lang/parse-fail-utf8.err.exp @@ -1,6 +1,4 @@ error: syntax error, unexpected invalid token, expecting end of file - at «stdin»:1:5: - 1| 123 | ^ diff --git a/tests/functional/lang/symlink-resolution/foo/lib/default.nix b/tests/functional/lang/symlink-resolution/foo/lib/default.nix new file mode 100644 index 000000000..8b8441b91 --- /dev/null +++ b/tests/functional/lang/symlink-resolution/foo/lib/default.nix @@ -0,0 +1 @@ +"test" diff --git a/tests/functional/lang/symlink-resolution/foo/overlays b/tests/functional/lang/symlink-resolution/foo/overlays new file mode 120000 index 000000000..0d44a21c5 --- /dev/null +++ b/tests/functional/lang/symlink-resolution/foo/overlays @@ -0,0 +1 @@ +../overlays \ No newline at end of file diff --git a/tests/functional/lang/symlink-resolution/overlays/overlay.nix b/tests/functional/lang/symlink-resolution/overlays/overlay.nix new file mode 100644 index 000000000..b0368308e --- /dev/null +++ b/tests/functional/lang/symlink-resolution/overlays/overlay.nix @@ -0,0 +1 @@ +import ../lib diff --git a/tests/functional/local.mk b/tests/functional/local.mk index e6f16be43..888c7e18a 100644 --- a/tests/functional/local.mk +++ b/tests/functional/local.mk @@ -70,7 +70,9 @@ nix_tests = \ build-remote-trustless-should-pass-2.sh \ build-remote-trustless-should-pass-3.sh \ build-remote-trustless-should-fail-0.sh \ + build-remote-with-mounted-ssh-ng.sh \ nar-access.sh \ + impure-eval.sh \ pure-eval.sh \ eval.sh \ repl.sh \ @@ -128,21 +130,21 @@ nix_tests = \ impure-env.sh ifeq ($(HAVE_LIBCPUID), 1) - nix_tests += compute-levels.sh + nix_tests += compute-levels.sh endif ifeq ($(ENABLE_BUILD), yes) - nix_tests += test-libstoreconsumer.sh + nix_tests += test-libstoreconsumer.sh - ifeq ($(BUILD_SHARED_LIBS), 1) - nix_tests += plugins.sh - endif + ifeq ($(BUILD_SHARED_LIBS), 1) + nix_tests += plugins.sh + endif endif $(d)/test-libstoreconsumer.sh.test $(d)/test-libstoreconsumer.sh.test-debug: \ - $(d)/test-libstoreconsumer/test-libstoreconsumer + $(buildprefix)$(d)/test-libstoreconsumer/test-libstoreconsumer $(d)/plugins.sh.test $(d)/plugins.sh.test-debug: \ - $(d)/plugins/libplugintest.$(SO_EXT) + $(buildprefix)$(d)/plugins/libplugintest.$(SO_EXT) install-tests += $(foreach x, $(nix_tests), $(d)/$(x)) diff --git a/tests/functional/logging.sh b/tests/functional/logging.sh index 1481b9b36..1ccc21d0b 100644 --- a/tests/functional/logging.sh +++ b/tests/functional/logging.sh @@ -15,7 +15,7 @@ nix-build dependencies.nix --no-out-link --compress-build-log [ "$(nix-store -l $path)" = FOO ] # test whether empty logs work fine with `nix log`. -builder="$(mktemp)" +builder="$(realpath "$(mktemp)")" echo -e "#!/bin/sh\nmkdir \$out" > "$builder" outp="$(nix-build -E \ 'with import ./config.nix; mkDerivation { name = "fnord"; builder = '"$builder"'; }' \ diff --git a/tests/functional/nix-copy-ssh-common.sh b/tests/functional/nix-copy-ssh-common.sh new file mode 100644 index 000000000..cc8314ff7 --- /dev/null +++ b/tests/functional/nix-copy-ssh-common.sh @@ -0,0 +1,70 @@ +proto=$1 +shift +(( $# == 0 )) + +clearStore +clearCache + +mkdir -p $TEST_ROOT/stores + +# Create path to copy back and forth +outPath=$(nix-build --no-out-link dependencies.nix) + +storeQueryParam="store=${NIX_STORE_DIR}" + +realQueryParam () { + echo "real=$1$NIX_STORE_DIR" +} + +remoteRoot="$TEST_ROOT/stores/$proto" + +clearRemoteStore () { + chmod -R u+w "$remoteRoot" || true + rm -rf "$remoteRoot" +} + +clearRemoteStore + +remoteStore="${proto}://localhost?${storeQueryParam}&remote-store=${remoteRoot}%3f${storeQueryParam}%26$(realQueryParam "$remoteRoot")" + +# Copy to store + +args=() +if [[ "$proto" == "ssh-ng" ]]; then + # TODO investigate discrepancy + args+=(--no-check-sigs) +fi + +[ ! -f ${remoteRoot}${outPath}/foobar ] +nix copy "${args[@]}" --to "$remoteStore" $outPath +[ -f ${remoteRoot}${outPath}/foobar ] + +# Copy back from store + +clearStore + +[ ! -f $outPath/foobar ] +nix copy --no-check-sigs --from "$remoteStore" $outPath +[ -f $outPath/foobar ] + +# Check --substitute-on-destination, avoid corrupted store + +clearRemoteStore + +corruptedRoot=$TEST_ROOT/stores/corrupted +corruptedStore="${corruptedRoot}?${storeQueryParam}&$(realQueryParam "$corruptedRoot")" + +# Copy it to the corrupted store +nix copy --no-check-sigs "$outPath" --to "$corruptedStore" + +# Corrupt it in there +corruptPath="${corruptedRoot}${outPath}" +chmod +w "$corruptPath" +echo "not supposed to be here" > "$corruptPath/foobarbaz" +chmod -w "$corruptPath" + +# Copy from the corrupted store with the regular store as a +# substituter. It must use the substituter not the source store in +# order to avoid errors. +NIX_CONFIG=$(echo -e "substituters = local\nrequire-sigs = false") \ + nix copy --no-check-sigs --from "$corruptedStore" --to "$remoteStore" --substitute-on-destination "$outPath" diff --git a/tests/functional/nix-copy-ssh-ng.sh b/tests/functional/nix-copy-ssh-ng.sh index 463b5e0c4..62e99cd24 100644 --- a/tests/functional/nix-copy-ssh-ng.sh +++ b/tests/functional/nix-copy-ssh-ng.sh @@ -1,18 +1,14 @@ source common.sh -clearStore -clearCache +source nix-copy-ssh-common.sh "ssh-ng" -remoteRoot=$TEST_ROOT/store2 -chmod -R u+w "$remoteRoot" || true -rm -rf "$remoteRoot" +clearStore +clearRemoteStore outPath=$(nix-build --no-out-link dependencies.nix) -nix store info --store "ssh-ng://localhost?store=$NIX_STORE_DIR&remote-store=$remoteRoot%3fstore=$NIX_STORE_DIR%26real=$remoteRoot$NIX_STORE_DIR" +nix store info --store "$remoteStore" # Regression test for https://github.com/NixOS/nix/issues/6253 -nix copy --to "ssh-ng://localhost?store=$NIX_STORE_DIR&remote-store=$remoteRoot%3fstore=$NIX_STORE_DIR%26real=$remoteRoot$NIX_STORE_DIR" $outPath --no-check-sigs & -nix copy --to "ssh-ng://localhost?store=$NIX_STORE_DIR&remote-store=$remoteRoot%3fstore=$NIX_STORE_DIR%26real=$remoteRoot$NIX_STORE_DIR" $outPath --no-check-sigs - -[ -f $remoteRoot$outPath/foobar ] +nix copy --to "$remoteStore" $outPath --no-check-sigs & +nix copy --to "$remoteStore" $outPath --no-check-sigs diff --git a/tests/functional/nix-copy-ssh.sh b/tests/functional/nix-copy-ssh.sh index eb801548d..12e8346bc 100644 --- a/tests/functional/nix-copy-ssh.sh +++ b/tests/functional/nix-copy-ssh.sh @@ -1,20 +1,3 @@ source common.sh -clearStore -clearCache - -remoteRoot=$TEST_ROOT/store2 -chmod -R u+w "$remoteRoot" || true -rm -rf "$remoteRoot" - -outPath=$(nix-build --no-out-link dependencies.nix) - -nix copy --to "ssh://localhost?store=$NIX_STORE_DIR&remote-store=$remoteRoot%3fstore=$NIX_STORE_DIR%26real=$remoteRoot$NIX_STORE_DIR" $outPath - -[ -f $remoteRoot$outPath/foobar ] - -clearStore - -nix copy --no-check-sigs --from "ssh://localhost?store=$NIX_STORE_DIR&remote-store=$remoteRoot%3fstore=$NIX_STORE_DIR%26real=$remoteRoot$NIX_STORE_DIR" $outPath - -[ -f $outPath/foobar ] +source nix-copy-ssh-common.sh "ssh" diff --git a/tests/functional/nix-profile.sh b/tests/functional/nix-profile.sh index 7c478a0cd..eced4d3f1 100644 --- a/tests/functional/nix-profile.sh +++ b/tests/functional/nix-profile.sh @@ -47,7 +47,7 @@ cp ./config.nix $flake1Dir/ # Test upgrading from nix-env. nix-env -f ./user-envs.nix -i foo-1.0 -nix profile list | grep -A2 'Index:.*0' | grep 'Store paths:.*foo-1.0' +nix profile list | grep -A2 'Name:.*foo' | grep 'Store paths:.*foo-1.0' nix profile install $flake1Dir -L nix profile list | grep -A4 'Index:.*1' | grep 'Locked flake URL:.*narHash' [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]] @@ -81,7 +81,7 @@ nix profile rollback # Test uninstall. [ -e $TEST_HOME/.nix-profile/bin/foo ] -nix profile remove 0 +nix profile remove foo (! [ -e $TEST_HOME/.nix-profile/bin/foo ]) nix profile history | grep 'foo: 1.0 -> ∅' nix profile diff-closures | grep 'Version 3 -> 4' @@ -93,6 +93,13 @@ nix profile remove 1 nix profile install $(nix-build --no-out-link ./simple.nix) [[ $(cat $TEST_HOME/.nix-profile/hello) = "Hello World!" ]] +# Test packages with same name from different sources +mkdir $TEST_ROOT/simple-too +cp ./simple.nix ./config.nix simple.builder.sh $TEST_ROOT/simple-too +nix profile install --file $TEST_ROOT/simple-too/simple.nix '' +nix profile list | grep -A4 'Name:.*simple' | grep 'Name:.*simple1' +nix profile remove simple1 + # Test wipe-history. nix profile wipe-history [[ $(nix profile history | grep Version | wc -l) -eq 1 ]] @@ -104,7 +111,7 @@ nix profile upgrade 0 nix profile history | grep "packages.$system.default: 1.0, 1.0-man -> 3.0, 3.0-man" # Test new install of CA package. -nix profile remove 0 +nix profile remove flake1 printf 4.0 > $flake1Dir/version printf Utrecht > $flake1Dir/who nix profile install $flake1Dir @@ -112,26 +119,27 @@ nix profile install $flake1Dir [[ $(nix path-info --json $(realpath $TEST_HOME/.nix-profile/bin/hello) | jq -r .[].ca) =~ fixed:r:sha256: ]] # Override the outputs. -nix profile remove 0 1 +nix profile remove simple flake1 nix profile install "$flake1Dir^*" [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello Utrecht" ]] [ -e $TEST_HOME/.nix-profile/share/man ] [ -e $TEST_HOME/.nix-profile/include ] printf Nix > $flake1Dir/who -nix profile upgrade 0 +nix profile list +nix profile upgrade flake1 [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello Nix" ]] [ -e $TEST_HOME/.nix-profile/share/man ] [ -e $TEST_HOME/.nix-profile/include ] -nix profile remove 0 +nix profile remove flake1 nix profile install "$flake1Dir^man" (! [ -e $TEST_HOME/.nix-profile/bin/hello ]) [ -e $TEST_HOME/.nix-profile/share/man ] (! [ -e $TEST_HOME/.nix-profile/include ]) # test priority -nix profile remove 0 +nix profile remove flake1 # Make another flake. flake2Dir=$TEST_ROOT/flake2 diff --git a/tests/functional/remote-store.sh b/tests/functional/remote-store.sh index 5c7bfde46..dc80f8b55 100644 --- a/tests/functional/remote-store.sh +++ b/tests/functional/remote-store.sh @@ -19,18 +19,7 @@ else fi # Test import-from-derivation through the daemon. -[[ $(nix eval --impure --raw --expr ' - with import ./config.nix; - import ( - mkDerivation { - name = "foo"; - bla = import ./dependencies.nix {}; - buildCommand = " - echo \\\"hi\\\" > $out - "; - } - ) -') = hi ]] +[[ $(nix eval --impure --raw --file ./ifd.nix) = hi ]] storeCleared=1 NIX_REMOTE_=$NIX_REMOTE $SHELL ./user-envs.sh diff --git a/tests/functional/restricted.sh b/tests/functional/restricted.sh index 197ae7a10..3de26eb36 100644 --- a/tests/functional/restricted.sh +++ b/tests/functional/restricted.sh @@ -14,8 +14,8 @@ nix-instantiate --restrict-eval --eval -E 'builtins.readFile ./simple.nix' -I sr (! nix-instantiate --restrict-eval --eval -E 'builtins.readDir ../../src/nix-channel') nix-instantiate --restrict-eval --eval -E 'builtins.readDir ../../src/nix-channel' -I src=../../src -(! nix-instantiate --restrict-eval --eval -E 'let __nixPath = [ { prefix = "foo"; path = ./.; } ]; in ') -nix-instantiate --restrict-eval --eval -E 'let __nixPath = [ { prefix = "foo"; path = ./.; } ]; in ' -I src=. +expectStderr 1 nix-instantiate --restrict-eval --eval -E 'let __nixPath = [ { prefix = "foo"; path = ./.; } ]; in builtins.readFile ' | grepQuiet "forbidden in restricted mode" +nix-instantiate --restrict-eval --eval -E 'let __nixPath = [ { prefix = "foo"; path = ./.; } ]; in builtins.readFile ' -I src=. p=$(nix eval --raw --expr "builtins.fetchurl file://$(pwd)/restricted.sh" --impure --restrict-eval --allowed-uris "file://$(pwd)") cmp $p restricted.sh @@ -39,6 +39,18 @@ nix-instantiate --eval --restrict-eval $TEST_ROOT/restricted.nix -I $TEST_ROOT - [[ $(nix eval --raw --impure --restrict-eval -I . --expr 'builtins.readFile "${import ./simple.nix}/hello"') == 'Hello World!' ]] +# Check that we can't follow a symlink outside of the allowed paths. +mkdir -p $TEST_ROOT/tunnel.d $TEST_ROOT/foo2 +ln -sfn .. $TEST_ROOT/tunnel.d/tunnel +echo foo > $TEST_ROOT/bar + +expectStderr 1 nix-instantiate --restrict-eval --eval -E "let __nixPath = [ { prefix = \"foo\"; path = $TEST_ROOT/tunnel.d; } ]; in builtins.readFile " -I $TEST_ROOT/tunnel.d | grepQuiet "forbidden in restricted mode" + +expectStderr 1 nix-instantiate --restrict-eval --eval -E "let __nixPath = [ { prefix = \"foo\"; path = $TEST_ROOT/tunnel.d; } ]; in builtins.readDir " -I $TEST_ROOT/tunnel.d | grepQuiet "forbidden in restricted mode" + +# Reading the parents of allowed paths should show only the ancestors of the allowed paths. +[[ $(nix-instantiate --restrict-eval --eval -E "let __nixPath = [ { prefix = \"foo\"; path = $TEST_ROOT/tunnel.d; } ]; in builtins.readDir " -I $TEST_ROOT/tunnel.d) == '{ "tunnel.d" = "directory"; }' ]] + # Check whether we can leak symlink information through directory traversal. traverseDir="$(pwd)/restricted-traverse-me" ln -sfn "$(pwd)/restricted-secret" "$(pwd)/restricted-innocent" diff --git a/tests/functional/shell-hello.nix b/tests/functional/shell-hello.nix index 3fdd3501d..dfe66ef93 100644 --- a/tests/functional/shell-hello.nix +++ b/tests/functional/shell-hello.nix @@ -23,4 +23,20 @@ with import ./config.nix; chmod +x $dev/bin/hello2 ''; }; + + salve-mundi = mkDerivation { + name = "salve-mundi"; + outputs = [ "out" ]; + meta.outputsToInstall = [ "out" ]; + buildCommand = + '' + mkdir -p $out/bin + + cat > $out/bin/hello <&''$NIX_LOG_FD'' + "fi" + "echo Hello" + "mkdir $out" + "cat /proc/sys/kernel/hostname > $out/host" + ] + }" ]; + outputs = [ "out" ]; + } + ''; +in + +{ + name = "remote-builds-ssh-ng"; + + nodes = + { builder = + { config, pkgs, ... }: + { services.openssh.enable = true; + virtualisation.writableStore = true; + nix.settings.sandbox = true; + nix.settings.substituters = lib.mkForce [ ]; + }; + + client = + { config, lib, pkgs, ... }: + { nix.settings.max-jobs = 0; # force remote building + nix.distributedBuilds = true; + nix.buildMachines = + [ { hostName = "builder"; + sshUser = "root"; + sshKey = "/root/.ssh/id_ed25519"; + system = "i686-linux"; + maxJobs = 1; + protocol = "ssh-ng"; + } + ]; + virtualisation.writableStore = true; + virtualisation.additionalPaths = [ config.system.build.extraUtils ]; + nix.settings.substituters = lib.mkForce [ ]; + programs.ssh.extraConfig = "ConnectTimeout 30"; + }; + }; + + testScript = { nodes }: '' + # fmt: off + import subprocess + + start_all() + + # Create an SSH key on the client. + subprocess.run([ + "${hostPkgs.openssh}/bin/ssh-keygen", "-t", "ed25519", "-f", "key", "-N", "" + ], capture_output=True, check=True) + client.succeed("mkdir -p -m 700 /root/.ssh") + client.copy_from_host("key", "/root/.ssh/id_ed25519") + client.succeed("chmod 600 /root/.ssh/id_ed25519") + + # Install the SSH key on the builder. + client.wait_for_unit("network.target") + builder.succeed("mkdir -p -m 700 /root/.ssh") + builder.copy_from_host("key.pub", "/root/.ssh/authorized_keys") + builder.wait_for_unit("sshd") + client.succeed(f"ssh -o StrictHostKeyChecking=no {builder.name} 'echo hello world'") + + # Perform a build + out = client.succeed("nix-build ${expr nodes.client 1} 2> build-output") + + # Verify that the build was done on the builder + builder.succeed(f"test -e {out.strip()}") + + # Print the build log, prefix the log lines to avoid nix intercepting lines starting with @nix + buildOutput = client.succeed("sed -e 's/^/build-output:/' build-output") + print(buildOutput) + + # Make sure that we get the expected build output + client.succeed("grep -qF Hello build-output") + + # We don't want phase reporting in the build output + client.fail("grep -qF '@nix' build-output") + + # Get the log file + client.succeed(f"nix-store --read-log {out.strip()} > log-output") + # Prefix the log lines to avoid nix intercepting lines starting with @nix + logOutput = client.succeed("sed -e 's/^/log-file:/' log-output") + print(logOutput) + + # Check that we get phase reporting in the log file + client.succeed("grep -q '@nix {\"action\":\"setPhase\",\"phase\":\"buildPhase\"}' log-output") + ''; +} diff --git a/tests/nixos/remote-builds.nix b/tests/nixos/remote-builds.nix index 1c96cc787..ad7f509db 100644 --- a/tests/nixos/remote-builds.nix +++ b/tests/nixos/remote-builds.nix @@ -90,22 +90,22 @@ in # Perform a build and check that it was performed on the builder. out = client.succeed( - "nix-build ${expr nodes.client.config 1} 2> build-output", + "nix-build ${expr nodes.client 1} 2> build-output", "grep -q Hello build-output" ) builder1.succeed(f"test -e {out}") # And a parallel build. - paths = client.succeed(r'nix-store -r $(nix-instantiate ${expr nodes.client.config 2})\!out $(nix-instantiate ${expr nodes.client.config 3})\!out') + paths = client.succeed(r'nix-store -r $(nix-instantiate ${expr nodes.client 2})\!out $(nix-instantiate ${expr nodes.client 3})\!out') out1, out2 = paths.split() builder1.succeed(f"test -e {out1} -o -e {out2}") builder2.succeed(f"test -e {out1} -o -e {out2}") # And a failing build. - client.fail("nix-build ${expr nodes.client.config 5}") + client.fail("nix-build ${expr nodes.client 5}") # Test whether the build hook automatically skips unavailable builders. builder1.block() - client.succeed("nix-build ${expr nodes.client.config 4}") + client.succeed("nix-build ${expr nodes.client 4}") ''; } diff --git a/tests/nixos/sourcehut-flakes.nix b/tests/nixos/sourcehut-flakes.nix index 6e8d884a0..04f3590e1 100644 --- a/tests/nixos/sourcehut-flakes.nix +++ b/tests/nixos/sourcehut-flakes.nix @@ -108,7 +108,7 @@ in flake-registry = https://git.sr.ht/~NixOS/flake-registry/blob/master/flake-registry.json ''; environment.systemPackages = [ pkgs.jq ]; - networking.hosts.${(builtins.head nodes.sourcehut.config.networking.interfaces.eth1.ipv4.addresses).address} = + networking.hosts.${(builtins.head nodes.sourcehut.networking.interfaces.eth1.ipv4.addresses).address} = [ "git.sr.ht" ]; security.pki.certificateFiles = [ "${cert}/ca.crt" ]; }; diff --git a/tests/unit/libexpr-support/local.mk b/tests/unit/libexpr-support/local.mk new file mode 100644 index 000000000..0501de33c --- /dev/null +++ b/tests/unit/libexpr-support/local.mk @@ -0,0 +1,23 @@ +libraries += libexpr-test-support + +libexpr-test-support_NAME = libnixexpr-test-support + +libexpr-test-support_DIR := $(d) + +ifeq ($(INSTALL_UNIT_TESTS), yes) + libexpr-test-support_INSTALL_DIR := $(checklibdir) +else + libexpr-test-support_INSTALL_DIR := +endif + +libexpr-test-support_SOURCES := \ + $(wildcard $(d)/tests/*.cc) \ + $(wildcard $(d)/tests/value/*.cc) + +libexpr-test-support_CXXFLAGS += $(libexpr-tests_EXTRA_INCLUDES) + +libexpr-test-support_LIBS = \ + libstore-test-support libutil-test-support \ + libexpr libstore libutil + +libexpr-test-support_LDFLAGS := $(THREAD_LDFLAGS) -lrapidcheck diff --git a/src/libexpr/tests/libexpr.hh b/tests/unit/libexpr-support/tests/libexpr.hh similarity index 98% rename from src/libexpr/tests/libexpr.hh rename to tests/unit/libexpr-support/tests/libexpr.hh index 968431446..d720cedde 100644 --- a/src/libexpr/tests/libexpr.hh +++ b/tests/unit/libexpr-support/tests/libexpr.hh @@ -8,6 +8,7 @@ #include "nixexpr.hh" #include "eval.hh" #include "eval-inline.hh" +#include "eval-settings.hh" #include "store-api.hh" #include "tests/libstore.hh" @@ -18,6 +19,7 @@ namespace nix { static void SetUpTestSuite() { LibStoreTest::SetUpTestSuite(); initGC(); + evalSettings.nixPath = {}; } protected: diff --git a/tests/unit/libexpr-support/tests/value/context.cc b/tests/unit/libexpr-support/tests/value/context.cc new file mode 100644 index 000000000..8658bdaef --- /dev/null +++ b/tests/unit/libexpr-support/tests/value/context.cc @@ -0,0 +1,30 @@ +#include + +#include "tests/path.hh" +#include "tests/value/context.hh" + +namespace rc { +using namespace nix; + +Gen Arbitrary::arbitrary() +{ + return gen::just(NixStringContextElem::DrvDeep { + .drvPath = *gen::arbitrary(), + }); +} + +Gen Arbitrary::arbitrary() +{ + switch (*gen::inRange(0, std::variant_size_v)) { + case 0: + return gen::just(*gen::arbitrary()); + case 1: + return gen::just(*gen::arbitrary()); + case 2: + return gen::just(*gen::arbitrary()); + default: + assert(false); + } +} + +} diff --git a/src/libexpr/tests/value/context.hh b/tests/unit/libexpr-support/tests/value/context.hh similarity index 95% rename from src/libexpr/tests/value/context.hh rename to tests/unit/libexpr-support/tests/value/context.hh index c0bc97ba3..8c68c78bb 100644 --- a/src/libexpr/tests/value/context.hh +++ b/tests/unit/libexpr-support/tests/value/context.hh @@ -3,7 +3,7 @@ #include -#include +#include "value/context.hh" namespace rc { using namespace nix; diff --git a/src/libexpr/tests/derived-path.cc b/tests/unit/libexpr/derived-path.cc similarity index 100% rename from src/libexpr/tests/derived-path.cc rename to tests/unit/libexpr/derived-path.cc diff --git a/src/libexpr/tests/error_traces.cc b/tests/unit/libexpr/error_traces.cc similarity index 99% rename from src/libexpr/tests/error_traces.cc rename to tests/unit/libexpr/error_traces.cc index 139366bcd..81498f65a 100644 --- a/src/libexpr/tests/error_traces.cc +++ b/tests/unit/libexpr/error_traces.cc @@ -906,12 +906,12 @@ namespace nix { ASSERT_TRACE2("concatMap (x: 1) [ \"foo\" ] # TODO", TypeError, hintfmt("value is %s while a list was expected", "an integer"), - hintfmt("while evaluating the return value of the function passed to buitlins.concatMap")); + hintfmt("while evaluating the return value of the function passed to builtins.concatMap")); ASSERT_TRACE2("concatMap (x: \"foo\") [ 1 2 ] # TODO", TypeError, hintfmt("value is %s while a list was expected", "a string"), - hintfmt("while evaluating the return value of the function passed to buitlins.concatMap")); + hintfmt("while evaluating the return value of the function passed to builtins.concatMap")); } diff --git a/tests/unit/libexpr/eval.cc b/tests/unit/libexpr/eval.cc new file mode 100644 index 000000000..93d3f658f --- /dev/null +++ b/tests/unit/libexpr/eval.cc @@ -0,0 +1,141 @@ +#include +#include + +#include "eval.hh" +#include "tests/libexpr.hh" + +namespace nix { + +TEST(nix_isAllowedURI, http_example_com) { + Strings allowed; + allowed.push_back("http://example.com"); + + ASSERT_TRUE(isAllowedURI("http://example.com", allowed)); + ASSERT_TRUE(isAllowedURI("http://example.com/foo", allowed)); + ASSERT_TRUE(isAllowedURI("http://example.com/foo/", allowed)); + ASSERT_FALSE(isAllowedURI("/", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.co", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.como", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.org", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.org/foo", allowed)); +} + +TEST(nix_isAllowedURI, http_example_com_foo) { + Strings allowed; + allowed.push_back("http://example.com/foo"); + + ASSERT_TRUE(isAllowedURI("http://example.com/foo", allowed)); + ASSERT_TRUE(isAllowedURI("http://example.com/foo/", allowed)); + ASSERT_FALSE(isAllowedURI("/foo", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.com", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.como", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.org/foo", allowed)); + // Broken? + // ASSERT_TRUE(isAllowedURI("http://example.com/foo?ok=1", allowed)); +} + +TEST(nix_isAllowedURI, http) { + Strings allowed; + allowed.push_back("http://"); + + ASSERT_TRUE(isAllowedURI("http://", allowed)); + ASSERT_TRUE(isAllowedURI("http://example.com", allowed)); + ASSERT_TRUE(isAllowedURI("http://example.com/foo", allowed)); + ASSERT_TRUE(isAllowedURI("http://example.com/foo/", allowed)); + ASSERT_TRUE(isAllowedURI("http://example.com", allowed)); + ASSERT_FALSE(isAllowedURI("/", allowed)); + ASSERT_FALSE(isAllowedURI("https://", allowed)); + ASSERT_FALSE(isAllowedURI("http:foo", allowed)); +} + +TEST(nix_isAllowedURI, https) { + Strings allowed; + allowed.push_back("https://"); + + ASSERT_TRUE(isAllowedURI("https://example.com", allowed)); + ASSERT_TRUE(isAllowedURI("https://example.com/foo", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.com", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.com/https:", allowed)); +} + +TEST(nix_isAllowedURI, absolute_path) { + Strings allowed; + allowed.push_back("/var/evil"); // bad idea + + ASSERT_TRUE(isAllowedURI("/var/evil", allowed)); + ASSERT_TRUE(isAllowedURI("/var/evil/", allowed)); + ASSERT_TRUE(isAllowedURI("/var/evil/foo", allowed)); + ASSERT_TRUE(isAllowedURI("/var/evil/foo/", allowed)); + ASSERT_FALSE(isAllowedURI("/", allowed)); + ASSERT_FALSE(isAllowedURI("/var/evi", allowed)); + ASSERT_FALSE(isAllowedURI("/var/evilo", allowed)); + ASSERT_FALSE(isAllowedURI("/var/evilo/", allowed)); + ASSERT_FALSE(isAllowedURI("/var/evilo/foo", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.com/var/evil", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.com//var/evil", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.com//var/evil/foo", allowed)); +} + +TEST(nix_isAllowedURI, file_url) { + Strings allowed; + allowed.push_back("file:///var/evil"); // bad idea + + ASSERT_TRUE(isAllowedURI("file:///var/evil", allowed)); + ASSERT_TRUE(isAllowedURI("file:///var/evil/", allowed)); + ASSERT_TRUE(isAllowedURI("file:///var/evil/foo", allowed)); + ASSERT_TRUE(isAllowedURI("file:///var/evil/foo/", allowed)); + ASSERT_FALSE(isAllowedURI("/", allowed)); + ASSERT_FALSE(isAllowedURI("/var/evi", allowed)); + ASSERT_FALSE(isAllowedURI("/var/evilo", allowed)); + ASSERT_FALSE(isAllowedURI("/var/evilo/", allowed)); + ASSERT_FALSE(isAllowedURI("/var/evilo/foo", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.com/var/evil", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.com//var/evil", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.com//var/evil/foo", allowed)); + ASSERT_FALSE(isAllowedURI("http://var/evil", allowed)); + ASSERT_FALSE(isAllowedURI("http:///var/evil", allowed)); + ASSERT_FALSE(isAllowedURI("http://var/evil/", allowed)); + ASSERT_FALSE(isAllowedURI("file:///var/evi", allowed)); + ASSERT_FALSE(isAllowedURI("file:///var/evilo", allowed)); + ASSERT_FALSE(isAllowedURI("file:///var/evilo/", allowed)); + ASSERT_FALSE(isAllowedURI("file:///var/evilo/foo", allowed)); + ASSERT_FALSE(isAllowedURI("file:///", allowed)); + ASSERT_FALSE(isAllowedURI("file://", allowed)); +} + +TEST(nix_isAllowedURI, github_all) { + Strings allowed; + allowed.push_back("github:"); + ASSERT_TRUE(isAllowedURI("github:", allowed)); + ASSERT_TRUE(isAllowedURI("github:foo/bar", allowed)); + ASSERT_TRUE(isAllowedURI("github:foo/bar/feat-multi-bar", allowed)); + ASSERT_TRUE(isAllowedURI("github:foo/bar?ref=refs/heads/feat-multi-bar", allowed)); + ASSERT_TRUE(isAllowedURI("github://foo/bar", allowed)); + ASSERT_FALSE(isAllowedURI("https://github:443/foo/bar/archive/master.tar.gz", allowed)); + ASSERT_FALSE(isAllowedURI("file://github:foo/bar/archive/master.tar.gz", allowed)); + ASSERT_FALSE(isAllowedURI("file:///github:foo/bar/archive/master.tar.gz", allowed)); + ASSERT_FALSE(isAllowedURI("github", allowed)); +} + +TEST(nix_isAllowedURI, github_org) { + Strings allowed; + allowed.push_back("github:foo"); + ASSERT_FALSE(isAllowedURI("github:", allowed)); + ASSERT_TRUE(isAllowedURI("github:foo/bar", allowed)); + ASSERT_TRUE(isAllowedURI("github:foo/bar/feat-multi-bar", allowed)); + ASSERT_TRUE(isAllowedURI("github:foo/bar?ref=refs/heads/feat-multi-bar", allowed)); + ASSERT_FALSE(isAllowedURI("github://foo/bar", allowed)); + ASSERT_FALSE(isAllowedURI("https://github:443/foo/bar/archive/master.tar.gz", allowed)); + ASSERT_FALSE(isAllowedURI("file://github:foo/bar/archive/master.tar.gz", allowed)); + ASSERT_FALSE(isAllowedURI("file:///github:foo/bar/archive/master.tar.gz", allowed)); +} + +TEST(nix_isAllowedURI, non_scheme_colon) { + Strings allowed; + allowed.push_back("https://foo/bar:"); + ASSERT_TRUE(isAllowedURI("https://foo/bar:", allowed)); + ASSERT_TRUE(isAllowedURI("https://foo/bar:/baz", allowed)); + ASSERT_FALSE(isAllowedURI("https://foo/bar:baz", allowed)); +} + +} // namespace nix \ No newline at end of file diff --git a/src/libexpr/tests/flakeref.cc b/tests/unit/libexpr/flake/flakeref.cc similarity index 100% rename from src/libexpr/tests/flakeref.cc rename to tests/unit/libexpr/flake/flakeref.cc diff --git a/tests/unit/libexpr/flake/url-name.cc b/tests/unit/libexpr/flake/url-name.cc new file mode 100644 index 000000000..85387b323 --- /dev/null +++ b/tests/unit/libexpr/flake/url-name.cc @@ -0,0 +1,69 @@ +#include "flake/url-name.hh" +#include + +namespace nix { + +/* ----------- tests for url-name.hh --------------------------------------------------*/ + + TEST(getNameFromURL, getNameFromURL) { + ASSERT_EQ(getNameFromURL(parseURL("path:/home/user/project")), "project"); + ASSERT_EQ(getNameFromURL(parseURL("path:~/repos/nixpkgs#packages.x86_64-linux.hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("path:~/repos/nixpkgs#legacyPackages.x86_64-linux.hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("path:~/repos/nixpkgs#packages.x86_64-linux.Hello")), "Hello"); + ASSERT_EQ(getNameFromURL(parseURL("path:.#nonStandardAttr.mylaptop")), "mylaptop"); + ASSERT_EQ(getNameFromURL(parseURL("path:./repos/myflake#nonStandardAttr.mylaptop")), "mylaptop"); + ASSERT_EQ(getNameFromURL(parseURL("path:./nixpkgs#packages.x86_64-linux.complex^bin,man")), "complex"); + ASSERT_EQ(getNameFromURL(parseURL("path:./myproj#packages.x86_64-linux.default^*")), "myproj"); + + ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nixpkgs#packages.x86_64-linux.hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nixpkgs#hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nix#packages.x86_64-linux.default")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nix#")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nix")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("github:cachix/devenv/main#packages.x86_64-linux.default")), "devenv"); + ASSERT_EQ(getNameFromURL(parseURL("github:edolstra/nix-warez?rev=1234&dir=blender&ref=master")), "blender"); + + ASSERT_EQ(getNameFromURL(parseURL("gitlab:NixOS/nixpkgs#packages.x86_64-linux.hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("gitlab:NixOS/nixpkgs#hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("gitlab:NixOS/nix#packages.x86_64-linux.default")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("gitlab:NixOS/nix#")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("gitlab:NixOS/nix")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("gitlab:cachix/devenv/main#packages.x86_64-linux.default")), "devenv"); + + ASSERT_EQ(getNameFromURL(parseURL("sourcehut:NixOS/nixpkgs#packages.x86_64-linux.hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("sourcehut:NixOS/nixpkgs#hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("sourcehut:NixOS/nix#packages.x86_64-linux.default")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("sourcehut:NixOS/nix#")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("sourcehut:NixOS/nix")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("sourcehut:cachix/devenv/main#packages.x86_64-linux.default")), "devenv"); + + ASSERT_EQ(getNameFromURL(parseURL("git://github.com/edolstra/dwarffs")), "dwarffs"); + ASSERT_EQ(getNameFromURL(parseURL("git://github.com/edolstra/nix-warez?dir=blender")), "blender"); + ASSERT_EQ(getNameFromURL(parseURL("git+file:///home/user/project")), "project"); + ASSERT_EQ(getNameFromURL(parseURL("git+file:///home/user/project?ref=fa1e2d23a22")), "project"); + ASSERT_EQ(getNameFromURL(parseURL("git+ssh://git@github.com/someuser/my-repo#")), "my-repo"); + ASSERT_EQ(getNameFromURL(parseURL("git+git://github.com/someuser/my-repo?rev=v1.2.3")), "my-repo"); + ASSERT_EQ(getNameFromURL(parseURL("git+ssh:///home/user/project?dir=subproject&rev=v2.4")), "subproject"); + ASSERT_EQ(getNameFromURL(parseURL("git+http://not-even-real#packages.x86_64-linux.hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("git+https://not-even-real#packages.aarch64-darwin.hello")), "hello"); + + ASSERT_EQ(getNameFromURL(parseURL("tarball+http://github.com/NixOS/nix/archive/refs/tags/2.18.1#packages.x86_64-linux.jq")), "jq"); + ASSERT_EQ(getNameFromURL(parseURL("tarball+https://github.com/NixOS/nix/archive/refs/tags/2.18.1#packages.x86_64-linux.hg")), "hg"); + ASSERT_EQ(getNameFromURL(parseURL("tarball+file:///home/user/Downloads/nixpkgs-2.18.1#packages.aarch64-darwin.ripgrep")), "ripgrep"); + + ASSERT_EQ(getNameFromURL(parseURL("https://github.com/NixOS/nix/archive/refs/tags/2.18.1.tar.gz#packages.x86_64-linux.pv")), "pv"); + ASSERT_EQ(getNameFromURL(parseURL("http://github.com/NixOS/nix/archive/refs/tags/2.18.1.tar.gz#packages.x86_64-linux.pv")), "pv"); + + ASSERT_EQ(getNameFromURL(parseURL("file:///home/user/project?ref=fa1e2d23a22")), "project"); + ASSERT_EQ(getNameFromURL(parseURL("file+file:///home/user/project?ref=fa1e2d23a22")), "project"); + ASSERT_EQ(getNameFromURL(parseURL("file+http://not-even-real#packages.x86_64-linux.hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("file+http://gitfantasy.com/org/user/notaflake")), "notaflake"); + ASSERT_EQ(getNameFromURL(parseURL("file+https://not-even-real#packages.aarch64-darwin.hello")), "hello"); + + ASSERT_EQ(getNameFromURL(parseURL("https://www.github.com/")), std::nullopt); + ASSERT_EQ(getNameFromURL(parseURL("path:.")), std::nullopt); + ASSERT_EQ(getNameFromURL(parseURL("file:.#")), std::nullopt); + ASSERT_EQ(getNameFromURL(parseURL("path:.#packages.x86_64-linux.default")), std::nullopt); + ASSERT_EQ(getNameFromURL(parseURL("path:.#packages.x86_64-linux.default^*")), std::nullopt); + } +} diff --git a/src/libexpr/tests/json.cc b/tests/unit/libexpr/json.cc similarity index 100% rename from src/libexpr/tests/json.cc rename to tests/unit/libexpr/json.cc diff --git a/tests/unit/libexpr/local.mk b/tests/unit/libexpr/local.mk new file mode 100644 index 000000000..25810ad9c --- /dev/null +++ b/tests/unit/libexpr/local.mk @@ -0,0 +1,37 @@ +check: libexpr-tests_RUN + +programs += libexpr-tests + +libexpr-tests_NAME := libnixexpr-tests + +libexpr-tests_ENV := _NIX_TEST_UNIT_DATA=$(d)/data + +libexpr-tests_DIR := $(d) + +ifeq ($(INSTALL_UNIT_TESTS), yes) + libexpr-tests_INSTALL_DIR := $(checkbindir) +else + libexpr-tests_INSTALL_DIR := +endif + +libexpr-tests_SOURCES := \ + $(wildcard $(d)/*.cc) \ + $(wildcard $(d)/value/*.cc) \ + $(wildcard $(d)/flake/*.cc) + +libexpr-tests_EXTRA_INCLUDES = \ + -I tests/unit/libexpr-support \ + -I tests/unit/libstore-support \ + -I tests/unit/libutil-support \ + -I src/libexpr \ + -I src/libfetchers \ + -I src/libstore \ + -I src/libutil + +libexpr-tests_CXXFLAGS += $(libexpr-tests_EXTRA_INCLUDES) + +libexpr-tests_LIBS = \ + libexpr-test-support libstore-test-support libutils-test-support \ + libexpr libfetchers libstore libutil + +libexpr-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS) -lgmock diff --git a/src/libexpr/tests/primops.cc b/tests/unit/libexpr/primops.cc similarity index 95% rename from src/libexpr/tests/primops.cc rename to tests/unit/libexpr/primops.cc index d820b860e..6d7649b3c 100644 --- a/src/libexpr/tests/primops.cc +++ b/tests/unit/libexpr/primops.cc @@ -1,6 +1,9 @@ #include #include +#include "eval-settings.hh" +#include "memory-input-accessor.hh" + #include "tests/libexpr.hh" namespace nix { @@ -148,10 +151,25 @@ namespace nix { } TEST_F(PrimOpTest, unsafeGetAttrPos) { - // The `y` attribute is at position - const char* expr = "builtins.unsafeGetAttrPos \"y\" { y = \"x\"; }"; + state.corepkgsFS->addFile(CanonPath("foo.nix"), "{ y = \"x\"; }"); + + auto expr = "builtins.unsafeGetAttrPos \"y\" (import )"; auto v = eval(expr); - ASSERT_THAT(v, IsNull()); + ASSERT_THAT(v, IsAttrsOfSize(3)); + + auto file = v.attrs->find(createSymbol("file")); + ASSERT_NE(file, nullptr); + ASSERT_THAT(*file->value, IsString()); + auto s = baseNameOf(file->value->string_view()); + ASSERT_EQ(s, "foo.nix"); + + auto line = v.attrs->find(createSymbol("line")); + ASSERT_NE(line, nullptr); + ASSERT_THAT(*line->value, IsIntEq(1)); + + auto column = v.attrs->find(createSymbol("column")); + ASSERT_NE(column, nullptr); + ASSERT_THAT(*column->value, IsIntEq(3)); } TEST_F(PrimOpTest, hasAttr) { @@ -586,7 +604,7 @@ namespace nix { ASSERT_THAT(v, IsStringEq("401b09eab3c013d4ca54922bb802bec8fd5318192b0a75f201d8b3727429080fb337591abd3e44453b954555b7a0812e1081c39b740293f765eae731f5a65ed1")); } - TEST_F(PrimOpTest, hashStringInvalidHashType) { + TEST_F(PrimOpTest, hashStringInvalidHashAlgorithm) { ASSERT_THROW(eval("builtins.hashString \"foobar\" \"asdf\""), Error); } @@ -614,7 +632,7 @@ namespace nix { TEST_F(PrimOpTest, currentSystem) { auto v = eval("builtins.currentSystem"); - ASSERT_THAT(v, IsStringEq(settings.thisSystem.get())); + ASSERT_THAT(v, IsStringEq(evalSettings.getCurrentSystem())); } TEST_F(PrimOpTest, derivation) { @@ -814,6 +832,14 @@ namespace nix { ASSERT_THAT(*v.listElems()[0], IsStringEq("FOO")); } + TEST_F(PrimOpTest, match5) { + // The regex "\\{}" is valid and matches the string "{}". + // Caused a regression before when trying to switch from std::regex to boost::regex. + // See https://github.com/NixOS/nix/pull/7762#issuecomment-1834303659 + auto v = eval("builtins.match \"\\\\{}\" \"{}\""); + ASSERT_THAT(v, IsListOfSize(0)); + } + TEST_F(PrimOpTest, attrNames) { auto v = eval("builtins.attrNames { x = 1; y = 2; z = 3; a = 2; }"); ASSERT_THAT(v, IsListOfSize(4)); diff --git a/src/libexpr/tests/search-path.cc b/tests/unit/libexpr/search-path.cc similarity index 100% rename from src/libexpr/tests/search-path.cc rename to tests/unit/libexpr/search-path.cc diff --git a/src/libexpr/tests/trivial.cc b/tests/unit/libexpr/trivial.cc similarity index 100% rename from src/libexpr/tests/trivial.cc rename to tests/unit/libexpr/trivial.cc diff --git a/src/libexpr/tests/value/context.cc b/tests/unit/libexpr/value/context.cc similarity index 83% rename from src/libexpr/tests/value/context.cc rename to tests/unit/libexpr/value/context.cc index 92d4889ab..761286dbd 100644 --- a/src/libexpr/tests/value/context.cc +++ b/tests/unit/libexpr/value/context.cc @@ -117,36 +117,6 @@ TEST(NixStringContextElemTest, built_built_xp) { NixStringContextElem::parse("!foo!bar!g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv"), MissingExperimentalFeature); } -} - -namespace rc { -using namespace nix; - -Gen Arbitrary::arbitrary() -{ - return gen::just(NixStringContextElem::DrvDeep { - .drvPath = *gen::arbitrary(), - }); -} - -Gen Arbitrary::arbitrary() -{ - switch (*gen::inRange(0, std::variant_size_v)) { - case 0: - return gen::just(*gen::arbitrary()); - case 1: - return gen::just(*gen::arbitrary()); - case 2: - return gen::just(*gen::arbitrary()); - default: - assert(false); - } -} - -} - -namespace nix { - #ifndef COVERAGE RC_GTEST_PROP( diff --git a/src/libexpr/tests/value/print.cc b/tests/unit/libexpr/value/print.cc similarity index 99% rename from src/libexpr/tests/value/print.cc rename to tests/unit/libexpr/value/print.cc index 5e96e12ec..a4f6fc014 100644 --- a/src/libexpr/tests/value/print.cc +++ b/tests/unit/libexpr/value/print.cc @@ -114,7 +114,8 @@ TEST_F(ValuePrintingTests, vLambda) TEST_F(ValuePrintingTests, vPrimOp) { Value vPrimOp; - vPrimOp.mkPrimOp(nullptr); + PrimOp primOp{}; + vPrimOp.mkPrimOp(&primOp); test(vPrimOp, ""); } diff --git a/tests/unit/libstore-support/local.mk b/tests/unit/libstore-support/local.mk new file mode 100644 index 000000000..56dedd825 --- /dev/null +++ b/tests/unit/libstore-support/local.mk @@ -0,0 +1,21 @@ +libraries += libstore-test-support + +libstore-test-support_NAME = libnixstore-test-support + +libstore-test-support_DIR := $(d) + +ifeq ($(INSTALL_UNIT_TESTS), yes) + libstore-test-support_INSTALL_DIR := $(checklibdir) +else + libstore-test-support_INSTALL_DIR := +endif + +libstore-test-support_SOURCES := $(wildcard $(d)/tests/*.cc) + +libstore-test-support_CXXFLAGS += $(libstore-tests_EXTRA_INCLUDES) + +libstore-test-support_LIBS = \ + libutil-test-support \ + libstore libutil + +libstore-test-support_LDFLAGS := $(THREAD_LDFLAGS) -lrapidcheck diff --git a/tests/unit/libstore-support/tests/derived-path.cc b/tests/unit/libstore-support/tests/derived-path.cc new file mode 100644 index 000000000..091706dba --- /dev/null +++ b/tests/unit/libstore-support/tests/derived-path.cc @@ -0,0 +1,57 @@ +#include + +#include + +#include "tests/derived-path.hh" + +namespace rc { +using namespace nix; + +Gen Arbitrary::arbitrary() +{ + return gen::just(DerivedPath::Opaque { + .path = *gen::arbitrary(), + }); +} + +Gen Arbitrary::arbitrary() +{ + return gen::just(SingleDerivedPath::Built { + .drvPath = make_ref(*gen::arbitrary()), + .output = (*gen::arbitrary()).name, + }); +} + +Gen Arbitrary::arbitrary() +{ + return gen::just(DerivedPath::Built { + .drvPath = make_ref(*gen::arbitrary()), + .outputs = *gen::arbitrary(), + }); +} + +Gen Arbitrary::arbitrary() +{ + switch (*gen::inRange(0, std::variant_size_v)) { + case 0: + return gen::just(*gen::arbitrary()); + case 1: + return gen::just(*gen::arbitrary()); + default: + assert(false); + } +} + +Gen Arbitrary::arbitrary() +{ + switch (*gen::inRange(0, std::variant_size_v)) { + case 0: + return gen::just(*gen::arbitrary()); + case 1: + return gen::just(*gen::arbitrary()); + default: + assert(false); + } +} + +} diff --git a/src/libstore/tests/derived-path.hh b/tests/unit/libstore-support/tests/derived-path.hh similarity index 100% rename from src/libstore/tests/derived-path.hh rename to tests/unit/libstore-support/tests/derived-path.hh diff --git a/src/libstore/tests/libstore.hh b/tests/unit/libstore-support/tests/libstore.hh similarity index 100% rename from src/libstore/tests/libstore.hh rename to tests/unit/libstore-support/tests/libstore.hh diff --git a/tests/unit/libstore-support/tests/outputs-spec.cc b/tests/unit/libstore-support/tests/outputs-spec.cc new file mode 100644 index 000000000..e9d602203 --- /dev/null +++ b/tests/unit/libstore-support/tests/outputs-spec.cc @@ -0,0 +1,24 @@ +#include "tests/outputs-spec.hh" + +#include + +namespace rc { +using namespace nix; + +Gen Arbitrary::arbitrary() +{ + switch (*gen::inRange(0, std::variant_size_v)) { + case 0: + return gen::just((OutputsSpec) OutputsSpec::All { }); + case 1: + return gen::just((OutputsSpec) OutputsSpec::Names { + *gen::nonEmpty(gen::container(gen::map( + gen::arbitrary(), + [](StorePathName n) { return n.name; }))), + }); + default: + assert(false); + } +} + +} diff --git a/src/libstore/tests/outputs-spec.hh b/tests/unit/libstore-support/tests/outputs-spec.hh similarity index 89% rename from src/libstore/tests/outputs-spec.hh rename to tests/unit/libstore-support/tests/outputs-spec.hh index ded331b33..f5bf9042d 100644 --- a/src/libstore/tests/outputs-spec.hh +++ b/tests/unit/libstore-support/tests/outputs-spec.hh @@ -5,7 +5,7 @@ #include -#include +#include "tests/path.hh" namespace rc { using namespace nix; diff --git a/tests/unit/libstore-support/tests/path.cc b/tests/unit/libstore-support/tests/path.cc new file mode 100644 index 000000000..e5f169e94 --- /dev/null +++ b/tests/unit/libstore-support/tests/path.cc @@ -0,0 +1,82 @@ +#include + +#include + +#include "path-regex.hh" +#include "store-api.hh" + +#include "tests/hash.hh" +#include "tests/path.hh" + +namespace nix { + +void showValue(const StorePath & p, std::ostream & os) +{ + os << p.to_string(); +} + +} + +namespace rc { +using namespace nix; + +Gen Arbitrary::arbitrary() +{ + auto len = *gen::inRange( + 1, + StorePath::MaxPathLen - StorePath::HashLen); + + std::string pre; + pre.reserve(len); + + for (size_t c = 0; c < len; ++c) { + switch (auto i = *gen::inRange(0, 10 + 2 * 26 + 6)) { + case 0 ... 9: + pre += '0' + i; + case 10 ... 35: + pre += 'A' + (i - 10); + break; + case 36 ... 61: + pre += 'a' + (i - 36); + break; + case 62: + pre += '+'; + break; + case 63: + pre += '-'; + break; + case 64: + // names aren't permitted to start with a period, + // so just fall through to the next case here + if (c != 0) { + pre += '.'; + break; + } + case 65: + pre += '_'; + break; + case 66: + pre += '?'; + break; + case 67: + pre += '='; + break; + default: + assert(false); + } + } + + return gen::just(StorePathName { + .name = std::move(pre), + }); +} + +Gen Arbitrary::arbitrary() +{ + return gen::just(StorePath { + *gen::arbitrary(), + (*gen::arbitrary()).name, + }); +} + +} // namespace rc diff --git a/src/libstore/tests/path.hh b/tests/unit/libstore-support/tests/path.hh similarity index 82% rename from src/libstore/tests/path.hh rename to tests/unit/libstore-support/tests/path.hh index 21cb62310..4751b3373 100644 --- a/src/libstore/tests/path.hh +++ b/tests/unit/libstore-support/tests/path.hh @@ -11,6 +11,9 @@ struct StorePathName { std::string name; }; +// For rapidcheck +void showValue(const StorePath & p, std::ostream & os); + } namespace rc { diff --git a/src/libstore/tests/protocol.hh b/tests/unit/libstore-support/tests/protocol.hh similarity index 96% rename from src/libstore/tests/protocol.hh rename to tests/unit/libstore-support/tests/protocol.hh index 466032a79..3c9e52c11 100644 --- a/src/libstore/tests/protocol.hh +++ b/tests/unit/libstore-support/tests/protocol.hh @@ -12,7 +12,7 @@ namespace nix { template class ProtoTest : public CharacterizationTest, public LibStoreTest { - Path unitTestData = getUnitTestData() + "/libstore/" + protocolDir; + Path unitTestData = getUnitTestData() + "/" + protocolDir; Path goldenMaster(std::string_view testStem) const override { return unitTestData + "/" + testStem + ".bin"; diff --git a/src/libstore/tests/common-protocol.cc b/tests/unit/libstore/common-protocol.cc similarity index 94% rename from src/libstore/tests/common-protocol.cc rename to tests/unit/libstore/common-protocol.cc index c09ac6a3e..d23805fc3 100644 --- a/src/libstore/tests/common-protocol.cc +++ b/tests/unit/libstore/common-protocol.cc @@ -84,15 +84,15 @@ CHARACTERIZATION_TEST( (std::tuple { ContentAddress { .method = TextIngestionMethod {}, - .hash = hashString(HashType::htSHA256, "Derive(...)"), + .hash = hashString(HashAlgorithm::SHA256, "Derive(...)"), }, ContentAddress { .method = FileIngestionMethod::Flat, - .hash = hashString(HashType::htSHA1, "blob blob..."), + .hash = hashString(HashAlgorithm::SHA1, "blob blob..."), }, ContentAddress { .method = FileIngestionMethod::Recursive, - .hash = hashString(HashType::htSHA256, "(...)"), + .hash = hashString(HashAlgorithm::SHA256, "(...)"), }, })) @@ -179,7 +179,7 @@ CHARACTERIZATION_TEST( std::optional { ContentAddress { .method = FileIngestionMethod::Flat, - .hash = hashString(HashType::htSHA1, "blob blob..."), + .hash = hashString(HashAlgorithm::SHA1, "blob blob..."), }, }, })) diff --git a/unit-test-data/libstore/common-protocol/content-address.bin b/tests/unit/libstore/data/common-protocol/content-address.bin similarity index 100% rename from unit-test-data/libstore/common-protocol/content-address.bin rename to tests/unit/libstore/data/common-protocol/content-address.bin diff --git a/unit-test-data/libstore/common-protocol/drv-output.bin b/tests/unit/libstore/data/common-protocol/drv-output.bin similarity index 100% rename from unit-test-data/libstore/common-protocol/drv-output.bin rename to tests/unit/libstore/data/common-protocol/drv-output.bin diff --git a/unit-test-data/libstore/common-protocol/optional-content-address.bin b/tests/unit/libstore/data/common-protocol/optional-content-address.bin similarity index 100% rename from unit-test-data/libstore/common-protocol/optional-content-address.bin rename to tests/unit/libstore/data/common-protocol/optional-content-address.bin diff --git a/unit-test-data/libstore/common-protocol/optional-store-path.bin b/tests/unit/libstore/data/common-protocol/optional-store-path.bin similarity index 100% rename from unit-test-data/libstore/common-protocol/optional-store-path.bin rename to tests/unit/libstore/data/common-protocol/optional-store-path.bin diff --git a/unit-test-data/libstore/common-protocol/realisation.bin b/tests/unit/libstore/data/common-protocol/realisation.bin similarity index 100% rename from unit-test-data/libstore/common-protocol/realisation.bin rename to tests/unit/libstore/data/common-protocol/realisation.bin diff --git a/unit-test-data/libstore/common-protocol/set.bin b/tests/unit/libstore/data/common-protocol/set.bin similarity index 100% rename from unit-test-data/libstore/common-protocol/set.bin rename to tests/unit/libstore/data/common-protocol/set.bin diff --git a/unit-test-data/libstore/common-protocol/store-path.bin b/tests/unit/libstore/data/common-protocol/store-path.bin similarity index 100% rename from unit-test-data/libstore/common-protocol/store-path.bin rename to tests/unit/libstore/data/common-protocol/store-path.bin diff --git a/unit-test-data/libstore/common-protocol/string.bin b/tests/unit/libstore/data/common-protocol/string.bin similarity index 100% rename from unit-test-data/libstore/common-protocol/string.bin rename to tests/unit/libstore/data/common-protocol/string.bin diff --git a/unit-test-data/libstore/common-protocol/vector.bin b/tests/unit/libstore/data/common-protocol/vector.bin similarity index 100% rename from unit-test-data/libstore/common-protocol/vector.bin rename to tests/unit/libstore/data/common-protocol/vector.bin diff --git a/unit-test-data/libstore/derivation/bad-old-version-dyn-deps.drv b/tests/unit/libstore/data/derivation/bad-old-version-dyn-deps.drv similarity index 100% rename from unit-test-data/libstore/derivation/bad-old-version-dyn-deps.drv rename to tests/unit/libstore/data/derivation/bad-old-version-dyn-deps.drv diff --git a/unit-test-data/libstore/derivation/bad-version.drv b/tests/unit/libstore/data/derivation/bad-version.drv similarity index 100% rename from unit-test-data/libstore/derivation/bad-version.drv rename to tests/unit/libstore/data/derivation/bad-version.drv diff --git a/unit-test-data/libstore/derivation/dynDerivationDeps.drv b/tests/unit/libstore/data/derivation/dynDerivationDeps.drv similarity index 100% rename from unit-test-data/libstore/derivation/dynDerivationDeps.drv rename to tests/unit/libstore/data/derivation/dynDerivationDeps.drv diff --git a/unit-test-data/libstore/derivation/dynDerivationDeps.json b/tests/unit/libstore/data/derivation/dynDerivationDeps.json similarity index 100% rename from unit-test-data/libstore/derivation/dynDerivationDeps.json rename to tests/unit/libstore/data/derivation/dynDerivationDeps.json diff --git a/unit-test-data/libstore/derivation/output-caFixedFlat.json b/tests/unit/libstore/data/derivation/output-caFixedFlat.json similarity index 100% rename from unit-test-data/libstore/derivation/output-caFixedFlat.json rename to tests/unit/libstore/data/derivation/output-caFixedFlat.json diff --git a/unit-test-data/libstore/derivation/output-caFixedNAR.json b/tests/unit/libstore/data/derivation/output-caFixedNAR.json similarity index 100% rename from unit-test-data/libstore/derivation/output-caFixedNAR.json rename to tests/unit/libstore/data/derivation/output-caFixedNAR.json diff --git a/unit-test-data/libstore/derivation/output-caFixedText.json b/tests/unit/libstore/data/derivation/output-caFixedText.json similarity index 100% rename from unit-test-data/libstore/derivation/output-caFixedText.json rename to tests/unit/libstore/data/derivation/output-caFixedText.json diff --git a/unit-test-data/libstore/derivation/output-caFloating.json b/tests/unit/libstore/data/derivation/output-caFloating.json similarity index 100% rename from unit-test-data/libstore/derivation/output-caFloating.json rename to tests/unit/libstore/data/derivation/output-caFloating.json diff --git a/unit-test-data/libstore/derivation/output-deferred.json b/tests/unit/libstore/data/derivation/output-deferred.json similarity index 100% rename from unit-test-data/libstore/derivation/output-deferred.json rename to tests/unit/libstore/data/derivation/output-deferred.json diff --git a/unit-test-data/libstore/derivation/output-impure.json b/tests/unit/libstore/data/derivation/output-impure.json similarity index 100% rename from unit-test-data/libstore/derivation/output-impure.json rename to tests/unit/libstore/data/derivation/output-impure.json diff --git a/unit-test-data/libstore/derivation/output-inputAddressed.json b/tests/unit/libstore/data/derivation/output-inputAddressed.json similarity index 100% rename from unit-test-data/libstore/derivation/output-inputAddressed.json rename to tests/unit/libstore/data/derivation/output-inputAddressed.json diff --git a/unit-test-data/libstore/derivation/simple.drv b/tests/unit/libstore/data/derivation/simple.drv similarity index 100% rename from unit-test-data/libstore/derivation/simple.drv rename to tests/unit/libstore/data/derivation/simple.drv diff --git a/unit-test-data/libstore/derivation/simple.json b/tests/unit/libstore/data/derivation/simple.json similarity index 100% rename from unit-test-data/libstore/derivation/simple.json rename to tests/unit/libstore/data/derivation/simple.json diff --git a/unit-test-data/libstore/nar-info/impure.json b/tests/unit/libstore/data/nar-info/impure.json similarity index 100% rename from unit-test-data/libstore/nar-info/impure.json rename to tests/unit/libstore/data/nar-info/impure.json diff --git a/unit-test-data/libstore/nar-info/pure.json b/tests/unit/libstore/data/nar-info/pure.json similarity index 100% rename from unit-test-data/libstore/nar-info/pure.json rename to tests/unit/libstore/data/nar-info/pure.json diff --git a/unit-test-data/libstore/path-info/impure.json b/tests/unit/libstore/data/path-info/impure.json similarity index 100% rename from unit-test-data/libstore/path-info/impure.json rename to tests/unit/libstore/data/path-info/impure.json diff --git a/unit-test-data/libstore/path-info/pure.json b/tests/unit/libstore/data/path-info/pure.json similarity index 100% rename from unit-test-data/libstore/path-info/pure.json rename to tests/unit/libstore/data/path-info/pure.json diff --git a/tests/unit/libstore/data/serve-protocol/build-options-2.1.bin b/tests/unit/libstore/data/serve-protocol/build-options-2.1.bin new file mode 100644 index 000000000..61e1d9728 Binary files /dev/null and b/tests/unit/libstore/data/serve-protocol/build-options-2.1.bin differ diff --git a/tests/unit/libstore/data/serve-protocol/build-options-2.2.bin b/tests/unit/libstore/data/serve-protocol/build-options-2.2.bin new file mode 100644 index 000000000..045c2ff2b Binary files /dev/null and b/tests/unit/libstore/data/serve-protocol/build-options-2.2.bin differ diff --git a/tests/unit/libstore/data/serve-protocol/build-options-2.3.bin b/tests/unit/libstore/data/serve-protocol/build-options-2.3.bin new file mode 100644 index 000000000..5c5345883 Binary files /dev/null and b/tests/unit/libstore/data/serve-protocol/build-options-2.3.bin differ diff --git a/tests/unit/libstore/data/serve-protocol/build-options-2.7.bin b/tests/unit/libstore/data/serve-protocol/build-options-2.7.bin new file mode 100644 index 000000000..1bc7b02db Binary files /dev/null and b/tests/unit/libstore/data/serve-protocol/build-options-2.7.bin differ diff --git a/unit-test-data/libstore/serve-protocol/build-result-2.2.bin b/tests/unit/libstore/data/serve-protocol/build-result-2.2.bin similarity index 100% rename from unit-test-data/libstore/serve-protocol/build-result-2.2.bin rename to tests/unit/libstore/data/serve-protocol/build-result-2.2.bin diff --git a/unit-test-data/libstore/serve-protocol/build-result-2.3.bin b/tests/unit/libstore/data/serve-protocol/build-result-2.3.bin similarity index 100% rename from unit-test-data/libstore/serve-protocol/build-result-2.3.bin rename to tests/unit/libstore/data/serve-protocol/build-result-2.3.bin diff --git a/unit-test-data/libstore/serve-protocol/build-result-2.6.bin b/tests/unit/libstore/data/serve-protocol/build-result-2.6.bin similarity index 100% rename from unit-test-data/libstore/serve-protocol/build-result-2.6.bin rename to tests/unit/libstore/data/serve-protocol/build-result-2.6.bin diff --git a/unit-test-data/libstore/serve-protocol/content-address.bin b/tests/unit/libstore/data/serve-protocol/content-address.bin similarity index 100% rename from unit-test-data/libstore/serve-protocol/content-address.bin rename to tests/unit/libstore/data/serve-protocol/content-address.bin diff --git a/unit-test-data/libstore/serve-protocol/drv-output.bin b/tests/unit/libstore/data/serve-protocol/drv-output.bin similarity index 100% rename from unit-test-data/libstore/serve-protocol/drv-output.bin rename to tests/unit/libstore/data/serve-protocol/drv-output.bin diff --git a/unit-test-data/libstore/serve-protocol/optional-content-address.bin b/tests/unit/libstore/data/serve-protocol/optional-content-address.bin similarity index 100% rename from unit-test-data/libstore/serve-protocol/optional-content-address.bin rename to tests/unit/libstore/data/serve-protocol/optional-content-address.bin diff --git a/unit-test-data/libstore/serve-protocol/optional-store-path.bin b/tests/unit/libstore/data/serve-protocol/optional-store-path.bin similarity index 100% rename from unit-test-data/libstore/serve-protocol/optional-store-path.bin rename to tests/unit/libstore/data/serve-protocol/optional-store-path.bin diff --git a/unit-test-data/libstore/serve-protocol/realisation.bin b/tests/unit/libstore/data/serve-protocol/realisation.bin similarity index 100% rename from unit-test-data/libstore/serve-protocol/realisation.bin rename to tests/unit/libstore/data/serve-protocol/realisation.bin diff --git a/unit-test-data/libstore/serve-protocol/set.bin b/tests/unit/libstore/data/serve-protocol/set.bin similarity index 100% rename from unit-test-data/libstore/serve-protocol/set.bin rename to tests/unit/libstore/data/serve-protocol/set.bin diff --git a/unit-test-data/libstore/serve-protocol/store-path.bin b/tests/unit/libstore/data/serve-protocol/store-path.bin similarity index 100% rename from unit-test-data/libstore/serve-protocol/store-path.bin rename to tests/unit/libstore/data/serve-protocol/store-path.bin diff --git a/unit-test-data/libstore/serve-protocol/string.bin b/tests/unit/libstore/data/serve-protocol/string.bin similarity index 100% rename from unit-test-data/libstore/serve-protocol/string.bin rename to tests/unit/libstore/data/serve-protocol/string.bin diff --git a/tests/unit/libstore/data/serve-protocol/unkeyed-valid-path-info-2.3.bin b/tests/unit/libstore/data/serve-protocol/unkeyed-valid-path-info-2.3.bin new file mode 100644 index 000000000..8056ec055 Binary files /dev/null and b/tests/unit/libstore/data/serve-protocol/unkeyed-valid-path-info-2.3.bin differ diff --git a/tests/unit/libstore/data/serve-protocol/unkeyed-valid-path-info-2.4.bin b/tests/unit/libstore/data/serve-protocol/unkeyed-valid-path-info-2.4.bin new file mode 100644 index 000000000..521b5c423 Binary files /dev/null and b/tests/unit/libstore/data/serve-protocol/unkeyed-valid-path-info-2.4.bin differ diff --git a/unit-test-data/libstore/serve-protocol/vector.bin b/tests/unit/libstore/data/serve-protocol/vector.bin similarity index 100% rename from unit-test-data/libstore/serve-protocol/vector.bin rename to tests/unit/libstore/data/serve-protocol/vector.bin diff --git a/unit-test-data/libstore/worker-protocol/build-result-1.27.bin b/tests/unit/libstore/data/worker-protocol/build-result-1.27.bin similarity index 100% rename from unit-test-data/libstore/worker-protocol/build-result-1.27.bin rename to tests/unit/libstore/data/worker-protocol/build-result-1.27.bin diff --git a/unit-test-data/libstore/worker-protocol/build-result-1.28.bin b/tests/unit/libstore/data/worker-protocol/build-result-1.28.bin similarity index 100% rename from unit-test-data/libstore/worker-protocol/build-result-1.28.bin rename to tests/unit/libstore/data/worker-protocol/build-result-1.28.bin diff --git a/unit-test-data/libstore/worker-protocol/build-result-1.29.bin b/tests/unit/libstore/data/worker-protocol/build-result-1.29.bin similarity index 100% rename from unit-test-data/libstore/worker-protocol/build-result-1.29.bin rename to tests/unit/libstore/data/worker-protocol/build-result-1.29.bin diff --git a/tests/unit/libstore/data/worker-protocol/build-result-1.37.bin b/tests/unit/libstore/data/worker-protocol/build-result-1.37.bin new file mode 100644 index 000000000..7d6e43fff Binary files /dev/null and b/tests/unit/libstore/data/worker-protocol/build-result-1.37.bin differ diff --git a/unit-test-data/libstore/worker-protocol/content-address.bin b/tests/unit/libstore/data/worker-protocol/content-address.bin similarity index 100% rename from unit-test-data/libstore/worker-protocol/content-address.bin rename to tests/unit/libstore/data/worker-protocol/content-address.bin diff --git a/unit-test-data/libstore/worker-protocol/derived-path-1.29.bin b/tests/unit/libstore/data/worker-protocol/derived-path-1.29.bin similarity index 100% rename from unit-test-data/libstore/worker-protocol/derived-path-1.29.bin rename to tests/unit/libstore/data/worker-protocol/derived-path-1.29.bin diff --git a/unit-test-data/libstore/worker-protocol/derived-path-1.30.bin b/tests/unit/libstore/data/worker-protocol/derived-path-1.30.bin similarity index 100% rename from unit-test-data/libstore/worker-protocol/derived-path-1.30.bin rename to tests/unit/libstore/data/worker-protocol/derived-path-1.30.bin diff --git a/unit-test-data/libstore/worker-protocol/drv-output.bin b/tests/unit/libstore/data/worker-protocol/drv-output.bin similarity index 100% rename from unit-test-data/libstore/worker-protocol/drv-output.bin rename to tests/unit/libstore/data/worker-protocol/drv-output.bin diff --git a/unit-test-data/libstore/worker-protocol/keyed-build-result-1.29.bin b/tests/unit/libstore/data/worker-protocol/keyed-build-result-1.29.bin similarity index 100% rename from unit-test-data/libstore/worker-protocol/keyed-build-result-1.29.bin rename to tests/unit/libstore/data/worker-protocol/keyed-build-result-1.29.bin diff --git a/unit-test-data/libstore/worker-protocol/optional-content-address.bin b/tests/unit/libstore/data/worker-protocol/optional-content-address.bin similarity index 100% rename from unit-test-data/libstore/worker-protocol/optional-content-address.bin rename to tests/unit/libstore/data/worker-protocol/optional-content-address.bin diff --git a/unit-test-data/libstore/worker-protocol/optional-store-path.bin b/tests/unit/libstore/data/worker-protocol/optional-store-path.bin similarity index 100% rename from unit-test-data/libstore/worker-protocol/optional-store-path.bin rename to tests/unit/libstore/data/worker-protocol/optional-store-path.bin diff --git a/unit-test-data/libstore/worker-protocol/optional-trusted-flag.bin b/tests/unit/libstore/data/worker-protocol/optional-trusted-flag.bin similarity index 100% rename from unit-test-data/libstore/worker-protocol/optional-trusted-flag.bin rename to tests/unit/libstore/data/worker-protocol/optional-trusted-flag.bin diff --git a/unit-test-data/libstore/worker-protocol/realisation.bin b/tests/unit/libstore/data/worker-protocol/realisation.bin similarity index 100% rename from unit-test-data/libstore/worker-protocol/realisation.bin rename to tests/unit/libstore/data/worker-protocol/realisation.bin diff --git a/unit-test-data/libstore/worker-protocol/set.bin b/tests/unit/libstore/data/worker-protocol/set.bin similarity index 100% rename from unit-test-data/libstore/worker-protocol/set.bin rename to tests/unit/libstore/data/worker-protocol/set.bin diff --git a/unit-test-data/libstore/worker-protocol/store-path.bin b/tests/unit/libstore/data/worker-protocol/store-path.bin similarity index 100% rename from unit-test-data/libstore/worker-protocol/store-path.bin rename to tests/unit/libstore/data/worker-protocol/store-path.bin diff --git a/unit-test-data/libstore/worker-protocol/string.bin b/tests/unit/libstore/data/worker-protocol/string.bin similarity index 100% rename from unit-test-data/libstore/worker-protocol/string.bin rename to tests/unit/libstore/data/worker-protocol/string.bin diff --git a/unit-test-data/libstore/worker-protocol/unkeyed-valid-path-info-1.15.bin b/tests/unit/libstore/data/worker-protocol/unkeyed-valid-path-info-1.15.bin similarity index 100% rename from unit-test-data/libstore/worker-protocol/unkeyed-valid-path-info-1.15.bin rename to tests/unit/libstore/data/worker-protocol/unkeyed-valid-path-info-1.15.bin diff --git a/unit-test-data/libstore/worker-protocol/valid-path-info-1.15.bin b/tests/unit/libstore/data/worker-protocol/valid-path-info-1.15.bin similarity index 100% rename from unit-test-data/libstore/worker-protocol/valid-path-info-1.15.bin rename to tests/unit/libstore/data/worker-protocol/valid-path-info-1.15.bin diff --git a/unit-test-data/libstore/worker-protocol/valid-path-info-1.16.bin b/tests/unit/libstore/data/worker-protocol/valid-path-info-1.16.bin similarity index 100% rename from unit-test-data/libstore/worker-protocol/valid-path-info-1.16.bin rename to tests/unit/libstore/data/worker-protocol/valid-path-info-1.16.bin diff --git a/unit-test-data/libstore/worker-protocol/vector.bin b/tests/unit/libstore/data/worker-protocol/vector.bin similarity index 100% rename from unit-test-data/libstore/worker-protocol/vector.bin rename to tests/unit/libstore/data/worker-protocol/vector.bin diff --git a/src/libstore/tests/derivation.cc b/tests/unit/libstore/derivation.cc similarity index 98% rename from src/libstore/tests/derivation.cc rename to tests/unit/libstore/derivation.cc index 7becfa5ab..7a4b1403a 100644 --- a/src/libstore/tests/derivation.cc +++ b/tests/unit/libstore/derivation.cc @@ -13,7 +13,7 @@ using nlohmann::json; class DerivationTest : public CharacterizationTest, public LibStoreTest { - Path unitTestData = getUnitTestData() + "/libstore/derivation"; + Path unitTestData = getUnitTestData() + "/derivation"; public: Path goldenMaster(std::string_view testStem) const override { @@ -134,7 +134,7 @@ TEST_JSON(DynDerivationTest, caFixedText, TEST_JSON(CaDerivationTest, caFloating, (DerivationOutput::CAFloating { .method = FileIngestionMethod::Recursive, - .hashType = htSHA256, + .hashAlgo = HashAlgorithm::SHA256, }), "drv-name", "output-name") @@ -145,7 +145,7 @@ TEST_JSON(DerivationTest, deferred, TEST_JSON(ImpureDerivationTest, impure, (DerivationOutput::Impure { .method = FileIngestionMethod::Recursive, - .hashType = htSHA256, + .hashAlgo = HashAlgorithm::SHA256, }), "drv-name", "output-name") diff --git a/src/libstore/tests/derived-path.cc b/tests/unit/libstore/derived-path.cc similarity index 66% rename from src/libstore/tests/derived-path.cc rename to tests/unit/libstore/derived-path.cc index 3fa3c0801..c62d79a78 100644 --- a/src/libstore/tests/derived-path.cc +++ b/tests/unit/libstore/derived-path.cc @@ -1,64 +1,11 @@ #include -#include #include #include #include "tests/derived-path.hh" #include "tests/libstore.hh" -namespace rc { -using namespace nix; - -Gen Arbitrary::arbitrary() -{ - return gen::just(DerivedPath::Opaque { - .path = *gen::arbitrary(), - }); -} - -Gen Arbitrary::arbitrary() -{ - return gen::just(SingleDerivedPath::Built { - .drvPath = make_ref(*gen::arbitrary()), - .output = (*gen::arbitrary()).name, - }); -} - -Gen Arbitrary::arbitrary() -{ - return gen::just(DerivedPath::Built { - .drvPath = make_ref(*gen::arbitrary()), - .outputs = *gen::arbitrary(), - }); -} - -Gen Arbitrary::arbitrary() -{ - switch (*gen::inRange(0, std::variant_size_v)) { - case 0: - return gen::just(*gen::arbitrary()); - case 1: - return gen::just(*gen::arbitrary()); - default: - assert(false); - } -} - -Gen Arbitrary::arbitrary() -{ - switch (*gen::inRange(0, std::variant_size_v)) { - case 0: - return gen::just(*gen::arbitrary()); - case 1: - return gen::just(*gen::arbitrary()); - default: - assert(false); - } -} - -} - namespace nix { class DerivedPathTest : public LibStoreTest diff --git a/src/libstore/tests/downstream-placeholder.cc b/tests/unit/libstore/downstream-placeholder.cc similarity index 100% rename from src/libstore/tests/downstream-placeholder.cc rename to tests/unit/libstore/downstream-placeholder.cc diff --git a/tests/unit/libstore/local.mk b/tests/unit/libstore/local.mk new file mode 100644 index 000000000..63f6d011f --- /dev/null +++ b/tests/unit/libstore/local.mk @@ -0,0 +1,31 @@ +check: libstore-tests_RUN + +programs += libstore-tests + +libstore-tests_NAME = libnixstore-tests + +libstore-tests_ENV := _NIX_TEST_UNIT_DATA=$(d)/data + +libstore-tests_DIR := $(d) + +ifeq ($(INSTALL_UNIT_TESTS), yes) + libstore-tests_INSTALL_DIR := $(checkbindir) +else + libstore-tests_INSTALL_DIR := +endif + +libstore-tests_SOURCES := $(wildcard $(d)/*.cc) + +libstore-tests_EXTRA_INCLUDES = \ + -I tests/unit/libstore-support \ + -I tests/unit/libutil-support \ + -I src/libstore \ + -I src/libutil + +libstore-tests_CXXFLAGS += $(libstore-tests_EXTRA_INCLUDES) + +libstore-tests_LIBS = \ + libstore-test-support libutil-test-support \ + libstore libutil + +libstore-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS) diff --git a/src/libstore/tests/machines.cc b/tests/unit/libstore/machines.cc similarity index 97% rename from src/libstore/tests/machines.cc rename to tests/unit/libstore/machines.cc index fede328ea..5b66e5a5b 100644 --- a/src/libstore/tests/machines.cc +++ b/tests/unit/libstore/machines.cc @@ -139,7 +139,7 @@ TEST(machines, getMachinesWithIncorrectFormat) { } TEST(machines, getMachinesWithCorrectFileReference) { - auto path = absPath("src/libstore/tests/test-data/machines.valid"); + auto path = absPath("tests/unit/libstore/test-data/machines.valid"); ASSERT_TRUE(pathExists(path)); settings.builders = std::string("@") + path; @@ -166,6 +166,6 @@ TEST(machines, getMachinesWithIncorrectFileReference) { } TEST(machines, getMachinesWithCorrectFileReferenceToIncorrectFile) { - settings.builders = std::string("@") + absPath("src/libstore/tests/test-data/machines.bad_format"); + settings.builders = std::string("@") + absPath("tests/unit/libstore/test-data/machines.bad_format"); EXPECT_THROW(getMachines(), FormatError); } diff --git a/src/libstore/tests/nar-info-disk-cache.cc b/tests/unit/libstore/nar-info-disk-cache.cc similarity index 100% rename from src/libstore/tests/nar-info-disk-cache.cc rename to tests/unit/libstore/nar-info-disk-cache.cc diff --git a/src/libstore/tests/nar-info.cc b/tests/unit/libstore/nar-info.cc similarity index 95% rename from src/libstore/tests/nar-info.cc rename to tests/unit/libstore/nar-info.cc index c5b21d56b..bd10602e7 100644 --- a/src/libstore/tests/nar-info.cc +++ b/tests/unit/libstore/nar-info.cc @@ -2,6 +2,7 @@ #include #include "path-info.hh" +#include "nar-info.hh" #include "tests/characterization.hh" #include "tests/libstore.hh" @@ -12,7 +13,7 @@ using nlohmann::json; class NarInfoTest : public CharacterizationTest, public LibStoreTest { - Path unitTestData = getUnitTestData() + "/libstore/nar-info"; + Path unitTestData = getUnitTestData() + "/nar-info"; Path goldenMaster(PathView testStem) const override { return unitTestData + "/" + testStem + ".json"; @@ -25,7 +26,7 @@ static NarInfo makeNarInfo(const Store & store, bool includeImpureInfo) { "foo", FixedOutputInfo { .method = FileIngestionMethod::Recursive, - .hash = hashString(HashType::htSHA256, "(...)"), + .hash = hashString(HashAlgorithm::SHA256, "(...)"), .references = { .others = { diff --git a/src/libstore/tests/outputs-spec.cc b/tests/unit/libstore/outputs-spec.cc similarity index 92% rename from src/libstore/tests/outputs-spec.cc rename to tests/unit/libstore/outputs-spec.cc index 952945185..456196be1 100644 --- a/src/libstore/tests/outputs-spec.cc +++ b/tests/unit/libstore/outputs-spec.cc @@ -1,4 +1,4 @@ -#include "outputs-spec.hh" +#include "tests/outputs-spec.hh" #include #include @@ -199,31 +199,6 @@ TEST_JSON(ExtendedOutputsSpec, names, R"(["a","b"])", (ExtendedOutputsSpec::Expl #undef TEST_JSON -} - -namespace rc { -using namespace nix; - -Gen Arbitrary::arbitrary() -{ - switch (*gen::inRange(0, std::variant_size_v)) { - case 0: - return gen::just((OutputsSpec) OutputsSpec::All { }); - case 1: - return gen::just((OutputsSpec) OutputsSpec::Names { - *gen::nonEmpty(gen::container(gen::map( - gen::arbitrary(), - [](StorePathName n) { return n.name; }))), - }); - default: - assert(false); - } -} - -} - -namespace nix { - #ifndef COVERAGE RC_GTEST_PROP( diff --git a/src/libstore/tests/path-info.cc b/tests/unit/libstore/path-info.cc similarity index 95% rename from src/libstore/tests/path-info.cc rename to tests/unit/libstore/path-info.cc index 49bf623bd..80d6fcfed 100644 --- a/src/libstore/tests/path-info.cc +++ b/tests/unit/libstore/path-info.cc @@ -12,7 +12,7 @@ using nlohmann::json; class PathInfoTest : public CharacterizationTest, public LibStoreTest { - Path unitTestData = getUnitTestData() + "/libstore/path-info"; + Path unitTestData = getUnitTestData() + "/path-info"; Path goldenMaster(PathView testStem) const override { return unitTestData + "/" + testStem + ".json"; @@ -25,7 +25,7 @@ static UnkeyedValidPathInfo makePathInfo(const Store & store, bool includeImpure "foo", FixedOutputInfo { .method = FileIngestionMethod::Recursive, - .hash = hashString(HashType::htSHA256, "(...)"), + .hash = hashString(HashAlgorithm::SHA256, "(...)"), .references = { .others = { diff --git a/src/libstore/tests/path.cc b/tests/unit/libstore/path.cc similarity index 59% rename from src/libstore/tests/path.cc rename to tests/unit/libstore/path.cc index 5a84d646c..30631b5fd 100644 --- a/src/libstore/tests/path.cc +++ b/tests/unit/libstore/path.cc @@ -66,79 +66,6 @@ TEST_DO_PARSE(equals_sign, "foo=foo") #undef TEST_DO_PARSE -// For rapidcheck -void showValue(const StorePath & p, std::ostream & os) { - os << p.to_string(); -} - -} - -namespace rc { -using namespace nix; - -Gen Arbitrary::arbitrary() -{ - auto len = *gen::inRange( - 1, - StorePath::MaxPathLen - std::string_view { HASH_PART }.size()); - - std::string pre; - pre.reserve(len); - - for (size_t c = 0; c < len; ++c) { - switch (auto i = *gen::inRange(0, 10 + 2 * 26 + 6)) { - case 0 ... 9: - pre += '0' + i; - case 10 ... 35: - pre += 'A' + (i - 10); - break; - case 36 ... 61: - pre += 'a' + (i - 36); - break; - case 62: - pre += '+'; - break; - case 63: - pre += '-'; - break; - case 64: - // names aren't permitted to start with a period, - // so just fall through to the next case here - if (c != 0) { - pre += '.'; - break; - } - case 65: - pre += '_'; - break; - case 66: - pre += '?'; - break; - case 67: - pre += '='; - break; - default: - assert(false); - } - } - - return gen::just(StorePathName { - .name = std::move(pre), - }); -} - -Gen Arbitrary::arbitrary() -{ - return gen::just(StorePath { - *gen::arbitrary(), - (*gen::arbitrary()).name, - }); -} - -} // namespace rc - -namespace nix { - #ifndef COVERAGE RC_GTEST_FIXTURE_PROP( diff --git a/src/libstore/tests/references.cc b/tests/unit/libstore/references.cc similarity index 100% rename from src/libstore/tests/references.cc rename to tests/unit/libstore/references.cc diff --git a/src/libstore/tests/serve-protocol.cc b/tests/unit/libstore/serve-protocol.cc similarity index 68% rename from src/libstore/tests/serve-protocol.cc rename to tests/unit/libstore/serve-protocol.cc index c8ac87a04..8f256d1e6 100644 --- a/src/libstore/tests/serve-protocol.cc +++ b/tests/unit/libstore/serve-protocol.cc @@ -53,15 +53,15 @@ VERSIONED_CHARACTERIZATION_TEST( (std::tuple { ContentAddress { .method = TextIngestionMethod {}, - .hash = hashString(HashType::htSHA256, "Derive(...)"), + .hash = hashString(HashAlgorithm::SHA256, "Derive(...)"), }, ContentAddress { .method = FileIngestionMethod::Flat, - .hash = hashString(HashType::htSHA1, "blob blob..."), + .hash = hashString(HashAlgorithm::SHA1, "blob blob..."), }, ContentAddress { .method = FileIngestionMethod::Recursive, - .hash = hashString(HashType::htSHA256, "(...)"), + .hash = hashString(HashAlgorithm::SHA256, "(...)"), }, })) @@ -225,6 +225,131 @@ VERSIONED_CHARACTERIZATION_TEST( t; })) +VERSIONED_CHARACTERIZATION_TEST( + ServeProtoTest, + unkeyedValidPathInfo_2_3, + "unkeyed-valid-path-info-2.3", + 2 << 8 | 3, + (std::tuple { + ({ + UnkeyedValidPathInfo info { Hash::dummy }; + info.narSize = 34878; + info; + }), + ({ + UnkeyedValidPathInfo info { Hash::dummy }; + info.deriver = StorePath { + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + }; + info.references = { + StorePath { + "g1w7hyyyy1w7hy3qg1w7hy3qgqqqqy3q-foo.drv", + }, + }; + info.narSize = 34878; + info; + }), + })) + +VERSIONED_CHARACTERIZATION_TEST( + ServeProtoTest, + unkeyedValidPathInfo_2_4, + "unkeyed-valid-path-info-2.4", + 2 << 8 | 4, + (std::tuple { + ({ + UnkeyedValidPathInfo info { + Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + }; + info.deriver = StorePath { + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + }; + info.references = { + StorePath { + "g1w7hyyyy1w7hy3qg1w7hy3qgqqqqy3q-foo.drv", + }, + }; + info.narSize = 34878; + info; + }), + ({ + ValidPathInfo info { + *LibStoreTest::store, + "foo", + FixedOutputInfo { + .method = FileIngestionMethod::Recursive, + .hash = hashString(HashAlgorithm::SHA256, "(...)"), + .references = { + .others = { + StorePath { + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + }, + }, + .self = true, + }, + }, + Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + }; + info.deriver = StorePath { + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + }; + info.narSize = 34878; + info.sigs = { + "fake-sig-1", + "fake-sig-2", + }, + static_cast(std::move(info)); + }), + })) + +VERSIONED_CHARACTERIZATION_TEST( + ServeProtoTest, + build_options_2_1, + "build-options-2.1", + 2 << 8 | 1, + (ServeProto::BuildOptions { + .maxSilentTime = 5, + .buildTimeout = 6, + })) + +VERSIONED_CHARACTERIZATION_TEST( + ServeProtoTest, + build_options_2_2, + "build-options-2.2", + 2 << 8 | 2, + (ServeProto::BuildOptions { + .maxSilentTime = 5, + .buildTimeout = 6, + .maxLogSize = 7, + })) + +VERSIONED_CHARACTERIZATION_TEST( + ServeProtoTest, + build_options_2_3, + "build-options-2.3", + 2 << 8 | 3, + (ServeProto::BuildOptions { + .maxSilentTime = 5, + .buildTimeout = 6, + .maxLogSize = 7, + .nrRepeats = 8, + .enforceDeterminism = true, + })) + +VERSIONED_CHARACTERIZATION_TEST( + ServeProtoTest, + build_options_2_7, + "build-options-2.7", + 2 << 8 | 7, + (ServeProto::BuildOptions { + .maxSilentTime = 5, + .buildTimeout = 6, + .maxLogSize = 7, + .nrRepeats = 8, + .enforceDeterminism = false, + .keepFailed = true, + })) + VERSIONED_CHARACTERIZATION_TEST( ServeProtoTest, vector, @@ -271,7 +396,7 @@ VERSIONED_CHARACTERIZATION_TEST( std::optional { ContentAddress { .method = FileIngestionMethod::Flat, - .hash = hashString(HashType::htSHA1, "blob blob..."), + .hash = hashString(HashAlgorithm::SHA1, "blob blob..."), }, }, })) diff --git a/src/libstore/tests/test-data/machines.bad_format b/tests/unit/libstore/test-data/machines.bad_format similarity index 100% rename from src/libstore/tests/test-data/machines.bad_format rename to tests/unit/libstore/test-data/machines.bad_format diff --git a/src/libstore/tests/test-data/machines.valid b/tests/unit/libstore/test-data/machines.valid similarity index 100% rename from src/libstore/tests/test-data/machines.valid rename to tests/unit/libstore/test-data/machines.valid diff --git a/src/libstore/tests/worker-protocol.cc b/tests/unit/libstore/worker-protocol.cc similarity index 87% rename from src/libstore/tests/worker-protocol.cc rename to tests/unit/libstore/worker-protocol.cc index ad5943c69..2b2e559a9 100644 --- a/src/libstore/tests/worker-protocol.cc +++ b/tests/unit/libstore/worker-protocol.cc @@ -55,15 +55,15 @@ VERSIONED_CHARACTERIZATION_TEST( (std::tuple { ContentAddress { .method = TextIngestionMethod {}, - .hash = hashString(HashType::htSHA256, "Derive(...)"), + .hash = hashString(HashAlgorithm::SHA256, "Derive(...)"), }, ContentAddress { .method = FileIngestionMethod::Flat, - .hash = hashString(HashType::htSHA1, "blob blob..."), + .hash = hashString(HashAlgorithm::SHA1, "blob blob..."), }, ContentAddress { .method = FileIngestionMethod::Recursive, - .hash = hashString(HashType::htSHA256, "(...)"), + .hash = hashString(HashAlgorithm::SHA256, "(...)"), }, })) @@ -280,13 +280,60 @@ VERSIONED_CHARACTERIZATION_TEST( }, .startTime = 30, .stopTime = 50, -#if 0 - // These fields are not yet serialized. - // FIXME Include in next version of protocol or document - // why they are skipped. - .cpuUser = std::chrono::milliseconds(500s), - .cpuSystem = std::chrono::milliseconds(604s), -#endif + }, + }; + t; + })) + +VERSIONED_CHARACTERIZATION_TEST( + WorkerProtoTest, + buildResult_1_37, + "build-result-1.37", + 1 << 8 | 37, + ({ + using namespace std::literals::chrono_literals; + std::tuple t { + BuildResult { + .status = BuildResult::OutputRejected, + .errorMsg = "no idea why", + }, + BuildResult { + .status = BuildResult::NotDeterministic, + .errorMsg = "no idea why", + .timesBuilt = 3, + .isNonDeterministic = true, + .startTime = 30, + .stopTime = 50, + }, + BuildResult { + .status = BuildResult::Built, + .timesBuilt = 1, + .builtOutputs = { + { + "foo", + { + .id = DrvOutput { + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "foo", + }, + .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, + }, + }, + { + "bar", + { + .id = DrvOutput { + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "bar", + }, + .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar" }, + }, + }, + }, + .startTime = 30, + .stopTime = 50, + .cpuUser = std::chrono::microseconds(500s), + .cpuSystem = std::chrono::microseconds(604s), }, }; t; @@ -464,7 +511,7 @@ VERSIONED_CHARACTERIZATION_TEST( "foo", FixedOutputInfo { .method = FileIngestionMethod::Recursive, - .hash = hashString(HashType::htSHA256, "(...)"), + .hash = hashString(HashAlgorithm::SHA256, "(...)"), .references = { .others = { StorePath { @@ -539,7 +586,7 @@ VERSIONED_CHARACTERIZATION_TEST( std::optional { ContentAddress { .method = FileIngestionMethod::Flat, - .hash = hashString(HashType::htSHA1, "blob blob..."), + .hash = hashString(HashAlgorithm::SHA1, "blob blob..."), }, }, })) diff --git a/tests/unit/libutil-support/local.mk b/tests/unit/libutil-support/local.mk new file mode 100644 index 000000000..5f7835c9f --- /dev/null +++ b/tests/unit/libutil-support/local.mk @@ -0,0 +1,19 @@ +libraries += libutil-test-support + +libutil-test-support_NAME = libnixutil-test-support + +libutil-test-support_DIR := $(d) + +ifeq ($(INSTALL_UNIT_TESTS), yes) + libutil-test-support_INSTALL_DIR := $(checklibdir) +else + libutil-test-support_INSTALL_DIR := +endif + +libutil-test-support_SOURCES := $(wildcard $(d)/tests/*.cc) + +libutil-test-support_CXXFLAGS += $(libutil-tests_EXTRA_INCLUDES) + +libutil-test-support_LIBS = libutil + +libutil-test-support_LDFLAGS := $(THREAD_LDFLAGS) -lrapidcheck diff --git a/src/libutil/tests/characterization.hh b/tests/unit/libutil-support/tests/characterization.hh similarity index 95% rename from src/libutil/tests/characterization.hh rename to tests/unit/libutil-support/tests/characterization.hh index 6eb513d68..9d6c850f0 100644 --- a/src/libutil/tests/characterization.hh +++ b/tests/unit/libutil-support/tests/characterization.hh @@ -9,8 +9,8 @@ namespace nix { /** - * The path to the `unit-test-data` directory. See the contributing - * guide in the manual for further details. + * The path to the unit test data directory. See the contributing guide + * in the manual for further details. */ static Path getUnitTestData() { return getEnv("_NIX_TEST_UNIT_DATA").value(); diff --git a/tests/unit/libutil-support/tests/hash.cc b/tests/unit/libutil-support/tests/hash.cc new file mode 100644 index 000000000..50889cd33 --- /dev/null +++ b/tests/unit/libutil-support/tests/hash.cc @@ -0,0 +1,20 @@ +#include + +#include + +#include "hash.hh" + +#include "tests/hash.hh" + +namespace rc { +using namespace nix; + +Gen Arbitrary::arbitrary() +{ + Hash hash(HashAlgorithm::SHA1); + for (size_t i = 0; i < hash.hashSize; ++i) + hash.hash[i] = *gen::arbitrary(); + return gen::just(hash); +} + +} diff --git a/src/libutil/tests/hash.hh b/tests/unit/libutil-support/tests/hash.hh similarity index 100% rename from src/libutil/tests/hash.hh rename to tests/unit/libutil-support/tests/hash.hh diff --git a/src/libutil/tests/args.cc b/tests/unit/libutil/args.cc similarity index 98% rename from src/libutil/tests/args.cc rename to tests/unit/libutil/args.cc index bea74a8c8..950224430 100644 --- a/src/libutil/tests/args.cc +++ b/tests/unit/libutil/args.cc @@ -1,5 +1,5 @@ -#include "../args.hh" -#include "libutil/fs-sink.hh" +#include "args.hh" +#include "fs-sink.hh" #include #include @@ -165,4 +165,4 @@ RC_GTEST_PROP( #endif -} \ No newline at end of file +} diff --git a/src/libutil/tests/canon-path.cc b/tests/unit/libutil/canon-path.cc similarity index 100% rename from src/libutil/tests/canon-path.cc rename to tests/unit/libutil/canon-path.cc diff --git a/src/libutil/tests/chunked-vector.cc b/tests/unit/libutil/chunked-vector.cc similarity index 100% rename from src/libutil/tests/chunked-vector.cc rename to tests/unit/libutil/chunked-vector.cc diff --git a/src/libutil/tests/closure.cc b/tests/unit/libutil/closure.cc similarity index 100% rename from src/libutil/tests/closure.cc rename to tests/unit/libutil/closure.cc diff --git a/src/libutil/tests/compression.cc b/tests/unit/libutil/compression.cc similarity index 100% rename from src/libutil/tests/compression.cc rename to tests/unit/libutil/compression.cc diff --git a/src/libutil/tests/config.cc b/tests/unit/libutil/config.cc similarity index 100% rename from src/libutil/tests/config.cc rename to tests/unit/libutil/config.cc diff --git a/unit-test-data/libutil/git/check-data.sh b/tests/unit/libutil/data/git/check-data.sh similarity index 98% rename from unit-test-data/libutil/git/check-data.sh rename to tests/unit/libutil/data/git/check-data.sh index 68b705c95..b3f59c4f1 100644 --- a/unit-test-data/libutil/git/check-data.sh +++ b/tests/unit/libutil/data/git/check-data.sh @@ -2,7 +2,7 @@ set -eu -o pipefail -export TEST_ROOT=$(realpath ${TMPDIR:-/tmp}/nix-test)/git-hashing/unit-test-data +export TEST_ROOT=$(realpath ${TMPDIR:-/tmp}/nix-test)/git-hashing/check-data mkdir -p $TEST_ROOT repo="$TEST_ROOT/scratch" diff --git a/unit-test-data/libutil/git/hello-world-blob.bin b/tests/unit/libutil/data/git/hello-world-blob.bin similarity index 100% rename from unit-test-data/libutil/git/hello-world-blob.bin rename to tests/unit/libutil/data/git/hello-world-blob.bin diff --git a/unit-test-data/libutil/git/hello-world.bin b/tests/unit/libutil/data/git/hello-world.bin similarity index 100% rename from unit-test-data/libutil/git/hello-world.bin rename to tests/unit/libutil/data/git/hello-world.bin diff --git a/unit-test-data/libutil/git/tree.bin b/tests/unit/libutil/data/git/tree.bin similarity index 100% rename from unit-test-data/libutil/git/tree.bin rename to tests/unit/libutil/data/git/tree.bin diff --git a/unit-test-data/libutil/git/tree.txt b/tests/unit/libutil/data/git/tree.txt similarity index 100% rename from unit-test-data/libutil/git/tree.txt rename to tests/unit/libutil/data/git/tree.txt diff --git a/src/libutil/tests/git.cc b/tests/unit/libutil/git.cc similarity index 94% rename from src/libutil/tests/git.cc rename to tests/unit/libutil/git.cc index 2842ea4d0..141a55816 100644 --- a/src/libutil/tests/git.cc +++ b/tests/unit/libutil/git.cc @@ -11,7 +11,7 @@ using namespace git; class GitTest : public CharacterizationTest { - Path unitTestData = getUnitTestData() + "/libutil/git"; + Path unitTestData = getUnitTestData() + "/git"; public: @@ -86,8 +86,8 @@ TEST_F(GitTest, blob_write) { /** * This data is for "shallow" tree tests. However, we use "real" hashes - * so that we can check our test data in the corresponding functional - * test (`git-hashing/unit-test-data`). + * so that we can check our test data in a small shell script test test + * (`tests/unit/libutil/data/git/check-data.sh`). */ const static Tree tree = { { @@ -95,7 +95,7 @@ const static Tree tree = { { .mode = Mode::Regular, // hello world with special chars from above - .hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", htSHA1), + .hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", HashAlgorithm::SHA1), }, }, { @@ -103,7 +103,7 @@ const static Tree tree = { { .mode = Mode::Executable, // ditto - .hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", htSHA1), + .hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", HashAlgorithm::SHA1), }, }, { @@ -111,7 +111,7 @@ const static Tree tree = { { .mode = Mode::Directory, // Empty directory hash - .hash = Hash::parseAny("4b825dc642cb6eb9a060e54bf8d69288fbee4904", htSHA1), + .hash = Hash::parseAny("4b825dc642cb6eb9a060e54bf8d69288fbee4904", HashAlgorithm::SHA1), }, }, }; @@ -174,7 +174,7 @@ TEST_F(GitTest, both_roundrip) { std::function dumpHook; dumpHook = [&](const CanonPath & path) { StringSink s; - HashSink hashSink { htSHA1 }; + HashSink hashSink { HashAlgorithm::SHA1 }; TeeSink s2 { s, hashSink }; auto mode = dump( files, path, s2, dumpHook, diff --git a/src/libutil/tests/hash.cc b/tests/unit/libutil/hash.cc similarity index 80% rename from src/libutil/tests/hash.cc rename to tests/unit/libutil/hash.cc index 9a5ebbb30..a88994d0b 100644 --- a/src/libutil/tests/hash.cc +++ b/tests/unit/libutil/hash.cc @@ -1,12 +1,8 @@ #include -#include #include -#include -#include - -#include "tests/hash.hh" +#include "hash.hh" namespace nix { @@ -17,28 +13,28 @@ namespace nix { TEST(hashString, testKnownMD5Hashes1) { // values taken from: https://tools.ietf.org/html/rfc1321 auto s1 = ""; - auto hash = hashString(HashType::htMD5, s1); + auto hash = hashString(HashAlgorithm::MD5, s1); ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "md5:d41d8cd98f00b204e9800998ecf8427e"); } TEST(hashString, testKnownMD5Hashes2) { // values taken from: https://tools.ietf.org/html/rfc1321 auto s2 = "abc"; - auto hash = hashString(HashType::htMD5, s2); + auto hash = hashString(HashAlgorithm::MD5, s2); ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "md5:900150983cd24fb0d6963f7d28e17f72"); } TEST(hashString, testKnownSHA1Hashes1) { // values taken from: https://tools.ietf.org/html/rfc3174 auto s = "abc"; - auto hash = hashString(HashType::htSHA1, s); + auto hash = hashString(HashAlgorithm::SHA1, s); ASSERT_EQ(hash.to_string(HashFormat::Base16, true),"sha1:a9993e364706816aba3e25717850c26c9cd0d89d"); } TEST(hashString, testKnownSHA1Hashes2) { // values taken from: https://tools.ietf.org/html/rfc3174 auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"; - auto hash = hashString(HashType::htSHA1, s); + auto hash = hashString(HashAlgorithm::SHA1, s); ASSERT_EQ(hash.to_string(HashFormat::Base16, true),"sha1:84983e441c3bd26ebaae4aa1f95129e5e54670f1"); } @@ -46,7 +42,7 @@ namespace nix { // values taken from: https://tools.ietf.org/html/rfc4634 auto s = "abc"; - auto hash = hashString(HashType::htSHA256, s); + auto hash = hashString(HashAlgorithm::SHA256, s); ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad"); } @@ -54,7 +50,7 @@ namespace nix { TEST(hashString, testKnownSHA256Hashes2) { // values taken from: https://tools.ietf.org/html/rfc4634 auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"; - auto hash = hashString(HashType::htSHA256, s); + auto hash = hashString(HashAlgorithm::SHA256, s); ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "sha256:248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1"); } @@ -62,18 +58,17 @@ namespace nix { TEST(hashString, testKnownSHA512Hashes1) { // values taken from: https://tools.ietf.org/html/rfc4634 auto s = "abc"; - auto hash = hashString(HashType::htSHA512, s); + auto hash = hashString(HashAlgorithm::SHA512, s); ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "sha512:ddaf35a193617abacc417349ae20413112e6fa4e89a9" "7ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd" "454d4423643ce80e2a9ac94fa54ca49f"); } - TEST(hashString, testKnownSHA512Hashes2) { // values taken from: https://tools.ietf.org/html/rfc4634 auto s = "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"; - auto hash = hashString(HashType::htSHA512, s); + auto hash = hashString(HashAlgorithm::SHA512, s); ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "sha512:8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa1" "7299aeadb6889018501d289e4900f7e4331b99dec4b5433a" @@ -85,7 +80,7 @@ namespace nix { * --------------------------------------------------------------------------*/ TEST(hashFormat, testRoundTripPrintParse) { - for (const HashFormat hashFormat: { HashFormat::Base64, HashFormat::Base32, HashFormat::Base16, HashFormat::SRI}) { + for (const HashFormat hashFormat: { HashFormat::Base64, HashFormat::Nix32, HashFormat::Base16, HashFormat::SRI}) { ASSERT_EQ(parseHashFormat(printHashFormat(hashFormat)), hashFormat); ASSERT_EQ(*parseHashFormatOpt(printHashFormat(hashFormat)), hashFormat); } @@ -95,16 +90,3 @@ namespace nix { ASSERT_EQ(parseHashFormatOpt("sha0042"), std::nullopt); } } - -namespace rc { -using namespace nix; - -Gen Arbitrary::arbitrary() -{ - Hash hash(htSHA1); - for (size_t i = 0; i < hash.hashSize; ++i) - hash.hash[i] = *gen::arbitrary(); - return gen::just(hash); -} - -} diff --git a/src/libutil/tests/hilite.cc b/tests/unit/libutil/hilite.cc similarity index 100% rename from src/libutil/tests/hilite.cc rename to tests/unit/libutil/hilite.cc diff --git a/tests/unit/libutil/json-utils.cc b/tests/unit/libutil/json-utils.cc new file mode 100644 index 000000000..f0ce15c93 --- /dev/null +++ b/tests/unit/libutil/json-utils.cc @@ -0,0 +1,58 @@ +#include +#include + +#include + +#include "json-utils.hh" + +namespace nix { + +/* Test `to_json` and `from_json` with `std::optional` types. + * We are specifically interested in whether we can _nest_ optionals in STL + * containers so we that we can leverage existing adl_serializer templates. */ + +TEST(to_json, optionalInt) { + std::optional val = std::make_optional(420); + ASSERT_EQ(nlohmann::json(val), nlohmann::json(420)); + val = std::nullopt; + ASSERT_EQ(nlohmann::json(val), nlohmann::json(nullptr)); +} + +TEST(to_json, vectorOfOptionalInts) { + std::vector> vals = { + std::make_optional(420), + std::nullopt, + }; + ASSERT_EQ(nlohmann::json(vals), nlohmann::json::parse("[420,null]")); +} + +TEST(to_json, optionalVectorOfInts) { + std::optional> val = std::make_optional(std::vector { + -420, + 420, + }); + ASSERT_EQ(nlohmann::json(val), nlohmann::json::parse("[-420,420]")); + val = std::nullopt; + ASSERT_EQ(nlohmann::json(val), nlohmann::json(nullptr)); +} + +TEST(from_json, optionalInt) { + nlohmann::json json = 420; + std::optional val = json; + ASSERT_TRUE(val.has_value()); + ASSERT_EQ(*val, 420); + json = nullptr; + json.get_to(val); + ASSERT_FALSE(val.has_value()); +} + +TEST(from_json, vectorOfOptionalInts) { + nlohmann::json json = { 420, nullptr }; + std::vector> vals = json; + ASSERT_EQ(vals.size(), 2); + ASSERT_TRUE(vals.at(0).has_value()); + ASSERT_EQ(*vals.at(0), 420); + ASSERT_FALSE(vals.at(1).has_value()); +} + +} /* namespace nix */ diff --git a/tests/unit/libutil/local.mk b/tests/unit/libutil/local.mk new file mode 100644 index 000000000..930efb90b --- /dev/null +++ b/tests/unit/libutil/local.mk @@ -0,0 +1,31 @@ +check: libutil-tests_RUN + +programs += libutil-tests + +libutil-tests_NAME = libnixutil-tests + +libutil-tests_ENV := _NIX_TEST_UNIT_DATA=$(d)/data + +libutil-tests_DIR := $(d) + +ifeq ($(INSTALL_UNIT_TESTS), yes) + libutil-tests_INSTALL_DIR := $(checkbindir) +else + libutil-tests_INSTALL_DIR := +endif + +libutil-tests_SOURCES := $(wildcard $(d)/*.cc) + +libutil-tests_EXTRA_INCLUDES = \ + -I tests/unit/libutil-support \ + -I src/libutil + +libutil-tests_CXXFLAGS += $(libutil-tests_EXTRA_INCLUDES) + +libutil-tests_LIBS = libutil-test-support libutil + +libutil-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS) + +check: $(d)/data/git/check-data.sh.test + +$(eval $(call run-test,$(d)/data/git/check-data.sh)) diff --git a/src/libutil/tests/logging.cc b/tests/unit/libutil/logging.cc similarity index 100% rename from src/libutil/tests/logging.cc rename to tests/unit/libutil/logging.cc diff --git a/src/libutil/tests/lru-cache.cc b/tests/unit/libutil/lru-cache.cc similarity index 100% rename from src/libutil/tests/lru-cache.cc rename to tests/unit/libutil/lru-cache.cc diff --git a/src/libutil/tests/pool.cc b/tests/unit/libutil/pool.cc similarity index 100% rename from src/libutil/tests/pool.cc rename to tests/unit/libutil/pool.cc diff --git a/src/libutil/tests/references.cc b/tests/unit/libutil/references.cc similarity index 100% rename from src/libutil/tests/references.cc rename to tests/unit/libutil/references.cc diff --git a/src/libutil/tests/suggestions.cc b/tests/unit/libutil/suggestions.cc similarity index 100% rename from src/libutil/tests/suggestions.cc rename to tests/unit/libutil/suggestions.cc diff --git a/src/libutil/tests/tests.cc b/tests/unit/libutil/tests.cc similarity index 100% rename from src/libutil/tests/tests.cc rename to tests/unit/libutil/tests.cc diff --git a/src/libutil/tests/url.cc b/tests/unit/libutil/url.cc similarity index 92% rename from src/libutil/tests/url.cc rename to tests/unit/libutil/url.cc index a678dad20..7d08f467e 100644 --- a/src/libutil/tests/url.cc +++ b/tests/unit/libutil/url.cc @@ -344,4 +344,27 @@ namespace nix { ASSERT_EQ(percentDecode(e), s); } +TEST(nix, isValidSchemeName) { + ASSERT_TRUE(isValidSchemeName("http")); + ASSERT_TRUE(isValidSchemeName("https")); + ASSERT_TRUE(isValidSchemeName("file")); + ASSERT_TRUE(isValidSchemeName("file+https")); + ASSERT_TRUE(isValidSchemeName("fi.le")); + ASSERT_TRUE(isValidSchemeName("file-ssh")); + ASSERT_TRUE(isValidSchemeName("file+")); + ASSERT_TRUE(isValidSchemeName("file.")); + ASSERT_TRUE(isValidSchemeName("file1")); + ASSERT_FALSE(isValidSchemeName("file:")); + ASSERT_FALSE(isValidSchemeName("file/")); + ASSERT_FALSE(isValidSchemeName("+file")); + ASSERT_FALSE(isValidSchemeName(".file")); + ASSERT_FALSE(isValidSchemeName("-file")); + ASSERT_FALSE(isValidSchemeName("1file")); + // regex ok? + ASSERT_FALSE(isValidSchemeName("\nhttp")); + ASSERT_FALSE(isValidSchemeName("\nhttp\n")); + ASSERT_FALSE(isValidSchemeName("http\n")); + ASSERT_FALSE(isValidSchemeName("http ")); +} + } diff --git a/src/libutil/tests/xml-writer.cc b/tests/unit/libutil/xml-writer.cc similarity index 100% rename from src/libutil/tests/xml-writer.cc rename to tests/unit/libutil/xml-writer.cc