Merge branch 'master' into access-tokens

This commit is contained in:
Kevin Quick 2020-09-29 08:32:06 -07:00
commit 66c3959e8c
No known key found for this signature in database
GPG key ID: E6D7733599CC0A21
96 changed files with 1246 additions and 823 deletions

View file

@ -12,8 +12,6 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: cachix/install-nix-action@v10 - uses: cachix/install-nix-action@v11
with:
skip_adding_nixpkgs_channel: true
#- run: nix flake check #- run: nix flake check
- run: nix-build -A checks.$(if [[ `uname` = Linux ]]; then echo x86_64-linux; else echo x86_64-darwin; fi) - run: nix-build -A checks.$(if [[ `uname` = Linux ]]; then echo x86_64-linux; else echo x86_64-darwin; fi)

9
.gitignore vendored
View file

@ -12,15 +12,6 @@ perl/Makefile.config
/svn-revision /svn-revision
/libtool /libtool
/corepkgs/config.nix
# /corepkgs/channels/
/corepkgs/channels/unpack.sh
# /corepkgs/nar/
/corepkgs/nar/nar.sh
/corepkgs/nar/unnar.sh
# /doc/manual/ # /doc/manual/
/doc/manual/*.1 /doc/manual/*.1
/doc/manual/*.5 /doc/manual/*.5

View file

@ -26,7 +26,7 @@ OPTIMIZE = 1
ifeq ($(OPTIMIZE), 1) ifeq ($(OPTIMIZE), 1)
GLOBAL_CXXFLAGS += -O3 GLOBAL_CXXFLAGS += -O3
else else
GLOBAL_CXXFLAGS += -O0 GLOBAL_CXXFLAGS += -O0 -U_FORTIFY_SOURCE
endif endif
include mk/lib.mk include mk/lib.mk

View file

@ -9,7 +9,7 @@ for more details.
## Installation ## Installation
On Linux and macOS the easiest way to Install Nix is to run the following shell command On Linux and macOS the easiest way to install Nix is to run the following shell command
(as a user other than root): (as a user other than root):
```console ```console

View file

@ -1,13 +0,0 @@
# FIXME: remove this file?
let
fromEnv = var: def:
let val = builtins.getEnv var; in
if val != "" then val else def;
in rec {
nixBinDir = fromEnv "NIX_BIN_DIR" "@bindir@";
nixPrefix = "@prefix@";
nixLibexecDir = fromEnv "NIX_LIBEXEC_DIR" "@libexecdir@";
nixLocalstateDir = "@localstatedir@";
nixSysconfDir = "@sysconfdir@";
nixStoreDir = fromEnv "NIX_STORE_DIR" "@storedir@";
}

View file

@ -1,8 +1,4 @@
corepkgs_FILES = \ corepkgs_FILES = \
unpack-channel.nix \
derivation.nix \
fetchurl.nix fetchurl.nix
$(foreach file,config.nix $(corepkgs_FILES),$(eval $(call install-data-in,$(d)/$(file),$(datadir)/nix/corepkgs))) $(foreach file,$(corepkgs_FILES),$(eval $(call install-data-in,$(d)/$(file),$(datadir)/nix/corepkgs)))
template-files += $(d)/config.nix

View file

@ -51,6 +51,9 @@ Nix store is also printed.
result to the Nix store. The resulting hash can be used with result to the Nix store. The resulting hash can be used with
functions such as Nixpkgss `fetchzip` or `fetchFromGitHub`. functions such as Nixpkgss `fetchzip` or `fetchFromGitHub`.
- `--executable`
Set the executable bit on the downloaded file.
- `--name` *name* - `--name` *name*
Override the name of the file in the Nix store. By default, this is Override the name of the file in the Nix store. By default, this is
`hash-basename`, where *basename* is the last component of *url*. `hash-basename`, where *basename* is the last component of *url*.

View file

@ -39,17 +39,17 @@ To build Nix itself in this shell:
```console ```console
[nix-shell]$ ./bootstrap.sh [nix-shell]$ ./bootstrap.sh
[nix-shell]$ ./configure $configureFlags --prefix=$(pwd)/inst [nix-shell]$ ./configure $configureFlags --prefix=$(pwd)/outputs/out
[nix-shell]$ make -j $NIX_BUILD_CORES [nix-shell]$ make -j $NIX_BUILD_CORES
``` ```
To install it in `$(pwd)/inst` and test it: To install it in `$(pwd)/outputs` and test it:
```console ```console
[nix-shell]$ make install [nix-shell]$ make install
[nix-shell]$ make installcheck [nix-shell]$ make installcheck -j $NIX_BUILD_CORES
[nix-shell]$ ./inst/bin/nix --version [nix-shell]$ ./outputs/out/bin/nix --version
nix (Nix) 2.4 nix (Nix) 3.0
``` ```
To run a functional test: To run a functional test:
@ -58,6 +58,12 @@ To run a functional test:
make tests/test-name-should-auto-complete.sh.test make tests/test-name-should-auto-complete.sh.test
``` ```
To run the unit-tests for C++ code:
```
make check
```
If you have a flakes-enabled Nix you can replace: If you have a flakes-enabled Nix you can replace:
```console ```console

View file

@ -3,16 +3,15 @@
"lowdown-src": { "lowdown-src": {
"flake": false, "flake": false,
"locked": { "locked": {
"lastModified": 1598296217, "lastModified": 1598695561,
"narHash": "sha256-ha7lyNY1d8m+osmDpPc9f/bfZ3ZC1IVIXwfyklSWg8I=", "narHash": "sha256-gyH/5j+h/nWw0W8AcR2WKvNBUsiQ7QuxqSJNXAwV+8E=",
"owner": "edolstra", "owner": "kristapsdz",
"repo": "lowdown", "repo": "lowdown",
"rev": "c7a4e715af1e233080842db82d15b261cb74cb28", "rev": "1705b4a26fbf065d9574dce47a94e8c7c79e052f",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "edolstra", "owner": "kristapsdz",
"ref": "no-structs-in-anonymous-unions",
"repo": "lowdown", "repo": "lowdown",
"type": "github" "type": "github"
} }

View file

@ -2,7 +2,7 @@
description = "The purely functional package manager"; description = "The purely functional package manager";
inputs.nixpkgs.url = "nixpkgs/nixos-20.03-small"; inputs.nixpkgs.url = "nixpkgs/nixos-20.03-small";
inputs.lowdown-src = { url = "github:edolstra/lowdown/no-structs-in-anonymous-unions"; flake = false; }; inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
outputs = { self, nixpkgs, lowdown-src }: outputs = { self, nixpkgs, lowdown-src }:
@ -58,6 +58,7 @@
configureFlags = configureFlags =
lib.optionals stdenv.isLinux [ lib.optionals stdenv.isLinux [
"--with-sandbox-shell=${sh}/bin/busybox" "--with-sandbox-shell=${sh}/bin/busybox"
"LDFLAGS=-fuse-ld=gold"
]; ];
buildDeps = buildDeps =
@ -136,7 +137,7 @@
enableParallelBuilding = true; enableParallelBuilding = true;
makeFlags = "profiledir=$(out)/etc/profile.d"; makeFlags = "profiledir=$(out)/etc/profile.d PRECOMPILE_HEADERS=1";
doCheck = true; doCheck = true;
@ -334,9 +335,6 @@
# syntax-check generated dot files, it still requires some # syntax-check generated dot files, it still requires some
# fonts. So provide those. # fonts. So provide those.
FONTCONFIG_FILE = texFunctions.fontsConf; FONTCONFIG_FILE = texFunctions.fontsConf;
# To test building without precompiled headers.
makeFlagsArray = [ "PRECOMPILE_HEADERS=0" ];
}; };
# System tests. # System tests.

View file

@ -4,6 +4,8 @@
<dict> <dict>
<key>EnvironmentVariables</key> <key>EnvironmentVariables</key>
<dict> <dict>
<key>NIX_SSL_CERT_FILE</key>
<string>/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt</string>
<key>OBJC_DISABLE_INITIALIZE_FORK_SAFETY</key> <key>OBJC_DISABLE_INITIALIZE_FORK_SAFETY</key>
<string>YES</string> <string>YES</string>
</dict> </dict>

View file

@ -1,4 +1,4 @@
PRECOMPILE_HEADERS ?= 1 PRECOMPILE_HEADERS ?= 0
print-var-help += \ print-var-help += \
echo " PRECOMPILE_HEADERS ($(PRECOMPILE_HEADERS)): Whether to use precompiled headers to speed up the build"; echo " PRECOMPILE_HEADERS ($(PRECOMPILE_HEADERS)): Whether to use precompiled headers to speed up the build";

View file

@ -70,19 +70,6 @@ PKG_CHECK_MODULES([NIX], [nix-store])
NEED_PROG([NIX], [nix]) NEED_PROG([NIX], [nix])
# Get nix configure values
export NIX_REMOTE=daemon
nixbindir=$("$NIX" --experimental-features nix-command eval --raw -f '<nix/config.nix>' nixBinDir)
nixlibexecdir=$("$NIX" --experimental-features nix-command eval --raw -f '<nix/config.nix>' nixLibexecDir)
nixlocalstatedir=$("$NIX" --experimental-features nix-command eval --raw -f '<nix/config.nix>' nixLocalstateDir)
nixsysconfdir=$("$NIX" --experimental-features nix-command eval --raw -f '<nix/config.nix>' nixSysconfDir)
nixstoredir=$("$NIX" --experimental-features nix-command eval --raw -f '<nix/config.nix>' nixStoreDir)
AC_SUBST(nixbindir)
AC_SUBST(nixlibexecdir)
AC_SUBST(nixlocalstatedir)
AC_SUBST(nixsysconfdir)
AC_SUBST(nixstoredir)
# Expand all variables in config.status. # Expand all variables in config.status.
test "$prefix" = NONE && prefix=$ac_default_prefix test "$prefix" = NONE && prefix=$ac_default_prefix
test "$exec_prefix" = NONE && exec_prefix='${prefix}' test "$exec_prefix" = NONE && exec_prefix='${prefix}'

View file

@ -4,14 +4,8 @@ use MIME::Base64;
$version = "@PACKAGE_VERSION@"; $version = "@PACKAGE_VERSION@";
$binDir = $ENV{"NIX_BIN_DIR"} || "@nixbindir@"; $binDir = Nix::Store::getBinDir;
$libexecDir = $ENV{"NIX_LIBEXEC_DIR"} || "@nixlibexecdir@"; $storeDir = Nix::Store::getStoreDir;
$stateDir = $ENV{"NIX_STATE_DIR"} || "@nixlocalstatedir@/nix";
$logDir = $ENV{"NIX_LOG_DIR"} || "@nixlocalstatedir@/log/nix";
$confDir = $ENV{"NIX_CONF_DIR"} || "@nixsysconfdir@/nix";
$storeDir = $ENV{"NIX_STORE_DIR"} || "@nixstoredir@";
$useBindings = 1;
%config = (); %config = ();

View file

@ -2,7 +2,6 @@ package Nix::Store;
use strict; use strict;
use warnings; use warnings;
use Nix::Config;
require Exporter; require Exporter;
@ -22,6 +21,7 @@ our @EXPORT = qw(
addToStore makeFixedOutputPath addToStore makeFixedOutputPath
derivationFromPath derivationFromPath
addTempRoot addTempRoot
getBinDir getStoreDir
); );
our $VERSION = '0.15'; our $VERSION = '0.15';
@ -34,62 +34,8 @@ sub backtick {
return $res; return $res;
} }
if ($Nix::Config::useBindings) { require XSLoader;
require XSLoader; XSLoader::load('Nix::Store', $VERSION);
XSLoader::load('Nix::Store', $VERSION);
} else {
# Provide slow fallbacks of some functions on platforms that don't
# support the Perl bindings.
use File::Temp;
use Fcntl qw/F_SETFD/;
*hashFile = sub {
my ($algo, $base32, $path) = @_;
my $res = backtick("$Nix::Config::binDir/nix-hash", "--flat", $path, "--type", $algo, $base32 ? "--base32" : ());
chomp $res;
return $res;
};
*hashPath = sub {
my ($algo, $base32, $path) = @_;
my $res = backtick("$Nix::Config::binDir/nix-hash", $path, "--type", $algo, $base32 ? "--base32" : ());
chomp $res;
return $res;
};
*hashString = sub {
my ($algo, $base32, $s) = @_;
my $fh = File::Temp->new();
print $fh $s;
my $res = backtick("$Nix::Config::binDir/nix-hash", $fh->filename, "--type", $algo, $base32 ? "--base32" : ());
chomp $res;
return $res;
};
*addToStore = sub {
my ($srcPath, $recursive, $algo) = @_;
die "not implemented" if $recursive || $algo ne "sha256";
my $res = backtick("$Nix::Config::binDir/nix-store", "--add", $srcPath);
chomp $res;
return $res;
};
*isValidPath = sub {
my ($path) = @_;
my $res = backtick("$Nix::Config::binDir/nix-store", "--check-validity", "--print-invalid", $path);
chomp $res;
return $res ne $path;
};
*queryPathHash = sub {
my ($path) = @_;
my $res = backtick("$Nix::Config::binDir/nix-store", "--query", "--hash", $path);
chomp $res;
return $res;
};
}
1; 1;
__END__ __END__

View file

@ -351,3 +351,13 @@ void addTempRoot(char * storePath)
} catch (Error & e) { } catch (Error & e) {
croak("%s", e.what()); croak("%s", e.what());
} }
SV * getBinDir()
PPCODE:
XPUSHs(sv_2mortal(newSVpv(settings.nixBinDir.c_str(), 0)));
SV * getStoreDir()
PPCODE:
XPUSHs(sv_2mortal(newSVpv(settings.nixStore.c_str(), 0)));

View file

@ -76,7 +76,7 @@ Path lookupFileArg(EvalState & state, string s)
if (isUri(s)) { if (isUri(s)) {
return state.store->toRealPath( return state.store->toRealPath(
fetchers::downloadTarball( fetchers::downloadTarball(
state.store, resolveUri(s), Headers {}, "source", false).first.storePath); state.store, resolveUri(s), "source", false).first.storePath);
} else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') { } else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
Path p = s.substr(1, s.size() - 2); Path p = s.substr(1, s.size() - 2);
return state.findFile(p); return state.findFile(p);

View file

@ -356,6 +356,7 @@ EvalState::EvalState(const Strings & _searchPath, ref<Store> store)
, sEpsilon(symbols.create("")) , sEpsilon(symbols.create(""))
, repair(NoRepair) , repair(NoRepair)
, store(store) , store(store)
, regexCache(makeRegexCache())
, baseEnv(allocEnv(128)) , baseEnv(allocEnv(128))
, staticBaseEnv(false, 0) , staticBaseEnv(false, 0)
{ {

View file

@ -6,7 +6,6 @@
#include "symbol-table.hh" #include "symbol-table.hh"
#include "config.hh" #include "config.hh"
#include <regex>
#include <map> #include <map>
#include <optional> #include <optional>
#include <unordered_map> #include <unordered_map>
@ -65,6 +64,11 @@ typedef std::list<SearchPathElem> SearchPath;
void initGC(); void initGC();
struct RegexCache;
std::shared_ptr<RegexCache> makeRegexCache();
class EvalState class EvalState
{ {
public: public:
@ -120,7 +124,7 @@ private:
std::unordered_map<Path, Path> resolvedPaths; std::unordered_map<Path, Path> resolvedPaths;
/* Cache used by prim_match(). */ /* Cache used by prim_match(). */
std::unordered_map<std::string, std::regex> regexCache; std::shared_ptr<RegexCache> regexCache;
public: public:

View file

@ -48,17 +48,17 @@ static std::tuple<fetchers::Tree, FlakeRef, FlakeRef> fetchOrSubstituteTree(
resolvedRef = originalRef.resolve(state.store); resolvedRef = originalRef.resolve(state.store);
auto fetchedResolved = lookupInFlakeCache(flakeCache, originalRef); auto fetchedResolved = lookupInFlakeCache(flakeCache, originalRef);
if (!fetchedResolved) fetchedResolved.emplace(resolvedRef.fetchTree(state.store)); if (!fetchedResolved) fetchedResolved.emplace(resolvedRef.fetchTree(state.store));
flakeCache.push_back({resolvedRef, fetchedResolved.value()}); flakeCache.push_back({resolvedRef, *fetchedResolved});
fetched.emplace(fetchedResolved.value()); fetched.emplace(*fetchedResolved);
} }
else { else {
throw Error("'%s' is an indirect flake reference, but registry lookups are not allowed", originalRef); throw Error("'%s' is an indirect flake reference, but registry lookups are not allowed", originalRef);
} }
} }
flakeCache.push_back({originalRef, fetched.value()}); flakeCache.push_back({originalRef, *fetched});
} }
auto [tree, lockedRef] = fetched.value(); auto [tree, lockedRef] = *fetched;
debug("got tree '%s' from '%s'", debug("got tree '%s' from '%s'",
state.store->printStorePath(tree.storePath), lockedRef); state.store->printStorePath(tree.storePath), lockedRef);
@ -215,10 +215,9 @@ static Flake getFlake(
if (auto outputs = vInfo.attrs->get(sOutputs)) { if (auto outputs = vInfo.attrs->get(sOutputs)) {
expectType(state, tLambda, *outputs->value, *outputs->pos); expectType(state, tLambda, *outputs->value, *outputs->pos);
flake.vOutputs = allocRootValue(outputs->value);
if ((*flake.vOutputs)->lambda.fun->matchAttrs) { if (outputs->value->lambda.fun->matchAttrs) {
for (auto & formal : (*flake.vOutputs)->lambda.fun->formals->formals) { for (auto & formal : outputs->value->lambda.fun->formals->formals) {
if (formal.name != state.sSelf) if (formal.name != state.sSelf)
flake.inputs.emplace(formal.name, FlakeInput { flake.inputs.emplace(formal.name, FlakeInput {
.ref = parseFlakeRef(formal.name) .ref = parseFlakeRef(formal.name)
@ -248,7 +247,7 @@ Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup
} }
/* Compute an in-memory lock file for the specified top-level flake, /* Compute an in-memory lock file for the specified top-level flake,
and optionally write it to file, it the flake is writable. */ and optionally write it to file, if the flake is writable. */
LockedFlake lockFlake( LockedFlake lockFlake(
EvalState & state, EvalState & state,
const FlakeRef & topRef, const FlakeRef & topRef,
@ -367,7 +366,7 @@ LockedFlake lockFlake(
/* If we have an --update-input flag for an input /* If we have an --update-input flag for an input
of this input, then we must fetch the flake to of this input, then we must fetch the flake to
to update it. */ update it. */
auto lb = lockFlags.inputUpdates.lower_bound(inputPath); auto lb = lockFlags.inputUpdates.lower_bound(inputPath);
auto hasChildUpdate = auto hasChildUpdate =

View file

@ -34,7 +34,6 @@ struct Flake
std::optional<std::string> description; std::optional<std::string> description;
std::shared_ptr<const fetchers::Tree> sourceInfo; std::shared_ptr<const fetchers::Tree> sourceInfo;
FlakeInputs inputs; FlakeInputs inputs;
RootValue vOutputs;
~Flake(); ~Flake();
}; };

View file

@ -1,6 +1,7 @@
#include "flakeref.hh" #include "flakeref.hh"
#include "store-api.hh" #include "store-api.hh"
#include "url.hh" #include "url.hh"
#include "url-parts.hh"
#include "fetchers.hh" #include "fetchers.hh"
#include "registry.hh" #include "registry.hh"

View file

@ -1,5 +1,6 @@
#include "lockfile.hh" #include "lockfile.hh"
#include "store-api.hh" #include "store-api.hh"
#include "url-parts.hh"
#include <nlohmann/json.hpp> #include <nlohmann/json.hpp>

View file

@ -115,6 +115,14 @@ public:
{ {
return handle_value<void(Value&, const char*)>(mkString, val.c_str()); return handle_value<void(Value&, const char*)>(mkString, val.c_str());
} }
#if NLOHMANN_JSON_VERSION_MAJOR >= 3 && NLOHMANN_JSON_VERSION_MINOR >= 8
bool binary(binary_t&)
{
// This function ought to be unreachable
assert(false);
return true;
}
#endif
bool start_object(std::size_t len) bool start_object(std::size_t len)
{ {

View file

@ -42,6 +42,6 @@ $(eval $(call install-file-in, $(d)/nix-expr.pc, $(prefix)/lib/pkgconfig, 0644))
$(foreach i, $(wildcard src/libexpr/flake/*.hh), \ $(foreach i, $(wildcard src/libexpr/flake/*.hh), \
$(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644))) $(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644)))
$(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh $(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh $(d)/primops/derivation.nix.gen.hh
$(d)/flake/flake.cc: $(d)/flake/call-flake.nix.gen.hh $(d)/flake/flake.cc: $(d)/flake/call-flake.nix.gen.hh

View file

@ -614,8 +614,7 @@ Path resolveExprPath(Path path)
// Basic cycle/depth limit to avoid infinite loops. // Basic cycle/depth limit to avoid infinite loops.
if (++followCount >= maxFollow) if (++followCount >= maxFollow)
throw Error("too many symbolic links encountered while traversing the path '%s'", path); throw Error("too many symbolic links encountered while traversing the path '%s'", path);
if (lstat(path.c_str(), &st)) st = lstat(path);
throw SysError("getting status of '%s'", path);
if (!S_ISLNK(st.st_mode)) break; if (!S_ISLNK(st.st_mode)) break;
path = absPath(readLink(path), dirOf(path)); path = absPath(readLink(path), dirOf(path));
} }
@ -719,7 +718,7 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
if (isUri(elem.second)) { if (isUri(elem.second)) {
try { try {
res = { true, store->toRealPath(fetchers::downloadTarball( res = { true, store->toRealPath(fetchers::downloadTarball(
store, resolveUri(elem.second), Headers {}, "source", false).first.storePath) }; store, resolveUri(elem.second), "source", false).first.storePath) };
} catch (FileTransferError & e) { } catch (FileTransferError & e) {
logWarning({ logWarning({
.name = "Entry download", .name = "Entry download",

View file

@ -2236,6 +2236,10 @@ static RegisterPrimOp primop_catAttrs({
static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args, Value & v) static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args, Value & v)
{ {
state.forceValue(*args[0], pos); state.forceValue(*args[0], pos);
if (args[0]->type == tPrimOpApp || args[0]->type == tPrimOp) {
state.mkAttrs(v, 0);
return;
}
if (args[0]->type != tLambda) if (args[0]->type != tLambda)
throw TypeError({ throw TypeError({
.hint = hintfmt("'functionArgs' requires a function"), .hint = hintfmt("'functionArgs' requires a function"),
@ -3085,17 +3089,25 @@ static RegisterPrimOp primop_hashString({
.fun = prim_hashString, .fun = prim_hashString,
}); });
/* Match a regular expression against a string and return either struct RegexCache
null or a list containing substring matches. */ {
std::unordered_map<std::string, std::regex> cache;
};
std::shared_ptr<RegexCache> makeRegexCache()
{
return std::make_shared<RegexCache>();
}
void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v) void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v)
{ {
auto re = state.forceStringNoCtx(*args[0], pos); auto re = state.forceStringNoCtx(*args[0], pos);
try { try {
auto regex = state.regexCache.find(re); auto regex = state.regexCache->cache.find(re);
if (regex == state.regexCache.end()) if (regex == state.regexCache->cache.end())
regex = state.regexCache.emplace(re, std::regex(re, std::regex::extended)).first; regex = state.regexCache->cache.emplace(re, std::regex(re, std::regex::extended)).first;
PathSet context; PathSet context;
const std::string str = state.forceString(*args[1], context, pos); const std::string str = state.forceString(*args[1], context, pos);
@ -3565,13 +3577,11 @@ void EvalState::createBaseEnv()
/* Add a wrapper around the derivation primop that computes the /* Add a wrapper around the derivation primop that computes the
`drvPath' and `outPath' attributes lazily. */ `drvPath' and `outPath' attributes lazily. */
try { sDerivationNix = symbols.create("//builtin/derivation.nix");
string path = canonPath(settings.nixDataDir + "/nix/corepkgs/derivation.nix", true); eval(parse(
sDerivationNix = symbols.create(path); #include "primops/derivation.nix.gen.hh"
evalFile(path, v); , foFile, sDerivationNix, "/", staticBaseEnv), v);
addConstant("derivation", v); addConstant("derivation", v);
} catch (SysError &) {
}
/* Now that we've added all primops, sort the `builtins' set, /* Now that we've added all primops, sort the `builtins' set,
because attribute lookups expect it to be sorted. */ because attribute lookups expect it to be sorted. */

View file

@ -3,8 +3,7 @@
#include "store-api.hh" #include "store-api.hh"
#include "fetchers.hh" #include "fetchers.hh"
#include "url.hh" #include "url.hh"
#include "url-parts.hh"
#include <regex>
namespace nix { namespace nix {

View file

@ -201,8 +201,8 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
auto storePath = auto storePath =
unpack unpack
? fetchers::downloadTarball(state.store, *url, Headers {}, name, (bool) expectedHash).first.storePath ? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).first.storePath
: fetchers::downloadFile(state.store, *url, Headers{}, name, (bool) expectedHash).storePath; : fetchers::downloadFile(state.store, *url, name, (bool) expectedHash).storePath;
auto path = state.store->toRealPath(storePath); auto path = state.store->toRealPath(storePath);

View file

@ -73,7 +73,7 @@ public:
StorePath computeStorePath(Store & store) const; StorePath computeStorePath(Store & store) const;
// Convience functions for common attributes. // Convenience functions for common attributes.
std::string getType() const; std::string getType() const;
std::optional<Hash> getNarHash() const; std::optional<Hash> getNarHash() const;
std::optional<std::string> getRef() const; std::optional<std::string> getRef() const;
@ -118,15 +118,15 @@ struct DownloadFileResult
DownloadFileResult downloadFile( DownloadFileResult downloadFile(
ref<Store> store, ref<Store> store,
const std::string & url, const std::string & url,
const Headers & headers,
const std::string & name, const std::string & name,
bool immutable); bool immutable,
const Headers & headers = {});
std::pair<Tree, time_t> downloadTarball( std::pair<Tree, time_t> downloadTarball(
ref<Store> store, ref<Store> store,
const std::string & url, const std::string & url,
const Headers & headers,
const std::string & name, const std::string & name,
bool immutable); bool immutable,
const Headers & headers = {});
} }

View file

@ -3,6 +3,7 @@
#include "globals.hh" #include "globals.hh"
#include "tarfile.hh" #include "tarfile.hh"
#include "store-api.hh" #include "store-api.hh"
#include "url-parts.hh"
#include <sys/time.h> #include <sys/time.h>

View file

@ -4,6 +4,7 @@
#include "globals.hh" #include "globals.hh"
#include "store-api.hh" #include "store-api.hh"
#include "types.hh" #include "types.hh"
#include "url-parts.hh"
#include <optional> #include <optional>
#include <nlohmann/json.hpp> #include <nlohmann/json.hpp>
@ -19,9 +20,9 @@ struct DownloadUrl
: url(url), headers(headers) { } : url(url), headers(headers) { }
}; };
// A github or gitlab url // A github or gitlab host
const static std::string urlRegexS = "[a-zA-Z0-9.]*"; // FIXME: check const static std::string hostRegexS = "[a-zA-Z0-9.]*"; // FIXME: check
std::regex urlRegex(urlRegexS, std::regex::ECMAScript); std::regex hostRegex(hostRegexS, std::regex::ECMAScript);
struct GitArchiveInputScheme : InputScheme struct GitArchiveInputScheme : InputScheme
{ {
@ -64,7 +65,7 @@ struct GitArchiveInputScheme : InputScheme
ref = value; ref = value;
} }
else if (name == "host") { else if (name == "host") {
if (!std::regex_match(value, urlRegex)) if (!std::regex_match(value, hostRegex))
throw BadURL("URL '%s' contains an invalid instance host", url.url); throw BadURL("URL '%s' contains an invalid instance host", url.url);
host_url = value; host_url = value;
} }
@ -204,7 +205,7 @@ struct GitArchiveInputScheme : InputScheme
auto url = getDownloadUrl(input); auto url = getDownloadUrl(input);
auto [tree, lastModified] = downloadTarball(store, url.url, url.headers, "source", true); auto [tree, lastModified] = downloadTarball(store, url.url, "source", true, url.headers);
input.attrs.insert_or_assign("lastModified", lastModified); input.attrs.insert_or_assign("lastModified", lastModified);
@ -247,7 +248,7 @@ struct GitHubInputScheme : GitArchiveInputScheme
auto json = nlohmann::json::parse( auto json = nlohmann::json::parse(
readFile( readFile(
store->toRealPath( store->toRealPath(
downloadFile(store, url, headers, "source", false).storePath))); downloadFile(store, url, "source", false, headers).storePath)));
auto rev = Hash::parseAny(std::string { json["sha"] }, htSHA1); auto rev = Hash::parseAny(std::string { json["sha"] }, htSHA1);
debug("HEAD revision for '%s' is %s", url, rev.gitRev()); debug("HEAD revision for '%s' is %s", url, rev.gitRev());
return rev; return rev;
@ -310,7 +311,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
auto json = nlohmann::json::parse( auto json = nlohmann::json::parse(
readFile( readFile(
store->toRealPath( store->toRealPath(
downloadFile(store, url, headers, "source", false).storePath))); downloadFile(store, url, "source", false, headers).storePath)));
auto rev = Hash::parseAny(std::string(json[0]["id"]), htSHA1); auto rev = Hash::parseAny(std::string(json[0]["id"]), htSHA1);
debug("HEAD revision for '%s' is %s", url, rev.gitRev()); debug("HEAD revision for '%s' is %s", url, rev.gitRev());
return rev; return rev;

View file

@ -1,4 +1,5 @@
#include "fetchers.hh" #include "fetchers.hh"
#include "url-parts.hh"
namespace nix::fetchers { namespace nix::fetchers {

View file

@ -3,6 +3,7 @@
#include "globals.hh" #include "globals.hh"
#include "tarfile.hh" #include "tarfile.hh"
#include "store-api.hh" #include "store-api.hh"
#include "url-parts.hh"
#include <sys/time.h> #include <sys/time.h>

View file

@ -145,7 +145,7 @@ static std::shared_ptr<Registry> getGlobalRegistry(ref<Store> store)
auto path = settings.flakeRegistry.get(); auto path = settings.flakeRegistry.get();
if (!hasPrefix(path, "/")) { if (!hasPrefix(path, "/")) {
auto storePath = downloadFile(store, path, Headers {}, "flake-registry.json", false).storePath; auto storePath = downloadFile(store, path, "flake-registry.json", false).storePath;
if (auto store2 = store.dynamic_pointer_cast<LocalFSStore>()) if (auto store2 = store.dynamic_pointer_cast<LocalFSStore>())
store2->addPermRoot(storePath, getCacheDir() + "/nix/flake-registry.json"); store2->addPermRoot(storePath, getCacheDir() + "/nix/flake-registry.json");
path = store->toRealPath(storePath); path = store->toRealPath(storePath);

View file

@ -12,9 +12,9 @@ namespace nix::fetchers {
DownloadFileResult downloadFile( DownloadFileResult downloadFile(
ref<Store> store, ref<Store> store,
const std::string & url, const std::string & url,
const Headers & headers,
const std::string & name, const std::string & name,
bool immutable) bool immutable,
const Headers & headers)
{ {
// FIXME: check store // FIXME: check store
@ -38,7 +38,8 @@ DownloadFileResult downloadFile(
if (cached && !cached->expired) if (cached && !cached->expired)
return useCached(); return useCached();
FileTransferRequest request(url, headers); FileTransferRequest request(url);
request.headers = headers;
if (cached) if (cached)
request.expectedETag = getStrAttr(cached->infoAttrs, "etag"); request.expectedETag = getStrAttr(cached->infoAttrs, "etag");
FileTransferResult res; FileTransferResult res;
@ -112,9 +113,9 @@ DownloadFileResult downloadFile(
std::pair<Tree, time_t> downloadTarball( std::pair<Tree, time_t> downloadTarball(
ref<Store> store, ref<Store> store,
const std::string & url, const std::string & url,
const Headers & headers,
const std::string & name, const std::string & name,
bool immutable) bool immutable,
const Headers & headers)
{ {
Attrs inAttrs({ Attrs inAttrs({
{"type", "tarball"}, {"type", "tarball"},
@ -130,7 +131,7 @@ std::pair<Tree, time_t> downloadTarball(
getIntAttr(cached->infoAttrs, "lastModified") getIntAttr(cached->infoAttrs, "lastModified")
}; };
auto res = downloadFile(store, url, headers, name, immutable); auto res = downloadFile(store, url, name, immutable, headers);
std::optional<StorePath> unpackedStorePath; std::optional<StorePath> unpackedStorePath;
time_t lastModified; time_t lastModified;
@ -225,7 +226,7 @@ struct TarballInputScheme : InputScheme
std::pair<Tree, Input> fetch(ref<Store> store, const Input & input) override std::pair<Tree, Input> fetch(ref<Store> store, const Input & input) override
{ {
auto tree = downloadTarball(store, getStrAttr(input.attrs, "url"), Headers {}, "source", false).first; auto tree = downloadTarball(store, getStrAttr(input.attrs, "url"), "source", false).first;
return {std::move(tree), input}; return {std::move(tree), input};
} }
}; };

View file

@ -256,7 +256,7 @@ public:
} }
else if (type == resBuildLogLine || type == resPostBuildLogLine) { else if (type == resBuildLogLine || type == resPostBuildLogLine) {
auto lastLine = trim(getS(fields, 0)); auto lastLine = chomp(getS(fields, 0));
if (!lastLine.empty()) { if (!lastLine.empty()) {
auto i = state->its.find(act); auto i = state->its.find(act);
assert(i != state->its.end()); assert(i != state->its.end());

View file

@ -11,6 +11,7 @@
#include "nar-accessor.hh" #include "nar-accessor.hh"
#include "json.hh" #include "json.hh"
#include "thread-pool.hh" #include "thread-pool.hh"
#include "callback.hh"
#include <chrono> #include <chrono>
#include <future> #include <future>

View file

@ -17,6 +17,7 @@
#include "daemon.hh" #include "daemon.hh"
#include "worker-protocol.hh" #include "worker-protocol.hh"
#include "topo-sort.hh" #include "topo-sort.hh"
#include "callback.hh"
#include <algorithm> #include <algorithm>
#include <iostream> #include <iostream>
@ -295,9 +296,21 @@ public:
~Worker(); ~Worker();
/* Make a goal (with caching). */ /* Make a goal (with caching). */
GoalPtr makeDerivationGoal(const StorePath & drvPath, const StringSet & wantedOutputs, BuildMode buildMode = bmNormal);
std::shared_ptr<DerivationGoal> makeBasicDerivationGoal(const StorePath & drvPath, /* derivation goal */
const BasicDerivation & drv, BuildMode buildMode = bmNormal); private:
std::shared_ptr<DerivationGoal> makeDerivationGoalCommon(
const StorePath & drvPath, const StringSet & wantedOutputs,
std::function<std::shared_ptr<DerivationGoal>()> mkDrvGoal);
public:
std::shared_ptr<DerivationGoal> makeDerivationGoal(
const StorePath & drvPath,
const StringSet & wantedOutputs, BuildMode buildMode = bmNormal);
std::shared_ptr<DerivationGoal> makeBasicDerivationGoal(
const StorePath & drvPath, const BasicDerivation & drv,
const StringSet & wantedOutputs, BuildMode buildMode = bmNormal);
/* substitution goal */
GoalPtr makeSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt); GoalPtr makeSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
/* Remove a dead goal. */ /* Remove a dead goal. */
@ -948,10 +961,12 @@ private:
friend struct RestrictedStore; friend struct RestrictedStore;
public: public:
DerivationGoal(const StorePath & drvPath, const StringSet & wantedOutputs, DerivationGoal(const StorePath & drvPath,
Worker & worker, BuildMode buildMode = bmNormal); const StringSet & wantedOutputs, Worker & worker,
BuildMode buildMode = bmNormal);
DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv, DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv,
Worker & worker, BuildMode buildMode = bmNormal); const StringSet & wantedOutputs, Worker & worker,
BuildMode buildMode = bmNormal);
~DerivationGoal(); ~DerivationGoal();
/* Whether we need to perform hash rewriting if there are valid output paths. */ /* Whether we need to perform hash rewriting if there are valid output paths. */
@ -993,6 +1008,8 @@ private:
void tryLocalBuild(); void tryLocalBuild();
void buildDone(); void buildDone();
void resolvedFinished();
/* Is the build hook willing to perform the build? */ /* Is the build hook willing to perform the build? */
HookReply tryBuildHook(); HookReply tryBuildHook();
@ -1084,8 +1101,8 @@ private:
const Path DerivationGoal::homeDir = "/homeless-shelter"; const Path DerivationGoal::homeDir = "/homeless-shelter";
DerivationGoal::DerivationGoal(const StorePath & drvPath, const StringSet & wantedOutputs, DerivationGoal::DerivationGoal(const StorePath & drvPath,
Worker & worker, BuildMode buildMode) const StringSet & wantedOutputs, Worker & worker, BuildMode buildMode)
: Goal(worker) : Goal(worker)
, useDerivation(true) , useDerivation(true)
, drvPath(drvPath) , drvPath(drvPath)
@ -1093,7 +1110,9 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath, const StringSet & want
, buildMode(buildMode) , buildMode(buildMode)
{ {
state = &DerivationGoal::getDerivation; state = &DerivationGoal::getDerivation;
name = fmt("building of '%s'", worker.store.printStorePath(this->drvPath)); name = fmt(
"building of '%s' from .drv file",
StorePathWithOutputs { drvPath, wantedOutputs }.to_string(worker.store));
trace("created"); trace("created");
mcExpectedBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.expectedBuilds); mcExpectedBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.expectedBuilds);
@ -1102,15 +1121,18 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath, const StringSet & want
DerivationGoal::DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv, DerivationGoal::DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv,
Worker & worker, BuildMode buildMode) const StringSet & wantedOutputs, Worker & worker, BuildMode buildMode)
: Goal(worker) : Goal(worker)
, useDerivation(false) , useDerivation(false)
, drvPath(drvPath) , drvPath(drvPath)
, wantedOutputs(wantedOutputs)
, buildMode(buildMode) , buildMode(buildMode)
{ {
this->drv = std::make_unique<BasicDerivation>(BasicDerivation(drv)); this->drv = std::make_unique<BasicDerivation>(BasicDerivation(drv));
state = &DerivationGoal::haveDerivation; state = &DerivationGoal::haveDerivation;
name = fmt("building of %s", StorePathWithOutputs { drvPath, drv.outputNames() }.to_string(worker.store)); name = fmt(
"building of '%s' from in-memory derivation",
StorePathWithOutputs { drvPath, drv.outputNames() }.to_string(worker.store));
trace("created"); trace("created");
mcExpectedBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.expectedBuilds); mcExpectedBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.expectedBuilds);
@ -1463,8 +1485,40 @@ void DerivationGoal::inputsRealised()
/* Determine the full set of input paths. */ /* Determine the full set of input paths. */
/* First, the input derivations. */ /* First, the input derivations. */
if (useDerivation) if (useDerivation) {
for (auto & [depDrvPath, wantedDepOutputs] : dynamic_cast<Derivation *>(drv.get())->inputDrvs) { auto & fullDrv = *dynamic_cast<Derivation *>(drv.get());
if (!fullDrv.inputDrvs.empty() && fullDrv.type() == DerivationType::CAFloating) {
/* We are be able to resolve this derivation based on the
now-known results of dependencies. If so, we become a stub goal
aliasing that resolved derivation goal */
std::optional attempt = fullDrv.tryResolve(worker.store);
assert(attempt);
Derivation drvResolved { *std::move(attempt) };
auto pathResolved = writeDerivation(worker.store, drvResolved);
/* Add to memotable to speed up downstream goal's queries with the
original derivation. */
drvPathResolutions.lock()->insert_or_assign(drvPath, pathResolved);
auto msg = fmt("Resolved derivation: '%s' -> '%s'",
worker.store.printStorePath(drvPath),
worker.store.printStorePath(pathResolved));
act = std::make_unique<Activity>(*logger, lvlInfo, actBuildWaiting, msg,
Logger::Fields {
worker.store.printStorePath(drvPath),
worker.store.printStorePath(pathResolved),
});
auto resolvedGoal = worker.makeDerivationGoal(
pathResolved, wantedOutputs, buildMode);
addWaitee(resolvedGoal);
state = &DerivationGoal::resolvedFinished;
return;
}
for (auto & [depDrvPath, wantedDepOutputs] : fullDrv.inputDrvs) {
/* Add the relevant output closures of the input derivation /* Add the relevant output closures of the input derivation
`i' as input paths. Only add the closures of output paths `i' as input paths. Only add the closures of output paths
that are specified as inputs. */ that are specified as inputs. */
@ -1484,6 +1538,7 @@ void DerivationGoal::inputsRealised()
worker.store.printStorePath(drvPath), j, worker.store.printStorePath(drvPath)); worker.store.printStorePath(drvPath), j, worker.store.printStorePath(drvPath));
} }
} }
}
/* Second, the input sources. */ /* Second, the input sources. */
worker.store.computeFSClosure(drv->inputSrcs, inputPaths); worker.store.computeFSClosure(drv->inputSrcs, inputPaths);
@ -1611,6 +1666,13 @@ void DerivationGoal::tryToBuild()
actLock.reset(); actLock.reset();
state = &DerivationGoal::tryLocalBuild;
worker.wakeUp(shared_from_this());
}
void DerivationGoal::tryLocalBuild() {
bool buildLocally = buildMode != bmNormal || parsedDrv->willBuildLocally(worker.store);
/* Make sure that we are allowed to start a build. If this /* Make sure that we are allowed to start a build. If this
derivation prefers to be done locally, do it even if derivation prefers to be done locally, do it even if
maxBuildJobs is 0. */ maxBuildJobs is 0. */
@ -1621,12 +1683,6 @@ void DerivationGoal::tryToBuild()
return; return;
} }
state = &DerivationGoal::tryLocalBuild;
worker.wakeUp(shared_from_this());
}
void DerivationGoal::tryLocalBuild() {
/* If `build-users-group' is not empty, then we have to build as /* If `build-users-group' is not empty, then we have to build as
one of the members of that group. */ one of the members of that group. */
if (settings.buildUsersGroup != "" && getuid() == 0) { if (settings.buildUsersGroup != "" && getuid() == 0) {
@ -1674,7 +1730,34 @@ void DerivationGoal::tryLocalBuild() {
} }
void replaceValidPath(const Path & storePath, const Path tmpPath) static void chmod_(const Path & path, mode_t mode)
{
if (chmod(path.c_str(), mode) == -1)
throw SysError("setting permissions on '%s'", path);
}
/* Move/rename path 'src' to 'dst'. Temporarily make 'src' writable if
it's a directory and we're not root (to be able to update the
directory's parent link ".."). */
static void movePath(const Path & src, const Path & dst)
{
auto st = lstat(src);
bool changePerm = (geteuid() && S_ISDIR(st.st_mode) && !(st.st_mode & S_IWUSR));
if (changePerm)
chmod_(src, st.st_mode | S_IWUSR);
if (rename(src.c_str(), dst.c_str()))
throw SysError("renaming '%1%' to '%2%'", src, dst);
if (changePerm)
chmod_(dst, st.st_mode);
}
void replaceValidPath(const Path & storePath, const Path & tmpPath)
{ {
/* We can't atomically replace storePath (the original) with /* We can't atomically replace storePath (the original) with
tmpPath (the replacement), so we have to move it out of the tmpPath (the replacement), so we have to move it out of the
@ -1682,11 +1765,20 @@ void replaceValidPath(const Path & storePath, const Path tmpPath)
we're repairing (say) Glibc, we end up with a broken system. */ we're repairing (say) Glibc, we end up with a broken system. */
Path oldPath = (format("%1%.old-%2%-%3%") % storePath % getpid() % random()).str(); Path oldPath = (format("%1%.old-%2%-%3%") % storePath % getpid() % random()).str();
if (pathExists(storePath)) if (pathExists(storePath))
rename(storePath.c_str(), oldPath.c_str()); movePath(storePath, oldPath);
if (rename(tmpPath.c_str(), storePath.c_str()) == -1) {
rename(oldPath.c_str(), storePath.c_str()); // attempt to recover try {
throw SysError("moving '%s' to '%s'", tmpPath, storePath); movePath(tmpPath, storePath);
} catch (...) {
try {
// attempt to recover
movePath(oldPath, storePath);
} catch (...) {
ignoreException();
}
throw;
} }
deletePath(oldPath); deletePath(oldPath);
} }
@ -1905,6 +1997,9 @@ void DerivationGoal::buildDone()
done(BuildResult::Built); done(BuildResult::Built);
} }
void DerivationGoal::resolvedFinished() {
done(BuildResult::Built);
}
HookReply DerivationGoal::tryBuildHook() HookReply DerivationGoal::tryBuildHook()
{ {
@ -2004,13 +2099,6 @@ HookReply DerivationGoal::tryBuildHook()
} }
static void chmod_(const Path & path, mode_t mode)
{
if (chmod(path.c_str(), mode) == -1)
throw SysError("setting permissions on '%s'", path);
}
int childEntry(void * arg) int childEntry(void * arg)
{ {
((DerivationGoal *) arg)->runChild(); ((DerivationGoal *) arg)->runChild();
@ -2366,10 +2454,7 @@ void DerivationGoal::startBuilder()
for (auto & i : inputPaths) { for (auto & i : inputPaths) {
auto p = worker.store.printStorePath(i); auto p = worker.store.printStorePath(i);
Path r = worker.store.toRealPath(p); Path r = worker.store.toRealPath(p);
struct stat st; if (S_ISDIR(lstat(r).st_mode))
if (lstat(r.c_str(), &st))
throw SysError("getting attributes of path '%s'", p);
if (S_ISDIR(st.st_mode))
dirsInChroot.insert_or_assign(p, r); dirsInChroot.insert_or_assign(p, r);
else else
linkOrCopy(r, chrootRootDir + p); linkOrCopy(r, chrootRootDir + p);
@ -2949,14 +3034,6 @@ struct RestrictedStore : public LocalFSStore, public virtual RestrictedStoreConf
goal.addDependency(info.path); goal.addDependency(info.path);
} }
StorePath addToStoreFromDump(Source & dump, const string & name,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override
{
auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair);
goal.addDependency(path);
return path;
}
StorePath addTextToStore(const string & name, const string & s, StorePath addTextToStore(const string & name, const string & s,
const StorePathSet & references, RepairFlag repair = NoRepair) override const StorePathSet & references, RepairFlag repair = NoRepair) override
{ {
@ -3151,9 +3228,7 @@ void DerivationGoal::addDependency(const StorePath & path)
if (pathExists(target)) if (pathExists(target))
throw Error("store path '%s' already exists in the sandbox", worker.store.printStorePath(path)); throw Error("store path '%s' already exists in the sandbox", worker.store.printStorePath(path));
struct stat st; auto st = lstat(source);
if (lstat(source.c_str(), &st))
throw SysError("getting attributes of path '%s'", source);
if (S_ISDIR(st.st_mode)) { if (S_ISDIR(st.st_mode)) {
@ -3742,29 +3817,6 @@ void DerivationGoal::runChild()
} }
static void moveCheckToStore(const Path & src, const Path & dst)
{
/* For the rename of directory to succeed, we must be running as root or
the directory must be made temporarily writable (to update the
directory's parent link ".."). */
struct stat st;
if (lstat(src.c_str(), &st) == -1) {
throw SysError("getting attributes of path '%1%'", src);
}
bool changePerm = (geteuid() && S_ISDIR(st.st_mode) && !(st.st_mode & S_IWUSR));
if (changePerm)
chmod_(src, st.st_mode | S_IWUSR);
if (rename(src.c_str(), dst.c_str()))
throw SysError("renaming '%1%' to '%2%'", src, dst);
if (changePerm)
chmod_(dst, st.st_mode);
}
void DerivationGoal::registerOutputs() void DerivationGoal::registerOutputs()
{ {
/* When using a build hook, the build hook can register the output /* When using a build hook, the build hook can register the output
@ -3865,7 +3917,7 @@ void DerivationGoal::registerOutputs()
something like that. */ something like that. */
canonicalisePathMetaData(actualPath, buildUser ? buildUser->getUID() : -1, inodesSeen); canonicalisePathMetaData(actualPath, buildUser ? buildUser->getUID() : -1, inodesSeen);
debug("scanning for references for output %1 in temp location '%1%'", outputName, actualPath); debug("scanning for references for output '%s' in temp location '%s'", outputName, actualPath);
/* Pass blank Sink as we are not ready to hash data at this stage. */ /* Pass blank Sink as we are not ready to hash data at this stage. */
NullSink blank; NullSink blank;
@ -3920,7 +3972,6 @@ void DerivationGoal::registerOutputs()
outputRewrites[std::string { scratchPath.hashPart() }] = std::string { finalStorePath.hashPart() }; outputRewrites[std::string { scratchPath.hashPart() }] = std::string { finalStorePath.hashPart() };
}; };
bool rewritten = false;
std::optional<StorePathSet> referencesOpt = std::visit(overloaded { std::optional<StorePathSet> referencesOpt = std::visit(overloaded {
[&](AlreadyRegistered skippedFinalPath) -> std::optional<StorePathSet> { [&](AlreadyRegistered skippedFinalPath) -> std::optional<StorePathSet> {
finish(skippedFinalPath.path); finish(skippedFinalPath.path);
@ -3951,7 +4002,9 @@ void DerivationGoal::registerOutputs()
StringSource source(*sink.s); StringSource source(*sink.s);
restorePath(actualPath, source); restorePath(actualPath, source);
rewritten = true; /* FIXME: set proper permissions in restorePath() so
we don't have to do another traversal. */
canonicalisePathMetaData(actualPath, -1, inodesSeen);
} }
}; };
@ -4034,7 +4087,7 @@ void DerivationGoal::registerOutputs()
[&](DerivationOutputInputAddressed output) { [&](DerivationOutputInputAddressed output) {
/* input-addressed case */ /* input-addressed case */
auto requiredFinalPath = output.path; auto requiredFinalPath = output.path;
/* Preemtively add rewrite rule for final hash, as that is /* Preemptively add rewrite rule for final hash, as that is
what the NAR hash will use rather than normalized-self references */ what the NAR hash will use rather than normalized-self references */
if (scratchPath != requiredFinalPath) if (scratchPath != requiredFinalPath)
outputRewrites.insert_or_assign( outputRewrites.insert_or_assign(
@ -4108,44 +4161,21 @@ void DerivationGoal::registerOutputs()
else. No moving needed. */ else. No moving needed. */
assert(newInfo.ca); assert(newInfo.ca);
} else { } else {
/* Temporarily add write perm so we can move, will be fixed auto destPath = worker.store.toRealPath(finalDestPath);
later. */ movePath(actualPath, destPath);
{ actualPath = destPath;
struct stat st;
auto & mode = st.st_mode;
if (lstat(actualPath.c_str(), &st))
throw SysError("getting attributes of path '%1%'", actualPath);
mode |= 0200;
/* Try to change the perms, but only if the file isn't a
symlink as symlinks permissions are mostly ignored and
calling `chmod` on it will just forward the call to the
target of the link. */
if (!S_ISLNK(st.st_mode))
if (chmod(actualPath.c_str(), mode) == -1)
throw SysError("changing mode of '%1%' to %2$o", actualPath, mode);
}
if (rename(
actualPath.c_str(),
worker.store.toRealPath(finalDestPath).c_str()) == -1)
throw SysError("moving build output '%1%' from it's temporary location to the Nix store", finalDestPath);
actualPath = worker.store.toRealPath(finalDestPath);
} }
} }
/* Get rid of all weird permissions. This also checks that
all files are owned by the build user, if applicable. */
canonicalisePathMetaData(actualPath,
buildUser && !rewritten ? buildUser->getUID() : -1, inodesSeen);
if (buildMode == bmCheck) { if (buildMode == bmCheck) {
if (!worker.store.isValidPath(newInfo.path)) continue; if (!worker.store.isValidPath(newInfo.path)) continue;
ValidPathInfo oldInfo(*worker.store.queryPathInfo(newInfo.path)); ValidPathInfo oldInfo(*worker.store.queryPathInfo(newInfo.path));
if (newInfo.narHash != oldInfo.narHash) { if (newInfo.narHash != oldInfo.narHash) {
worker.checkMismatch = true; worker.checkMismatch = true;
if (settings.runDiffHook || settings.keepFailed) { if (settings.runDiffHook || settings.keepFailed) {
Path dst = worker.store.toRealPath(finalDestPath + checkSuffix); auto dst = worker.store.toRealPath(finalDestPath + checkSuffix);
deletePath(dst); deletePath(dst);
moveCheckToStore(actualPath, dst); movePath(actualPath, dst);
handleDiffHook( handleDiffHook(
buildUser ? buildUser->getUID() : getuid(), buildUser ? buildUser->getUID() : getuid(),
@ -4265,11 +4295,13 @@ void DerivationGoal::registerOutputs()
/* Register each output path as valid, and register the sets of /* Register each output path as valid, and register the sets of
paths referenced by each of them. If there are cycles in the paths referenced by each of them. If there are cycles in the
outputs, this will fail. */ outputs, this will fail. */
ValidPathInfos infos2; {
for (auto & [outputName, newInfo] : infos) { ValidPathInfos infos2;
infos2.push_back(newInfo); for (auto & [outputName, newInfo] : infos) {
infos2.push_back(newInfo);
}
worker.store.registerValidPaths(infos2);
} }
worker.store.registerValidPaths(infos2);
/* In case of a fixed-output derivation hash mismatch, throw an /* In case of a fixed-output derivation hash mismatch, throw an
exception now that we have registered the output as valid. */ exception now that we have registered the output as valid. */
@ -4281,12 +4313,21 @@ void DerivationGoal::registerOutputs()
means it's safe to link the derivation to the output hash. We must do means it's safe to link the derivation to the output hash. We must do
that for floating CA derivations, which otherwise couldn't be cached, that for floating CA derivations, which otherwise couldn't be cached,
but it's fine to do in all cases. */ but it's fine to do in all cases. */
for (auto & [outputName, newInfo] : infos) { bool isCaFloating = drv->type() == DerivationType::CAFloating;
/* FIXME: we will want to track this mapping in the DB whether or
not we have a drv file. */ auto drvPathResolved = drvPath;
if (useDerivation) if (!useDerivation && isCaFloating) {
worker.store.linkDeriverToPath(drvPath, outputName, newInfo.path); /* Once a floating CA derivations reaches this point, it
must already be resolved, so we don't bother trying to
downcast drv to get would would just be an empty
inputDrvs field. */
Derivation drv2 { *drv };
drvPathResolved = writeDerivation(worker.store, drv2);
} }
if (useDerivation || isCaFloating)
for (auto & [outputName, newInfo] : infos)
worker.store.linkDeriverToPath(drvPathResolved, outputName, newInfo.path);
} }
@ -4576,7 +4617,7 @@ void DerivationGoal::flushLine()
std::map<std::string, std::optional<StorePath>> DerivationGoal::queryPartialDerivationOutputMap() std::map<std::string, std::optional<StorePath>> DerivationGoal::queryPartialDerivationOutputMap()
{ {
if (drv->type() != DerivationType::CAFloating) { if (!useDerivation || drv->type() != DerivationType::CAFloating) {
std::map<std::string, std::optional<StorePath>> res; std::map<std::string, std::optional<StorePath>> res;
for (auto & [name, output] : drv->outputs) for (auto & [name, output] : drv->outputs)
res.insert_or_assign(name, output.path(worker.store, drv->name, name)); res.insert_or_assign(name, output.path(worker.store, drv->name, name));
@ -4588,7 +4629,7 @@ std::map<std::string, std::optional<StorePath>> DerivationGoal::queryPartialDeri
OutputPathMap DerivationGoal::queryDerivationOutputMap() OutputPathMap DerivationGoal::queryDerivationOutputMap()
{ {
if (drv->type() != DerivationType::CAFloating) { if (!useDerivation || drv->type() != DerivationType::CAFloating) {
OutputPathMap res; OutputPathMap res;
for (auto & [name, output] : drv->outputsAndOptPaths(worker.store)) for (auto & [name, output] : drv->outputsAndOptPaths(worker.store))
res.insert_or_assign(name, *output.second); res.insert_or_assign(name, *output.second);
@ -5067,35 +5108,52 @@ Worker::~Worker()
} }
GoalPtr Worker::makeDerivationGoal(const StorePath & path, std::shared_ptr<DerivationGoal> Worker::makeDerivationGoalCommon(
const StringSet & wantedOutputs, BuildMode buildMode) const StorePath & drvPath,
const StringSet & wantedOutputs,
std::function<std::shared_ptr<DerivationGoal>()> mkDrvGoal)
{ {
GoalPtr goal = derivationGoals[path].lock(); // FIXME WeakGoalPtr & abstract_goal_weak = derivationGoals[drvPath];
if (!goal) { GoalPtr abstract_goal = abstract_goal_weak.lock(); // FIXME
goal = std::make_shared<DerivationGoal>(path, wantedOutputs, *this, buildMode); std::shared_ptr<DerivationGoal> goal;
derivationGoals.insert_or_assign(path, goal); if (!abstract_goal) {
goal = mkDrvGoal();
abstract_goal_weak = goal;
wakeUp(goal); wakeUp(goal);
} else } else {
(dynamic_cast<DerivationGoal *>(goal.get()))->addWantedOutputs(wantedOutputs); goal = std::dynamic_pointer_cast<DerivationGoal>(abstract_goal);
assert(goal);
goal->addWantedOutputs(wantedOutputs);
}
return goal; return goal;
} }
std::shared_ptr<DerivationGoal> Worker::makeBasicDerivationGoal(const StorePath & drvPath, std::shared_ptr<DerivationGoal> Worker::makeDerivationGoal(const StorePath & drvPath,
const BasicDerivation & drv, BuildMode buildMode) const StringSet & wantedOutputs, BuildMode buildMode)
{ {
auto goal = std::make_shared<DerivationGoal>(drvPath, drv, *this, buildMode); return makeDerivationGoalCommon(drvPath, wantedOutputs, [&]() {
wakeUp(goal); return std::make_shared<DerivationGoal>(drvPath, wantedOutputs, *this, buildMode);
return goal; });
}
std::shared_ptr<DerivationGoal> Worker::makeBasicDerivationGoal(const StorePath & drvPath,
const BasicDerivation & drv, const StringSet & wantedOutputs, BuildMode buildMode)
{
return makeDerivationGoalCommon(drvPath, wantedOutputs, [&]() {
return std::make_shared<DerivationGoal>(drvPath, drv, wantedOutputs, *this, buildMode);
});
} }
GoalPtr Worker::makeSubstitutionGoal(const StorePath & path, RepairFlag repair, std::optional<ContentAddress> ca) GoalPtr Worker::makeSubstitutionGoal(const StorePath & path, RepairFlag repair, std::optional<ContentAddress> ca)
{ {
GoalPtr goal = substitutionGoals[path].lock(); // FIXME WeakGoalPtr & goal_weak = substitutionGoals[path];
GoalPtr goal = goal_weak.lock(); // FIXME
if (!goal) { if (!goal) {
goal = std::make_shared<SubstitutionGoal>(path, *this, repair, ca); goal = std::make_shared<SubstitutionGoal>(path, *this, repair, ca);
substitutionGoals.insert_or_assign(path, goal); goal_weak = goal;
wakeUp(goal); wakeUp(goal);
} }
return goal; return goal;
@ -5526,7 +5584,7 @@ BuildResult LocalStore::buildDerivation(const StorePath & drvPath, const BasicDe
BuildMode buildMode) BuildMode buildMode)
{ {
Worker worker(*this); Worker worker(*this);
auto goal = worker.makeBasicDerivationGoal(drvPath, drv, buildMode); auto goal = worker.makeBasicDerivationGoal(drvPath, drv, {}, buildMode);
BuildResult result; BuildResult result;

View file

@ -4,11 +4,13 @@
namespace nix { namespace nix {
std::string FixedOutputHash::printMethodAlgo() const { std::string FixedOutputHash::printMethodAlgo() const
{
return makeFileIngestionPrefix(method) + printHashType(hash.type); return makeFileIngestionPrefix(method) + printHashType(hash.type);
} }
std::string makeFileIngestionPrefix(const FileIngestionMethod m) { std::string makeFileIngestionPrefix(const FileIngestionMethod m)
{
switch (m) { switch (m) {
case FileIngestionMethod::Flat: case FileIngestionMethod::Flat:
return ""; return "";
@ -26,7 +28,8 @@ std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash)
+ hash.to_string(Base32, true); + hash.to_string(Base32, true);
} }
std::string renderContentAddress(ContentAddress ca) { std::string renderContentAddress(ContentAddress ca)
{
return std::visit(overloaded { return std::visit(overloaded {
[](TextHash th) { [](TextHash th) {
return "text:" + th.hash.to_string(Base32, true); return "text:" + th.hash.to_string(Base32, true);
@ -37,54 +40,97 @@ std::string renderContentAddress(ContentAddress ca) {
}, ca); }, ca);
} }
ContentAddress parseContentAddress(std::string_view rawCa) { std::string renderContentAddressMethod(ContentAddressMethod cam)
auto rest = rawCa; {
return std::visit(overloaded {
[](TextHashMethod &th) {
return std::string{"text:"} + printHashType(htSHA256);
},
[](FixedOutputHashMethod &fshm) {
return "fixed:" + makeFileIngestionPrefix(fshm.fileIngestionMethod) + printHashType(fshm.hashType);
}
}, cam);
}
/*
Parses content address strings up to the hash.
*/
static ContentAddressMethod parseContentAddressMethodPrefix(std::string_view & rest)
{
std::string_view wholeInput { rest };
std::string_view prefix; std::string_view prefix;
{ {
auto optPrefix = splitPrefixTo(rest, ':'); auto optPrefix = splitPrefixTo(rest, ':');
if (!optPrefix) if (!optPrefix)
throw UsageError("not a content address because it is not in the form '<prefix>:<rest>': %s", rawCa); throw UsageError("not a content address because it is not in the form '<prefix>:<rest>': %s", wholeInput);
prefix = *optPrefix; prefix = *optPrefix;
} }
auto parseHashType_ = [&](){ auto parseHashType_ = [&](){
auto hashTypeRaw = splitPrefixTo(rest, ':'); auto hashTypeRaw = splitPrefixTo(rest, ':');
if (!hashTypeRaw) if (!hashTypeRaw)
throw UsageError("content address hash must be in form '<algo>:<hash>', but found: %s", rawCa); throw UsageError("content address hash must be in form '<algo>:<hash>', but found: %s", wholeInput);
HashType hashType = parseHashType(*hashTypeRaw); HashType hashType = parseHashType(*hashTypeRaw);
return std::move(hashType); return std::move(hashType);
}; };
// Switch on prefix // Switch on prefix
if (prefix == "text") { if (prefix == "text") {
// No parsing of the method, "text" only support flat. // No parsing of the ingestion method, "text" only support flat.
HashType hashType = parseHashType_(); HashType hashType = parseHashType_();
if (hashType != htSHA256) if (hashType != htSHA256)
throw Error("text content address hash should use %s, but instead uses %s", throw Error("text content address hash should use %s, but instead uses %s",
printHashType(htSHA256), printHashType(hashType)); printHashType(htSHA256), printHashType(hashType));
return TextHash { return TextHashMethod {};
.hash = Hash::parseNonSRIUnprefixed(rest, std::move(hashType)),
};
} else if (prefix == "fixed") { } else if (prefix == "fixed") {
// Parse method // Parse method
auto method = FileIngestionMethod::Flat; auto method = FileIngestionMethod::Flat;
if (splitPrefix(rest, "r:")) if (splitPrefix(rest, "r:"))
method = FileIngestionMethod::Recursive; method = FileIngestionMethod::Recursive;
HashType hashType = parseHashType_(); HashType hashType = parseHashType_();
return FixedOutputHash { return FixedOutputHashMethod {
.method = method, .fileIngestionMethod = method,
.hash = Hash::parseNonSRIUnprefixed(rest, std::move(hashType)), .hashType = std::move(hashType),
}; };
} else } else
throw UsageError("content address prefix '%s' is unrecognized. Recogonized prefixes are 'text' or 'fixed'", prefix); throw UsageError("content address prefix '%s' is unrecognized. Recogonized prefixes are 'text' or 'fixed'", prefix);
}
ContentAddress parseContentAddress(std::string_view rawCa) {
auto rest = rawCa;
ContentAddressMethod caMethod = parseContentAddressMethodPrefix(rest);
return std::visit(
overloaded {
[&](TextHashMethod thm) {
return ContentAddress(TextHash {
.hash = Hash::parseNonSRIUnprefixed(rest, htSHA256)
});
},
[&](FixedOutputHashMethod fohMethod) {
return ContentAddress(FixedOutputHash {
.method = fohMethod.fileIngestionMethod,
.hash = Hash::parseNonSRIUnprefixed(rest, std::move(fohMethod.hashType)),
});
},
}, caMethod);
}
ContentAddressMethod parseContentAddressMethod(std::string_view caMethod)
{
std::string_view asPrefix {std::string{caMethod} + ":"};
return parseContentAddressMethodPrefix(asPrefix);
}
std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt)
{
return rawCaOpt == "" ? std::optional<ContentAddress>() : parseContentAddress(rawCaOpt);
}; };
std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt) { std::string renderContentAddress(std::optional<ContentAddress> ca)
return rawCaOpt == "" ? std::optional<ContentAddress> {} : parseContentAddress(rawCaOpt); {
};
std::string renderContentAddress(std::optional<ContentAddress> ca) {
return ca ? renderContentAddress(*ca) : ""; return ca ? renderContentAddress(*ca) : "";
} }

View file

@ -55,4 +55,23 @@ std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt);
Hash getContentAddressHash(const ContentAddress & ca); Hash getContentAddressHash(const ContentAddress & ca);
/*
We only have one way to hash text with references, so this is single-value
type is only useful in std::variant.
*/
struct TextHashMethod { };
struct FixedOutputHashMethod {
FileIngestionMethod fileIngestionMethod;
HashType hashType;
};
typedef std::variant<
TextHashMethod,
FixedOutputHashMethod
> ContentAddressMethod;
ContentAddressMethod parseContentAddressMethod(std::string_view rawCaMethod);
std::string renderContentAddressMethod(ContentAddressMethod caMethod);
} }

View file

@ -2,7 +2,6 @@
#include "monitor-fd.hh" #include "monitor-fd.hh"
#include "worker-protocol.hh" #include "worker-protocol.hh"
#include "store-api.hh" #include "store-api.hh"
#include "local-store.hh"
#include "finally.hh" #include "finally.hh"
#include "affinity.hh" #include "affinity.hh"
#include "archive.hh" #include "archive.hh"
@ -240,6 +239,23 @@ struct ClientSettings
} }
}; };
static void writeValidPathInfo(
ref<Store> store,
unsigned int clientVersion,
Sink & to,
std::shared_ptr<const ValidPathInfo> info)
{
to << (info->deriver ? store->printStorePath(*info->deriver) : "")
<< info->narHash.to_string(Base16, false);
writeStorePaths(*store, to, info->references);
to << info->registrationTime << info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
to << info->ultimate
<< info->sigs
<< renderContentAddress(info->ca);
}
}
static void performOp(TunnelLogger * logger, ref<Store> store, static void performOp(TunnelLogger * logger, ref<Store> store,
TrustedFlag trusted, RecursiveFlag recursive, unsigned int clientVersion, TrustedFlag trusted, RecursiveFlag recursive, unsigned int clientVersion,
Source & from, BufferedSink & to, unsigned int op) Source & from, BufferedSink & to, unsigned int op)
@ -350,47 +366,83 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
} }
case wopAddToStore: { case wopAddToStore: {
HashType hashAlgo; if (GET_PROTOCOL_MINOR(clientVersion) >= 25) {
std::string baseName; auto name = readString(from);
FileIngestionMethod method; auto camStr = readString(from);
{ auto refs = readStorePaths<StorePathSet>(*store, from);
bool fixed; bool repairBool;
uint8_t recursive; from >> repairBool;
std::string hashAlgoRaw; auto repair = RepairFlag{repairBool};
from >> baseName >> fixed /* obsolete */ >> recursive >> hashAlgoRaw;
if (recursive > (uint8_t) FileIngestionMethod::Recursive) logger->startWork();
throw Error("unsupported FileIngestionMethod with value of %i; you may need to upgrade nix-daemon", recursive); auto pathInfo = [&]() {
method = FileIngestionMethod { recursive }; // NB: FramedSource must be out of scope before logger->stopWork();
/* Compatibility hack. */ ContentAddressMethod contentAddressMethod = parseContentAddressMethod(camStr);
if (!fixed) { FramedSource source(from);
hashAlgoRaw = "sha256"; // TODO this is essentially RemoteStore::addCAToStore. Move it up to Store.
method = FileIngestionMethod::Recursive; return std::visit(overloaded {
[&](TextHashMethod &_) {
// We could stream this by changing Store
std::string contents = source.drain();
auto path = store->addTextToStore(name, contents, refs, repair);
return store->queryPathInfo(path);
},
[&](FixedOutputHashMethod &fohm) {
if (!refs.empty())
throw UnimplementedError("cannot yet have refs with flat or nar-hashed data");
auto path = store->addToStoreFromDump(source, name, fohm.fileIngestionMethod, fohm.hashType, repair);
return store->queryPathInfo(path);
},
}, contentAddressMethod);
}();
logger->stopWork();
to << store->printStorePath(pathInfo->path);
writeValidPathInfo(store, clientVersion, to, pathInfo);
} else {
HashType hashAlgo;
std::string baseName;
FileIngestionMethod method;
{
bool fixed;
uint8_t recursive;
std::string hashAlgoRaw;
from >> baseName >> fixed /* obsolete */ >> recursive >> hashAlgoRaw;
if (recursive > (uint8_t) FileIngestionMethod::Recursive)
throw Error("unsupported FileIngestionMethod with value of %i; you may need to upgrade nix-daemon", recursive);
method = FileIngestionMethod { recursive };
/* Compatibility hack. */
if (!fixed) {
hashAlgoRaw = "sha256";
method = FileIngestionMethod::Recursive;
}
hashAlgo = parseHashType(hashAlgoRaw);
} }
hashAlgo = parseHashType(hashAlgoRaw);
StringSink saved;
TeeSource savedNARSource(from, saved);
RetrieveRegularNARSink savedRegular { saved };
if (method == FileIngestionMethod::Recursive) {
/* Get the entire NAR dump from the client and save it to
a string so that we can pass it to
addToStoreFromDump(). */
ParseSink sink; /* null sink; just parse the NAR */
parseDump(sink, savedNARSource);
} else
parseDump(savedRegular, from);
logger->startWork();
if (!savedRegular.regular) throw Error("regular file expected");
// FIXME: try to stream directly from `from`.
StringSource dumpSource { *saved.s };
auto path = store->addToStoreFromDump(dumpSource, baseName, method, hashAlgo);
logger->stopWork();
to << store->printStorePath(path);
} }
StringSink saved;
TeeSource savedNARSource(from, saved);
RetrieveRegularNARSink savedRegular { saved };
if (method == FileIngestionMethod::Recursive) {
/* Get the entire NAR dump from the client and save it to
a string so that we can pass it to
addToStoreFromDump(). */
ParseSink sink; /* null sink; just parse the NAR */
parseDump(sink, savedNARSource);
} else
parseDump(savedRegular, from);
logger->startWork();
if (!savedRegular.regular) throw Error("regular file expected");
// FIXME: try to stream directly from `from`.
StringSource dumpSource { *saved.s };
auto path = store->addToStoreFromDump(dumpSource, baseName, method, hashAlgo);
logger->stopWork();
to << store->printStorePath(path);
break; break;
} }
@ -494,6 +546,20 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
are in fact content-addressed if we don't trust them. */ are in fact content-addressed if we don't trust them. */
assert(derivationIsCA(drv.type()) || trusted); assert(derivationIsCA(drv.type()) || trusted);
/* Recompute the derivation path when we cannot trust the original. */
if (!trusted) {
/* Recomputing the derivation path for input-address derivations
makes it harder to audit them after the fact, since we need the
original not-necessarily-resolved derivation to verify the drv
derivation as adequate claim to the input-addressed output
paths. */
assert(derivationIsCA(drv.type()));
Derivation drv2;
static_cast<BasicDerivation &>(drv2) = drv;
drvPath = writeDerivation(*store, Derivation { drv2 });
}
auto res = store->buildDerivation(drvPath, drv, buildMode); auto res = store->buildDerivation(drvPath, drv, buildMode);
logger->stopWork(); logger->stopWork();
to << res.status << res.errorMsg; to << res.status << res.errorMsg;
@ -675,15 +741,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
if (info) { if (info) {
if (GET_PROTOCOL_MINOR(clientVersion) >= 17) if (GET_PROTOCOL_MINOR(clientVersion) >= 17)
to << 1; to << 1;
to << (info->deriver ? store->printStorePath(*info->deriver) : "") writeValidPathInfo(store, clientVersion, to, info);
<< info->narHash.to_string(Base16, false);
writeStorePaths(*store, to, info->references);
to << info->registrationTime << info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
to << info->ultimate
<< info->sigs
<< renderContentAddress(info->ca);
}
} else { } else {
assert(GET_PROTOCOL_MINOR(clientVersion) >= 17); assert(GET_PROTOCOL_MINOR(clientVersion) >= 17);
to << 0; to << 0;
@ -749,59 +807,12 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
info.ultimate = false; info.ultimate = false;
if (GET_PROTOCOL_MINOR(clientVersion) >= 23) { if (GET_PROTOCOL_MINOR(clientVersion) >= 23) {
struct FramedSource : Source
{
Source & from;
bool eof = false;
std::vector<unsigned char> pending;
size_t pos = 0;
FramedSource(Source & from) : from(from)
{ }
~FramedSource()
{
if (!eof) {
while (true) {
auto n = readInt(from);
if (!n) break;
std::vector<unsigned char> data(n);
from(data.data(), n);
}
}
}
size_t read(unsigned char * data, size_t len) override
{
if (eof) throw EndOfFile("reached end of FramedSource");
if (pos >= pending.size()) {
size_t len = readInt(from);
if (!len) {
eof = true;
return 0;
}
pending = std::vector<unsigned char>(len);
pos = 0;
from(pending.data(), len);
}
auto n = std::min(len, pending.size() - pos);
memcpy(data, pending.data() + pos, n);
pos += n;
return n;
}
};
logger->startWork(); logger->startWork();
{ {
FramedSource source(from); FramedSource source(from);
store->addToStore(info, source, (RepairFlag) repair, store->addToStore(info, source, (RepairFlag) repair,
dontCheckSigs ? NoCheckSigs : CheckSigs); dontCheckSigs ? NoCheckSigs : CheckSigs);
} }
logger->stopWork(); logger->stopWork();
} }

View file

@ -69,7 +69,7 @@ bool BasicDerivation::isBuiltin() const
StorePath writeDerivation(Store & store, StorePath writeDerivation(Store & store,
const Derivation & drv, RepairFlag repair) const Derivation & drv, RepairFlag repair, bool readOnly)
{ {
auto references = drv.inputSrcs; auto references = drv.inputSrcs;
for (auto & i : drv.inputDrvs) for (auto & i : drv.inputDrvs)
@ -79,7 +79,7 @@ StorePath writeDerivation(Store & store,
held during a garbage collection). */ held during a garbage collection). */
auto suffix = std::string(drv.name) + drvExtension; auto suffix = std::string(drv.name) + drvExtension;
auto contents = drv.unparse(store, false); auto contents = drv.unparse(store, false);
return settings.readOnlyMode return readOnly || settings.readOnlyMode
? store.computeStorePathForText(suffix, contents, references) ? store.computeStorePathForText(suffix, contents, references)
: store.addTextToStore(suffix, contents, references, repair); : store.addTextToStore(suffix, contents, references, repair);
} }
@ -644,4 +644,57 @@ std::string downstreamPlaceholder(const Store & store, const StorePath & drvPath
return "/" + hashString(htSHA256, clearText).to_string(Base32, false); return "/" + hashString(htSHA256, clearText).to_string(Base32, false);
} }
// N.B. Outputs are left unchanged
static void rewriteDerivation(Store & store, BasicDerivation & drv, const StringMap & rewrites) {
debug("Rewriting the derivation");
for (auto &rewrite: rewrites) {
debug("rewriting %s as %s", rewrite.first, rewrite.second);
}
drv.builder = rewriteStrings(drv.builder, rewrites);
for (auto & arg: drv.args) {
arg = rewriteStrings(arg, rewrites);
}
StringPairs newEnv;
for (auto & envVar: drv.env) {
auto envName = rewriteStrings(envVar.first, rewrites);
auto envValue = rewriteStrings(envVar.second, rewrites);
newEnv.emplace(envName, envValue);
}
drv.env = newEnv;
}
Sync<DrvPathResolutions> drvPathResolutions;
std::optional<BasicDerivation> Derivation::tryResolve(Store & store) {
BasicDerivation resolved { *this };
// Input paths that we'll want to rewrite in the derivation
StringMap inputRewrites;
for (auto & input : inputDrvs) {
auto inputDrvOutputs = store.queryPartialDerivationOutputMap(input.first);
StringSet newOutputNames;
for (auto & outputName : input.second) {
auto actualPathOpt = inputDrvOutputs.at(outputName);
if (!actualPathOpt)
return std::nullopt;
auto actualPath = *actualPathOpt;
inputRewrites.emplace(
downstreamPlaceholder(store, input.first, outputName),
store.printStorePath(actualPath));
resolved.inputSrcs.insert(std::move(actualPath));
}
}
rewriteDerivation(store, resolved, inputRewrites);
return resolved;
}
} }

View file

@ -4,6 +4,7 @@
#include "types.hh" #include "types.hh"
#include "hash.hh" #include "hash.hh"
#include "content-address.hh" #include "content-address.hh"
#include "sync.hh"
#include <map> #include <map>
#include <variant> #include <variant>
@ -100,7 +101,7 @@ struct BasicDerivation
StringPairs env; StringPairs env;
std::string name; std::string name;
BasicDerivation() { } BasicDerivation() = default;
virtual ~BasicDerivation() { }; virtual ~BasicDerivation() { };
bool isBuiltin() const; bool isBuiltin() const;
@ -127,7 +128,17 @@ struct Derivation : BasicDerivation
std::string unparse(const Store & store, bool maskOutputs, std::string unparse(const Store & store, bool maskOutputs,
std::map<std::string, StringSet> * actualInputs = nullptr) const; std::map<std::string, StringSet> * actualInputs = nullptr) const;
Derivation() { } /* Return the underlying basic derivation but with these changes:
1. Input drvs are emptied, but the outputs of them that were used are
added directly to input sources.
2. Input placeholders are replaced with realized input store paths. */
std::optional<BasicDerivation> tryResolve(Store & store);
Derivation() = default;
Derivation(const BasicDerivation & bd) : BasicDerivation(bd) { }
Derivation(BasicDerivation && bd) : BasicDerivation(std::move(bd)) { }
}; };
@ -137,7 +148,9 @@ enum RepairFlag : bool { NoRepair = false, Repair = true };
/* Write a derivation to the Nix store, and return its path. */ /* Write a derivation to the Nix store, and return its path. */
StorePath writeDerivation(Store & store, StorePath writeDerivation(Store & store,
const Derivation & drv, RepairFlag repair = NoRepair); const Derivation & drv,
RepairFlag repair = NoRepair,
bool readOnly = false);
/* Read a derivation from a file. */ /* Read a derivation from a file. */
Derivation parseDerivation(const Store & store, std::string && s, std::string_view name); Derivation parseDerivation(const Store & store, std::string && s, std::string_view name);
@ -191,6 +204,16 @@ typedef std::map<StorePath, DrvHashModulo> DrvHashes;
extern DrvHashes drvHashes; // FIXME: global, not thread-safe extern DrvHashes drvHashes; // FIXME: global, not thread-safe
/* Memoisation of `readDerivation(..).resove()`. */
typedef std::map<
StorePath,
std::optional<StorePath>
> DrvPathResolutions;
// FIXME: global, though at least thread-safe.
// FIXME: arguably overlaps with hashDerivationModulo memo table.
extern Sync<DrvPathResolutions> drvPathResolutions;
bool wantOutput(const string & output, const std::set<string> & wanted); bool wantOutput(const string & output, const std::set<string> & wanted);
struct Source; struct Source;

View file

@ -1,4 +1,5 @@
#include "store-api.hh" #include "store-api.hh"
#include "callback.hh"
namespace nix { namespace nix {

View file

@ -5,6 +5,7 @@
#include "s3.hh" #include "s3.hh"
#include "compression.hh" #include "compression.hh"
#include "finally.hh" #include "finally.hh"
#include "callback.hh"
#ifdef ENABLE_S3 #ifdef ENABLE_S3
#include <aws/core/client/ClientConfiguration.h> #include <aws/core/client/ClientConfiguration.h>

View file

@ -66,9 +66,6 @@ struct FileTransferRequest
FileTransferRequest(const std::string & uri) FileTransferRequest(const std::string & uri)
: uri(uri), parentAct(getCurActivity()) { } : uri(uri), parentAct(getCurActivity()) { }
FileTransferRequest(const std::string & uri, Headers headers)
: uri(uri), headers(headers) { }
std::string verb() std::string verb()
{ {
return data ? "upload" : "download"; return data ? "upload" : "download";

View file

@ -574,9 +574,12 @@ bool LocalStore::canReachRoot(GCState & state, StorePathSet & visited, const Sto
/* If keep-derivations is set and this is a derivation, then /* If keep-derivations is set and this is a derivation, then
don't delete the derivation if any of the outputs are alive. */ don't delete the derivation if any of the outputs are alive. */
if (state.gcKeepDerivations && path.isDerivation()) { if (state.gcKeepDerivations && path.isDerivation()) {
for (auto & i : queryDerivationOutputs(path)) for (auto & [name, maybeOutPath] : queryPartialDerivationOutputMap(path))
if (isValidPath(i) && queryPathInfo(i)->deriver == path) if (maybeOutPath &&
incoming.insert(i); isValidPath(*maybeOutPath) &&
queryPathInfo(*maybeOutPath)->deriver == path
)
incoming.insert(*maybeOutPath);
} }
/* If keep-outputs is set, then don't delete this path if there /* If keep-outputs is set, then don't delete this path if there
@ -660,9 +663,7 @@ void LocalStore::removeUnusedLinks(const GCState & state)
if (name == "." || name == "..") continue; if (name == "." || name == "..") continue;
Path path = linksDir + "/" + name; Path path = linksDir + "/" + name;
struct stat st; auto st = lstat(path);
if (lstat(path.c_str(), &st) == -1)
throw SysError("statting '%1%'", path);
if (st.st_nlink != 1) { if (st.st_nlink != 1) {
actualSize += st.st_size; actualSize += st.st_size;

View file

@ -2,6 +2,7 @@
#include "util.hh" #include "util.hh"
#include "archive.hh" #include "archive.hh"
#include "args.hh" #include "args.hh"
#include "abstract-setting-to-json.hh"
#include <algorithm> #include <algorithm>
#include <map> #include <map>
@ -41,6 +42,7 @@ Settings::Settings()
{ {
buildUsersGroup = getuid() == 0 ? "nixbld" : ""; buildUsersGroup = getuid() == 0 ? "nixbld" : "";
lockCPU = getEnv("NIX_AFFINITY_HACK") == "1"; lockCPU = getEnv("NIX_AFFINITY_HACK") == "1";
allowSymlinkedStore = getEnv("NIX_IGNORE_SYMLINK_STORE") == "1";
caFile = getEnv("NIX_SSL_CERT_FILE").value_or(getEnv("SSL_CERT_FILE").value_or("")); caFile = getEnv("NIX_SSL_CERT_FILE").value_or(getEnv("SSL_CERT_FILE").value_or(""));
if (caFile == "") { if (caFile == "") {
@ -146,6 +148,12 @@ bool Settings::isWSL1()
const string nixVersion = PACKAGE_VERSION; const string nixVersion = PACKAGE_VERSION;
NLOHMANN_JSON_SERIALIZE_ENUM(SandboxMode, {
{SandboxMode::smEnabled, true},
{SandboxMode::smRelaxed, "relaxed"},
{SandboxMode::smDisabled, false},
});
template<> void BaseSetting<SandboxMode>::set(const std::string & str) template<> void BaseSetting<SandboxMode>::set(const std::string & str)
{ {
if (str == "true") value = smEnabled; if (str == "true") value = smEnabled;

View file

@ -2,7 +2,6 @@
#include "types.hh" #include "types.hh"
#include "config.hh" #include "config.hh"
#include "abstractsettingtojson.hh"
#include "util.hh" #include "util.hh"
#include <map> #include <map>
@ -930,6 +929,19 @@ public:
Setting<std::string> flakeRegistry{this, "https://github.com/NixOS/flake-registry/raw/master/flake-registry.json", "flake-registry", Setting<std::string> flakeRegistry{this, "https://github.com/NixOS/flake-registry/raw/master/flake-registry.json", "flake-registry",
"Path or URI of the global flake registry."}; "Path or URI of the global flake registry."};
Setting<bool> allowSymlinkedStore{
this, false, "allow-symlinked-store",
R"(
If set to `true`, Nix will stop complaining if the store directory
(typically /nix/store) contains symlink components.
This risks making some builds "impure" because builders sometimes
"canonicalise" paths by resolving all symlink components. Problems
occur if those builds are then deployed to machines where /nix/store
resolves to a different location from that of the build machine. You
can enable this setting if you are sure you're not going to do that.
)"};
}; };

View file

@ -2,6 +2,7 @@
#include "filetransfer.hh" #include "filetransfer.hh"
#include "globals.hh" #include "globals.hh"
#include "nar-info-disk-cache.hh" #include "nar-info-disk-cache.hh"
#include "callback.hh"
namespace nix { namespace nix {

View file

@ -6,6 +6,7 @@
#include "worker-protocol.hh" #include "worker-protocol.hh"
#include "ssh.hh" #include "ssh.hh"
#include "derivations.hh" #include "derivations.hh"
#include "callback.hh"
namespace nix { namespace nix {

View file

@ -6,6 +6,7 @@
#include "derivations.hh" #include "derivations.hh"
#include "nar-info.hh" #include "nar-info.hh"
#include "references.hh" #include "references.hh"
#include "callback.hh"
#include <iostream> #include <iostream>
#include <algorithm> #include <algorithm>
@ -109,12 +110,11 @@ LocalStore::LocalStore(const Params & params)
} }
/* Ensure that the store and its parents are not symlinks. */ /* Ensure that the store and its parents are not symlinks. */
if (getEnv("NIX_IGNORE_SYMLINK_STORE") != "1") { if (!settings.allowSymlinkedStore) {
Path path = realStoreDir; Path path = realStoreDir;
struct stat st; struct stat st;
while (path != "/") { while (path != "/") {
if (lstat(path.c_str(), &st)) st = lstat(path);
throw SysError("getting status of '%1%'", path);
if (S_ISLNK(st.st_mode)) if (S_ISLNK(st.st_mode))
throw Error( throw Error(
"the path '%1%' is a symlink; " "the path '%1%' is a symlink; "
@ -418,10 +418,7 @@ static void canonicaliseTimestampAndPermissions(const Path & path, const struct
void canonicaliseTimestampAndPermissions(const Path & path) void canonicaliseTimestampAndPermissions(const Path & path)
{ {
struct stat st; canonicaliseTimestampAndPermissions(path, lstat(path));
if (lstat(path.c_str(), &st))
throw SysError("getting attributes of path '%1%'", path);
canonicaliseTimestampAndPermissions(path, st);
} }
@ -439,9 +436,7 @@ static void canonicalisePathMetaData_(const Path & path, uid_t fromUid, InodesSe
} }
#endif #endif
struct stat st; auto st = lstat(path);
if (lstat(path.c_str(), &st))
throw SysError("getting attributes of path '%1%'", path);
/* Really make sure that the path is of a supported type. */ /* Really make sure that the path is of a supported type. */
if (!(S_ISREG(st.st_mode) || S_ISDIR(st.st_mode) || S_ISLNK(st.st_mode))) if (!(S_ISREG(st.st_mode) || S_ISDIR(st.st_mode) || S_ISLNK(st.st_mode)))
@ -477,8 +472,7 @@ static void canonicalisePathMetaData_(const Path & path, uid_t fromUid, InodesSe
ensure that we don't fail on hard links within the same build ensure that we don't fail on hard links within the same build
(i.e. "touch $out/foo; ln $out/foo $out/bar"). */ (i.e. "touch $out/foo; ln $out/foo $out/bar"). */
if (fromUid != (uid_t) -1 && st.st_uid != fromUid) { if (fromUid != (uid_t) -1 && st.st_uid != fromUid) {
assert(!S_ISDIR(st.st_mode)); if (S_ISDIR(st.st_mode) || !inodesSeen.count(Inode(st.st_dev, st.st_ino)))
if (inodesSeen.find(Inode(st.st_dev, st.st_ino)) == inodesSeen.end())
throw BuildError("invalid ownership on file '%1%'", path); throw BuildError("invalid ownership on file '%1%'", path);
mode_t mode = st.st_mode & ~S_IFMT; mode_t mode = st.st_mode & ~S_IFMT;
assert(S_ISLNK(st.st_mode) || (st.st_uid == geteuid() && (mode == 0444 || mode == 0555) && st.st_mtime == mtimeStore)); assert(S_ISLNK(st.st_mode) || (st.st_uid == geteuid() && (mode == 0444 || mode == 0555) && st.st_mtime == mtimeStore));
@ -521,9 +515,7 @@ void canonicalisePathMetaData(const Path & path, uid_t fromUid, InodesSeen & ino
/* On platforms that don't have lchown(), the top-level path can't /* On platforms that don't have lchown(), the top-level path can't
be a symlink, since we can't change its ownership. */ be a symlink, since we can't change its ownership. */
struct stat st; auto st = lstat(path);
if (lstat(path.c_str(), &st))
throw SysError("getting attributes of path '%1%'", path);
if (st.st_uid != geteuid()) { if (st.st_uid != geteuid()) {
assert(S_ISLNK(st.st_mode)); assert(S_ISLNK(st.st_mode));
@ -729,7 +721,7 @@ uint64_t LocalStore::queryValidPathId(State & state, const StorePath & path)
{ {
auto use(state.stmtQueryPathInfo.use()(printStorePath(path))); auto use(state.stmtQueryPathInfo.use()(printStorePath(path)));
if (!use.next()) if (!use.next())
throw Error("path '%s' is not valid", printStorePath(path)); throw InvalidPath("path '%s' is not valid", printStorePath(path));
return use.getInt(0); return use.getInt(0);
} }
@ -804,18 +796,58 @@ StorePathSet LocalStore::queryValidDerivers(const StorePath & path)
} }
std::map<std::string, std::optional<StorePath>> LocalStore::queryPartialDerivationOutputMap(const StorePath & path) std::map<std::string, std::optional<StorePath>> LocalStore::queryPartialDerivationOutputMap(const StorePath & path_)
{ {
auto path = path_;
std::map<std::string, std::optional<StorePath>> outputs; std::map<std::string, std::optional<StorePath>> outputs;
BasicDerivation drv = readDerivation(path); Derivation drv = readDerivation(path);
for (auto & [outName, _] : drv.outputs) { for (auto & [outName, _] : drv.outputs) {
outputs.insert_or_assign(outName, std::nullopt); outputs.insert_or_assign(outName, std::nullopt);
} }
bool haveCached = false;
{
auto resolutions = drvPathResolutions.lock();
auto resolvedPathOptIter = resolutions->find(path);
if (resolvedPathOptIter != resolutions->end()) {
auto & [_, resolvedPathOpt] = *resolvedPathOptIter;
if (resolvedPathOpt)
path = *resolvedPathOpt;
haveCached = true;
}
}
/* can't just use else-if instead of `!haveCached` because we need to unlock
`drvPathResolutions` before it is locked in `Derivation::resolve`. */
if (!haveCached && drv.type() == DerivationType::CAFloating) {
/* Try resolve drv and use that path instead. */
auto attempt = drv.tryResolve(*this);
if (!attempt)
/* If we cannot resolve the derivation, we cannot have any path
assigned so we return the map of all std::nullopts. */
return outputs;
/* Just compute store path */
auto pathResolved = writeDerivation(*this, *std::move(attempt), NoRepair, true);
/* Store in memo table. */
/* FIXME: memo logic should not be local-store specific, should have
wrapper-method instead. */
drvPathResolutions.lock()->insert_or_assign(path, pathResolved);
path = std::move(pathResolved);
}
return retrySQLite<std::map<std::string, std::optional<StorePath>>>([&]() { return retrySQLite<std::map<std::string, std::optional<StorePath>>>([&]() {
auto state(_state.lock()); auto state(_state.lock());
auto useQueryDerivationOutputs(state->stmtQueryDerivationOutputs.use() uint64_t drvId;
(queryValidPathId(*state, path))); try {
drvId = queryValidPathId(*state, path);
} catch (InvalidPath &) {
/* FIXME? if the derivation doesn't exist, we cannot have a mapping
for it. */
return outputs;
}
auto useQueryDerivationOutputs {
state->stmtQueryDerivationOutputs.use()
(drvId)
};
while (useQueryDerivationOutputs.next()) while (useQueryDerivationOutputs.next())
outputs.insert_or_assign( outputs.insert_or_assign(
@ -1454,7 +1486,7 @@ static void makeMutable(const Path & path)
{ {
checkInterrupt(); checkInterrupt();
struct stat st = lstat(path); auto st = lstat(path);
if (!S_ISDIR(st.st_mode) && !S_ISREG(st.st_mode)) return; if (!S_ISDIR(st.st_mode) && !S_ISREG(st.st_mode)) return;

View file

@ -5,7 +5,7 @@
#include "store-api.hh" #include "store-api.hh"
#include "thread-pool.hh" #include "thread-pool.hh"
#include "topo-sort.hh" #include "topo-sort.hh"
#include "callback.hh"
namespace nix { namespace nix {

View file

@ -1,10 +1,18 @@
#include "names.hh" #include "names.hh"
#include "util.hh" #include "util.hh"
#include <regex>
namespace nix { namespace nix {
struct Regex
{
std::regex regex;
};
DrvName::DrvName() DrvName::DrvName()
{ {
name = ""; name = "";
@ -30,11 +38,18 @@ DrvName::DrvName(std::string_view s) : hits(0)
} }
DrvName::~DrvName()
{ }
bool DrvName::matches(DrvName & n) bool DrvName::matches(DrvName & n)
{ {
if (name != "*") { if (name != "*") {
if (!regex) regex = std::unique_ptr<std::regex>(new std::regex(name, std::regex::extended)); if (!regex) {
if (!std::regex_match(n.name, *regex)) return false; regex = std::make_unique<Regex>();
regex->regex = std::regex(name, std::regex::extended);
}
if (!std::regex_match(n.name, regex->regex)) return false;
} }
if (version != "" && version != n.version) return false; if (version != "" && version != n.version) return false;
return true; return true;
@ -99,7 +114,7 @@ DrvNames drvNamesFromArgs(const Strings & opArgs)
{ {
DrvNames result; DrvNames result;
for (auto & i : opArgs) for (auto & i : opArgs)
result.push_back(DrvName(i)); result.emplace_back(i);
return result; return result;
} }

View file

@ -3,10 +3,11 @@
#include <memory> #include <memory>
#include "types.hh" #include "types.hh"
#include <regex>
namespace nix { namespace nix {
struct Regex;
struct DrvName struct DrvName
{ {
string fullName; string fullName;
@ -16,10 +17,12 @@ struct DrvName
DrvName(); DrvName();
DrvName(std::string_view s); DrvName(std::string_view s);
~DrvName();
bool matches(DrvName & n); bool matches(DrvName & n);
private: private:
std::unique_ptr<std::regex> regex; std::unique_ptr<Regex> regex;
}; };
typedef list<DrvName> DrvNames; typedef list<DrvName> DrvNames;

View file

@ -17,9 +17,7 @@ namespace nix {
static void makeWritable(const Path & path) static void makeWritable(const Path & path)
{ {
struct stat st; auto st = lstat(path);
if (lstat(path.c_str(), &st))
throw SysError("getting attributes of path '%1%'", path);
if (chmod(path.c_str(), st.st_mode | S_IWUSR) == -1) if (chmod(path.c_str(), st.st_mode | S_IWUSR) == -1)
throw SysError("changing writability of '%1%'", path); throw SysError("changing writability of '%1%'", path);
} }
@ -94,9 +92,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
{ {
checkInterrupt(); checkInterrupt();
struct stat st; auto st = lstat(path);
if (lstat(path.c_str(), &st))
throw SysError("getting attributes of path '%1%'", path);
#if __APPLE__ #if __APPLE__
/* HFS/macOS has some undocumented security feature disabling hardlinking for /* HFS/macOS has some undocumented security feature disabling hardlinking for
@ -187,9 +183,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
/* Yes! We've seen a file with the same contents. Replace the /* Yes! We've seen a file with the same contents. Replace the
current file with a hard link to that file. */ current file with a hard link to that file. */
struct stat stLink; auto stLink = lstat(linkPath);
if (lstat(linkPath.c_str(), &stLink))
throw SysError("getting attributes of path '%1%'", linkPath);
if (st.st_ino == stLink.st_ino) { if (st.st_ino == stLink.st_ino) {
debug(format("'%1%' is already linked to '%2%'") % path % linkPath); debug(format("'%1%' is already linked to '%2%'") % path % linkPath);

View file

@ -39,13 +39,10 @@ std::pair<Generations, std::optional<GenerationNumber>> findGenerations(Path pro
for (auto & i : readDirectory(profileDir)) { for (auto & i : readDirectory(profileDir)) {
if (auto n = parseName(profileName, i.name)) { if (auto n = parseName(profileName, i.name)) {
auto path = profileDir + "/" + i.name; auto path = profileDir + "/" + i.name;
struct stat st;
if (lstat(path.c_str(), &st) != 0)
throw SysError("statting '%1%'", path);
gens.push_back({ gens.push_back({
.number = *n, .number = *n,
.path = path, .path = path,
.creationTime = st.st_mtime .creationTime = lstat(path).st_mtime
}); });
} }
} }

View file

@ -10,6 +10,7 @@
#include "pool.hh" #include "pool.hh"
#include "finally.hh" #include "finally.hh"
#include "logging.hh" #include "logging.hh"
#include "callback.hh"
#include <sys/types.h> #include <sys/types.h>
#include <sys/stat.h> #include <sys/stat.h>
@ -97,7 +98,16 @@ RemoteStore::RemoteStore(const Params & params)
, RemoteStoreConfig(params) , RemoteStoreConfig(params)
, connections(make_ref<Pool<Connection>>( , connections(make_ref<Pool<Connection>>(
std::max(1, (int) maxConnections), std::max(1, (int) maxConnections),
[this]() { return openConnectionWrapper(); }, [this]() {
auto conn = openConnectionWrapper();
try {
initConnection(*conn);
} catch (...) {
failed = true;
throw;
}
return conn;
},
[this](const ref<Connection> & r) { [this](const ref<Connection> & r) {
return return
r->to.good() r->to.good()
@ -182,8 +192,6 @@ ref<RemoteStore::Connection> UDSRemoteStore::openConnection()
conn->startTime = std::chrono::steady_clock::now(); conn->startTime = std::chrono::steady_clock::now();
initConnection(*conn);
return conn; return conn;
} }
@ -299,6 +307,8 @@ struct ConnectionHandle
std::rethrow_exception(ex); std::rethrow_exception(ex);
} }
} }
void withFramedSink(std::function<void(Sink & sink)> fun);
}; };
@ -412,11 +422,28 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S
} }
ref<const ValidPathInfo> RemoteStore::readValidPathInfo(ConnectionHandle & conn, const StorePath & path)
{
auto deriver = readString(conn->from);
auto narHash = Hash::parseAny(readString(conn->from), htSHA256);
auto info = make_ref<ValidPathInfo>(path, narHash);
if (deriver != "") info->deriver = parseStorePath(deriver);
info->references = readStorePaths<StorePathSet>(*this, conn->from);
conn->from >> info->registrationTime >> info->narSize;
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) {
conn->from >> info->ultimate;
info->sigs = readStrings<StringSet>(conn->from);
info->ca = parseContentAddressOpt(readString(conn->from));
}
return info;
}
void RemoteStore::queryPathInfoUncached(const StorePath & path, void RemoteStore::queryPathInfoUncached(const StorePath & path,
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept
{ {
try { try {
std::shared_ptr<ValidPathInfo> info; std::shared_ptr<const ValidPathInfo> info;
{ {
auto conn(getConnection()); auto conn(getConnection());
conn->to << wopQueryPathInfo << printStorePath(path); conn->to << wopQueryPathInfo << printStorePath(path);
@ -432,17 +459,7 @@ void RemoteStore::queryPathInfoUncached(const StorePath & path,
bool valid; conn->from >> valid; bool valid; conn->from >> valid;
if (!valid) throw InvalidPath("path '%s' is not valid", printStorePath(path)); if (!valid) throw InvalidPath("path '%s' is not valid", printStorePath(path));
} }
auto deriver = readString(conn->from); info = readValidPathInfo(conn, path);
auto narHash = Hash::parseAny(readString(conn->from), htSHA256);
info = std::make_shared<ValidPathInfo>(path, narHash);
if (deriver != "") info->deriver = parseStorePath(deriver);
info->references = readStorePaths<StorePathSet>(*this, conn->from);
conn->from >> info->registrationTime >> info->narSize;
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) {
conn->from >> info->ultimate;
info->sigs = readStrings<StringSet>(conn->from);
info->ca = parseContentAddressOpt(readString(conn->from));
}
} }
callback(std::move(info)); callback(std::move(info));
} catch (...) { callback.rethrow(); } } catch (...) { callback.rethrow(); }
@ -517,6 +534,93 @@ std::optional<StorePath> RemoteStore::queryPathFromHashPart(const std::string &
} }
ref<const ValidPathInfo> RemoteStore::addCAToStore(
Source & dump,
const string & name,
ContentAddressMethod caMethod,
const StorePathSet & references,
RepairFlag repair)
{
std::optional<ConnectionHandle> conn_(getConnection());
auto & conn = *conn_;
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 25) {
conn->to
<< wopAddToStore
<< name
<< renderContentAddressMethod(caMethod);
writeStorePaths(*this, conn->to, references);
conn->to << repair;
conn.withFramedSink([&](Sink & sink) {
dump.drainInto(sink);
});
auto path = parseStorePath(readString(conn->from));
return readValidPathInfo(conn, path);
}
else {
if (repair) throw Error("repairing is not supported when building through the Nix daemon protocol < 1.25");
std::visit(overloaded {
[&](TextHashMethod thm) -> void {
std::string s = dump.drain();
conn->to << wopAddTextToStore << name << s;
writeStorePaths(*this, conn->to, references);
conn.processStderr();
},
[&](FixedOutputHashMethod fohm) -> void {
conn->to
<< wopAddToStore
<< name
<< ((fohm.hashType == htSHA256 && fohm.fileIngestionMethod == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */
<< (fohm.fileIngestionMethod == FileIngestionMethod::Recursive ? 1 : 0)
<< printHashType(fohm.hashType);
try {
conn->to.written = 0;
conn->to.warn = true;
connections->incCapacity();
{
Finally cleanup([&]() { connections->decCapacity(); });
if (fohm.fileIngestionMethod == FileIngestionMethod::Recursive) {
dump.drainInto(conn->to);
} else {
std::string contents = dump.drain();
dumpString(contents, conn->to);
}
}
conn->to.warn = false;
conn.processStderr();
} catch (SysError & e) {
/* Daemon closed while we were sending the path. Probably OOM
or I/O error. */
if (e.errNo == EPIPE)
try {
conn.processStderr();
} catch (EndOfFile & e) { }
throw;
}
}
}, caMethod);
auto path = parseStorePath(readString(conn->from));
// Release our connection to prevent a deadlock in queryPathInfo().
conn_.reset();
return queryPathInfo(path);
}
}
StorePath RemoteStore::addToStoreFromDump(Source & dump, const string & name,
FileIngestionMethod method, HashType hashType, RepairFlag repair)
{
StorePathSet references;
return addCAToStore(dump, name, FixedOutputHashMethod{ .fileIngestionMethod = method, .hashType = hashType }, references, repair)->path;
}
void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
RepairFlag repair, CheckSigsFlag checkSigs) RepairFlag repair, CheckSigsFlag checkSigs)
{ {
@ -557,78 +661,9 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
<< repair << !checkSigs; << repair << !checkSigs;
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 23) { if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 23) {
conn.withFramedSink([&](Sink & sink) {
conn->to.flush();
std::exception_ptr ex;
struct FramedSink : BufferedSink
{
ConnectionHandle & conn;
std::exception_ptr & ex;
FramedSink(ConnectionHandle & conn, std::exception_ptr & ex) : conn(conn), ex(ex)
{ }
~FramedSink()
{
try {
conn->to << 0;
conn->to.flush();
} catch (...) {
ignoreException();
}
}
void write(const unsigned char * data, size_t len) override
{
/* Don't send more data if the remote has
encountered an error. */
if (ex) {
auto ex2 = ex;
ex = nullptr;
std::rethrow_exception(ex2);
}
conn->to << len;
conn->to(data, len);
};
};
/* Handle log messages / exceptions from the remote on a
separate thread. */
std::thread stderrThread([&]()
{
try {
conn.processStderr(nullptr, nullptr, false);
} catch (...) {
ex = std::current_exception();
}
});
Finally joinStderrThread([&]()
{
if (stderrThread.joinable()) {
stderrThread.join();
if (ex) {
try {
std::rethrow_exception(ex);
} catch (...) {
ignoreException();
}
}
}
});
{
FramedSink sink(conn, ex);
copyNAR(source, sink); copyNAR(source, sink);
sink.flush(); });
}
stderrThread.join();
if (ex)
std::rethrow_exception(ex);
} else if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 21) { } else if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 21) {
conn.processStderr(0, &source); conn.processStderr(0, &source);
} else { } else {
@ -639,57 +674,11 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
} }
StorePath RemoteStore::addToStore(const string & name, const Path & _srcPath,
FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
{
if (repair) throw Error("repairing is not supported when building through the Nix daemon");
auto conn(getConnection());
Path srcPath(absPath(_srcPath));
conn->to
<< wopAddToStore
<< name
<< ((hashAlgo == htSHA256 && method == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */
<< (method == FileIngestionMethod::Recursive ? 1 : 0)
<< printHashType(hashAlgo);
try {
conn->to.written = 0;
conn->to.warn = true;
connections->incCapacity();
{
Finally cleanup([&]() { connections->decCapacity(); });
dumpPath(srcPath, conn->to, filter);
}
conn->to.warn = false;
conn.processStderr();
} catch (SysError & e) {
/* Daemon closed while we were sending the path. Probably OOM
or I/O error. */
if (e.errNo == EPIPE)
try {
conn.processStderr();
} catch (EndOfFile & e) { }
throw;
}
return parseStorePath(readString(conn->from));
}
StorePath RemoteStore::addTextToStore(const string & name, const string & s, StorePath RemoteStore::addTextToStore(const string & name, const string & s,
const StorePathSet & references, RepairFlag repair) const StorePathSet & references, RepairFlag repair)
{ {
if (repair) throw Error("repairing is not supported when building through the Nix daemon"); StringSource source(s);
return addCAToStore(source, name, TextHashMethod{}, references, repair)->path;
auto conn(getConnection());
conn->to << wopAddTextToStore << name << s;
writeStorePaths(*this, conn->to, references);
conn.processStderr();
return parseStorePath(readString(conn->from));
} }
@ -985,6 +974,49 @@ std::exception_ptr RemoteStore::Connection::processStderr(Sink * sink, Source *
return nullptr; return nullptr;
} }
void ConnectionHandle::withFramedSink(std::function<void(Sink &sink)> fun)
{
(*this)->to.flush();
std::exception_ptr ex;
/* Handle log messages / exceptions from the remote on a
separate thread. */
std::thread stderrThread([&]()
{
try {
processStderr(nullptr, nullptr, false);
} catch (...) {
ex = std::current_exception();
}
});
Finally joinStderrThread([&]()
{
if (stderrThread.joinable()) {
stderrThread.join();
if (ex) {
try {
std::rethrow_exception(ex);
} catch (...) {
ignoreException();
}
}
}
});
{
FramedSink sink((*this)->to, ex);
fun(sink);
sink.flush();
}
stderrThread.join();
if (ex)
std::rethrow_exception(ex);
}
static RegisterStoreImplementation<UDSRemoteStore, UDSRemoteStoreConfig> regStore; static RegisterStoreImplementation<UDSRemoteStore, UDSRemoteStoreConfig> regStore;
} }

View file

@ -63,13 +63,21 @@ public:
void querySubstitutablePathInfos(const StorePathCAMap & paths, void querySubstitutablePathInfos(const StorePathCAMap & paths,
SubstitutablePathInfos & infos) override; SubstitutablePathInfos & infos) override;
/* Add a content-addressable store path. `dump` will be drained. */
ref<const ValidPathInfo> addCAToStore(
Source & dump,
const string & name,
ContentAddressMethod caMethod,
const StorePathSet & references,
RepairFlag repair);
/* Add a content-addressable store path. Does not support references. `dump` will be drained. */
StorePath addToStoreFromDump(Source & dump, const string & name,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override;
void addToStore(const ValidPathInfo & info, Source & nar, void addToStore(const ValidPathInfo & info, Source & nar,
RepairFlag repair, CheckSigsFlag checkSigs) override; RepairFlag repair, CheckSigsFlag checkSigs) override;
StorePath addToStore(const string & name, const Path & srcPath,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) override;
StorePath addTextToStore(const string & name, const string & s, StorePath addTextToStore(const string & name, const string & s,
const StorePathSet & references, RepairFlag repair) override; const StorePathSet & references, RepairFlag repair) override;
@ -106,8 +114,6 @@ public:
void flushBadConnections(); void flushBadConnections();
protected:
struct Connection struct Connection
{ {
AutoCloseFD fd; AutoCloseFD fd;
@ -123,6 +129,8 @@ protected:
ref<Connection> openConnectionWrapper(); ref<Connection> openConnectionWrapper();
protected:
virtual ref<Connection> openConnection() = 0; virtual ref<Connection> openConnection() = 0;
void initConnection(Connection & conn); void initConnection(Connection & conn);
@ -139,6 +147,8 @@ protected:
virtual void narFromPath(const StorePath & path, Sink & sink) override; virtual void narFromPath(const StorePath & path, Sink & sink) override;
ref<const ValidPathInfo> readValidPathInfo(ConnectionHandle & conn, const StorePath & path);
private: private:
std::atomic_bool failed{false}; std::atomic_bool failed{false};

View file

@ -80,7 +80,6 @@ ref<RemoteStore::Connection> SSHStore::openConnection()
+ (remoteStore.get() == "" ? "" : " --store " + shellEscape(remoteStore.get()))); + (remoteStore.get() == "" ? "" : " --store " + shellEscape(remoteStore.get())));
conn->to = FdSink(conn->sshConn->in.get()); conn->to = FdSink(conn->sshConn->in.get());
conn->from = FdSource(conn->sshConn->out.get()); conn->from = FdSource(conn->sshConn->out.get());
initConnection(*conn);
return conn; return conn;
} }

View file

@ -8,9 +8,7 @@
#include "json.hh" #include "json.hh"
#include "url.hh" #include "url.hh"
#include "archive.hh" #include "archive.hh"
#include "callback.hh"
#include <future>
namespace nix { namespace nix {
@ -1040,38 +1038,26 @@ static bool isNonUriPath(const std::string & spec) {
&& spec.find("/") != std::string::npos; && spec.find("/") != std::string::npos;
} }
StoreType getStoreType(const std::string & uri, const std::string & stateDir)
{
if (uri == "daemon") {
return tDaemon;
} else if (uri == "local" || isNonUriPath(uri)) {
return tLocal;
} else if (uri == "" || uri == "auto") {
if (access(stateDir.c_str(), R_OK | W_OK) == 0)
return tLocal;
else if (pathExists(settings.nixDaemonSocketFile))
return tDaemon;
else
return tLocal;
} else {
return tOther;
}
}
std::shared_ptr<Store> openFromNonUri(const std::string & uri, const Store::Params & params) std::shared_ptr<Store> openFromNonUri(const std::string & uri, const Store::Params & params)
{ {
switch (getStoreType(uri, get(params, "state").value_or(settings.nixStateDir))) { if (uri == "" || uri == "auto") {
case tDaemon: auto stateDir = get(params, "state").value_or(settings.nixStateDir);
return std::shared_ptr<Store>(std::make_shared<UDSRemoteStore>(params)); if (access(stateDir.c_str(), R_OK | W_OK) == 0)
case tLocal: { return std::make_shared<LocalStore>(params);
Store::Params params2 = params; else if (pathExists(settings.nixDaemonSocketFile))
if (isNonUriPath(uri)) { return std::make_shared<UDSRemoteStore>(params);
params2["root"] = absPath(uri); else
} return std::make_shared<LocalStore>(params);
return std::shared_ptr<Store>(std::make_shared<LocalStore>(params2)); } else if (uri == "daemon") {
} return std::make_shared<UDSRemoteStore>(params);
default: } else if (uri == "local") {
return nullptr; return std::make_shared<LocalStore>(params);
} else if (isNonUriPath(uri)) {
Store::Params params2 = params;
params2["root"] = absPath(uri);
return std::make_shared<LocalStore>(params2);
} else {
return nullptr;
} }
} }

View file

@ -449,7 +449,8 @@ public:
/* Like addToStore(), but the contents of the path are contained /* Like addToStore(), but the contents of the path are contained
in `dump', which is either a NAR serialisation (if recursive == in `dump', which is either a NAR serialisation (if recursive ==
true) or simply the contents of a regular file (if recursive == true) or simply the contents of a regular file (if recursive ==
false). */ false).
`dump` may be drained */
// FIXME: remove? // FIXME: remove?
virtual StorePath addToStoreFromDump(Source & dump, const string & name, virtual StorePath addToStoreFromDump(Source & dump, const string & name,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair)
@ -478,8 +479,38 @@ public:
BuildMode buildMode = bmNormal); BuildMode buildMode = bmNormal);
/* Build a single non-materialized derivation (i.e. not from an /* Build a single non-materialized derivation (i.e. not from an
on-disk .drv file). Note that drvPath is only used for on-disk .drv file).
informational purposes. */
drvPath is used to deduplicate worker goals so it is imperative that
is correct. That said, it doesn't literally need to be store path that
would be calculated from writing this derivation to the store: it is OK
if it instead is that of a Derivation which would resolve to this (by
taking the outputs of it's input derivations and adding them as input
sources) such that the build time referenceable-paths are the same.
In the input-addressed case, we usually *do* use an "original"
unresolved derivations's path, as that is what will be used in the
`buildPaths` case. Also, the input-addressed output paths are verified
only by that contents of that specific unresolved derivation, so it is
nice to keep that information around so if the original derivation is
ever obtained later, it can be verified whether the trusted user in fact
used the proper output path.
In the content-addressed case, we want to always use the
resolved drv path calculated from the provided derivation. This serves
two purposes:
- It keeps the operation trustless, by ruling out a maliciously
invalid drv path corresponding to a non-resolution-equivalent
derivation.
- For the floating case in particular, it ensures that the derivation
to output mapping respects the resolution equivalence relation, so
one cannot choose different resolution-equivalent derivations to
subvert dependency coherence (i.e. the property that one doesn't end
up with multiple different versions of dependencies without
explicitly choosing to allow it).
*/
virtual BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, virtual BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
BuildMode buildMode = bmNormal) = 0; BuildMode buildMode = bmNormal) = 0;
@ -516,7 +547,7 @@ public:
- The collector isn't running, or it's just started but hasn't - The collector isn't running, or it's just started but hasn't
acquired the GC lock yet. In that case we get and release acquired the GC lock yet. In that case we get and release
the lock right away, then exit. The collector scans the the lock right away, then exit. The collector scans the
permanent root and sees our's. permanent root and sees ours.
In either case the permanent root is seen by the collector. */ In either case the permanent root is seen by the collector. */
virtual void syncWithGC() { }; virtual void syncWithGC() { };
@ -791,16 +822,6 @@ ref<Store> openStore(const std::string & uri = settings.storeUri.get(),
const Store::Params & extraParams = Store::Params()); const Store::Params & extraParams = Store::Params());
enum StoreType {
tDaemon,
tLocal,
tOther
};
StoreType getStoreType(const std::string & uri = settings.storeUri.get(),
const std::string & stateDir = settings.nixStateDir);
/* Return the default substituter stores, defined by the /* Return the default substituter stores, defined by the
substituters option and various legacy options. */ substituters option and various legacy options. */
std::list<ref<Store>> getDefaultSubstituters(); std::list<ref<Store>> getDefaultSubstituters();

View file

@ -6,7 +6,7 @@ namespace nix {
#define WORKER_MAGIC_1 0x6e697863 #define WORKER_MAGIC_1 0x6e697863
#define WORKER_MAGIC_2 0x6478696f #define WORKER_MAGIC_2 0x6478696f
#define PROTOCOL_VERSION 0x118 #define PROTOCOL_VERSION 0x119
#define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00) #define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00)
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff) #define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
@ -18,7 +18,7 @@ typedef enum {
wopQueryReferences = 5, // obsolete wopQueryReferences = 5, // obsolete
wopQueryReferrers = 6, wopQueryReferrers = 6,
wopAddToStore = 7, wopAddToStore = 7,
wopAddTextToStore = 8, wopAddTextToStore = 8, // obsolete since 1.25, Nix 3.0. Use wopAddToStore
wopBuildPaths = 9, wopBuildPaths = 9,
wopEnsurePath = 10, wopEnsurePath = 10,
wopAddTempRoot = 11, wopAddTempRoot = 11,

View file

@ -27,6 +27,8 @@ struct ArchiveSettings : Config
#endif #endif
"use-case-hack", "use-case-hack",
"Whether to enable a Darwin-specific hack for dealing with file name collisions."}; "Whether to enable a Darwin-specific hack for dealing with file name collisions."};
Setting<bool> preallocateContents{this, true, "preallocate-contents",
"Whether to preallocate files when writing objects with known size."};
}; };
static ArchiveSettings archiveSettings; static ArchiveSettings archiveSettings;
@ -66,9 +68,7 @@ static void dump(const Path & path, Sink & sink, PathFilter & filter)
{ {
checkInterrupt(); checkInterrupt();
struct stat st; auto st = lstat(path);
if (lstat(path.c_str(), &st))
throw SysError("getting attributes of path '%1%'", path);
sink << "("; sink << "(";
@ -325,6 +325,9 @@ struct RestoreSink : ParseSink
void preallocateContents(uint64_t len) void preallocateContents(uint64_t len)
{ {
if (!archiveSettings.preallocateContents)
return;
#if HAVE_POSIX_FALLOCATE #if HAVE_POSIX_FALLOCATE
if (len) { if (len) {
errno = posix_fallocate(fd.get(), 0, len); errno = posix_fallocate(fd.get(), 0, len);

View file

@ -192,7 +192,7 @@ public:
{ {
expectArgs({ expectArgs({
.label = label, .label = label,
.optional = true, .optional = optional,
.handler = {dest} .handler = {dest}
}); });
} }

46
src/libutil/callback.hh Normal file
View file

@ -0,0 +1,46 @@
#pragma once
#include <future>
#include <functional>
namespace nix {
/* A callback is a wrapper around a lambda that accepts a valid of
type T or an exception. (We abuse std::future<T> to pass the value or
exception.) */
template<typename T>
class Callback
{
std::function<void(std::future<T>)> fun;
std::atomic_flag done = ATOMIC_FLAG_INIT;
public:
Callback(std::function<void(std::future<T>)> fun) : fun(fun) { }
Callback(Callback && callback) : fun(std::move(callback.fun))
{
auto prev = callback.done.test_and_set();
if (prev) done.test_and_set();
}
void operator()(T && t) noexcept
{
auto prev = done.test_and_set();
assert(!prev);
std::promise<T> promise;
promise.set_value(std::move(t));
fun(promise.get_future());
}
void rethrow(const std::exception_ptr & exc = std::current_exception()) noexcept
{
auto prev = done.test_and_set();
assert(!prev);
std::promise<T> promise;
promise.set_exception(exc);
fun(promise.get_future());
}
};
}

View file

@ -1,6 +1,6 @@
#include "config.hh" #include "config.hh"
#include "args.hh" #include "args.hh"
#include "abstractsettingtojson.hh" #include "abstract-setting-to-json.hh"
#include <nlohmann/json.hpp> #include <nlohmann/json.hpp>

View file

@ -1,7 +1,6 @@
#pragma once #pragma once
#include <boost/format.hpp> #include <boost/format.hpp>
#include <boost/algorithm/string/replace.hpp>
#include <string> #include <string>
#include "ansicolor.hh" #include "ansicolor.hh"

View file

@ -93,7 +93,7 @@ void Source::operator () (unsigned char * data, size_t len)
} }
std::string Source::drain() void Source::drainInto(Sink & sink)
{ {
std::string s; std::string s;
std::vector<unsigned char> buf(8192); std::vector<unsigned char> buf(8192);
@ -101,12 +101,19 @@ std::string Source::drain()
size_t n; size_t n;
try { try {
n = read(buf.data(), buf.size()); n = read(buf.data(), buf.size());
s.append((char *) buf.data(), n); sink(buf.data(), n);
} catch (EndOfFile &) { } catch (EndOfFile &) {
break; break;
} }
} }
return s; }
std::string Source::drain()
{
StringSink s;
drainInto(s);
return *s.s;
} }

View file

@ -69,6 +69,8 @@ struct Source
virtual bool good() { return true; } virtual bool good() { return true; }
void drainInto(Sink & sink);
std::string drain(); std::string drain();
}; };
@ -340,7 +342,7 @@ T readNum(Source & source)
((uint64_t) buf[6] << 48) | ((uint64_t) buf[6] << 48) |
((uint64_t) buf[7] << 56); ((uint64_t) buf[7] << 56);
if (n > std::numeric_limits<T>::max()) if (n > (uint64_t)std::numeric_limits<T>::max())
throw SerialisationError("serialised integer %d is too large for type '%s'", n, typeid(T).name()); throw SerialisationError("serialised integer %d is too large for type '%s'", n, typeid(T).name());
return (T) n; return (T) n;
@ -404,4 +406,93 @@ struct StreamToSourceAdapter : Source
}; };
/* A source that reads a distinct format of concatenated chunks back into its
logical form, in order to guarantee a known state to the original stream,
even in the event of errors.
Use with FramedSink, which also allows the logical stream to be terminated
in the event of an exception.
*/
struct FramedSource : Source
{
Source & from;
bool eof = false;
std::vector<unsigned char> pending;
size_t pos = 0;
FramedSource(Source & from) : from(from)
{ }
~FramedSource()
{
if (!eof) {
while (true) {
auto n = readInt(from);
if (!n) break;
std::vector<unsigned char> data(n);
from(data.data(), n);
}
}
}
size_t read(unsigned char * data, size_t len) override
{
if (eof) throw EndOfFile("reached end of FramedSource");
if (pos >= pending.size()) {
size_t len = readInt(from);
if (!len) {
eof = true;
return 0;
}
pending = std::vector<unsigned char>(len);
pos = 0;
from(pending.data(), len);
}
auto n = std::min(len, pending.size() - pos);
memcpy(data, pending.data() + pos, n);
pos += n;
return n;
}
};
/* Write as chunks in the format expected by FramedSource.
The exception_ptr reference can be used to terminate the stream when you
detect that an error has occurred on the remote end.
*/
struct FramedSink : nix::BufferedSink
{
BufferedSink & to;
std::exception_ptr & ex;
FramedSink(BufferedSink & to, std::exception_ptr & ex) : to(to), ex(ex)
{ }
~FramedSink()
{
try {
to << 0;
to.flush();
} catch (...) {
ignoreException();
}
}
void write(const unsigned char * data, size_t len) override
{
/* Don't send more data if the remote has
encountered an error. */
if (ex) {
auto ex2 = ex;
ex = nullptr;
std::rethrow_exception(ex2);
}
to << len;
to(data, len);
};
};
} }

View file

@ -9,7 +9,7 @@ namespace nix {
// If `separator` is found, we return the portion of the string before the // If `separator` is found, we return the portion of the string before the
// separator, and modify the string argument to contain only the part after the // separator, and modify the string argument to contain only the part after the
// separator. Otherwise, wer return `std::nullopt`, and we leave the argument // separator. Otherwise, we return `std::nullopt`, and we leave the argument
// string alone. // string alone.
static inline std::optional<std::string_view> splitPrefixTo(std::string_view & string, char separator) { static inline std::optional<std::string_view> splitPrefixTo(std::string_view & string, char separator) {
auto sepInstance = string.find(separator); auto sepInstance = string.find(separator);

44
src/libutil/url-parts.hh Normal file
View file

@ -0,0 +1,44 @@
#pragma once
#include <string>
#include <regex>
namespace nix {
// URI stuff.
const static std::string pctEncoded = "(?:%[0-9a-fA-F][0-9a-fA-F])";
const static std::string schemeRegex = "(?:[a-z+.-]+)";
const static std::string ipv6AddressRegex = "(?:\\[[0-9a-fA-F:]+\\])";
const static std::string unreservedRegex = "(?:[a-zA-Z0-9-._~])";
const static std::string subdelimsRegex = "(?:[!$&'\"()*+,;=])";
const static std::string hostnameRegex = "(?:(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + ")*)";
const static std::string hostRegex = "(?:" + ipv6AddressRegex + "|" + hostnameRegex + ")";
const static std::string userRegex = "(?:(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|:)*)";
const static std::string authorityRegex = "(?:" + userRegex + "@)?" + hostRegex + "(?::[0-9]+)?";
const static std::string pcharRegex = "(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|[:@])";
const static std::string queryRegex = "(?:" + pcharRegex + "|[/? \"])*";
const static std::string segmentRegex = "(?:" + pcharRegex + "+)";
const static std::string absPathRegex = "(?:(?:/" + segmentRegex + ")*/?)";
const static std::string pathRegex = "(?:" + segmentRegex + "(?:/" + segmentRegex + ")*/?)";
// A Git ref (i.e. branch or tag name).
const static std::string refRegexS = "[a-zA-Z0-9][a-zA-Z0-9_.-]*"; // FIXME: check
extern std::regex refRegex;
// Instead of defining what a good Git Ref is, we define what a bad Git Ref is
// This is because of the definition of a ref in refs.c in https://github.com/git/git
// See tests/fetchGitRefs.sh for the full definition
const static std::string badGitRefRegexS = "//|^[./]|/\\.|\\.\\.|[[:cntrl:][:space:]:?^~\[]|\\\\|\\*|\\.lock$|\\.lock/|@\\{|[/.]$|^@$|^$";
extern std::regex badGitRefRegex;
// A Git revision (a SHA-1 commit hash).
const static std::string revRegexS = "[0-9a-fA-F]{40}";
extern std::regex revRegex;
// A ref or revision, or a ref followed by a revision.
const static std::string refAndOrRevRegex = "(?:(" + revRegexS + ")|(?:(" + refRegexS + ")(?:/(" + revRegexS + "))?))";
const static std::string flakeIdRegexS = "[a-zA-Z][a-zA-Z0-9_-]*";
extern std::regex flakeIdRegex;
}

View file

@ -1,4 +1,5 @@
#include "url.hh" #include "url.hh"
#include "url-parts.hh"
#include "util.hh" #include "util.hh"
namespace nix { namespace nix {

View file

@ -2,8 +2,6 @@
#include "error.hh" #include "error.hh"
#include <regex>
namespace nix { namespace nix {
struct ParsedURL struct ParsedURL
@ -29,40 +27,4 @@ std::map<std::string, std::string> decodeQuery(const std::string & query);
ParsedURL parseURL(const std::string & url); ParsedURL parseURL(const std::string & url);
// URI stuff.
const static std::string pctEncoded = "(?:%[0-9a-fA-F][0-9a-fA-F])";
const static std::string schemeRegex = "(?:[a-z+.-]+)";
const static std::string ipv6AddressRegex = "(?:\\[[0-9a-fA-F:]+\\])";
const static std::string unreservedRegex = "(?:[a-zA-Z0-9-._~])";
const static std::string subdelimsRegex = "(?:[!$&'\"()*+,;=])";
const static std::string hostnameRegex = "(?:(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + ")*)";
const static std::string hostRegex = "(?:" + ipv6AddressRegex + "|" + hostnameRegex + ")";
const static std::string userRegex = "(?:(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|:)*)";
const static std::string authorityRegex = "(?:" + userRegex + "@)?" + hostRegex + "(?::[0-9]+)?";
const static std::string pcharRegex = "(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|[:@])";
const static std::string queryRegex = "(?:" + pcharRegex + "|[/? \"])*";
const static std::string segmentRegex = "(?:" + pcharRegex + "+)";
const static std::string absPathRegex = "(?:(?:/" + segmentRegex + ")*/?)";
const static std::string pathRegex = "(?:" + segmentRegex + "(?:/" + segmentRegex + ")*/?)";
// A Git ref (i.e. branch or tag name).
const static std::string refRegexS = "[a-zA-Z0-9][a-zA-Z0-9_.-]*"; // FIXME: check
extern std::regex refRegex;
// Instead of defining what a good Git Ref is, we define what a bad Git Ref is
// This is because of the definition of a ref in refs.c in https://github.com/git/git
// See tests/fetchGitRefs.sh for the full definition
const static std::string badGitRefRegexS = "//|^[./]|/\\.|\\.\\.|[[:cntrl:][:space:]:?^~\[]|\\\\|\\*|\\.lock$|\\.lock/|@\\{|[/.]$|^@$|^$";
extern std::regex badGitRefRegex;
// A Git revision (a SHA-1 commit hash).
const static std::string revRegexS = "[0-9a-fA-F]{40}";
extern std::regex revRegex;
// A ref or revision, or a ref followed by a revision.
const static std::string refAndOrRevRegex = "(?:(" + revRegexS + ")|(?:(" + refRegexS + ")(?:/(" + revRegexS + "))?))";
const static std::string flakeIdRegexS = "[a-zA-Z][a-zA-Z0-9_-]*";
extern std::regex flakeIdRegex;
} }

View file

@ -12,13 +12,9 @@
#include <signal.h> #include <signal.h>
#include <functional> #include <functional>
#include <limits>
#include <cstdio>
#include <map> #include <map>
#include <sstream> #include <sstream>
#include <optional> #include <optional>
#include <future>
#include <iterator>
#ifndef HAVE_STRUCT_DIRENT_D_TYPE #ifndef HAVE_STRUCT_DIRENT_D_TYPE
#define DT_UNKNOWN 0 #define DT_UNKNOWN 0
@ -480,43 +476,8 @@ std::optional<typename T::mapped_type> get(const T & map, const typename T::key_
} }
/* A callback is a wrapper around a lambda that accepts a valid of
type T or an exception. (We abuse std::future<T> to pass the value or
exception.) */
template<typename T> template<typename T>
class Callback class Callback;
{
std::function<void(std::future<T>)> fun;
std::atomic_flag done = ATOMIC_FLAG_INIT;
public:
Callback(std::function<void(std::future<T>)> fun) : fun(fun) { }
Callback(Callback && callback) : fun(std::move(callback.fun))
{
auto prev = callback.done.test_and_set();
if (prev) done.test_and_set();
}
void operator()(T && t) noexcept
{
auto prev = done.test_and_set();
assert(!prev);
std::promise<T> promise;
promise.set_value(std::move(t));
fun(promise.get_future());
}
void rethrow(const std::exception_ptr & exc = std::current_exception()) noexcept
{
auto prev = done.test_and_set();
assert(!prev);
std::promise<T> promise;
promise.set_exception(exc);
fun(promise.get_future());
}
};
/* Start a thread that handles various signals. Also block those signals /* Start a thread that handles various signals. Also block those signals

View file

@ -76,6 +76,13 @@ static void update(const StringSet & channelNames)
auto store = openStore(); auto store = openStore();
auto [fd, unpackChannelPath] = createTempFile();
writeFull(fd.get(),
#include "unpack-channel.nix.gen.hh"
);
fd = -1;
AutoDelete del(unpackChannelPath, false);
// Download each channel. // Download each channel.
Strings exprs; Strings exprs;
for (const auto & channel : channels) { for (const auto & channel : channels) {
@ -87,7 +94,7 @@ static void update(const StringSet & channelNames)
// We want to download the url to a file to see if it's a tarball while also checking if we // We want to download the url to a file to see if it's a tarball while also checking if we
// got redirected in the process, so that we can grab the various parts of a nix channel // got redirected in the process, so that we can grab the various parts of a nix channel
// definition from a consistent location if the redirect changes mid-download. // definition from a consistent location if the redirect changes mid-download.
auto result = fetchers::downloadFile(store, url, Headers {}, std::string(baseNameOf(url)), false); auto result = fetchers::downloadFile(store, url, std::string(baseNameOf(url)), false);
auto filename = store->toRealPath(result.storePath); auto filename = store->toRealPath(result.storePath);
url = result.effectiveUrl; url = result.effectiveUrl;
@ -104,7 +111,7 @@ static void update(const StringSet & channelNames)
bool unpacked = false; bool unpacked = false;
if (std::regex_search(filename, std::regex("\\.tar\\.(gz|bz2|xz)$"))) { if (std::regex_search(filename, std::regex("\\.tar\\.(gz|bz2|xz)$"))) {
runProgram(settings.nixBinDir + "/nix-build", false, { "--no-out-link", "--expr", "import <nix/unpack-channel.nix> " runProgram(settings.nixBinDir + "/nix-build", false, { "--no-out-link", "--expr", "import " + unpackChannelPath +
"{ name = \"" + cname + "\"; channelName = \"" + name + "\"; src = builtins.storePath \"" + filename + "\"; }" }); "{ name = \"" + cname + "\"; channelName = \"" + name + "\"; src = builtins.storePath \"" + filename + "\"; }" });
unpacked = true; unpacked = true;
} }
@ -112,9 +119,9 @@ static void update(const StringSet & channelNames)
if (!unpacked) { if (!unpacked) {
// Download the channel tarball. // Download the channel tarball.
try { try {
filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.xz", Headers {}, "nixexprs.tar.xz", false).storePath); filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.xz", "nixexprs.tar.xz", false).storePath);
} catch (FileTransferError & e) { } catch (FileTransferError & e) {
filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.bz2", Headers {}, "nixexprs.tar.bz2", false).storePath); filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.bz2", "nixexprs.tar.bz2", false).storePath);
} }
} }
@ -125,7 +132,7 @@ static void update(const StringSet & channelNames)
// Unpack the channel tarballs into the Nix store and install them // Unpack the channel tarballs into the Nix store and install them
// into the channels profile. // into the channels profile.
std::cerr << "unpacking channels...\n"; std::cerr << "unpacking channels...\n";
Strings envArgs{ "--profile", profile, "--file", "<nix/unpack-channel.nix>", "--install", "--from-expression" }; Strings envArgs{ "--profile", profile, "--file", unpackChannelPath, "--install", "--from-expression" };
for (auto & expr : exprs) for (auto & expr : exprs)
envArgs.push_back(std::move(expr)); envArgs.push_back(std::move(expr));
envArgs.push_back("--quiet"); envArgs.push_back("--quiet");

View file

@ -1,5 +1,6 @@
#include "shared.hh" #include "shared.hh"
#include "local-store.hh" #include "local-store.hh"
#include "remote-store.hh"
#include "util.hh" #include "util.hh"
#include "serialise.hh" #include "serialise.hh"
#include "archive.hh" #include "archive.hh"
@ -285,44 +286,28 @@ static int _main(int argc, char * * argv)
initPlugins(); initPlugins();
if (stdio) { if (stdio) {
if (getStoreType() == tDaemon) { if (auto store = openUncachedStore().dynamic_pointer_cast<RemoteStore>()) {
// Forward on this connection to the real daemon auto conn = store->openConnectionWrapper();
auto socketPath = settings.nixDaemonSocketFile; int from = conn->from.fd;
auto s = socket(PF_UNIX, SOCK_STREAM, 0); int to = conn->to.fd;
if (s == -1)
throw SysError("creating Unix domain socket");
auto socketDir = dirOf(socketPath); auto nfds = std::max(from, STDIN_FILENO) + 1;
if (chdir(socketDir.c_str()) == -1)
throw SysError("changing to socket directory '%1%'", socketDir);
auto socketName = std::string(baseNameOf(socketPath));
auto addr = sockaddr_un{};
addr.sun_family = AF_UNIX;
if (socketName.size() + 1 >= sizeof(addr.sun_path))
throw Error("socket name %1% is too long", socketName);
strcpy(addr.sun_path, socketName.c_str());
if (connect(s, (struct sockaddr *) &addr, sizeof(addr)) == -1)
throw SysError("cannot connect to daemon at %1%", socketPath);
auto nfds = (s > STDIN_FILENO ? s : STDIN_FILENO) + 1;
while (true) { while (true) {
fd_set fds; fd_set fds;
FD_ZERO(&fds); FD_ZERO(&fds);
FD_SET(s, &fds); FD_SET(from, &fds);
FD_SET(STDIN_FILENO, &fds); FD_SET(STDIN_FILENO, &fds);
if (select(nfds, &fds, nullptr, nullptr, nullptr) == -1) if (select(nfds, &fds, nullptr, nullptr, nullptr) == -1)
throw SysError("waiting for data from client or server"); throw SysError("waiting for data from client or server");
if (FD_ISSET(s, &fds)) { if (FD_ISSET(from, &fds)) {
auto res = splice(s, nullptr, STDOUT_FILENO, nullptr, SSIZE_MAX, SPLICE_F_MOVE); auto res = splice(from, nullptr, STDOUT_FILENO, nullptr, SSIZE_MAX, SPLICE_F_MOVE);
if (res == -1) if (res == -1)
throw SysError("splicing data from daemon socket to stdout"); throw SysError("splicing data from daemon socket to stdout");
else if (res == 0) else if (res == 0)
throw EndOfFile("unexpected EOF from daemon socket"); throw EndOfFile("unexpected EOF from daemon socket");
} }
if (FD_ISSET(STDIN_FILENO, &fds)) { if (FD_ISSET(STDIN_FILENO, &fds)) {
auto res = splice(STDIN_FILENO, nullptr, s, nullptr, SSIZE_MAX, SPLICE_F_MOVE); auto res = splice(STDIN_FILENO, nullptr, to, nullptr, SSIZE_MAX, SPLICE_F_MOVE);
if (res == -1) if (res == -1)
throw SysError("splicing data from stdin to daemon socket"); throw SysError("splicing data from stdin to daemon socket");
else if (res == 0) else if (res == 0)

View file

@ -230,7 +230,7 @@ static DrvInfos filterBySelector(EvalState & state, const DrvInfos & allElems,
{ {
DrvNames selectors = drvNamesFromArgs(args); DrvNames selectors = drvNamesFromArgs(args);
if (selectors.empty()) if (selectors.empty())
selectors.push_back(DrvName("*")); selectors.emplace_back("*");
DrvInfos elems; DrvInfos elems;
set<unsigned int> done; set<unsigned int> done;

View file

@ -57,6 +57,7 @@ static int _main(int argc, char * * argv)
bool fromExpr = false; bool fromExpr = false;
string attrPath; string attrPath;
bool unpack = false; bool unpack = false;
bool executable = false;
string name; string name;
struct MyArgs : LegacyArgs, MixEvalArgs struct MyArgs : LegacyArgs, MixEvalArgs
@ -81,6 +82,8 @@ static int _main(int argc, char * * argv)
} }
else if (*arg == "--unpack") else if (*arg == "--unpack")
unpack = true; unpack = true;
else if (*arg == "--executable")
executable = true;
else if (*arg == "--name") else if (*arg == "--name")
name = getArg(*arg, arg, end); name = getArg(*arg, arg, end);
else if (*arg != "" && arg->at(0) == '-') else if (*arg != "" && arg->at(0) == '-')
@ -175,7 +178,11 @@ static int _main(int argc, char * * argv)
/* Download the file. */ /* Download the file. */
{ {
AutoCloseFD fd = open(tmpFile.c_str(), O_WRONLY | O_CREAT | O_EXCL, 0600); auto mode = 0600;
if (executable)
mode = 0700;
AutoCloseFD fd = open(tmpFile.c_str(), O_WRONLY | O_CREAT | O_EXCL, mode);
if (!fd) throw SysError("creating temporary file '%s'", tmpFile); if (!fd) throw SysError("creating temporary file '%s'", tmpFile);
FdSink sink(fd.get()); FdSink sink(fd.get());
@ -201,7 +208,7 @@ static int _main(int argc, char * * argv)
tmpFile = unpacked; tmpFile = unpacked;
} }
const auto method = unpack ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat; const auto method = unpack || executable ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
auto info = store->addToStoreSlow(name, tmpFile, method, ht, expectedHash); auto info = store->addToStoreSlow(name, tmpFile, method, ht, expectedHash);
storePath = info.path; storePath = info.path;

View file

@ -98,14 +98,14 @@ struct CmdBundle : InstallableCommand
if (!evalState->isDerivation(*vRes)) if (!evalState->isDerivation(*vRes))
throw Error("the bundler '%s' does not produce a derivation", bundler.what()); throw Error("the bundler '%s' does not produce a derivation", bundler.what());
auto attr1 = vRes->attrs->find(evalState->sDrvPath); auto attr1 = vRes->attrs->get(evalState->sDrvPath);
if (!attr1) if (!attr1)
throw Error("the bundler '%s' does not produce a derivation", bundler.what()); throw Error("the bundler '%s' does not produce a derivation", bundler.what());
PathSet context2; PathSet context2;
StorePath drvPath = store->parseStorePath(evalState->coerceToPath(*attr1->pos, *attr1->value, context2)); StorePath drvPath = store->parseStorePath(evalState->coerceToPath(*attr1->pos, *attr1->value, context2));
auto attr2 = vRes->attrs->find(evalState->sOutPath); auto attr2 = vRes->attrs->get(evalState->sOutPath);
if (!attr2) if (!attr2)
throw Error("the bundler '%s' does not produce a derivation", bundler.what()); throw Error("the bundler '%s' does not produce a derivation", bundler.what());

View file

@ -49,9 +49,7 @@ struct CmdDoctor : StoreCommand
{ {
logger->log("Running checks against store uri: " + store->getUri()); logger->log("Running checks against store uri: " + store->getUri());
auto type = getStoreType(); if (store.dynamic_pointer_cast<LocalFSStore>()) {
if (type < tOther) {
success &= checkNixInPath(); success &= checkNixInPath();
success &= checkProfileRoots(store); success &= checkProfileRoots(store);
} }

View file

@ -44,6 +44,7 @@ struct CmdHash : Command
switch (mode) { switch (mode) {
case FileIngestionMethod::Flat: case FileIngestionMethod::Flat:
d = "print cryptographic hash of a regular file"; d = "print cryptographic hash of a regular file";
break;
case FileIngestionMethod::Recursive: case FileIngestionMethod::Recursive:
d = "print cryptographic hash of the NAR serialisation of a path"; d = "print cryptographic hash of the NAR serialisation of a path";
}; };

View file

@ -29,3 +29,5 @@ $(eval $(call install-symlink, $(bindir)/nix, $(libexecdir)/nix/build-remote))
src/nix-env/user-env.cc: src/nix-env/buildenv.nix.gen.hh src/nix-env/user-env.cc: src/nix-env/buildenv.nix.gen.hh
src/nix/develop.cc: src/nix/get-env.sh.gen.hh src/nix/develop.cc: src/nix/get-env.sh.gen.hh
src/nix-channel/nix-channel.cc: src/nix-channel/unpack-channel.nix.gen.hh

View file

@ -111,11 +111,7 @@ std::set<std::string> runResolver(const Path & filename)
bool isSymlink(const Path & path) bool isSymlink(const Path & path)
{ {
struct stat st; return S_ISLNK(lstat(path).st_mode);
if (lstat(path.c_str(), &st) == -1)
throw SysError("getting attributes of path '%1%'", path);
return S_ISLNK(st.st_mode);
} }
Path resolveSymlink(const Path & path) Path resolveSymlink(const Path & path)

View file

@ -29,4 +29,26 @@ rec {
outputHashMode = "recursive"; outputHashMode = "recursive";
outputHashAlgo = "sha256"; outputHashAlgo = "sha256";
}; };
dependentCA = mkDerivation {
name = "dependent";
buildCommand = ''
echo "building a dependent derivation"
mkdir -p $out
echo ${rootCA}/hello > $out/dep
'';
__contentAddressed = true;
outputHashMode = "recursive";
outputHashAlgo = "sha256";
};
transitivelyDependentCA = mkDerivation {
name = "transitively-dependent";
buildCommand = ''
echo "building transitively-dependent"
cat ${dependentCA}/dep
echo ${dependentCA} > $out
'';
__contentAddressed = true;
outputHashMode = "recursive";
outputHashAlgo = "sha256";
};
} }

View file

@ -2,16 +2,26 @@
source common.sh source common.sh
clearStore
clearCache
export REMOTE_STORE=file://$cacheDir
drv=$(nix-instantiate --experimental-features ca-derivations ./content-addressed.nix -A rootCA --arg seed 1) drv=$(nix-instantiate --experimental-features ca-derivations ./content-addressed.nix -A rootCA --arg seed 1)
nix --experimental-features 'nix-command ca-derivations' show-derivation --derivation "$drv" --arg seed 1 nix --experimental-features 'nix-command ca-derivations' show-derivation --derivation "$drv" --arg seed 1
commonArgs=("--experimental-features" "ca-derivations" "./content-addressed.nix" "-A" "rootCA" "--no-out-link") testDerivation () {
out1=$(nix-build "${commonArgs[@]}" ./content-addressed.nix --arg seed 1) local derivationPath=$1
out2=$(nix-build "${commonArgs[@]}" ./content-addressed.nix --arg seed 2) local commonArgs=("--experimental-features" "ca-derivations" "./content-addressed.nix" "-A" "$derivationPath" "--no-out-link")
local out1 out2
out1=$(nix-build "${commonArgs[@]}" --arg seed 1)
out2=$(nix-build "${commonArgs[@]}" --arg seed 2 "${secondSeedArgs[@]}")
test "$out1" == "$out2"
}
test $out1 == $out2 testDerivation rootCA
# The seed only changes the root derivation, and not it's output, so the
# dependent derivations should only need to be built once.
secondSeedArgs=(-j0)
# Don't directly build depenentCA, that way we'll make sure we dodn't rely on
# dependent derivations always being already built.
#testDerivation dependentCA
testDerivation transitivelyDependentCA
nix-instantiate --experimental-features ca-derivations ./content-addressed.nix -A rootCA --arg seed 5
nix-collect-garbage --experimental-features ca-derivations --option keep-derivations true

View file

@ -15,6 +15,7 @@ nix_tests = \
linux-sandbox.sh \ linux-sandbox.sh \
build-dry.sh \ build-dry.sh \
build-remote-input-addressed.sh \ build-remote-input-addressed.sh \
ssh-relay.sh \
nar-access.sh \ nar-access.sh \
structured-attrs.sh \ structured-attrs.sh \
fetchGit.sh \ fetchGit.sh \

View file

@ -9,9 +9,8 @@ rm -f $TEST_ROOT/result
export unreachable=$(nix add-to-store ./recursive.sh) export unreachable=$(nix add-to-store ./recursive.sh)
nix --experimental-features 'nix-command recursive-nix' build -o $TEST_ROOT/result -L --impure --expr ' NIX_BIN_DIR=$(dirname $(type -p nix)) nix --experimental-features 'nix-command recursive-nix' build -o $TEST_ROOT/result -L --impure --expr '
with import ./config.nix; with import ./config.nix;
with import <nix/config.nix>;
mkDerivation { mkDerivation {
name = "recursive"; name = "recursive";
dummy = builtins.toFile "dummy" "bla bla"; dummy = builtins.toFile "dummy" "bla bla";
@ -24,9 +23,10 @@ nix --experimental-features 'nix-command recursive-nix' build -o $TEST_ROOT/resu
buildCommand = '\'\'' buildCommand = '\'\''
mkdir $out mkdir $out
PATH=${nixBinDir}:$PATH
opts="--experimental-features nix-command" opts="--experimental-features nix-command"
PATH=${builtins.getEnv "NIX_BIN_DIR"}:$PATH
# Check that we can query/build paths in our input closure. # Check that we can query/build paths in our input closure.
nix $opts path-info $dummy nix $opts path-info $dummy
nix $opts build $dummy nix $opts build $dummy

View file

@ -13,14 +13,14 @@ hash=$(nix-hash $path2)
chmod u+w $path2 chmod u+w $path2
touch $path2/bad touch $path2/bad
if nix-store --verify --check-contents -v; then (! nix-store --verify --check-contents -v)
echo "nix-store --verify succeeded unexpectedly" >&2
exit 1
fi
# The path can be repaired by rebuilding the derivation. # The path can be repaired by rebuilding the derivation.
nix-store --verify --check-contents --repair nix-store --verify --check-contents --repair
(! [ -e $path2/bad ])
(! [ -w $path2 ])
nix-store --verify-path $path2 nix-store --verify-path $path2
# Re-corrupt and delete the deriver. Now --verify --repair should # Re-corrupt and delete the deriver. Now --verify --repair should
@ -30,10 +30,7 @@ touch $path2/bad
nix-store --delete $(nix-store -qd $path2) nix-store --delete $(nix-store -qd $path2)
if nix-store --verify --check-contents --repair; then (! nix-store --verify --check-contents --repair)
echo "nix-store --verify --repair succeeded unexpectedly" >&2
exit 1
fi
nix-build dependencies.nix -o $TEST_ROOT/result --repair nix-build dependencies.nix -o $TEST_ROOT/result --repair

View file

@ -10,13 +10,15 @@ outPath=$(nix-store -rvv "$drvPath")
echo "output path is $outPath" echo "output path is $outPath"
(! [ -w $outPath ])
text=$(cat "$outPath"/hello) text=$(cat "$outPath"/hello)
if test "$text" != "Hello World!"; then exit 1; fi if test "$text" != "Hello World!"; then exit 1; fi
# Directed delete: $outPath is not reachable from a root, so it should # Directed delete: $outPath is not reachable from a root, so it should
# be deleteable. # be deleteable.
nix-store --delete $outPath nix-store --delete $outPath
if test -e $outPath/hello; then false; fi (! [ -e $outPath/hello ])
outPath="$(NIX_REMOTE=local?store=/foo\&real=$TEST_ROOT/real-store nix-instantiate --readonly-mode hash-check.nix)" outPath="$(NIX_REMOTE=local?store=/foo\&real=$TEST_ROOT/real-store nix-instantiate --readonly-mode hash-check.nix)"
if test "$outPath" != "/foo/lfy1s6ca46rm5r6w4gg9hc0axiakjcnm-dependencies.drv"; then if test "$outPath" != "/foo/lfy1s6ca46rm5r6w4gg9hc0axiakjcnm-dependencies.drv"; then

16
tests/ssh-relay.sh Normal file
View file

@ -0,0 +1,16 @@
source common.sh
echo foo > $TEST_ROOT/hello.sh
ssh_localhost=ssh://localhost
remote_store=?remote-store=$ssh_localhost
store=$ssh_localhost
store+=$remote_store
store+=$remote_store
store+=$remote_store
out=$(nix add-to-store --store "$store" $TEST_ROOT/hello.sh)
[ foo = $(< $out) ]

View file

@ -17,7 +17,7 @@ test_tarball() {
local compressor="$2" local compressor="$2"
tarball=$TEST_ROOT/tarball.tar$ext tarball=$TEST_ROOT/tarball.tar$ext
(cd $TEST_ROOT && tar c tarball) | $compressor > $tarball (cd $TEST_ROOT && tar cf - tarball) | $compressor > $tarball
nix-env -f file://$tarball -qa --out-path | grep -q dependencies nix-env -f file://$tarball -qa --out-path | grep -q dependencies