From e90569905ecdfe3225314f0e5f122263aa68efb2 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 2 Sep 2013 15:18:15 +0200 Subject: [PATCH 0001/2196] Initial version of nix-repl This program interactively reads a Nix expression from the user, evaluates it, and prints the result. --- default.nix | 10 ++++++++++ nix-repl.cc | 57 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 67 insertions(+) create mode 100644 default.nix create mode 100644 nix-repl.cc diff --git a/default.nix b/default.nix new file mode 100644 index 00000000000..55c9ed9e3af --- /dev/null +++ b/default.nix @@ -0,0 +1,10 @@ +with import { }; + +runCommand "nix-repl" + { buildInputs = [ readline nixUnstable boehmgc ]; } + '' + mkdir -p $out/bin + g++ -O3 -Wall -o $out/bin/nix-repl ${./nix-repl.cc} \ + -I${nixUnstable}/include/nix -L${nixUnstable}/lib/nix \ + -lexpr -lmain -lreadline + '' diff --git a/nix-repl.cc b/nix-repl.cc new file mode 100644 index 00000000000..ea248b5ca15 --- /dev/null +++ b/nix-repl.cc @@ -0,0 +1,57 @@ +#include +#include + +#include +#include + +#include "shared.hh" +#include "eval.hh" + +using namespace std; +using namespace nix; + + +string programId = "nix-repl"; + + +void printHelp() +{ + std::cout << "Usage: nix-repl\n"; +} + + +bool getLine(string & line) +{ + char * s = readline ("nix-repl> "); + if (!s) return false; + line = chomp(string(s)); + free(s); + if (line != "") add_history(line.c_str()); + return true; +} + + +void run(nix::Strings args) +{ + EvalState state; + Path curDir = absPath("."); + + while (true) { + string line; + if (!getLine(line)) break; + + try { + Expr * e = state.parseExprFromString(line, curDir); + Value v; + state.eval(e, v); + state.strictForceValue(v); + std::cout << v << std::endl; + } catch (Error & e) { + printMsg(lvlError, e.msg()); + } + + std::cout << std::endl; + } + + std::cout << std::endl; +} From 287c88ca59c5eae2b33874acc6271ca30b7b7e52 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 2 Sep 2013 17:53:58 +0200 Subject: [PATCH 0002/2196] Support adding variables to the scope The command ":a " evaluates and adds the attributes in the resulting attribute set to the interpreter scope. For instance: nix-repl> :a import {} nix-repl> lib.range 0 10 [ 0 1 2 3 4 5 6 7 8 9 10 ] --- nix-repl.cc | 95 ++++++++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 86 insertions(+), 9 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index ea248b5ca15..10e91ec5328 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -6,6 +6,8 @@ #include "shared.hh" #include "eval.hh" +#include "eval-inline.hh" +#include "store-api.hh" using namespace std; using namespace nix; @@ -14,6 +16,23 @@ using namespace nix; string programId = "nix-repl"; +struct NixRepl +{ + string curDir; + EvalState state; + + StaticEnv staticEnv; + Env * env; + int displ; + + NixRepl(); + void mainLoop(); + void processLine(string line); + void addVar(const Symbol & name, Value * v); + Expr * parseString(string s); +}; + + void printHelp() { std::cout << "Usage: nix-repl\n"; @@ -22,7 +41,7 @@ void printHelp() bool getLine(string & line) { - char * s = readline ("nix-repl> "); + char * s = readline("nix-repl> "); if (!s) return false; line = chomp(string(s)); free(s); @@ -31,21 +50,33 @@ bool getLine(string & line) } -void run(nix::Strings args) +NixRepl::NixRepl() + : staticEnv(false, &state.staticBaseEnv) { - EvalState state; - Path curDir = absPath("."); + curDir = absPath("."); + + env = &state.allocEnv(32768); + env->up = &state.baseEnv; + displ = 0; + + store = openStore(); +} + + +void NixRepl::mainLoop() +{ + std::cerr << "Welcome to Nix version " << NIX_VERSION << ". Type :? for help." << std::endl << std::endl; while (true) { string line; if (!getLine(line)) break; + /* Remove preceeding whitespace. */ + size_t n = line.find_first_not_of(" \n\r\t"); + if (n != string::npos) line = string(line, n); + try { - Expr * e = state.parseExprFromString(line, curDir); - Value v; - state.eval(e, v); - state.strictForceValue(v); - std::cout << v << std::endl; + processLine(line); } catch (Error & e) { printMsg(lvlError, e.msg()); } @@ -55,3 +86,49 @@ void run(nix::Strings args) std::cout << std::endl; } + + +void NixRepl::processLine(string line) +{ + if (string(line, 0, 2) == ":a") { + Expr * e = parseString(string(line, 2)); + Value v; + e->eval(state, *env, v); + state.forceAttrs(v); + foreach (Bindings::iterator, i, *v.attrs) + addVar(i->name, i->value); + } + + else if (string(line, 0, 1) == ":") { + throw Error(format("unknown command ‘%1%’") % string(line, 0, 2)); + } + + else { + Expr * e = parseString(line); + Value v; + e->eval(state, *env, v); + state.strictForceValue(v); + std::cout << v << std::endl; + } +} + + +void NixRepl::addVar(const Symbol & name, Value * v) +{ + staticEnv.vars[name] = displ; + env->values[displ++] = v; +} + + +Expr * NixRepl::parseString(string s) +{ + Expr * e = state.parseExprFromString(s, curDir, staticEnv); + return e; +} + + +void run(nix::Strings args) +{ + NixRepl repl; + repl.mainLoop(); +} From 3202206d1d906ea6279dadfe608ea92ea0aaf927 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 2 Sep 2013 16:00:48 +0000 Subject: [PATCH 0003/2196] Add a command :t for showing the type of an expression --- nix-repl.cc | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index 10e91ec5328..9858a034d72 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -30,6 +30,7 @@ struct NixRepl void processLine(string line); void addVar(const Symbol & name, Value * v); Expr * parseString(string s); + void evalString(string s, Value & v); }; @@ -91,22 +92,26 @@ void NixRepl::mainLoop() void NixRepl::processLine(string line) { if (string(line, 0, 2) == ":a") { - Expr * e = parseString(string(line, 2)); Value v; - e->eval(state, *env, v); + evalString(string(line, 2), v); state.forceAttrs(v); foreach (Bindings::iterator, i, *v.attrs) addVar(i->name, i->value); } + else if (string(line, 0, 2) == ":t") { + Value v; + evalString(string(line, 2), v); + std::cout << showType(v) << std::endl; + } + else if (string(line, 0, 1) == ":") { throw Error(format("unknown command ‘%1%’") % string(line, 0, 2)); } else { - Expr * e = parseString(line); Value v; - e->eval(state, *env, v); + evalString(line, v); state.strictForceValue(v); std::cout << v << std::endl; } @@ -127,6 +132,14 @@ Expr * NixRepl::parseString(string s) } +void NixRepl::evalString(string s, Value & v) +{ + Expr * e = parseString(s); + e->eval(state, *env, v); + state.forceValue(v); +} + + void run(nix::Strings args) { NixRepl repl; From 0f6279d87421f19cd2c1e286163d7567f13dc77f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 2 Sep 2013 18:18:27 +0200 Subject: [PATCH 0004/2196] Add a command :l for loading a file into scope Example: nix-repl> :l nix-repl> lib.range 0 10 [ 0 1 2 3 4 5 6 7 8 9 10 ] nix-repl> :l nix-repl> config.boot.kernelModules [ "vboxdrv" "vboxnetadp" ... ] --- nix-repl.cc | 43 +++++++++++++++++++++++++++++++++---------- 1 file changed, 33 insertions(+), 10 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index 9858a034d72..0b84834491e 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -8,6 +8,7 @@ #include "eval.hh" #include "eval-inline.hh" #include "store-api.hh" +#include "common-opts.hh" using namespace std; using namespace nix; @@ -28,7 +29,8 @@ struct NixRepl NixRepl(); void mainLoop(); void processLine(string line); - void addVar(const Symbol & name, Value * v); + void addAttrsToScope(Value & attrs); + void addVarToScope(const Symbol & name, Value * v); Expr * parseString(string s); void evalString(string s, Value & v); }; @@ -51,6 +53,15 @@ bool getLine(string & line) } +string removeWhitespace(string s) +{ + s = chomp(s); + size_t n = s.find_first_not_of(" \n\r\t"); + if (n != string::npos) s = string(s, n); + return s; +} + + NixRepl::NixRepl() : staticEnv(false, &state.staticBaseEnv) { @@ -72,12 +83,8 @@ void NixRepl::mainLoop() string line; if (!getLine(line)) break; - /* Remove preceeding whitespace. */ - size_t n = line.find_first_not_of(" \n\r\t"); - if (n != string::npos) line = string(line, n); - try { - processLine(line); + processLine(removeWhitespace(line)); } catch (Error & e) { printMsg(lvlError, e.msg()); } @@ -94,9 +101,17 @@ void NixRepl::processLine(string line) if (string(line, 0, 2) == ":a") { Value v; evalString(string(line, 2), v); - state.forceAttrs(v); - foreach (Bindings::iterator, i, *v.attrs) - addVar(i->name, i->value); + addAttrsToScope(v); + } + + else if (string(line, 0, 2) == ":l") { + state.resetFileCache(); + Path path = lookupFileArg(state, removeWhitespace(string(line, 2))); + Value v, v2; + state.evalFile(path, v); + Bindings bindings; + state.autoCallFunction(bindings, v, v2); + addAttrsToScope(v2); } else if (string(line, 0, 2) == ":t") { @@ -118,7 +133,15 @@ void NixRepl::processLine(string line) } -void NixRepl::addVar(const Symbol & name, Value * v) +void NixRepl::addAttrsToScope(Value & attrs) +{ + state.forceAttrs(attrs); + foreach (Bindings::iterator, i, *attrs.attrs) + addVarToScope(i->name, i->value); +} + + +void NixRepl::addVarToScope(const Symbol & name, Value * v) { staticEnv.vars[name] = displ; env->values[displ++] = v; From 504563ea4431f765028dc0ccacd5ee834d0d8a91 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 6 Sep 2013 11:54:49 +0200 Subject: [PATCH 0005/2196] Fix build --- default.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/default.nix b/default.nix index 55c9ed9e3af..635ead71ea5 100644 --- a/default.nix +++ b/default.nix @@ -6,5 +6,5 @@ runCommand "nix-repl" mkdir -p $out/bin g++ -O3 -Wall -o $out/bin/nix-repl ${./nix-repl.cc} \ -I${nixUnstable}/include/nix -L${nixUnstable}/lib/nix \ - -lexpr -lmain -lreadline + -lexpr -lmain -lreadline -lgc '' From 4fb82d3d800be9a53631c3c8a3321a359306d835 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 6 Sep 2013 13:01:02 +0200 Subject: [PATCH 0006/2196] Handle SIGINT to cancel the current line --- nix-repl.cc | 42 +++++++++++++++++++++++++++++++++++++----- 1 file changed, 37 insertions(+), 5 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index 0b84834491e..bb205211793 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -1,6 +1,8 @@ #include #include +#include + #include #include @@ -42,13 +44,41 @@ void printHelp() } +/* Apparently, the only way to get readline() to return on Ctrl-C + (SIGINT) is to use siglongjmp(). That's fucked up... */ +static sigjmp_buf sigintJmpBuf; + + +static void sigintHandler(int signo) +{ + siglongjmp(sigintJmpBuf, 1); +} + + bool getLine(string & line) { - char * s = readline("nix-repl> "); - if (!s) return false; - line = chomp(string(s)); - free(s); - if (line != "") add_history(line.c_str()); + struct sigaction act, old; + act.sa_handler = sigintHandler; + sigfillset(&act.sa_mask); + act.sa_flags = 0; + if (sigaction(SIGINT, &act, &old)) + throw SysError("installing handler for SIGINT"); + + if (sigsetjmp(sigintJmpBuf, 1)) + line = ""; + else { + char * s = readline("nix-repl> "); + if (!s) return false; + line = chomp(string(s)); + free(s); + if (line != "") add_history(line.c_str()); + } + + _isInterrupted = 0; + + if (sigaction(SIGINT, &old, 0)) + throw SysError("restoring handler for SIGINT"); + return true; } @@ -98,6 +128,8 @@ void NixRepl::mainLoop() void NixRepl::processLine(string line) { + if (line == "") return; + if (string(line, 0, 2) == ":a") { Value v; evalString(string(line, 2), v); From 0b419c048b206e2fe68758ea1bd5fa7b1c29c521 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 6 Sep 2013 13:14:28 +0200 Subject: [PATCH 0007/2196] Use readline history file --- nix-repl.cc | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/nix-repl.cc b/nix-repl.cc index bb205211793..bb18359c155 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -71,7 +71,10 @@ bool getLine(string & line) if (!s) return false; line = chomp(string(s)); free(s); - if (line != "") add_history(line.c_str()); + if (line != "") { + add_history(line.c_str()); + append_history(1, 0); + } } _isInterrupted = 0; @@ -109,6 +112,9 @@ void NixRepl::mainLoop() { std::cerr << "Welcome to Nix version " << NIX_VERSION << ". Type :? for help." << std::endl << std::endl; + using_history(); + read_history(0); + while (true) { string line; if (!getLine(line)) break; From ad0dd359b4434db84bf6458715440cc15f896ddc Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 6 Sep 2013 13:20:35 +0200 Subject: [PATCH 0008/2196] Don't exit on SIGINT during evaluation However, this may leave thunks in black-holed state, so it's not really safe. --- nix-repl.cc | 3 +++ 1 file changed, 3 insertions(+) diff --git a/nix-repl.cc b/nix-repl.cc index bb18359c155..45d7433396e 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -123,6 +123,8 @@ void NixRepl::mainLoop() processLine(removeWhitespace(line)); } catch (Error & e) { printMsg(lvlError, e.msg()); + } catch (Interrupted & e) { + printMsg(lvlError, e.msg()); } std::cout << std::endl; @@ -176,6 +178,7 @@ void NixRepl::addAttrsToScope(Value & attrs) state.forceAttrs(attrs); foreach (Bindings::iterator, i, *attrs.attrs) addVarToScope(i->name, i->value); + printMsg(lvlError, format("added %1% variables") % attrs.attrs->size()); } From c6712a007fc55398893995a3466d35ae0697db05 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 6 Sep 2013 14:58:53 +0200 Subject: [PATCH 0009/2196] Add a command :b to build a derivation --- nix-repl.cc | 31 ++++++++++++++++++++++++++----- 1 file changed, 26 insertions(+), 5 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index 45d7433396e..f41cf4d292d 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -11,6 +11,8 @@ #include "eval-inline.hh" #include "store-api.hh" #include "common-opts.hh" +#include "get-drvs.hh" +#include "derivations.hh" using namespace std; using namespace nix; @@ -122,9 +124,9 @@ void NixRepl::mainLoop() try { processLine(removeWhitespace(line)); } catch (Error & e) { - printMsg(lvlError, e.msg()); + printMsg(lvlError, "error: " + e.msg()); } catch (Interrupted & e) { - printMsg(lvlError, e.msg()); + printMsg(lvlError, "error: " + e.msg()); } std::cout << std::endl; @@ -160,10 +162,29 @@ void NixRepl::processLine(string line) std::cout << showType(v) << std::endl; } - else if (string(line, 0, 1) == ":") { - throw Error(format("unknown command ‘%1%’") % string(line, 0, 2)); + else if (string(line, 0, 2) == ":b") { + Value v; + evalString(string(line, 2), v); + DrvInfo drvInfo; + if (!getDerivation(state, v, drvInfo, false)) + throw Error("expression does not evaluation to a derivation, so I can't build it"); + Path drvPath = drvInfo.queryDrvPath(state); + if (drvPath == "" || !store->isValidPath(drvPath)) + throw Error("expression did not evaluate to a valid derivation"); + /* We could do the build in this process using buildPaths(), + but doing it in a child makes it easier to recover from + problems / SIGINT. */ + if (system(("nix-store -r " + drvPath + " > /dev/null").c_str()) == -1) + throw SysError("starting nix-store"); + Derivation drv = parseDerivation(readFile(drvPath)); + std::cout << "this derivation produced the following outputs:" << std::endl; + foreach (DerivationOutputs::iterator, i, drv.outputs) + std::cout << format(" %1% -> %2%") % i->first % i->second.path << std::endl; } + else if (string(line, 0, 1) == ":") + throw Error(format("unknown command ‘%1%’") % string(line, 0, 2)); + else { Value v; evalString(line, v); @@ -178,7 +199,7 @@ void NixRepl::addAttrsToScope(Value & attrs) state.forceAttrs(attrs); foreach (Bindings::iterator, i, *attrs.attrs) addVarToScope(i->name, i->value); - printMsg(lvlError, format("added %1% variables") % attrs.attrs->size()); + std::cout << format("added %1% variables") % attrs.attrs->size() << std::endl; } From b5944ac4ffffde7f52d0bbe3f4a2d53bba70bb66 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 6 Sep 2013 15:05:18 +0200 Subject: [PATCH 0010/2196] Add a command :s to start a nix-shell for a derivation --- nix-repl.cc | 34 +++++++++++++++++++++------------- 1 file changed, 21 insertions(+), 13 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index f41cf4d292d..838928b35dd 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -140,13 +140,15 @@ void NixRepl::processLine(string line) { if (line == "") return; - if (string(line, 0, 2) == ":a") { + string command = string(line, 0, 2); + + if (command == ":a") { Value v; evalString(string(line, 2), v); addAttrsToScope(v); } - else if (string(line, 0, 2) == ":l") { + else if (command == ":l") { state.resetFileCache(); Path path = lookupFileArg(state, removeWhitespace(string(line, 2))); Value v, v2; @@ -156,13 +158,13 @@ void NixRepl::processLine(string line) addAttrsToScope(v2); } - else if (string(line, 0, 2) == ":t") { + else if (command == ":t") { Value v; evalString(string(line, 2), v); std::cout << showType(v) << std::endl; } - else if (string(line, 0, 2) == ":b") { + else if (command == ":b" || command == ":s") { Value v; evalString(string(line, 2), v); DrvInfo drvInfo; @@ -171,15 +173,21 @@ void NixRepl::processLine(string line) Path drvPath = drvInfo.queryDrvPath(state); if (drvPath == "" || !store->isValidPath(drvPath)) throw Error("expression did not evaluate to a valid derivation"); - /* We could do the build in this process using buildPaths(), - but doing it in a child makes it easier to recover from - problems / SIGINT. */ - if (system(("nix-store -r " + drvPath + " > /dev/null").c_str()) == -1) - throw SysError("starting nix-store"); - Derivation drv = parseDerivation(readFile(drvPath)); - std::cout << "this derivation produced the following outputs:" << std::endl; - foreach (DerivationOutputs::iterator, i, drv.outputs) - std::cout << format(" %1% -> %2%") % i->first % i->second.path << std::endl; + + if (command == ":b") { + /* We could do the build in this process using buildPaths(), + but doing it in a child makes it easier to recover from + problems / SIGINT. */ + if (system(("nix-store -r " + drvPath + " > /dev/null").c_str()) == -1) + throw SysError("starting nix-store"); + Derivation drv = parseDerivation(readFile(drvPath)); + std::cout << "this derivation produced the following outputs:" << std::endl; + foreach (DerivationOutputs::iterator, i, drv.outputs) + std::cout << format(" %1% -> %2%") % i->first % i->second.path << std::endl; + } else { + if (system(("nix-shell " + drvPath).c_str()) == -1) + throw SysError("starting nix-shell"); + } } else if (string(line, 0, 1) == ":") From cf4c29d90a1b0ec7f8cd7f0c5258be63a2e02058 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 6 Sep 2013 15:20:06 +0200 Subject: [PATCH 0011/2196] Load files specified on the command line MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit For example: $ nix-repl '' '' Welcome to Nix version 1.6pre3215_2c1ecf8. Type :? for help. Loading ‘’... Added 3337 variables. Loading ‘’... Added 7 variables. nix-repl> --- nix-repl.cc | 36 ++++++++++++++++++++++++------------ 1 file changed, 24 insertions(+), 12 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index 838928b35dd..2ef23faa585 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -31,8 +31,9 @@ struct NixRepl int displ; NixRepl(); - void mainLoop(); + void mainLoop(const Strings & args); void processLine(string line); + void loadFile(const Path & path); void addAttrsToScope(Value & attrs); void addVarToScope(const Symbol & name, Value * v); Expr * parseString(string s); @@ -110,9 +111,15 @@ NixRepl::NixRepl() } -void NixRepl::mainLoop() +void NixRepl::mainLoop(const Strings & args) { - std::cerr << "Welcome to Nix version " << NIX_VERSION << ". Type :? for help." << std::endl << std::endl; + std::cout << "Welcome to Nix version " << NIX_VERSION << ". Type :? for help." << std::endl << std::endl; + + foreach (Strings::const_iterator, i, args) { + std::cout << format("Loading ‘%1%’...") % *i << std::endl; + loadFile(*i); + std::cout << std::endl; + } using_history(); read_history(0); @@ -150,12 +157,7 @@ void NixRepl::processLine(string line) else if (command == ":l") { state.resetFileCache(); - Path path = lookupFileArg(state, removeWhitespace(string(line, 2))); - Value v, v2; - state.evalFile(path, v); - Bindings bindings; - state.autoCallFunction(bindings, v, v2); - addAttrsToScope(v2); + loadFile(removeWhitespace(string(line, 2))); } else if (command == ":t") { @@ -202,12 +204,22 @@ void NixRepl::processLine(string line) } +void NixRepl::loadFile(const Path & path) +{ + Value v, v2; + state.evalFile(lookupFileArg(state, path), v); + Bindings bindings; + state.autoCallFunction(bindings, v, v2); + addAttrsToScope(v2); +} + + void NixRepl::addAttrsToScope(Value & attrs) { state.forceAttrs(attrs); foreach (Bindings::iterator, i, *attrs.attrs) addVarToScope(i->name, i->value); - std::cout << format("added %1% variables") % attrs.attrs->size() << std::endl; + std::cout << format("Added %1% variables.") % attrs.attrs->size() << std::endl; } @@ -233,8 +245,8 @@ void NixRepl::evalString(string s, Value & v) } -void run(nix::Strings args) +void run(Strings args) { NixRepl repl; - repl.mainLoop(); + repl.mainLoop(args); } From 0abdf4beaaa64c01ed3cfb4cc5fd78997116fac8 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 6 Sep 2013 19:51:59 +0200 Subject: [PATCH 0012/2196] Add basic variable name completion --- nix-repl.cc | 129 +++++++++++++++++++++++++++++++++++----------------- 1 file changed, 87 insertions(+), 42 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index 2ef23faa585..f85eac1246b 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -29,9 +29,15 @@ struct NixRepl StaticEnv staticEnv; Env * env; int displ; + StringSet varNames; + + StringSet completions; + StringSet::iterator curCompletion; NixRepl(); void mainLoop(const Strings & args); + void completePrefix(string prefix); + bool getLine(string & line); void processLine(string line); void loadFile(const Path & path); void addAttrsToScope(Value & attrs); @@ -47,48 +53,6 @@ void printHelp() } -/* Apparently, the only way to get readline() to return on Ctrl-C - (SIGINT) is to use siglongjmp(). That's fucked up... */ -static sigjmp_buf sigintJmpBuf; - - -static void sigintHandler(int signo) -{ - siglongjmp(sigintJmpBuf, 1); -} - - -bool getLine(string & line) -{ - struct sigaction act, old; - act.sa_handler = sigintHandler; - sigfillset(&act.sa_mask); - act.sa_flags = 0; - if (sigaction(SIGINT, &act, &old)) - throw SysError("installing handler for SIGINT"); - - if (sigsetjmp(sigintJmpBuf, 1)) - line = ""; - else { - char * s = readline("nix-repl> "); - if (!s) return false; - line = chomp(string(s)); - free(s); - if (line != "") { - add_history(line.c_str()); - append_history(1, 0); - } - } - - _isInterrupted = 0; - - if (sigaction(SIGINT, &old, 0)) - throw SysError("restoring handler for SIGINT"); - - return true; -} - - string removeWhitespace(string s) { s = chomp(s); @@ -143,6 +107,86 @@ void NixRepl::mainLoop(const Strings & args) } +/* Apparently, the only way to get readline() to return on Ctrl-C + (SIGINT) is to use siglongjmp(). That's fucked up... */ +static sigjmp_buf sigintJmpBuf; + + +static void sigintHandler(int signo) +{ + siglongjmp(sigintJmpBuf, 1); +} + + +/* Oh, if only g++ had nested functions... */ +NixRepl * curRepl; + +char * completerThunk(const char * s, int state) +{ + string prefix(s); + + /* If the prefix has a slash in it, use readline's builtin filename + completer. */ + if (prefix.find('/') != string::npos) + return rl_filename_completion_function(s, state); + + /* Otherwise, return all symbols that start with the prefix. */ + if (state == 0) { + curRepl->completePrefix(s); + curRepl->curCompletion = curRepl->completions.begin(); + } + if (curRepl->curCompletion == curRepl->completions.end()) return 0; + return strdup((curRepl->curCompletion++)->c_str()); +} + + +bool NixRepl::getLine(string & line) +{ + struct sigaction act, old; + act.sa_handler = sigintHandler; + sigfillset(&act.sa_mask); + act.sa_flags = 0; + if (sigaction(SIGINT, &act, &old)) + throw SysError("installing handler for SIGINT"); + + if (sigsetjmp(sigintJmpBuf, 1)) + line = ""; + else { + curRepl = this; + rl_completion_entry_function = completerThunk; + + char * s = readline("nix-repl> "); + if (!s) return false; + line = chomp(string(s)); + free(s); + if (line != "") { + add_history(line.c_str()); + append_history(1, 0); + } + } + + _isInterrupted = 0; + + if (sigaction(SIGINT, &old, 0)) + throw SysError("restoring handler for SIGINT"); + + return true; +} + + +void NixRepl::completePrefix(string prefix) +{ + completions.clear(); + + StringSet::iterator i = varNames.lower_bound(prefix); + while (i != varNames.end()) { + if (string(*i, 0, prefix.size()) != prefix) break; + completions.insert(*i); + i++; + } +} + + void NixRepl::processLine(string line) { if (line == "") return; @@ -227,6 +271,7 @@ void NixRepl::addVarToScope(const Symbol & name, Value * v) { staticEnv.vars[name] = displ; env->values[displ++] = v; + varNames.insert((string) name); } From c6f2b89c0e3091e5020983c9a02f36b9c33c3f81 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 6 Sep 2013 21:00:36 +0200 Subject: [PATCH 0013/2196] Restore affinity --- default.nix | 3 ++- nix-repl.cc | 33 ++++++++++++++++++++++++++------- 2 files changed, 28 insertions(+), 8 deletions(-) diff --git a/default.nix b/default.nix index 635ead71ea5..eae1d56d0a7 100644 --- a/default.nix +++ b/default.nix @@ -4,7 +4,8 @@ runCommand "nix-repl" { buildInputs = [ readline nixUnstable boehmgc ]; } '' mkdir -p $out/bin - g++ -O3 -Wall -o $out/bin/nix-repl ${./nix-repl.cc} \ + g++ -O3 -Wall -std=c++0x \ + -o $out/bin/nix-repl ${./nix-repl.cc} \ -I${nixUnstable}/include/nix -L${nixUnstable}/lib/nix \ -lexpr -lmain -lreadline -lgc '' diff --git a/nix-repl.cc b/nix-repl.cc index f85eac1246b..15ac42dcc77 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -13,6 +13,7 @@ #include "common-opts.hh" #include "get-drvs.hh" #include "derivations.hh" +#include "affinity.hh" using namespace std; using namespace nix; @@ -187,6 +188,27 @@ void NixRepl::completePrefix(string prefix) } +static int runProgram(const string & program, const Strings & args) +{ + std::vector cargs; /* careful with c_str()! */ + cargs.push_back(program.c_str()); + for (Strings::const_iterator i = args.begin(); i != args.end(); ++i) + cargs.push_back(i->c_str()); + cargs.push_back(0); + + Pid pid; + pid = fork(); + if (pid == -1) throw SysError("forking"); + if (pid == 0) { + restoreAffinity(); + execvp(program.c_str(), (char * *) &cargs[0]); + _exit(1); + } + + return pid.wait(true); +} + + void NixRepl::processLine(string line) { if (line == "") return; @@ -224,16 +246,13 @@ void NixRepl::processLine(string line) /* We could do the build in this process using buildPaths(), but doing it in a child makes it easier to recover from problems / SIGINT. */ - if (system(("nix-store -r " + drvPath + " > /dev/null").c_str()) == -1) - throw SysError("starting nix-store"); + if (runProgram("nix-store", Strings{"-r", drvPath}) != 0) return; Derivation drv = parseDerivation(readFile(drvPath)); - std::cout << "this derivation produced the following outputs:" << std::endl; + std::cout << std::endl << "this derivation produced the following outputs:" << std::endl; foreach (DerivationOutputs::iterator, i, drv.outputs) std::cout << format(" %1% -> %2%") % i->first % i->second.path << std::endl; - } else { - if (system(("nix-shell " + drvPath).c_str()) == -1) - throw SysError("starting nix-shell"); - } + } else + runProgram("nix-shell", Strings{drvPath}); } else if (string(line, 0, 1) == ":") From 7e3625f924825db2ffa5e58d4b414d93d9af2465 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Sat, 7 Sep 2013 00:35:54 +0200 Subject: [PATCH 0014/2196] Improved value display By default, we don't recurse into attribute sets or lists when printing a value. However, the new :p command does recurse. --- nix-repl.cc | 118 +++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 116 insertions(+), 2 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index 15ac42dcc77..d2179d77a8e 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -45,6 +45,7 @@ struct NixRepl void addVarToScope(const Symbol & name, Value * v); Expr * parseString(string s); void evalString(string s, Value & v); + std::ostream & printValue(std::ostream & str, Value & v, unsigned int maxDepth); }; @@ -255,14 +256,19 @@ void NixRepl::processLine(string line) runProgram("nix-shell", Strings{drvPath}); } + else if (command == ":p") { + Value v; + evalString(string(line, 2), v); + printValue(std::cout, v, 1000000000) << std::endl; + } + else if (string(line, 0, 1) == ":") throw Error(format("unknown command ‘%1%’") % string(line, 0, 2)); else { Value v; evalString(line, v); - state.strictForceValue(v); - std::cout << v << std::endl; + printValue(std::cout, v, 1) << std::endl; } } @@ -309,6 +315,114 @@ void NixRepl::evalString(string s, Value & v) } +// FIXME: lot of cut&paste from Nix's eval.cc. +std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int maxDepth) +{ + str.flush(); + checkInterrupt(); + + state.forceValue(v); + + switch (v.type) { + + case tInt: + str << v.integer; + break; + + case tBool: + str << (v.boolean ? "true" : "false"); + break; + + case tString: + str << "\""; + for (const char * i = v.string.s; *i; i++) + if (*i == '\"' || *i == '\\') str << "\\" << *i; + else if (*i == '\n') str << "\\n"; + else if (*i == '\r') str << "\\r"; + else if (*i == '\t') str << "\\t"; + else str << *i; + str << "\""; + break; + + case tPath: + str << v.path; // !!! escaping? + break; + + case tNull: + str << "null"; + break; + + case tAttrs: { + bool isDrv = state.isDerivation(v); + if (isDrv) str << "(derivation "; + str << "{ "; + + if (maxDepth > 0) { + typedef std::map Sorted; + Sorted sorted; + foreach (Bindings::iterator, i, *v.attrs) + sorted[i->name] = i->value; + + /* If this is a derivation, then don't show the + self-references ("all", "out", etc.). */ + StringSet hidden; + if (isDrv) { + hidden.insert("all"); + Bindings::iterator i = v.attrs->find(state.sOutputs); + if (i == v.attrs->end()) + hidden.insert("out"); + else { + state.forceList(*i->value); + for (unsigned int j = 0; j < i->value->list.length; ++j) + hidden.insert(state.forceStringNoCtx(*i->value->list.elems[j])); + } + } + + foreach (Sorted::iterator, i, sorted) + if (hidden.find(i->first) == hidden.end()) + printValue(str << i->first << " = ", *i->second, maxDepth - 1) << "; "; + else + str << i->first << " = ...; "; + + } else + str << "... "; + + str << "}"; + if (isDrv) str << ")"; + break; + } + + case tList: + str << "[ "; + if (maxDepth > 0) + for (unsigned int n = 0; n < v.list.length; ++n) + printValue(str, *v.list.elems[n], maxDepth - 1) << " "; + else + str << "... "; + str << "]"; + break; + + case tLambda: + str << "«lambda»"; + break; + + case tPrimOp: + str << "«primop»"; + break; + + case tPrimOpApp: + str << "«primop-app»"; + break; + + default: + str << "«unknown»"; + break; + } + + return str; +} + + void run(Strings args) { NixRepl repl; From e587aec1235d7834d04d6d9f7997bc010ef99925 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 9 Sep 2013 11:14:43 +0200 Subject: [PATCH 0015/2196] printValue: Don't show lists/attribute sets twice --- nix-repl.cc | 32 ++++++++++++++++++++++++++------ 1 file changed, 26 insertions(+), 6 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index d2179d77a8e..a6e5f78ec9c 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -45,7 +45,10 @@ struct NixRepl void addVarToScope(const Symbol & name, Value * v); Expr * parseString(string s); void evalString(string s, Value & v); + + typedef set ValuesSeen; std::ostream & printValue(std::ostream & str, Value & v, unsigned int maxDepth); + std::ostream & printValue(std::ostream & str, Value & v, unsigned int maxDepth, ValuesSeen & seen); }; @@ -315,8 +318,15 @@ void NixRepl::evalString(string s, Value & v) } +std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int maxDepth) +{ + ValuesSeen seen; + return printValue(str, v, maxDepth, seen); +} + + // FIXME: lot of cut&paste from Nix's eval.cc. -std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int maxDepth) +std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int maxDepth, ValuesSeen & seen) { str.flush(); checkInterrupt(); @@ -353,6 +363,8 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int break; case tAttrs: { + seen.insert(&v); + bool isDrv = state.isDerivation(v); if (isDrv) str << "(derivation "; str << "{ "; @@ -379,10 +391,12 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int } foreach (Sorted::iterator, i, sorted) - if (hidden.find(i->first) == hidden.end()) - printValue(str << i->first << " = ", *i->second, maxDepth - 1) << "; "; + if (hidden.find(i->first) != hidden.end()) + str << i->first << " = «...»; "; + else if (seen.find(i->second) != seen.end()) + str << i->first << " = «repeated»; "; else - str << i->first << " = ...; "; + printValue(str << i->first << " = ", *i->second, maxDepth - 1, seen) << "; "; } else str << "... "; @@ -393,10 +407,16 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int } case tList: + seen.insert(&v); + str << "[ "; if (maxDepth > 0) - for (unsigned int n = 0; n < v.list.length; ++n) - printValue(str, *v.list.elems[n], maxDepth - 1) << " "; + for (unsigned int n = 0; n < v.list.length; ++n) { + if (seen.find(v.list.elems[n]) != seen.end()) + str << "«repeated» "; + else + printValue(str, *v.list.elems[n], maxDepth - 1, seen) << " "; + } else str << "... "; str << "]"; From 8e765b8876ff67879a6bd1a067bad526b14a4045 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 9 Sep 2013 11:34:54 +0200 Subject: [PATCH 0016/2196] printValue: Show assertion errors inline --- nix-repl.cc | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index a6e5f78ec9c..f7eb6ab5c0d 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -390,13 +390,20 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m } } - foreach (Sorted::iterator, i, sorted) + foreach (Sorted::iterator, i, sorted) { + str << i->first << " = "; if (hidden.find(i->first) != hidden.end()) - str << i->first << " = «...»; "; + str << "«...»"; else if (seen.find(i->second) != seen.end()) - str << i->first << " = «repeated»; "; + str << "«repeated»"; else - printValue(str << i->first << " = ", *i->second, maxDepth - 1, seen) << "; "; + try { + printValue(str, *i->second, maxDepth - 1, seen); + } catch (AssertionError & e) { + str << "«error: " << e.msg() << "»"; + } + str << "; "; + } } else str << "... "; @@ -413,9 +420,14 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m if (maxDepth > 0) for (unsigned int n = 0; n < v.list.length; ++n) { if (seen.find(v.list.elems[n]) != seen.end()) - str << "«repeated» "; + str << "«repeated»"; else - printValue(str, *v.list.elems[n], maxDepth - 1, seen) << " "; + try { + printValue(str, *v.list.elems[n], maxDepth - 1, seen); + } catch (AssertionError & e) { + str << "«error: " << e.msg() << "»"; + } + str << " "; } else str << "... "; From e133e91410d9486e022a1bc0372b822152b6654e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 9 Sep 2013 12:00:33 +0200 Subject: [PATCH 0017/2196] Support tab-completion on attribute sets Example: $ nix-repl '' > config.services.xserver.desktop comletes to > config.services.xserver.desktopManager You also get suggestions if there are multiple matches: > config.services.xserver.desktopManager.kde4 config.services.xserver.desktopManager.kde4.enable config.services.xserver.desktopManager.kde4.phononBackends --- nix-repl.cc | 39 ++++++++++++++++++++++++++++++++++----- 1 file changed, 34 insertions(+), 5 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index f7eb6ab5c0d..9c22abbfacf 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -183,11 +183,40 @@ void NixRepl::completePrefix(string prefix) { completions.clear(); - StringSet::iterator i = varNames.lower_bound(prefix); - while (i != varNames.end()) { - if (string(*i, 0, prefix.size()) != prefix) break; - completions.insert(*i); - i++; + size_t dot = prefix.rfind('.'); + + if (dot == string::npos) { + /* This is a variable name; look it up in the current scope. */ + StringSet::iterator i = varNames.lower_bound(prefix); + while (i != varNames.end()) { + if (string(*i, 0, prefix.size()) != prefix) break; + completions.insert(*i); + i++; + } + } else { + try { + /* This is an expression that should evaluate to an + attribute set. Evaluate it to get the names of the + attributes. */ + string expr(prefix, 0, dot); + string prefix2 = string(prefix, dot + 1); + + Expr * e = parseString(expr); + Value v; + e->eval(state, *env, v); + state.forceAttrs(v); + + foreach (Bindings::iterator, i, *v.attrs) { + string name = i->name; + if (string(name, 0, prefix2.size()) != prefix2) continue; + completions.insert(expr + "." + name); + } + + } catch (ParseError & e) { + // Quietly ignore parse errors. + }catch (EvalError & e) { + // Quietly ignore evaluation errors. + } } } From 4b33c2dd4cac95151ca5d99ceb3161fdb460ec0a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 9 Sep 2013 13:22:33 +0200 Subject: [PATCH 0018/2196] Add help (:?) --- nix-repl.cc | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/nix-repl.cc b/nix-repl.cc index 9c22abbfacf..72099f46e62 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -248,7 +248,19 @@ void NixRepl::processLine(string line) string command = string(line, 0, 2); - if (command == ":a") { + if (command == ":?") { + cout << "The following commands are available:\n" + << "\n" + << " Evaluate and print expression\n" + << " :a Add attributes from resulting set to scope\n" + << " :b Build derivation\n" + << " :l Load Nix expression and add it to scope\n" + << " :p Evaluate and print expression recursively\n" + << " :s Build dependencies of derivation, then start nix-shell\n" + << " :t Describe result of evaluation\n"; + } + + else if (command == ":a") { Value v; evalString(string(line, 2), v); addAttrsToScope(v); From 3c67df928f8d311d7dfd565e8c3a16db9d6a0278 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 9 Sep 2013 13:56:53 +0200 Subject: [PATCH 0019/2196] Add sugar for defining a variable MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ‘x = ’ is short for ‘:a { x = ; }’. Note that the right-hand side refers to the original scope, so you get: nix-repl> x = 1 nix-repl> x = x + 1 nix-repl> x 2 rather than an infinite recursion. --- nix-repl.cc | 55 +++++++++++++++++++++++++++++++++++++++-------------- 1 file changed, 41 insertions(+), 14 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index 72099f46e62..bcd80ffc756 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -42,7 +42,7 @@ struct NixRepl void processLine(string line); void loadFile(const Path & path); void addAttrsToScope(Value & attrs); - void addVarToScope(const Symbol & name, Value * v); + void addVarToScope(const Symbol & name, Value & v); Expr * parseString(string s); void evalString(string s, Value & v); @@ -242,6 +242,19 @@ static int runProgram(const string & program, const Strings & args) } +bool isVarName(const string & s) +{ + // FIXME: not quite correct. + foreach (string::const_iterator, i, s) + if (!((*i >= 'a' && *i <= 'z') || + (*i >= 'A' && *i <= 'Z') || + (*i >= '0' && *i <= '9') || + *i == '_' || *i == '\'')) + return false; + return true; +} + + void NixRepl::processLine(string line) { if (line == "") return; @@ -251,13 +264,14 @@ void NixRepl::processLine(string line) if (command == ":?") { cout << "The following commands are available:\n" << "\n" - << " Evaluate and print expression\n" - << " :a Add attributes from resulting set to scope\n" - << " :b Build derivation\n" - << " :l Load Nix expression and add it to scope\n" - << " :p Evaluate and print expression recursively\n" - << " :s Build dependencies of derivation, then start nix-shell\n" - << " :t Describe result of evaluation\n"; + << " Evaluate and print expression\n" + << " = Bind expression to variable\n" + << " :a Add attributes from resulting set to scope\n" + << " :b Build derivation\n" + << " :l Load Nix expression and add it to scope\n" + << " :p Evaluate and print expression recursively\n" + << " :s Build dependencies of derivation, then start nix-shell\n" + << " :t Describe result of evaluation\n"; } else if (command == ":a") { @@ -310,9 +324,22 @@ void NixRepl::processLine(string line) throw Error(format("unknown command ‘%1%’") % string(line, 0, 2)); else { - Value v; - evalString(line, v); - printValue(std::cout, v, 1) << std::endl; + size_t p = line.find('='); + string name; + if (p != string::npos && + isVarName(name = removeWhitespace(string(line, 0, p)))) + { + Expr * e = parseString(string(line, p + 1)); + Value & v(*state.allocValue()); + v.type = tThunk; + v.thunk.env = env; + v.thunk.expr = e; + addVarToScope(state.symbols.create(name), v); + } else { + Value v; + evalString(line, v); + printValue(std::cout, v, 1) << std::endl; + } } } @@ -331,15 +358,15 @@ void NixRepl::addAttrsToScope(Value & attrs) { state.forceAttrs(attrs); foreach (Bindings::iterator, i, *attrs.attrs) - addVarToScope(i->name, i->value); + addVarToScope(i->name, *i->value); std::cout << format("Added %1% variables.") % attrs.attrs->size() << std::endl; } -void NixRepl::addVarToScope(const Symbol & name, Value * v) +void NixRepl::addVarToScope(const Symbol & name, Value & v) { staticEnv.vars[name] = displ; - env->values[displ++] = v; + env->values[displ++] = &v; varNames.insert((string) name); } From a5dffb3d3dcfff3b1d6c166451268ebb1cbde991 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 9 Sep 2013 14:03:28 +0200 Subject: [PATCH 0020/2196] Temporary hack to parameterize nixpkgs --- default.nix | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/default.nix b/default.nix index eae1d56d0a7..97162c6e8b8 100644 --- a/default.nix +++ b/default.nix @@ -1,4 +1,6 @@ -with import { }; +{ nixpkgs ? }: + +with import nixpkgs { }; runCommand "nix-repl" { buildInputs = [ readline nixUnstable boehmgc ]; } From 3567bdb514e6ba4e460b76dc1d1b1dd466214286 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 9 Sep 2013 15:02:56 +0200 Subject: [PATCH 0021/2196] Add :quit command --- nix-repl.cc | 55 +++++++++++++++++++++++++++++++++++------------------ 1 file changed, 36 insertions(+), 19 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index bcd80ffc756..a4d74055d5f 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -39,7 +39,7 @@ struct NixRepl void mainLoop(const Strings & args); void completePrefix(string prefix); bool getLine(string & line); - void processLine(string line); + bool processLine(string line); void loadFile(const Path & path); void addAttrsToScope(Value & attrs); void addVarToScope(const Symbol & name, Value & v); @@ -95,10 +95,13 @@ void NixRepl::mainLoop(const Strings & args) while (true) { string line; - if (!getLine(line)) break; + if (!getLine(line)) { + std::cout << std::endl; + break; + } try { - processLine(removeWhitespace(line)); + if (!processLine(removeWhitespace(line))) return; } catch (Error & e) { printMsg(lvlError, "error: " + e.msg()); } catch (Interrupted & e) { @@ -108,7 +111,6 @@ void NixRepl::mainLoop(const Strings & args) std::cout << std::endl; } - std::cout << std::endl; } @@ -255,11 +257,19 @@ bool isVarName(const string & s) } -void NixRepl::processLine(string line) +bool NixRepl::processLine(string line) { - if (line == "") return; + if (line == "") return true; + + string command, arg; - string command = string(line, 0, 2); + if (line[0] == ':') { + size_t p = line.find(' '); + command = string(line, 0, p); + if (p != string::npos) arg = removeWhitespace(string(line, p)); + } else { + arg = line; + } if (command == ":?") { cout << "The following commands are available:\n" @@ -270,30 +280,31 @@ void NixRepl::processLine(string line) << " :b Build derivation\n" << " :l Load Nix expression and add it to scope\n" << " :p Evaluate and print expression recursively\n" + << " :q Exit nix-repl\n" << " :s Build dependencies of derivation, then start nix-shell\n" << " :t Describe result of evaluation\n"; } else if (command == ":a") { Value v; - evalString(string(line, 2), v); + evalString(arg, v); addAttrsToScope(v); } else if (command == ":l") { state.resetFileCache(); - loadFile(removeWhitespace(string(line, 2))); + loadFile(arg); } else if (command == ":t") { Value v; - evalString(string(line, 2), v); + evalString(arg, v); std::cout << showType(v) << std::endl; } else if (command == ":b" || command == ":s") { Value v; - evalString(string(line, 2), v); + evalString(arg, v); DrvInfo drvInfo; if (!getDerivation(state, v, drvInfo, false)) throw Error("expression does not evaluation to a derivation, so I can't build it"); @@ -305,23 +316,27 @@ void NixRepl::processLine(string line) /* We could do the build in this process using buildPaths(), but doing it in a child makes it easier to recover from problems / SIGINT. */ - if (runProgram("nix-store", Strings{"-r", drvPath}) != 0) return; - Derivation drv = parseDerivation(readFile(drvPath)); - std::cout << std::endl << "this derivation produced the following outputs:" << std::endl; - foreach (DerivationOutputs::iterator, i, drv.outputs) - std::cout << format(" %1% -> %2%") % i->first % i->second.path << std::endl; + if (runProgram("nix-store", Strings{"-r", drvPath}) == 0) { + Derivation drv = parseDerivation(readFile(drvPath)); + std::cout << std::endl << "this derivation produced the following outputs:" << std::endl; + foreach (DerivationOutputs::iterator, i, drv.outputs) + std::cout << format(" %1% -> %2%") % i->first % i->second.path << std::endl; + } } else runProgram("nix-shell", Strings{drvPath}); } else if (command == ":p") { Value v; - evalString(string(line, 2), v); + evalString(arg, v); printValue(std::cout, v, 1000000000) << std::endl; } - else if (string(line, 0, 1) == ":") - throw Error(format("unknown command ‘%1%’") % string(line, 0, 2)); + else if (command == ":q" || command == ":quit") + return false; + + else if (command != "") + throw Error(format("unknown command ‘%1%’") % command); else { size_t p = line.find('='); @@ -341,6 +356,8 @@ void NixRepl::processLine(string line) printValue(std::cout, v, 1) << std::endl; } } + + return true; } From 03ef6b69be4711309fa04fa32681e03fde3d234f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 9 Sep 2013 15:17:08 +0200 Subject: [PATCH 0022/2196] Add system argument --- default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/default.nix b/default.nix index 97162c6e8b8..4c9cbcc0cb4 100644 --- a/default.nix +++ b/default.nix @@ -1,6 +1,6 @@ -{ nixpkgs ? }: +{ nixpkgs ? , system ? builtins.currentSystem }: -with import nixpkgs { }; +with import nixpkgs { inherit system; }; runCommand "nix-repl" { buildInputs = [ readline nixUnstable boehmgc ]; } From ddd22c37c58aa9d39a632c746a0617f6602d1815 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 9 Sep 2013 16:02:35 +0200 Subject: [PATCH 0023/2196] Build on Mac OS X --- default.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/default.nix b/default.nix index 4c9cbcc0cb4..c4a967bf528 100644 --- a/default.nix +++ b/default.nix @@ -9,5 +9,5 @@ runCommand "nix-repl" g++ -O3 -Wall -std=c++0x \ -o $out/bin/nix-repl ${./nix-repl.cc} \ -I${nixUnstable}/include/nix -L${nixUnstable}/lib/nix \ - -lexpr -lmain -lreadline -lgc + -lformat -lutil -lstore -lexpr -lmain -lreadline -lgc '' From adde4f0c8d720b9c9c43f6d0a2e789d7c68798bd Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 9 Sep 2013 16:02:46 +0200 Subject: [PATCH 0024/2196] Add :reload command --- nix-repl.cc | 41 +++++++++++++++++++++++++++++++++-------- 1 file changed, 33 insertions(+), 8 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index a4d74055d5f..c0ba3a58831 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -27,6 +27,8 @@ struct NixRepl string curDir; EvalState state; + Strings loadedFiles; + StaticEnv staticEnv; Env * env; int displ; @@ -41,6 +43,7 @@ struct NixRepl bool getLine(string & line); bool processLine(string line); void loadFile(const Path & path); + void reloadFiles(); void addAttrsToScope(Value & attrs); void addVarToScope(const Symbol & name, Value & v); Expr * parseString(string s); @@ -84,9 +87,11 @@ void NixRepl::mainLoop(const Strings & args) { std::cout << "Welcome to Nix version " << NIX_VERSION << ". Type :? for help." << std::endl << std::endl; - foreach (Strings::const_iterator, i, args) { - std::cout << format("Loading ‘%1%’...") % *i << std::endl; - loadFile(*i); + foreach (Strings::const_iterator, i, args) + loadedFiles.push_back(*i); + + if (!loadedFiles.empty()) { + reloadFiles(); std::cout << std::endl; } @@ -110,7 +115,6 @@ void NixRepl::mainLoop(const Strings & args) std::cout << std::endl; } - } @@ -271,7 +275,7 @@ bool NixRepl::processLine(string line) arg = line; } - if (command == ":?") { + if (command == ":?" || command == ":help") { cout << "The following commands are available:\n" << "\n" << " Evaluate and print expression\n" @@ -281,21 +285,27 @@ bool NixRepl::processLine(string line) << " :l Load Nix expression and add it to scope\n" << " :p Evaluate and print expression recursively\n" << " :q Exit nix-repl\n" + << " :r Reload all files\n" << " :s Build dependencies of derivation, then start nix-shell\n" << " :t Describe result of evaluation\n"; } - else if (command == ":a") { + else if (command == ":a" || command == ":add") { Value v; evalString(arg, v); addAttrsToScope(v); } - else if (command == ":l") { + else if (command == ":l" || command == ":load") { state.resetFileCache(); loadFile(arg); } + else if (command == ":r" || command == ":reload") { + state.resetFileCache(); + reloadFiles(); + } + else if (command == ":t") { Value v; evalString(arg, v); @@ -326,7 +336,7 @@ bool NixRepl::processLine(string line) runProgram("nix-shell", Strings{drvPath}); } - else if (command == ":p") { + else if (command == ":p" || command == ":print") { Value v; evalString(arg, v); printValue(std::cout, v, 1000000000) << std::endl; @@ -363,6 +373,8 @@ bool NixRepl::processLine(string line) void NixRepl::loadFile(const Path & path) { + loadedFiles.remove(path); + loadedFiles.push_back(path); Value v, v2; state.evalFile(lookupFileArg(state, path), v); Bindings bindings; @@ -371,6 +383,19 @@ void NixRepl::loadFile(const Path & path) } +void NixRepl::reloadFiles() +{ + Strings old = loadedFiles; + loadedFiles.clear(); + + foreach (Strings::iterator, i, old) { + if (i != old.begin()) std::cout << std::endl; + std::cout << format("Loading ‘%1%’...") % *i << std::endl; + loadFile(*i); + } +} + + void NixRepl::addAttrsToScope(Value & attrs) { state.forceAttrs(attrs); From 498f8b048513bf3eee810c80f6795ab1ef32793f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 9 Sep 2013 16:17:45 +0200 Subject: [PATCH 0025/2196] Add license --- COPYING | 674 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 674 insertions(+) create mode 100644 COPYING diff --git a/COPYING b/COPYING new file mode 100644 index 00000000000..94a9ed024d3 --- /dev/null +++ b/COPYING @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. From e91160021f992169228bc59cfa509cfb66335e8c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 9 Sep 2013 17:06:14 +0200 Subject: [PATCH 0026/2196] On reload, wipe the environment --- nix-repl.cc | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index c0ba3a58831..012fdfe1e6c 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -29,6 +29,7 @@ struct NixRepl Strings loadedFiles; + const static int envSize = 32768; StaticEnv staticEnv; Env * env; int displ; @@ -43,6 +44,7 @@ struct NixRepl bool getLine(string & line); bool processLine(string line); void loadFile(const Path & path); + void initEnv(); void reloadFiles(); void addAttrsToScope(Value & attrs); void addVarToScope(const Symbol & name, Value & v); @@ -75,10 +77,6 @@ NixRepl::NixRepl() { curDir = absPath("."); - env = &state.allocEnv(32768); - env->up = &state.baseEnv; - displ = 0; - store = openStore(); } @@ -90,10 +88,8 @@ void NixRepl::mainLoop(const Strings & args) foreach (Strings::const_iterator, i, args) loadedFiles.push_back(*i); - if (!loadedFiles.empty()) { - reloadFiles(); - std::cout << std::endl; - } + reloadFiles(); + if (!loadedFiles.empty()) std::cout << std::endl; using_history(); read_history(0); @@ -383,8 +379,20 @@ void NixRepl::loadFile(const Path & path) } +void NixRepl::initEnv() +{ + env = &state.allocEnv(envSize); + env->up = &state.baseEnv; + displ = 0; + varNames.clear(); + staticEnv.vars.clear(); +} + + void NixRepl::reloadFiles() { + initEnv(); + Strings old = loadedFiles; loadedFiles.clear(); @@ -407,6 +415,8 @@ void NixRepl::addAttrsToScope(Value & attrs) void NixRepl::addVarToScope(const Symbol & name, Value & v) { + if (displ >= envSize) + throw Error("environment full; cannot add more variables"); staticEnv.vars[name] = displ; env->values[displ++] = &v; varNames.insert((string) name); From dc670a173ab22d344c5d77d502798f0296f572cc Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 9 Sep 2013 17:22:42 +0200 Subject: [PATCH 0027/2196] Make tab-completion work on builtins --- nix-repl.cc | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/nix-repl.cc b/nix-repl.cc index 012fdfe1e6c..98d27b435ef 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -384,8 +384,11 @@ void NixRepl::initEnv() env = &state.allocEnv(envSize); env->up = &state.baseEnv; displ = 0; - varNames.clear(); staticEnv.vars.clear(); + + varNames.clear(); + foreach (StaticEnv::Vars::iterator, i, state.staticBaseEnv.vars) + varNames.insert(i->first); } From ae50a5e7bee19cb6de331e847936ea8afa0ba8b6 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 9 Sep 2013 18:09:46 +0200 Subject: [PATCH 0028/2196] Add a README --- README.md | 104 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 104 insertions(+) create mode 100644 README.md diff --git a/README.md b/README.md new file mode 100644 index 00000000000..af2d27a2067 --- /dev/null +++ b/README.md @@ -0,0 +1,104 @@ +nix-repl +======== + +`nix-repl` is a simple read–eval–print loop (REPL) for the Nix package +manager. + +Installation +------------ + +Assuming you have Nix installed, just do + + $ git clone https://github.com/edolstra/nix-repl.git + $ cd nix-repl + $ nix-env -f . -i nix-repl + +Example +------- + +Here is a typical `nix-repl` session: + + $ nix-repl + Welcome to Nix version 1.6pre3215_2c1ecf8. Type :? for help. + + nix-repl> 3 * 4 + 12 + + nix-repl> :l + Added 3337 variables. + + nix-repl> lib.range 1 5 + [ 1 2 3 4 5 ] + + nix-repl> :a lib + Added 299 variables. + + nix-repl> range 1 5 + [ 1 2 3 4 5 ] + + nix-repl> xs = range 1 5 + + nix-repl> map (x: x * x) xs + [ 1 4 9 16 25 ] + + nix-repl> :l + Added 7 variables. + + nix-repl> config.services.dhcpd + { configFile = null; enable = false; extraConfig = ""; interfaces = [ ... ]; machines = [ ... ]; } + + nix-repl> :p config.services.dhcpd + { configFile = null; enable = false; extraConfig = ""; interfaces = [ "eth0" ]; machines = [ ]; } + + nix-repl> config.fileSystems + { "/" = { ... }; "/boot" = { ... }; } + + nix-repl> mapAttrsToList (n: v: v.device) config.fileSystems + [ "/dev/disk/by-label/nixos" "/dev/disk/by-label/boot" ] + + nix-repl> :b libjson + these derivations will be built: + /nix/store/h910xqb36pysxcxkayb1zkr1zcvvk1zy-libjson_7.6.1.zip.drv + /nix/store/iv0rdx08di0fg704zyxklkvdz6i96lm8-libjson-7.6.1.drv + ... + this derivation produced the following outputs: + out -> /nix/store/ys6bvgfia81rjwqxjlgkwnx6jhsml8h9-libjson-7.6.1 + + nix-repl> :t makeFontsConf + a function + + nix-repl> :b makeFontsConf { fontDirectories = [ "${freefont_ttf}/share/fonts/truetype" ]; } + ... + this derivation produced the following outputs: + out -> /nix/store/jkw848xj0gkbhmvxi0hwpnhzn2716v3c-fonts.conf + + nix-repl> :s pan + # Builds dependencies of the ‘pan’ derivation, then starts a shell + # in which the environment variables of the derivation are set + + [nix-shell:/tmp/nix-repl]$ echo $src + /nix/store/0ibx15r02nnkwiclmfbpzrzjm2y204fh-pan-0.139.tar.bz2 + + [nix-shell:/tmp/nix-repl]$ exit + + nix-repl> + +Tab completion works on variables in scope and on attribute sets. For +example: + + $ nix-repl '' '' + Welcome to Nix version 1.6pre3215_2c1ecf8. Type :? for help. + + nix-repl> thunder => thunderbird + + nix-repl> + Display all 3634 possibilities? (y or n) + + nix-repl> lib + Display all 291 possibilities? (y or n) + + nix-repl> xorg.libX + xorg.libXdamage xorg.libXdmcp + + nix-repl> config.networking.use + config.networking.useDHCP config.networking.usePredictableInterfaceNames From 853d2e0aa42b1a7e5de6111c86dbd4a16e0fe411 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 9 Sep 2013 18:10:57 +0200 Subject: [PATCH 0029/2196] Fix markdown --- README.md | 112 +++++++++++++++++++++++++++--------------------------- 1 file changed, 56 insertions(+), 56 deletions(-) diff --git a/README.md b/README.md index af2d27a2067..01967e80034 100644 --- a/README.md +++ b/README.md @@ -18,87 +18,87 @@ Example Here is a typical `nix-repl` session: - $ nix-repl - Welcome to Nix version 1.6pre3215_2c1ecf8. Type :? for help. + $ nix-repl + Welcome to Nix version 1.6pre3215_2c1ecf8. Type :? for help. - nix-repl> 3 * 4 - 12 + nix-repl> 3 * 4 + 12 - nix-repl> :l - Added 3337 variables. + nix-repl> :l + Added 3337 variables. - nix-repl> lib.range 1 5 - [ 1 2 3 4 5 ] + nix-repl> lib.range 1 5 + [ 1 2 3 4 5 ] - nix-repl> :a lib - Added 299 variables. + nix-repl> :a lib + Added 299 variables. - nix-repl> range 1 5 - [ 1 2 3 4 5 ] + nix-repl> range 1 5 + [ 1 2 3 4 5 ] - nix-repl> xs = range 1 5 + nix-repl> xs = range 1 5 - nix-repl> map (x: x * x) xs - [ 1 4 9 16 25 ] + nix-repl> map (x: x * x) xs + [ 1 4 9 16 25 ] - nix-repl> :l - Added 7 variables. + nix-repl> :l + Added 7 variables. - nix-repl> config.services.dhcpd - { configFile = null; enable = false; extraConfig = ""; interfaces = [ ... ]; machines = [ ... ]; } + nix-repl> config.services.dhcpd + { configFile = null; enable = false; extraConfig = ""; interfaces = [ ... ]; machines = [ ... ]; } - nix-repl> :p config.services.dhcpd - { configFile = null; enable = false; extraConfig = ""; interfaces = [ "eth0" ]; machines = [ ]; } + nix-repl> :p config.services.dhcpd + { configFile = null; enable = false; extraConfig = ""; interfaces = [ "eth0" ]; machines = [ ]; } - nix-repl> config.fileSystems - { "/" = { ... }; "/boot" = { ... }; } + nix-repl> config.fileSystems + { "/" = { ... }; "/boot" = { ... }; } - nix-repl> mapAttrsToList (n: v: v.device) config.fileSystems - [ "/dev/disk/by-label/nixos" "/dev/disk/by-label/boot" ] + nix-repl> mapAttrsToList (n: v: v.device) config.fileSystems + [ "/dev/disk/by-label/nixos" "/dev/disk/by-label/boot" ] - nix-repl> :b libjson - these derivations will be built: - /nix/store/h910xqb36pysxcxkayb1zkr1zcvvk1zy-libjson_7.6.1.zip.drv - /nix/store/iv0rdx08di0fg704zyxklkvdz6i96lm8-libjson-7.6.1.drv - ... - this derivation produced the following outputs: - out -> /nix/store/ys6bvgfia81rjwqxjlgkwnx6jhsml8h9-libjson-7.6.1 + nix-repl> :b libjson + these derivations will be built: + /nix/store/h910xqb36pysxcxkayb1zkr1zcvvk1zy-libjson_7.6.1.zip.drv + /nix/store/iv0rdx08di0fg704zyxklkvdz6i96lm8-libjson-7.6.1.drv + ... + this derivation produced the following outputs: + out -> /nix/store/ys6bvgfia81rjwqxjlgkwnx6jhsml8h9-libjson-7.6.1 - nix-repl> :t makeFontsConf - a function + nix-repl> :t makeFontsConf + a function - nix-repl> :b makeFontsConf { fontDirectories = [ "${freefont_ttf}/share/fonts/truetype" ]; } - ... - this derivation produced the following outputs: - out -> /nix/store/jkw848xj0gkbhmvxi0hwpnhzn2716v3c-fonts.conf + nix-repl> :b makeFontsConf { fontDirectories = [ "${freefont_ttf}/share/fonts/truetype" ]; } + ... + this derivation produced the following outputs: + out -> /nix/store/jkw848xj0gkbhmvxi0hwpnhzn2716v3c-fonts.conf - nix-repl> :s pan - # Builds dependencies of the ‘pan’ derivation, then starts a shell - # in which the environment variables of the derivation are set + nix-repl> :s pan + # Builds dependencies of the ‘pan’ derivation, then starts a shell + # in which the environment variables of the derivation are set - [nix-shell:/tmp/nix-repl]$ echo $src - /nix/store/0ibx15r02nnkwiclmfbpzrzjm2y204fh-pan-0.139.tar.bz2 + [nix-shell:/tmp/nix-repl]$ echo $src + /nix/store/0ibx15r02nnkwiclmfbpzrzjm2y204fh-pan-0.139.tar.bz2 - [nix-shell:/tmp/nix-repl]$ exit + [nix-shell:/tmp/nix-repl]$ exit - nix-repl> + nix-repl> Tab completion works on variables in scope and on attribute sets. For example: - $ nix-repl '' '' - Welcome to Nix version 1.6pre3215_2c1ecf8. Type :? for help. + $ nix-repl '' '' + Welcome to Nix version 1.6pre3215_2c1ecf8. Type :? for help. - nix-repl> thunder => thunderbird + nix-repl> thunder => thunderbird - nix-repl> - Display all 3634 possibilities? (y or n) + nix-repl> + Display all 3634 possibilities? (y or n) - nix-repl> lib - Display all 291 possibilities? (y or n) + nix-repl> lib + Display all 291 possibilities? (y or n) - nix-repl> xorg.libX - xorg.libXdamage xorg.libXdmcp + nix-repl> xorg.libX + xorg.libXdamage xorg.libXdmcp - nix-repl> config.networking.use - config.networking.useDHCP config.networking.usePredictableInterfaceNames + nix-repl> config.networking.use + config.networking.useDHCP config.networking.usePredictableInterfaceNames From 81d658fe4afda234028cd4551e12491db4303957 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 9 Sep 2013 18:11:47 +0200 Subject: [PATCH 0030/2196] Fix readme --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 01967e80034..c5964fd5ea1 100644 --- a/README.md +++ b/README.md @@ -9,9 +9,9 @@ Installation Assuming you have Nix installed, just do - $ git clone https://github.com/edolstra/nix-repl.git - $ cd nix-repl - $ nix-env -f . -i nix-repl + $ git clone https://github.com/edolstra/nix-repl.git + $ cd nix-repl + $ nix-env -f . -i nix-repl Example ------- From 3beb6f6e763f56d791db0a99baf285e0ba745bc8 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 28 Jan 2014 10:40:02 +0100 Subject: [PATCH 0031/2196] Show derivations more concisely --- nix-repl.cc | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index 98d27b435ef..78be0f98c42 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -489,10 +489,18 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m seen.insert(&v); bool isDrv = state.isDerivation(v); - if (isDrv) str << "(derivation "; - str << "{ "; - if (maxDepth > 0) { + if (isDrv) { + str << "«derivation "; + Bindings::iterator i = v.attrs->find(state.sDrvPath); + PathSet context; + Path drvPath = i != v.attrs->end() ? state.coerceToPath(*i->value, context) : "???"; + str << drvPath << "»"; + } + + else if (maxDepth > 0) { + str << "{ "; + typedef std::map Sorted; Sorted sorted; foreach (Bindings::iterator, i, *v.attrs) @@ -528,11 +536,10 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m str << "; "; } + str << "}"; } else - str << "... "; + str << "{ ... }"; - str << "}"; - if (isDrv) str << ")"; break; } From 6a4a8208be10462b1051c689b26577dc36495632 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 28 Jan 2014 10:42:05 +0100 Subject: [PATCH 0032/2196] Fix building against current Nix --- nix-repl.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index 78be0f98c42..1c7b2f5142e 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -311,10 +311,10 @@ bool NixRepl::processLine(string line) else if (command == ":b" || command == ":s") { Value v; evalString(arg, v); - DrvInfo drvInfo; + DrvInfo drvInfo(state); if (!getDerivation(state, v, drvInfo, false)) throw Error("expression does not evaluation to a derivation, so I can't build it"); - Path drvPath = drvInfo.queryDrvPath(state); + Path drvPath = drvInfo.queryDrvPath(); if (drvPath == "" || !store->isValidPath(drvPath)) throw Error("expression did not evaluate to a valid derivation"); From 22a47ab03c8a1bf81df3aa0d29a4d6a29aacc36f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 11 Apr 2014 12:50:46 +0200 Subject: [PATCH 0033/2196] Fix building against Nix 1.7 --- default.nix | 7 ++++--- nix-repl.cc | 4 ++-- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/default.nix b/default.nix index c4a967bf528..435c16c0915 100644 --- a/default.nix +++ b/default.nix @@ -3,11 +3,12 @@ with import nixpkgs { inherit system; }; runCommand "nix-repl" - { buildInputs = [ readline nixUnstable boehmgc ]; } + { buildInputs = [ readline nix boehmgc ]; } '' mkdir -p $out/bin g++ -O3 -Wall -std=c++0x \ -o $out/bin/nix-repl ${./nix-repl.cc} \ - -I${nixUnstable}/include/nix -L${nixUnstable}/lib/nix \ - -lformat -lutil -lstore -lexpr -lmain -lreadline -lgc + -I${nix}/include/nix \ + -lnixformat -lnixutil -lnixstore -lnixexpr -lnixmain -lreadline -lgc \ + -DNIX_VERSION=${(builtins.parseDrvName nix.name).version} '' diff --git a/nix-repl.cc b/nix-repl.cc index 1c7b2f5142e..ef9be417acd 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -323,7 +323,7 @@ bool NixRepl::processLine(string line) but doing it in a child makes it easier to recover from problems / SIGINT. */ if (runProgram("nix-store", Strings{"-r", drvPath}) == 0) { - Derivation drv = parseDerivation(readFile(drvPath)); + Derivation drv = readDerivation(drvPath); std::cout << std::endl << "this derivation produced the following outputs:" << std::endl; foreach (DerivationOutputs::iterator, i, drv.outputs) std::cout << format(" %1% -> %2%") % i->first % i->second.path << std::endl; @@ -494,7 +494,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m str << "«derivation "; Bindings::iterator i = v.attrs->find(state.sDrvPath); PathSet context; - Path drvPath = i != v.attrs->end() ? state.coerceToPath(*i->value, context) : "???"; + Path drvPath = i != v.attrs->end() ? state.coerceToPath(*i->pos, *i->value, context) : "???"; str << drvPath << "»"; } From 1734e8a1491ef831c83c2620b6b0f4a590b67c1f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 11 Apr 2014 12:51:15 +0200 Subject: [PATCH 0034/2196] Fix crash in tab completion Fixes #1. Patch by Maxdamantus. --- nix-repl.cc | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/nix-repl.cc b/nix-repl.cc index ef9be417acd..8049008d14e 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -216,8 +216,10 @@ void NixRepl::completePrefix(string prefix) } catch (ParseError & e) { // Quietly ignore parse errors. - }catch (EvalError & e) { + } catch (EvalError & e) { // Quietly ignore evaluation errors. + } catch (UndefinedVarError & e) { + // Quietly ignore undefined variable errors. } } } From 66b2d18243d055bcbc6c3f7708960575d02db09c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 16 Jun 2014 10:05:09 -0400 Subject: [PATCH 0035/2196] Don't parse 'var == expr' as an assignment --- nix-repl.cc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nix-repl.cc b/nix-repl.cc index 8049008d14e..ea188a1c75e 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -350,6 +350,8 @@ bool NixRepl::processLine(string line) size_t p = line.find('='); string name; if (p != string::npos && + p < line.size() && + line[p + 1] != '=' && isVarName(name = removeWhitespace(string(line, 0, p)))) { Expr * e = parseString(string(line, p + 1)); From 2cf0e67761121a4ddceb69a932bc2e3c0cd6cb6c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 24 Jul 2014 17:46:58 +0200 Subject: [PATCH 0036/2196] Handle non-numeric version strings Fixes #2. --- default.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/default.nix b/default.nix index 435c16c0915..e7cdc23580e 100644 --- a/default.nix +++ b/default.nix @@ -10,5 +10,5 @@ runCommand "nix-repl" -o $out/bin/nix-repl ${./nix-repl.cc} \ -I${nix}/include/nix \ -lnixformat -lnixutil -lnixstore -lnixexpr -lnixmain -lreadline -lgc \ - -DNIX_VERSION=${(builtins.parseDrvName nix.name).version} + -DNIX_VERSION=\"${(builtins.parseDrvName nix.name).version}\" '' From 02b66e97ba08ed68a73654556734aadfc9f41c89 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 24 Jul 2014 17:53:32 +0200 Subject: [PATCH 0037/2196] Fix building against current Nix master --- nix-repl.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nix-repl.cc b/nix-repl.cc index ea188a1c75e..fadaf079a8b 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -73,7 +73,8 @@ string removeWhitespace(string s) NixRepl::NixRepl() - : staticEnv(false, &state.staticBaseEnv) + : state(Strings()) + , staticEnv(false, &state.staticBaseEnv) { curDir = absPath("."); From 89f9c0d41b29d18e2804677da856e84c86c83b45 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 26 Aug 2014 20:03:12 +0200 Subject: [PATCH 0038/2196] Fix building against current Nix master --- default.nix | 2 ++ nix-repl.cc | 30 +++++++++++++++++++++++------- 2 files changed, 25 insertions(+), 7 deletions(-) diff --git a/default.nix b/default.nix index e7cdc23580e..8690325e528 100644 --- a/default.nix +++ b/default.nix @@ -2,6 +2,8 @@ with import nixpkgs { inherit system; }; +let nix = nixUnstable; in + runCommand "nix-repl" { buildInputs = [ readline nix boehmgc ]; } '' diff --git a/nix-repl.cc b/nix-repl.cc index fadaf079a8b..bff54aa14f3 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -39,7 +39,7 @@ struct NixRepl StringSet::iterator curCompletion; NixRepl(); - void mainLoop(const Strings & args); + void mainLoop(const Strings & files); void completePrefix(string prefix); bool getLine(string & line); bool processLine(string line); @@ -73,7 +73,7 @@ string removeWhitespace(string s) NixRepl::NixRepl() - : state(Strings()) + : state(Strings()) , staticEnv(false, &state.staticBaseEnv) { curDir = absPath("."); @@ -82,11 +82,11 @@ NixRepl::NixRepl() } -void NixRepl::mainLoop(const Strings & args) +void NixRepl::mainLoop(const Strings & files) { std::cout << "Welcome to Nix version " << NIX_VERSION << ". Type :? for help." << std::endl << std::endl; - foreach (Strings::const_iterator, i, args) + foreach (Strings::const_iterator, i, files) loadedFiles.push_back(*i); reloadFiles(); @@ -590,8 +590,24 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m } -void run(Strings args) +int main(int argc, char * * argv) { - NixRepl repl; - repl.mainLoop(args); + return handleExceptions(argv[0], [&]() { + initNix(); + + Strings files; + + parseCmdLine(argc, argv, [&](Strings::iterator & arg, const Strings::iterator & end) { + if (*arg == "--version") + printVersion("nix-repl"); + else if (*arg != "" && arg->at(0) == '-') + return false; + else + files.push_back(*arg); + return true; + }); + + NixRepl repl; + repl.mainLoop(files); + }); } From 71d61508f203e8d926a0365332ff218c1314f734 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 26 Aug 2014 20:05:08 +0200 Subject: [PATCH 0039/2196] Support -I flag --- nix-repl.cc | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index bff54aa14f3..51d297d1d17 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -38,7 +38,7 @@ struct NixRepl StringSet completions; StringSet::iterator curCompletion; - NixRepl(); + NixRepl(const Strings & searchPath); void mainLoop(const Strings & files); void completePrefix(string prefix); bool getLine(string & line); @@ -72,8 +72,8 @@ string removeWhitespace(string s) } -NixRepl::NixRepl() - : state(Strings()) +NixRepl::NixRepl(const Strings & searchPath) + : state(searchPath) , staticEnv(false, &state.staticBaseEnv) { curDir = absPath("."); @@ -595,11 +595,13 @@ int main(int argc, char * * argv) return handleExceptions(argv[0], [&]() { initNix(); - Strings files; + Strings files, searchPath; parseCmdLine(argc, argv, [&](Strings::iterator & arg, const Strings::iterator & end) { if (*arg == "--version") printVersion("nix-repl"); + else if (parseSearchPathArg(arg, end, searchPath)) + ; else if (*arg != "" && arg->at(0) == '-') return false; else @@ -607,7 +609,7 @@ int main(int argc, char * * argv) return true; }); - NixRepl repl; + NixRepl repl(searchPath); repl.mainLoop(files); }); } From f92408136ed08804bab14b3e2a2def9b8effd7eb Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 1 Dec 2014 10:07:10 +0100 Subject: [PATCH 0040/2196] Fix building against current Nix master --- nix-repl.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nix-repl.cc b/nix-repl.cc index 51d297d1d17..343d3f9f3bf 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -378,7 +378,7 @@ void NixRepl::loadFile(const Path & path) loadedFiles.push_back(path); Value v, v2; state.evalFile(lookupFileArg(state, path), v); - Bindings bindings; + Bindings & bindings(*state.allocBindings(0)); state.autoCallFunction(bindings, v, v2); addAttrsToScope(v2); } From 45c6405a30bd1b2cb8ad6a94b23be8b10cf52069 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 2 Jun 2015 13:23:53 +0200 Subject: [PATCH 0041/2196] Fix building against latest Nix Fixes #8. Fixes #9. --- nix-repl.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/nix-repl.cc b/nix-repl.cc index 343d3f9f3bf..8cfbfeff9f4 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -594,6 +594,7 @@ int main(int argc, char * * argv) { return handleExceptions(argv[0], [&]() { initNix(); + initGC(); Strings files, searchPath; From 57aeef0b6a3d3c9506e35f57f5b6db33019967e5 Mon Sep 17 00:00:00 2001 From: Susan Potter Date: Mon, 6 Jul 2015 08:26:17 -0500 Subject: [PATCH 0042/2196] Fix nix-repl does not support '--help' According to popular practice and convention `nix-repl` now supports `--help` like a good POSIX citizen[1]. [1] https://www.gnu.org/prep/standards/html_node/Command_002dLine-Interfaces.html --- nix-repl.cc | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/nix-repl.cc b/nix-repl.cc index 8cfbfeff9f4..43e8c4a6cc6 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -59,7 +59,8 @@ struct NixRepl void printHelp() { - std::cout << "Usage: nix-repl\n"; + std::cout << "Usage: nix-repl [--help|--version]"; + std::cout << std::endl; } @@ -601,6 +602,11 @@ int main(int argc, char * * argv) parseCmdLine(argc, argv, [&](Strings::iterator & arg, const Strings::iterator & end) { if (*arg == "--version") printVersion("nix-repl"); + else if (*arg == "--help") { + printHelp(); + // exit with 0 since user asked for help + _exit(0); + } else if (parseSearchPathArg(arg, end, searchPath)) ; else if (*arg != "" && arg->at(0) == '-') From 8a2f5f0607540ffe56b56d52db544373e1efb980 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 7 Sep 2015 13:05:58 +0200 Subject: [PATCH 0043/2196] Fix building against Nix 1.10 Fixes #12. --- nix-repl.cc | 68 ++++++++++++++++++++++++++++------------------------- 1 file changed, 36 insertions(+), 32 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index 43e8c4a6cc6..474ad4fc9af 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -87,8 +87,8 @@ void NixRepl::mainLoop(const Strings & files) { std::cout << "Welcome to Nix version " << NIX_VERSION << ". Type :? for help." << std::endl << std::endl; - foreach (Strings::const_iterator, i, files) - loadedFiles.push_back(*i); + for (auto & i : files) + loadedFiles.push_back(i); reloadFiles(); if (!loadedFiles.empty()) std::cout << std::endl; @@ -210,8 +210,8 @@ void NixRepl::completePrefix(string prefix) e->eval(state, *env, v); state.forceAttrs(v); - foreach (Bindings::iterator, i, *v.attrs) { - string name = i->name; + for (auto & i : *v.attrs) { + string name = i.name; if (string(name, 0, prefix2.size()) != prefix2) continue; completions.insert(expr + "." + name); } @@ -251,11 +251,11 @@ static int runProgram(const string & program, const Strings & args) bool isVarName(const string & s) { // FIXME: not quite correct. - foreach (string::const_iterator, i, s) - if (!((*i >= 'a' && *i <= 'z') || - (*i >= 'A' && *i <= 'Z') || - (*i >= '0' && *i <= '9') || - *i == '_' || *i == '\'')) + for (auto & i : s) + if (!((i >= 'a' && i <= 'z') || + (i >= 'A' && i <= 'Z') || + (i >= '0' && i <= '9') || + i == '_' || i == '\'')) return false; return true; } @@ -329,8 +329,8 @@ bool NixRepl::processLine(string line) if (runProgram("nix-store", Strings{"-r", drvPath}) == 0) { Derivation drv = readDerivation(drvPath); std::cout << std::endl << "this derivation produced the following outputs:" << std::endl; - foreach (DerivationOutputs::iterator, i, drv.outputs) - std::cout << format(" %1% -> %2%") % i->first % i->second.path << std::endl; + for (auto & i : drv.outputs) + std::cout << format(" %1% -> %2%") % i.first % i.second.path << std::endl; } } else runProgram("nix-shell", Strings{drvPath}); @@ -393,8 +393,8 @@ void NixRepl::initEnv() staticEnv.vars.clear(); varNames.clear(); - foreach (StaticEnv::Vars::iterator, i, state.staticBaseEnv.vars) - varNames.insert(i->first); + for (auto & i : state.staticBaseEnv.vars) + varNames.insert(i.first); } @@ -405,10 +405,12 @@ void NixRepl::reloadFiles() Strings old = loadedFiles; loadedFiles.clear(); - foreach (Strings::iterator, i, old) { - if (i != old.begin()) std::cout << std::endl; - std::cout << format("Loading ‘%1%’...") % *i << std::endl; - loadFile(*i); + bool first = true; + for (auto & i : old) { + if (!first) std::cout << std::endl; + first = false; + std::cout << format("Loading ‘%1%’...") % i << std::endl; + loadFile(i); } } @@ -416,8 +418,8 @@ void NixRepl::reloadFiles() void NixRepl::addAttrsToScope(Value & attrs) { state.forceAttrs(attrs); - foreach (Bindings::iterator, i, *attrs.attrs) - addVarToScope(i->name, *i->value); + for (auto & i : *attrs.attrs) + addVarToScope(i.name, *i.value); std::cout << format("Added %1% variables.") % attrs.attrs->size() << std::endl; } @@ -509,8 +511,8 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m typedef std::map Sorted; Sorted sorted; - foreach (Bindings::iterator, i, *v.attrs) - sorted[i->name] = i->value; + for (auto & i : *v.attrs) + sorted[i.name] = i.value; /* If this is a derivation, then don't show the self-references ("all", "out", etc.). */ @@ -522,20 +524,20 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m hidden.insert("out"); else { state.forceList(*i->value); - for (unsigned int j = 0; j < i->value->list.length; ++j) - hidden.insert(state.forceStringNoCtx(*i->value->list.elems[j])); + for (unsigned int j = 0; j < i->value->listSize(); ++j) + hidden.insert(state.forceStringNoCtx(*i->value->listElems()[j])); } } - foreach (Sorted::iterator, i, sorted) { - str << i->first << " = "; - if (hidden.find(i->first) != hidden.end()) + for (auto & i : sorted) { + str << i.first << " = "; + if (hidden.find(i.first) != hidden.end()) str << "«...»"; - else if (seen.find(i->second) != seen.end()) + else if (seen.find(i.second) != seen.end()) str << "«repeated»"; else try { - printValue(str, *i->second, maxDepth - 1, seen); + printValue(str, *i.second, maxDepth - 1, seen); } catch (AssertionError & e) { str << "«error: " << e.msg() << "»"; } @@ -549,17 +551,19 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m break; } - case tList: + case tList1: + case tList2: + case tListN: seen.insert(&v); str << "[ "; if (maxDepth > 0) - for (unsigned int n = 0; n < v.list.length; ++n) { - if (seen.find(v.list.elems[n]) != seen.end()) + for (unsigned int n = 0; n < v.listSize(); ++n) { + if (seen.find(v.listElems()[n]) != seen.end()) str << "«repeated»"; else try { - printValue(str, *v.list.elems[n], maxDepth - 1, seen); + printValue(str, *v.listElems()[n], maxDepth - 1, seen); } catch (AssertionError & e) { str << "«error: " << e.msg() << "»"; } From f7980b471273d695aa0b28f2f73e6ee443dfe9eb Mon Sep 17 00:00:00 2001 From: Scott Olson Date: Sun, 14 Feb 2016 01:16:30 -0600 Subject: [PATCH 0044/2196] Parse `foo-bar = expr` as an assignment. --- nix-repl.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nix-repl.cc b/nix-repl.cc index 474ad4fc9af..25f84a3082a 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -255,7 +255,7 @@ bool isVarName(const string & s) if (!((i >= 'a' && i <= 'z') || (i >= 'A' && i <= 'Z') || (i >= '0' && i <= '9') || - i == '_' || i == '\'')) + i == '_' || i == '-' || i == '\'')) return false; return true; } From 2111098a3a26d11edf4452f021245b55287c45b8 Mon Sep 17 00:00:00 2001 From: Scott Olson Date: Sun, 14 Feb 2016 01:29:48 -0600 Subject: [PATCH 0045/2196] Don't consider strings starting with - or ' as variable names. --- nix-repl.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nix-repl.cc b/nix-repl.cc index 25f84a3082a..5c319f06865 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -250,7 +250,8 @@ static int runProgram(const string & program, const Strings & args) bool isVarName(const string & s) { - // FIXME: not quite correct. + if (s.size() > 0 && (s[0] == '-' || s[0] == '\'')) + return false; for (auto & i : s) if (!((i >= 'a' && i <= 'z') || (i >= 'A' && i <= 'Z') || From f30fd9c47b1ae7a48f4854c86b3ad5e038845aa3 Mon Sep 17 00:00:00 2001 From: Scott Olson Date: Sun, 14 Feb 2016 01:50:47 -0600 Subject: [PATCH 0046/2196] Don't consider empty strings or strings beginning with numbers as variable names. --- nix-repl.cc | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index 5c319f06865..1077f5d8f61 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -250,8 +250,9 @@ static int runProgram(const string & program, const Strings & args) bool isVarName(const string & s) { - if (s.size() > 0 && (s[0] == '-' || s[0] == '\'')) - return false; + if (s.size() == 0) return false; + char c = s[0]; + if ((c >= '0' && c <= '9') || c == '-' || c == '\'') return false; for (auto & i : s) if (!((i >= 'a' && i <= 'z') || (i >= 'A' && i <= 'Z') || From 3cfb8d15846238d79b36de7e52a90a4d3afa4268 Mon Sep 17 00:00:00 2001 From: Scott Olson Date: Mon, 15 Feb 2016 19:16:24 -0600 Subject: [PATCH 0047/2196] Remove unused global variable. --- nix-repl.cc | 3 --- 1 file changed, 3 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index 1077f5d8f61..8569b5eb67b 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -19,9 +19,6 @@ using namespace std; using namespace nix; -string programId = "nix-repl"; - - struct NixRepl { string curDir; From 30a7bfbebe8582ab02f2a9b659403a5b3e1c097b Mon Sep 17 00:00:00 2001 From: Scott Olson Date: Mon, 15 Feb 2016 23:11:26 -0600 Subject: [PATCH 0048/2196] Fix grammar. --- nix-repl.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nix-repl.cc b/nix-repl.cc index 8569b5eb67b..e753c637a75 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -316,7 +316,7 @@ bool NixRepl::processLine(string line) evalString(arg, v); DrvInfo drvInfo(state); if (!getDerivation(state, v, drvInfo, false)) - throw Error("expression does not evaluation to a derivation, so I can't build it"); + throw Error("expression does not evaluate to a derivation, so I can't build it"); Path drvPath = drvInfo.queryDrvPath(); if (drvPath == "" || !store->isValidPath(drvPath)) throw Error("expression did not evaluate to a valid derivation"); From 82aca33899a0348736604e6b6a601f9c7b4e0633 Mon Sep 17 00:00:00 2001 From: Scott Olson Date: Tue, 16 Feb 2016 00:24:50 -0600 Subject: [PATCH 0049/2196] Add :i command to install a package to the current profile. It works by running `nix-env -i `. Fixes #15. --- nix-repl.cc | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index e753c637a75..ca221b03338 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -281,6 +281,7 @@ bool NixRepl::processLine(string line) << " = Bind expression to variable\n" << " :a Add attributes from resulting set to scope\n" << " :b Build derivation\n" + << " :i Build derivation, then install result into current profile\n" << " :l Load Nix expression and add it to scope\n" << " :p Evaluate and print expression recursively\n" << " :q Exit nix-repl\n" @@ -311,7 +312,7 @@ bool NixRepl::processLine(string line) std::cout << showType(v) << std::endl; } - else if (command == ":b" || command == ":s") { + else if (command == ":b" || command == ":i" || command == ":s") { Value v; evalString(arg, v); DrvInfo drvInfo(state); @@ -331,8 +332,11 @@ bool NixRepl::processLine(string line) for (auto & i : drv.outputs) std::cout << format(" %1% -> %2%") % i.first % i.second.path << std::endl; } - } else + } else if (command == ":i") { + runProgram("nix-env", Strings{"-i", drvPath}); + } else { runProgram("nix-shell", Strings{drvPath}); + } } else if (command == ":p" || command == ":print") { From cfc874ee52008f523a86b5079243deabaecb62e4 Mon Sep 17 00:00:00 2001 From: Scott Olson Date: Tue, 16 Feb 2016 18:40:45 -0600 Subject: [PATCH 0050/2196] Open the store before constructing EvalState. EvalState requires the `store` global to be initialized before it is constructed in some cases, e.g. when it needs to download a tarball for something in NIX_PATH. Hence, this fixes #13. --- nix-repl.cc | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index 1077f5d8f61..cdf6a79e920 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -78,8 +78,6 @@ NixRepl::NixRepl(const Strings & searchPath) , staticEnv(false, &state.staticBaseEnv) { curDir = absPath("."); - - store = openStore(); } @@ -622,6 +620,7 @@ int main(int argc, char * * argv) return true; }); + store = openStore(); NixRepl repl(searchPath); repl.mainLoop(files); }); From 287dfee35edc6c324ee8829f37a8ed6b07c35ffa Mon Sep 17 00:00:00 2001 From: Scott Olson Date: Thu, 18 Feb 2016 04:05:11 -0600 Subject: [PATCH 0051/2196] Expand the help message printed from --help. Fixes #10. I consider this a temporary measure, however, until nix-repl has a manpage (see #14). Then it can just open its manpage on --help like the other nix tools do. Much of the text in this commit was copied from nix-build's manpage. --- nix-repl.cc | 31 +++++++++++++++++++++++++++++-- 1 file changed, 29 insertions(+), 2 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index 1077f5d8f61..1b098405fc8 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -59,8 +59,35 @@ struct NixRepl void printHelp() { - std::cout << "Usage: nix-repl [--help|--version]"; - std::cout << std::endl; + cout << "Usage: nix-repl [--help] [--version] [-I path] paths...\n" + << "\n" + << "nix-repl is a simple read-eval-print loop (REPL) for the Nix package manager.\n" + << "\n" + << "Options:\n" + << " --help\n" + << " Prints out a summary of the command syntax and exits.\n" + << "\n" + << " --version\n" + << " Prints out the Nix version number on standard output and exits.\n" + << "\n" + << " -I path\n" + << " Add a path to the Nix expression search path. This option may be given\n" + << " multiple times. See the NIX_PATH environment variable for information on\n" + << " the semantics of the Nix search path. Paths added through -I take\n" + << " precedence over NIX_PATH.\n" + << "\n" + << " paths...\n" + << " A list of paths to files containing Nix expressions which nix-repl will\n" + << " load and add to its scope.\n" + << "\n" + << " A path surrounded in < and > will be looked up in the Nix expression search\n" + << " path, as in the Nix language itself.\n" + << "\n" + << " If an element of paths starts with http:// or https://, it is interpreted\n" + << " as the URL of a tarball that will be downloaded and unpacked to a temporary\n" + << " location. The tarball must include a single top-level directory containing\n" + << " at least a file named default.nix.\n" + << flush; } From 2d729e4f6f45c079ddf149610357e648e805f42c Mon Sep 17 00:00:00 2001 From: Scott Olson Date: Thu, 18 Feb 2016 06:27:39 -0600 Subject: [PATCH 0052/2196] Support multiline input by detecting "unfinished" parse errors. Fixes #4, --- nix-repl.cc | 28 +++++++++++++++++++++++----- 1 file changed, 23 insertions(+), 5 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index 1077f5d8f61..9dddb26037a 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -41,7 +41,7 @@ struct NixRepl NixRepl(const Strings & searchPath); void mainLoop(const Strings & files); void completePrefix(string prefix); - bool getLine(string & line); + bool getLine(string & line, const char * prompt); bool processLine(string line); void loadFile(const Path & path); void initEnv(); @@ -96,21 +96,39 @@ void NixRepl::mainLoop(const Strings & files) using_history(); read_history(0); + string input; + while (true) { + // When continuing input from a previous, don't print a prompt, just align to the same + // number of chars as the prompt. + const char * prompt = input.empty() ? "nix-repl> " : " "; string line; - if (!getLine(line)) { + if (!getLine(line, prompt)) { std::cout << std::endl; break; } + input.append(removeWhitespace(line)); + input.push_back('\n'); + try { - if (!processLine(removeWhitespace(line))) return; + if (!processLine(input)) return; + } catch (ParseError & e) { + if (e.msg().find("unexpected $end") != std::string::npos) { + // For parse errors on incomplete input, we continue waiting for the next line of + // input without clearing the input so far. + continue; + } else { + printMsg(lvlError, "error: " + e.msg()); + } } catch (Error & e) { printMsg(lvlError, "error: " + e.msg()); } catch (Interrupted & e) { printMsg(lvlError, "error: " + e.msg()); } + // We handled the current input fully, so we should clear it and read brand new input. + input.clear(); std::cout << std::endl; } } @@ -149,7 +167,7 @@ char * completerThunk(const char * s, int state) } -bool NixRepl::getLine(string & line) +bool NixRepl::getLine(string & line, const char * prompt) { struct sigaction act, old; act.sa_handler = sigintHandler; @@ -164,7 +182,7 @@ bool NixRepl::getLine(string & line) curRepl = this; rl_completion_entry_function = completerThunk; - char * s = readline("nix-repl> "); + char * s = readline(prompt); if (!s) return false; line = chomp(string(s)); free(s); From 64080d26fe9364bc0ea0893f357386ed3121878f Mon Sep 17 00:00:00 2001 From: Scott Olson Date: Thu, 18 Feb 2016 06:50:52 -0600 Subject: [PATCH 0053/2196] Cancel multiline input on Ctrl-C. --- nix-repl.cc | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index 9dddb26037a..8d2fbd919c9 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -41,7 +41,7 @@ struct NixRepl NixRepl(const Strings & searchPath); void mainLoop(const Strings & files); void completePrefix(string prefix); - bool getLine(string & line, const char * prompt); + bool getLine(string & input, const char * prompt); bool processLine(string line); void loadFile(const Path & path); void initEnv(); @@ -102,15 +102,11 @@ void NixRepl::mainLoop(const Strings & files) // When continuing input from a previous, don't print a prompt, just align to the same // number of chars as the prompt. const char * prompt = input.empty() ? "nix-repl> " : " "; - string line; - if (!getLine(line, prompt)) { + if (!getLine(input, prompt)) { std::cout << std::endl; break; } - input.append(removeWhitespace(line)); - input.push_back('\n'); - try { if (!processLine(input)) return; } catch (ParseError & e) { @@ -167,7 +163,7 @@ char * completerThunk(const char * s, int state) } -bool NixRepl::getLine(string & line, const char * prompt) +bool NixRepl::getLine(string & input, const char * prompt) { struct sigaction act, old; act.sa_handler = sigintHandler; @@ -176,15 +172,17 @@ bool NixRepl::getLine(string & line, const char * prompt) if (sigaction(SIGINT, &act, &old)) throw SysError("installing handler for SIGINT"); - if (sigsetjmp(sigintJmpBuf, 1)) - line = ""; - else { + if (sigsetjmp(sigintJmpBuf, 1)) { + input.clear(); + } else { curRepl = this; rl_completion_entry_function = completerThunk; char * s = readline(prompt); if (!s) return false; - line = chomp(string(s)); + string line = chomp(string(s)); + input.append(removeWhitespace(line)); + input.push_back('\n'); free(s); if (line != "") { add_history(line.c_str()); From 60ba98242f7c976e4e14113d28bced03b32db4f5 Mon Sep 17 00:00:00 2001 From: Scott Olson Date: Thu, 18 Feb 2016 06:59:51 -0600 Subject: [PATCH 0054/2196] Fix recognition of REPL commands. --- nix-repl.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nix-repl.cc b/nix-repl.cc index 8d2fbd919c9..e52c3b257a7 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -286,7 +286,7 @@ bool NixRepl::processLine(string line) string command, arg; if (line[0] == ':') { - size_t p = line.find(' '); + size_t p = line.find_first_of(" \n\r\t"); command = string(line, 0, p); if (p != string::npos) arg = removeWhitespace(string(line, p)); } else { From 56c7f0e8c581c66a968fdae681e9c417817e28d0 Mon Sep 17 00:00:00 2001 From: Scott Olson Date: Thu, 18 Feb 2016 07:04:55 -0600 Subject: [PATCH 0055/2196] Fix typo in comment. --- nix-repl.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nix-repl.cc b/nix-repl.cc index e52c3b257a7..3834e572a2d 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -99,7 +99,7 @@ void NixRepl::mainLoop(const Strings & files) string input; while (true) { - // When continuing input from a previous, don't print a prompt, just align to the same + // When continuing input from previous lines, don't print a prompt, just align to the same // number of chars as the prompt. const char * prompt = input.empty() ? "nix-repl> " : " "; if (!getLine(input, prompt)) { From 97da6d62f2a1580dfdae9575c92418d5f45a29af Mon Sep 17 00:00:00 2001 From: Scott Olson Date: Sat, 20 Feb 2016 01:10:06 -0600 Subject: [PATCH 0056/2196] Print syntactially invalid attribute names as strings. --- nix-repl.cc | 28 +++++++++++++++++++--------- 1 file changed, 19 insertions(+), 9 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index f4850271dc6..577efa8e2ba 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -472,6 +472,19 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m } +std::ostream & printStringValue(std::ostream & str, const char * string) { + str << "\""; + for (const char * i = string; *i; i++) + if (*i == '\"' || *i == '\\') str << "\\" << *i; + else if (*i == '\n') str << "\\n"; + else if (*i == '\r') str << "\\r"; + else if (*i == '\t') str << "\\t"; + else str << *i; + str << "\""; + return str; +} + + // FIXME: lot of cut&paste from Nix's eval.cc. std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int maxDepth, ValuesSeen & seen) { @@ -491,14 +504,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m break; case tString: - str << "\""; - for (const char * i = v.string.s; *i; i++) - if (*i == '\"' || *i == '\\') str << "\\" << *i; - else if (*i == '\n') str << "\\n"; - else if (*i == '\r') str << "\\r"; - else if (*i == '\t') str << "\\t"; - else str << *i; - str << "\""; + printStringValue(str, v.string.s); break; case tPath: @@ -546,7 +552,11 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m } for (auto & i : sorted) { - str << i.first << " = "; + if (isVarName(i.first)) + str << i.first; + else + printStringValue(str, i.first.c_str()); + str << " = "; if (hidden.find(i.first) != hidden.end()) str << "«...»"; else if (seen.find(i.second) != seen.end()) From 103c46abc273266afcd5dbffa40151114234a02b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ole=20J=C3=B8rgen=20Br=C3=B8nner?= Date: Tue, 23 Feb 2016 23:19:49 +0100 Subject: [PATCH 0057/2196] Preserve readline history across sessions. Add rl_readline_name. --- README.md | 4 ++++ nix-repl.cc | 7 ++++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index c5964fd5ea1..acdcd367fc9 100644 --- a/README.md +++ b/README.md @@ -102,3 +102,7 @@ example: nix-repl> config.networking.use config.networking.useDHCP config.networking.usePredictableInterfaceNames + +Input history is preserved by readline in ~/.nix-repl-history +The readline "application name" is nix-repl. This allows for nix-repl specific +settings in ~/.inputrc diff --git a/nix-repl.cc b/nix-repl.cc index 577efa8e2ba..1c878cd0e23 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -20,6 +20,7 @@ using namespace nix; string programId = "nix-repl"; +const string historyFile = string(getenv("HOME")) + "/.nix-repl-history"; struct NixRepl @@ -91,8 +92,10 @@ void NixRepl::mainLoop(const Strings & files) reloadFiles(); if (!loadedFiles.empty()) std::cout << std::endl; + // Allow nix-repl specific settings in .inputrc + rl_readline_name = "nix-repl"; using_history(); - read_history(0); + read_history(historyFile.c_str()); string input; @@ -649,5 +652,7 @@ int main(int argc, char * * argv) store = openStore(); NixRepl repl(searchPath); repl.mainLoop(files); + + write_history(historyFile.c_str()); }); } From 87e6649fc30a37adabf384a68f588de946bc3468 Mon Sep 17 00:00:00 2001 From: Scott Olson Date: Tue, 23 Feb 2016 18:29:56 -0600 Subject: [PATCH 0058/2196] Fix handling of whitespace. Whitespace will no longer be removed from input lines, which fixes pasting multiline strings containing end-of-line or beginning-of-line whitespace. --- nix-repl.cc | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index 577efa8e2ba..1cdff20fd41 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -178,14 +178,13 @@ bool NixRepl::getLine(string & input, const char * prompt) char * s = readline(prompt); if (!s) return false; - string line = chomp(string(s)); - input.append(removeWhitespace(line)); + input.append(s); input.push_back('\n'); - free(s); - if (line != "") { - add_history(line.c_str()); + if (!removeWhitespace(s).empty()) { + add_history(s); append_history(1, 0); } + free(s); } _isInterrupted = 0; From 38816759fc7f70605ecfd73304b9d442db388b78 Mon Sep 17 00:00:00 2001 From: Scott Olson Date: Tue, 23 Feb 2016 18:30:21 -0600 Subject: [PATCH 0059/2196] Ignore blank inputs. Previously, nix-repl would consider this an incomplete parse and wait for the next line as if it was a multiline input. Blank lines in the middle of a multiline input will continue to work. --- nix-repl.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nix-repl.cc b/nix-repl.cc index 1cdff20fd41..6f4287a2b69 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -106,7 +106,7 @@ void NixRepl::mainLoop(const Strings & files) } try { - if (!processLine(input)) return; + if (!removeWhitespace(input).empty() && !processLine(input)) return; } catch (ParseError & e) { if (e.msg().find("unexpected $end") != std::string::npos) { // For parse errors on incomplete input, we continue waiting for the next line of From ff8d0698c76914f30d5311e1ceb0ab7d18a543b6 Mon Sep 17 00:00:00 2001 From: Fabian Schmitthenner Date: Sun, 28 Feb 2016 22:41:16 +0000 Subject: [PATCH 0060/2196] fix nix-repl after we don't have a global store variable anymore (cf nix@c10c61449f954702ae6d8092120321744acd82ff) --- nix-repl.cc | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index 6f4287a2b69..89c35c311db 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -38,7 +38,7 @@ struct NixRepl StringSet completions; StringSet::iterator curCompletion; - NixRepl(const Strings & searchPath); + NixRepl(const Strings & searchPath, nix::ref store); void mainLoop(const Strings & files); void completePrefix(string prefix); bool getLine(string & input, const char * prompt); @@ -73,8 +73,8 @@ string removeWhitespace(string s) } -NixRepl::NixRepl(const Strings & searchPath) - : state(searchPath) +NixRepl::NixRepl(const Strings & searchPath, nix::ref store) + : state(searchPath, store) , staticEnv(false, &state.staticBaseEnv) { curDir = absPath("."); @@ -334,7 +334,7 @@ bool NixRepl::processLine(string line) if (!getDerivation(state, v, drvInfo, false)) throw Error("expression does not evaluation to a derivation, so I can't build it"); Path drvPath = drvInfo.queryDrvPath(); - if (drvPath == "" || !store->isValidPath(drvPath)) + if (drvPath == "" || !state.store->isValidPath(drvPath)) throw Error("expression did not evaluate to a valid derivation"); if (command == ":b") { @@ -645,8 +645,7 @@ int main(int argc, char * * argv) return true; }); - store = openStore(); - NixRepl repl(searchPath); + NixRepl repl(searchPath, openStore()); repl.mainLoop(files); }); } From 86e93b9f61cf53cfd766e1724e65507aca952f55 Mon Sep 17 00:00:00 2001 From: Scott Olson Date: Wed, 17 Feb 2016 20:31:30 -0600 Subject: [PATCH 0061/2196] Add :x command which works like `nix-shell -p`. --- nix-repl.cc | 31 ++++++++++++++++++++++++------- 1 file changed, 24 insertions(+), 7 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index 8c4f4017470..adf4186975b 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -39,6 +39,7 @@ struct NixRepl void mainLoop(const Strings & files); void completePrefix(string prefix); bool getLine(string & input, const char * prompt); + Path getDerivationPath(Value & v); bool processLine(string line); void loadFile(const Path & path); void initEnv(); @@ -300,6 +301,17 @@ bool isVarName(const string & s) } +Path NixRepl::getDerivationPath(Value & v) { + DrvInfo drvInfo(state); + if (!getDerivation(state, v, drvInfo, false)) + throw Error("expression does not evaluate to a derivation, so I can't build it"); + Path drvPath = drvInfo.queryDrvPath(); + if (drvPath == "" || !state.store->isValidPath(drvPath)) + throw Error("expression did not evaluate to a valid derivation"); + return drvPath; +} + + bool NixRepl::processLine(string line) { if (line == "") return true; @@ -327,7 +339,8 @@ bool NixRepl::processLine(string line) << " :q Exit nix-repl\n" << " :r Reload all files\n" << " :s Build dependencies of derivation, then start nix-shell\n" - << " :t Describe result of evaluation\n"; + << " :t Describe result of evaluation\n" + << " :x Build derivation, then start nix-shell\n"; } else if (command == ":a" || command == ":add") { @@ -350,17 +363,21 @@ bool NixRepl::processLine(string line) Value v; evalString(arg, v); std::cout << showType(v) << std::endl; + + } else if (command == ":x") { + Value v, f, result; + evalString(arg, v); + evalString("drv: (import {}).runCommand \"shell\" { buildInputs = [ drv ]; } \"\"", f); + state.callFunction(f, v, result, Pos()); + + Path drvPath = getDerivationPath(result); + runProgram("nix-shell", Strings{drvPath}); } else if (command == ":b" || command == ":i" || command == ":s") { Value v; evalString(arg, v); - DrvInfo drvInfo(state); - if (!getDerivation(state, v, drvInfo, false)) - throw Error("expression does not evaluate to a derivation, so I can't build it"); - Path drvPath = drvInfo.queryDrvPath(); - if (drvPath == "" || !state.store->isValidPath(drvPath)) - throw Error("expression did not evaluate to a valid derivation"); + Path drvPath = getDerivationPath(v); if (command == ":b") { /* We could do the build in this process using buildPaths(), From e2ff27da07f30c598c88e8e7552be0d126e2b4da Mon Sep 17 00:00:00 2001 From: Scott Olson Date: Fri, 19 Feb 2016 04:00:36 -0600 Subject: [PATCH 0062/2196] Rename :x to :u, for 'use'. --- nix-repl.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index adf4186975b..a9a21ae766d 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -340,7 +340,7 @@ bool NixRepl::processLine(string line) << " :r Reload all files\n" << " :s Build dependencies of derivation, then start nix-shell\n" << " :t Describe result of evaluation\n" - << " :x Build derivation, then start nix-shell\n"; + << " :u Build derivation, then start nix-shell\n"; } else if (command == ":a" || command == ":add") { @@ -364,7 +364,7 @@ bool NixRepl::processLine(string line) evalString(arg, v); std::cout << showType(v) << std::endl; - } else if (command == ":x") { + } else if (command == ":u") { Value v, f, result; evalString(arg, v); evalString("drv: (import {}).runCommand \"shell\" { buildInputs = [ drv ]; } \"\"", f); From 8bec2c07a1e25e3471ee5f1b7aedb30a5d5b03cd Mon Sep 17 00:00:00 2001 From: Fabian Schmitthenner Date: Sat, 5 Mar 2016 16:47:17 +0000 Subject: [PATCH 0063/2196] When showing a lambda, also show the position of the definition --- nix-repl.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nix-repl.cc b/nix-repl.cc index 8c4f4017470..e17f73ae0ca 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -628,7 +628,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m break; case tLambda: - str << "«lambda»"; + str << "«lambda defined at " << v.lambda.fun->pos << "»"; break; case tPrimOp: From 828cf7b0582220ce568b69e9cc51af794bdd2416 Mon Sep 17 00:00:00 2001 From: Fabian Schmitthenner Date: Sat, 19 Mar 2016 13:52:39 +0000 Subject: [PATCH 0064/2196] show trace of errors when using --show-trace --- nix-repl.cc | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index a9a21ae766d..d1c67276c2f 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -14,6 +14,7 @@ #include "get-drvs.hh" #include "derivations.hh" #include "affinity.hh" +#include "globals.hh" using namespace std; using namespace nix; @@ -108,6 +109,7 @@ NixRepl::NixRepl(const Strings & searchPath, nix::ref store) void NixRepl::mainLoop(const Strings & files) { + string error = ANSI_RED "error:" ANSI_NORMAL " "; std::cout << "Welcome to Nix version " << NIX_VERSION << ". Type :? for help." << std::endl << std::endl; for (auto & i : files) @@ -138,12 +140,12 @@ void NixRepl::mainLoop(const Strings & files) // input without clearing the input so far. continue; } else { - printMsg(lvlError, "error: " + e.msg()); + printMsg(lvlError, format(error + "%1%%2%") % (settings.showTrace ? e.prefix() : "") % e.msg()); } } catch (Error & e) { - printMsg(lvlError, "error: " + e.msg()); + printMsg(lvlError, format(error + "%1%%2%") % (settings.showTrace ? e.prefix() : "") % e.msg()); } catch (Interrupted & e) { - printMsg(lvlError, "error: " + e.msg()); + printMsg(lvlError, format(error + "%1%%2%") % (settings.showTrace ? e.prefix() : "") % e.msg()); } // We handled the current input fully, so we should clear it and read brand new input. From eaabcba1c39a22c588b8c9336ec7ba98aefce86e Mon Sep 17 00:00:00 2001 From: Emery Hemingway Date: Sat, 25 Jun 2016 13:25:31 +0200 Subject: [PATCH 0065/2196] Colorize Berlin NixOS meetup --- nix-repl.cc | 29 +++++++++++++++++++---------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index a9a21ae766d..e28c543035f 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -18,6 +18,13 @@ using namespace std; using namespace nix; +#define ESC_RED "\033[31m" +#define ESC_GRE "\033[32m" +#define ESC_YEL "\033[33m" +#define ESC_BLU "\033[34m" +#define ESC_MAG "\033[35m" +#define ESC_CYA "\033[36m" +#define ESC_END "\033[0m" struct NixRepl { @@ -540,23 +547,25 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m switch (v.type) { case tInt: - str << v.integer; + str << ESC_CYA << v.integer << ESC_END; break; case tBool: - str << (v.boolean ? "true" : "false"); + str << ESC_CYA << (v.boolean ? "true" : "false") << ESC_END; break; case tString: + str << ESC_YEL; printStringValue(str, v.string.s); + str << ESC_END; break; case tPath: - str << v.path; // !!! escaping? + str << ESC_GRE << v.path << ESC_END; // !!! escaping? break; case tNull: - str << "null"; + str << ESC_CYA "null" ESC_END; break; case tAttrs: { @@ -609,7 +618,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m try { printValue(str, *i.second, maxDepth - 1, seen); } catch (AssertionError & e) { - str << "«error: " << e.msg() << "»"; + str << ESC_RED "«error: " << e.msg() << "»" ESC_END; } str << "; "; } @@ -635,7 +644,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m try { printValue(str, *v.listElems()[n], maxDepth - 1, seen); } catch (AssertionError & e) { - str << "«error: " << e.msg() << "»"; + str << ESC_RED "«error: " << e.msg() << "»" ESC_END; } str << " "; } @@ -645,19 +654,19 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m break; case tLambda: - str << "«lambda»"; + str << ESC_BLU "«lambda»" ESC_END; break; case tPrimOp: - str << "«primop»"; + str << ESC_MAG "«primop»" ESC_END; break; case tPrimOpApp: - str << "«primop-app»"; + str << ESC_BLU "«primop-app»" ESC_END; break; default: - str << "«unknown»"; + str << "ESC_RED «unknown»" ESC_END; break; } From 34ec98176e4644c8c6ec45e4e932b83cbddc6cce Mon Sep 17 00:00:00 2001 From: Emery Hemingway Date: Sat, 25 Jun 2016 13:40:50 +0200 Subject: [PATCH 0066/2196] fixup "Colorize" Do not quote escape sequence macro --- nix-repl.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index e28c543035f..fc0c3791334 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -551,7 +551,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m break; case tBool: - str << ESC_CYA << (v.boolean ? "true" : "false") << ESC_END; + str << ESC_CYA << (v.boolean ? "true" : "false") << ESC_END; break; case tString: @@ -666,7 +666,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m break; default: - str << "ESC_RED «unknown»" ESC_END; + str << ESC_RED "«unknown»" ESC_END; break; } From 8b6052923167f19c1f4728c46e8cfc97b9f029fb Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 21 Jul 2016 11:21:59 +0200 Subject: [PATCH 0067/2196] Strip ANSI escapes from file names Also, use bright blue for lambdas, otherwise my eyes hurt. --- nix-repl.cc | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/nix-repl.cc b/nix-repl.cc index ab943f7bac1..0e8c67cf7f2 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -22,7 +22,7 @@ using namespace nix; #define ESC_RED "\033[31m" #define ESC_GRE "\033[32m" #define ESC_YEL "\033[33m" -#define ESC_BLU "\033[34m" +#define ESC_BLU "\033[34;1m" #define ESC_MAG "\033[35m" #define ESC_CYA "\033[36m" #define ESC_END "\033[0m" @@ -655,9 +655,12 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m str << "]"; break; - case tLambda: - str << ESC_BLU "«lambda defined at " << v.lambda.fun->pos << "»" ESC_END; + case tLambda: { + std::ostringstream s; + s << v.lambda.fun->pos; + str << ESC_BLU "«lambda @ " << filterANSIEscapes(s.str()) << "»" ESC_END; break; + } case tPrimOp: str << ESC_MAG "«primop»" ESC_END; From 5476e987d5d605e8fc8e50d76912e342c722bbdb Mon Sep 17 00:00:00 2001 From: Kjetil Orbekk Date: Sun, 1 Jan 2017 16:13:11 -0500 Subject: [PATCH 0068/2196] Update path in documentation. --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index c5964fd5ea1..57613cc19e9 100644 --- a/README.md +++ b/README.md @@ -41,7 +41,7 @@ Here is a typical `nix-repl` session: nix-repl> map (x: x * x) xs [ 1 4 9 16 25 ] - nix-repl> :l + nix-repl> :l Added 7 variables. nix-repl> config.services.dhcpd @@ -86,7 +86,7 @@ Here is a typical `nix-repl` session: Tab completion works on variables in scope and on attribute sets. For example: - $ nix-repl '' '' + $ nix-repl '' '' Welcome to Nix version 1.6pre3215_2c1ecf8. Type :? for help. nix-repl> thunder => thunderbird From bfa41eb6714a7e7c3956389ee063e898bd1f37ff Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Wed, 10 Aug 2016 10:44:39 -0400 Subject: [PATCH 0069/2196] nix-copy-closure: Implement in C++. Tests fail currently because the database is not given proper hashes in the VM --- .gitignore | 2 + Makefile | 1 + scripts/local.mk | 7 +- scripts/nix-copy-closure.in | 103 ----------------------- src/build-remote/build-remote.cc | 1 - src/build-remote/local.mk | 2 +- src/libstore/ssh-store.cc | 8 +- src/libstore/store-api.cc | 24 +++++- src/libstore/store-api.hh | 2 +- src/nix-copy-closure/local.mk | 7 ++ src/nix-copy-closure/nix-copy-closure.cc | 60 +++++++++++++ 11 files changed, 101 insertions(+), 116 deletions(-) delete mode 100755 scripts/nix-copy-closure.in create mode 100644 src/nix-copy-closure/local.mk create mode 100755 src/nix-copy-closure/nix-copy-closure.cc diff --git a/.gitignore b/.gitignore index 92f95fe1fcb..a8abc64ada5 100644 --- a/.gitignore +++ b/.gitignore @@ -81,6 +81,8 @@ Makefile.config # /src/nix-build/ /src/nix-build/nix-build +/src/nix-copy-closure/nix-copy-closure + /src/build-remote/build-remote # /tests/ diff --git a/Makefile b/Makefile index 14be271bb10..8390c867d8b 100644 --- a/Makefile +++ b/Makefile @@ -11,6 +11,7 @@ makefiles = \ src/nix-env/local.mk \ src/nix-daemon/local.mk \ src/nix-collect-garbage/local.mk \ + src/nix-copy-closure/local.mk \ src/nix-prefetch-url/local.mk \ src/buildenv/local.mk \ src/resolve-system-dependencies/local.mk \ diff --git a/scripts/local.mk b/scripts/local.mk index ee8ae6845dc..9f666bde4d0 100644 --- a/scripts/local.mk +++ b/scripts/local.mk @@ -1,8 +1,3 @@ -nix_bin_scripts := \ - $(d)/nix-copy-closure \ - -bin-scripts += $(nix_bin_scripts) - nix_noinst_scripts := \ $(d)/build-remote.pl \ $(d)/nix-http-export.cgi \ @@ -16,4 +11,4 @@ profiledir = $(sysconfdir)/profile.d $(eval $(call install-file-as, $(d)/nix-profile.sh, $(profiledir)/nix.sh, 0644)) $(eval $(call install-program-in, $(d)/build-remote.pl, $(libexecdir)/nix)) -clean-files += $(nix_bin_scripts) $(nix_noinst_scripts) +clean-files += $(nix_noinst_scripts) diff --git a/scripts/nix-copy-closure.in b/scripts/nix-copy-closure.in deleted file mode 100755 index af1d3091926..00000000000 --- a/scripts/nix-copy-closure.in +++ /dev/null @@ -1,103 +0,0 @@ -#! @perl@ -w @perlFlags@ - -use utf8; -use strict; -use Nix::SSH; -use Nix::Config; -use Nix::Store; -use Nix::CopyClosure; -use List::Util qw(sum); - -binmode STDERR, ":encoding(utf8)"; - -if (scalar @ARGV < 1) { - print STDERR < 0) { - print STDERR "copying ", scalar @missing, " missing paths from ‘$sshHost’...\n"; - writeInt(5, $to); # == cmdExportPaths - writeInt(0, $to); # obsolete - writeStrings(\@missing, $to); - importPaths(fileno($from), 1); - } - -} diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index 98ccc3ddc37..1ac9711a110 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -9,7 +9,6 @@ #include "shared.hh" #include "pathlocks.hh" #include "globals.hh" -#include "serve-protocol.hh" #include "serialise.hh" #include "store-api.hh" #include "derivations.hh" diff --git a/src/build-remote/local.mk b/src/build-remote/local.mk index 05b8cb45143..62d5a010c24 100644 --- a/src/build-remote/local.mk +++ b/src/build-remote/local.mk @@ -8,4 +8,4 @@ build-remote_LIBS = libmain libutil libformat libstore build-remote_SOURCES := $(d)/build-remote.cc -build-remote_CXXFLAGS = -DSYSCONFDIR="\"$(sysconfdir)\"" -Isrc/nix-store +build-remote_CXXFLAGS = -DSYSCONFDIR="\"$(sysconfdir)\"" diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc index 3d01594009a..cce0458c69f 100644 --- a/src/libstore/ssh-store.cc +++ b/src/libstore/ssh-store.cc @@ -39,6 +39,8 @@ class SSHStore : public RemoteStore string uri; Path key; + + bool compress; }; SSHStore::SSHStore(string uri, const Params & params, size_t maxConnections) @@ -48,6 +50,7 @@ SSHStore::SSHStore(string uri, const Params & params, size_t maxConnections) , socketPath((Path) tmpDir + "/ssh.sock") , uri(std::move(uri)) , key(get(params, "ssh-key", "")) + , compress(get(params, "compress", "") == "true") { /* open a connection and perform the handshake to verify all is well */ connections->get(); @@ -90,11 +93,12 @@ ref SSHStore::getFSAccessor() ref SSHStore::openConnection() { if ((pid_t) sshMaster == -1) { + auto flags = compress ? "-NMCS" : "-NMS"; sshMaster = startProcess([&]() { if (key.empty()) - execlp("ssh", "ssh", "-N", "-M", "-S", socketPath.c_str(), uri.c_str(), NULL); + execlp("ssh", "ssh", flags, socketPath.c_str(), uri.c_str(), NULL); else - execlp("ssh", "ssh", "-N", "-M", "-S", socketPath.c_str(), "-i", key.c_str(), uri.c_str(), NULL); + execlp("ssh", "ssh", flags, socketPath.c_str(), "-i", key.c_str(), uri.c_str(), NULL); throw SysError("starting ssh master"); }); } diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 8fdd6277155..c8ca00f0069 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -4,6 +4,7 @@ #include "util.hh" #include "nar-info-disk-cache.hh" #include "thread-pool.hh" +#include "derivations.hh" #include @@ -699,8 +700,27 @@ std::list> getDefaultSubstituters() } -void copyPaths(ref from, ref to, const Paths & storePaths) -{ +void copyPaths(ref from, ref to, const Paths & storePaths, bool substitute) +{ + if (substitute) { + /* Filter out .drv files (we don't want to build anything). */ + PathSet paths2; + for (auto & path : storePaths) + if (!isDerivation(path)) paths2.insert(path); + unsigned long long downloadSize, narSize; + PathSet willBuild, willSubstitute, unknown; + to->queryMissing(PathSet(paths2.begin(), paths2.end()), + willBuild, willSubstitute, unknown, downloadSize, narSize); + /* FIXME: should use ensurePath(), but it only + does one path at a time. */ + if (!willSubstitute.empty()) + try { + to->buildPaths(willSubstitute); + } catch (Error & e) { + printMsg(lvlError, format("warning: %1%") % e.msg()); + } + } + std::string copiedLabel = "copied"; logger->setExpected(copiedLabel, storePaths.size()); diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index ec3bf5a6fd8..30ee433bf07 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -625,7 +625,7 @@ void removeTempRoots(); ref openStore(const std::string & uri = getEnv("NIX_REMOTE")); -void copyPaths(ref from, ref to, const Paths & storePaths); +void copyPaths(ref from, ref to, const Paths & storePaths, bool substitute = false); enum StoreType { tDaemon, diff --git a/src/nix-copy-closure/local.mk b/src/nix-copy-closure/local.mk new file mode 100644 index 00000000000..42bb34dd820 --- /dev/null +++ b/src/nix-copy-closure/local.mk @@ -0,0 +1,7 @@ +programs += nix-copy-closure + +nix-copy-closure_DIR := $(d) + +nix-copy-closure_LIBS = libmain libutil libformat libstore + +nix-copy-closure_SOURCES := $(d)/nix-copy-closure.cc diff --git a/src/nix-copy-closure/nix-copy-closure.cc b/src/nix-copy-closure/nix-copy-closure.cc new file mode 100755 index 00000000000..b7e997ca4b0 --- /dev/null +++ b/src/nix-copy-closure/nix-copy-closure.cc @@ -0,0 +1,60 @@ +#include "shared.hh" +#include "store-api.hh" + +using namespace nix; + +int main(int argc, char ** argv) +{ + return handleExceptions(argv[0], [&]() { + initNix(); + auto gzip = false; + auto toMode = true; + auto includeOutputs = false; + auto dryRun = false; + auto useSubstitutes = false; + auto sshHost = string{}; + auto storePaths = PathSet{}; + parseCmdLine(argc, argv, [&](Strings::iterator & arg, const Strings::iterator & end) { + if (*arg == "--help") + showManPage("nix-copy-closure"); + else if (*arg == "--version") + printVersion("nix-copy-closure"); + else if (*arg == "--gzip" || *arg == "--bzip2" || *arg == "--xz") { + if (*arg != "--gzip") + printMsg(lvlError, format("Warning: ‘%1%’ is not implemented, falling back to gzip") % *arg); + gzip = true; + } else if (*arg == "--from") + toMode = false; + else if (*arg == "--to") + toMode = true; + else if (*arg == "--include-outputs") + includeOutputs = true; + else if (*arg == "--show-progress") + printMsg(lvlError, "Warning: ‘--show-progress’ is not implemented"); + else if (*arg == "--dry-run") + dryRun = true; + else if (*arg == "--use-substitutes" || *arg == "-s") + useSubstitutes = true; + else if (sshHost.empty()) + sshHost = *arg; + else + storePaths.insert(*arg); + return true; + }); + if (sshHost.empty()) + throw UsageError("no host name specified"); + + auto remoteUri = "ssh://" + sshHost + (gzip ? "?compress=true" : ""); + auto to = toMode ? openStore(remoteUri) : openStore(); + auto from = toMode ? openStore() : openStore(remoteUri); + if (includeOutputs) { + auto newPaths = PathSet{}; + for (const auto & p : storePaths) { + auto outputs = from->queryDerivationOutputs(p); + newPaths.insert(outputs.begin(), outputs.end()); + } + storePaths.insert(newPaths.begin(), newPaths.end()); + } + copyPaths(from, to, Paths(storePaths.begin(), storePaths.end()), useSubstitutes); + }); +} From 4425a5c5470e4f26213cfecac404fed88d0f35aa Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 24 Jan 2017 17:18:11 +0100 Subject: [PATCH 0070/2196] Move exportReferencesGraph into a separate method startBuilder() is getting rather obese. --- src/libstore/build.cc | 99 ++++++++++++++++++++++++------------------- 1 file changed, 55 insertions(+), 44 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 7fc6ff0df0f..607601e2aac 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -864,6 +864,9 @@ class DerivationGoal : public Goal /* Start building a derivation. */ void startBuilder(); + /* Handle the exportReferencesGraph attribute. */ + void doExportReferencesGraph(); + /* Run the builder's process. */ void runChild(); @@ -1791,51 +1794,9 @@ void DerivationGoal::startBuilder() for (auto & output : drv->outputs) inputRewrites[hashPlaceholder(output.first)] = output.second.path; - /* The `exportReferencesGraph' feature allows the references graph - to be passed to a builder. This attribute should be a list of - pairs [name1 path1 name2 path2 ...]. The references graph of - each `pathN' will be stored in a text file `nameN' in the - temporary build directory. The text files have the format used - by `nix-store --register-validity'. However, the deriver - fields are left empty. */ - string s = get(drv->env, "exportReferencesGraph"); - Strings ss = tokenizeString(s); - if (ss.size() % 2 != 0) - throw BuildError(format("odd number of tokens in ‘exportReferencesGraph’: ‘%1%’") % s); - for (Strings::iterator i = ss.begin(); i != ss.end(); ) { - string fileName = *i++; - checkStoreName(fileName); /* !!! abuse of this function */ - /* Check that the store path is valid. */ - Path storePath = *i++; - if (!worker.store.isInStore(storePath)) - throw BuildError(format("‘exportReferencesGraph’ contains a non-store path ‘%1%’") - % storePath); - storePath = worker.store.toStorePath(storePath); - if (!worker.store.isValidPath(storePath)) - throw BuildError(format("‘exportReferencesGraph’ contains an invalid path ‘%1%’") - % storePath); - - /* If there are derivations in the graph, then include their - outputs as well. This is useful if you want to do things - like passing all build-time dependencies of some path to a - derivation that builds a NixOS DVD image. */ - PathSet paths, paths2; - worker.store.computeFSClosure(storePath, paths); - paths2 = paths; - - for (auto & j : paths2) { - if (isDerivation(j)) { - Derivation drv = worker.store.derivationFromPath(j); - for (auto & k : drv.outputs) - worker.store.computeFSClosure(k.second.path, paths); - } - } - - /* Write closure info to `fileName'. */ - writeFile(tmpDir + "/" + fileName, - worker.store.makeValidityRegistration(paths, false, false)); - } + /* Handle exportReferencesGraph(), if set. */ + doExportReferencesGraph(); /* If `build-users-group' is not empty, then we have to build as @@ -2242,6 +2203,56 @@ void DerivationGoal::startBuilder() } +void DerivationGoal::doExportReferencesGraph() +{ + /* The `exportReferencesGraph' feature allows the references graph + to be passed to a builder. This attribute should be a list of + pairs [name1 path1 name2 path2 ...]. The references graph of + each `pathN' will be stored in a text file `nameN' in the + temporary build directory. The text files have the format used + by `nix-store --register-validity'. However, the deriver + fields are left empty. */ + string s = get(drv->env, "exportReferencesGraph"); + Strings ss = tokenizeString(s); + if (ss.size() % 2 != 0) + throw BuildError(format("odd number of tokens in ‘exportReferencesGraph’: ‘%1%’") % s); + for (Strings::iterator i = ss.begin(); i != ss.end(); ) { + string fileName = *i++; + checkStoreName(fileName); /* !!! abuse of this function */ + + /* Check that the store path is valid. */ + Path storePath = *i++; + if (!worker.store.isInStore(storePath)) + throw BuildError(format("‘exportReferencesGraph’ contains a non-store path ‘%1%’") + % storePath); + storePath = worker.store.toStorePath(storePath); + if (!worker.store.isValidPath(storePath)) + throw BuildError(format("‘exportReferencesGraph’ contains an invalid path ‘%1%’") + % storePath); + + /* If there are derivations in the graph, then include their + outputs as well. This is useful if you want to do things + like passing all build-time dependencies of some path to a + derivation that builds a NixOS DVD image. */ + PathSet paths, paths2; + worker.store.computeFSClosure(storePath, paths); + paths2 = paths; + + for (auto & j : paths2) { + if (isDerivation(j)) { + Derivation drv = worker.store.derivationFromPath(j); + for (auto & k : drv.outputs) + worker.store.computeFSClosure(k.second.path, paths); + } + } + + /* Write closure info to `fileName'. */ + writeFile(tmpDir + "/" + fileName, + worker.store.makeValidityRegistration(paths, false, false)); + } +} + + void DerivationGoal::runChild() { /* Warning: in the child we should absolutely not make any SQLite From e8c43abd9aed4d88f2cce9ebc97de73930dffdfb Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 24 Jan 2017 20:13:19 +0100 Subject: [PATCH 0071/2196] On HTTP errors, also show the curl error This is a hopefully temporary measure to diagnose the intermittent "HTTP error 200" failures. --- src/libstore/download.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 8030e83b0dd..074e0ca6642 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -276,7 +276,7 @@ struct CurlDownloader : public Downloader code == CURLE_ABORTED_BY_CALLBACK && _isInterrupted ? DownloadError(Interrupted, format("download of ‘%s’ was interrupted") % request.uri) : httpStatus != 0 - ? DownloadError(err, format("unable to download ‘%s’: HTTP error %d") % request.uri % httpStatus) + ? DownloadError(err, format("unable to download ‘%s’: HTTP error %d (curl error: %s)") % request.uri % httpStatus % curl_easy_strerror(code)) : DownloadError(err, format("unable to download ‘%s’: %s (%d)") % request.uri % curl_easy_strerror(code) % code); /* If this is a transient error, then maybe retry the From a529c740d28859201a3a4b245b88ade96fb89fb0 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 25 Jan 2017 12:00:28 +0100 Subject: [PATCH 0072/2196] Moving more code out of DerivationGoal::startBuilder() --- src/libstore/build.cc | 211 ++++++++++++++++++++++-------------------- 1 file changed, 111 insertions(+), 100 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 607601e2aac..1d039d33849 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -780,6 +780,7 @@ class DerivationGoal : public Goal }; typedef map DirsInChroot; // maps target path to source path DirsInChroot dirsInChroot; + typedef map Environment; Environment env; @@ -817,6 +818,8 @@ class DerivationGoal : public Goal const uid_t sandboxUid = 1000; const gid_t sandboxGid = 100; + const static Path homeDir; + public: DerivationGoal(const Path & drvPath, const StringSet & wantedOutputs, Worker & worker, BuildMode buildMode = bmNormal); @@ -864,6 +867,12 @@ class DerivationGoal : public Goal /* Start building a derivation. */ void startBuilder(); + /* Fill in the environment for the builder. */ + void initEnv(); + + /* Make a file owned by the builder. */ + void chownToBuilder(const Path & path); + /* Handle the exportReferencesGraph attribute. */ void doExportReferencesGraph(); @@ -907,6 +916,9 @@ class DerivationGoal : public Goal }; +const Path DerivationGoal::homeDir = "/homeless-shelter"; + + DerivationGoal::DerivationGoal(const Path & drvPath, const StringSet & wantedOutputs, Worker & worker, BuildMode buildMode) : Goal(worker) @@ -1672,11 +1684,7 @@ void DerivationGoal::startBuilder() additionalSandboxProfile = get(drv->env, "__sandboxProfile"); #endif - /* Are we doing a chroot build? Note that fixed-output - derivations are never done in a chroot, mainly so that - functions like fetchurl (which needs a proper /etc/resolv.conf) - work properly. Purity checking for fixed-output derivations - is somewhat pointless anyway. */ + /* Are we doing a chroot build? */ { string x = settings.get("build-use-sandbox", /* deprecated alias */ @@ -1703,31 +1711,15 @@ void DerivationGoal::startBuilder() if (worker.store.storeDir != worker.store.realStoreDir) useChroot = true; - /* Construct the environment passed to the builder. */ - env.clear(); - - /* Most shells initialise PATH to some default (/bin:/usr/bin:...) when - PATH is not set. We don't want this, so we fill it in with some dummy - value. */ - env["PATH"] = "/path-not-set"; - - /* Set HOME to a non-existing path to prevent certain programs from using - /etc/passwd (or NIS, or whatever) to locate the home directory (for - example, wget looks for ~/.wgetrc). I.e., these tools use /etc/passwd - if HOME is not set, but they will just assume that the settings file - they are looking for does not exist if HOME is set but points to some - non-existing path. */ - Path homeDir = "/homeless-shelter"; - env["HOME"] = homeDir; - - /* Tell the builder where the Nix store is. Usually they - shouldn't care, but this is useful for purity checking (e.g., - the compiler or linker might only want to accept paths to files - in the store or in the build directory). */ - env["NIX_STORE"] = worker.store.storeDir; + /* If `build-users-group' is not empty, then we have to build as + one of the members of that group. */ + if (settings.buildUsersGroup != "" && getuid() == 0) { + buildUser.acquire(); - /* The maximum number of cores to utilize for parallel building. */ - env["NIX_BUILD_CORES"] = (format("%d") % settings.buildCores).str(); + /* Make sure that no other processes are executing under this + uid. */ + buildUser.kill(); + } /* Create a temporary directory where the build will take place. */ @@ -1737,86 +1729,18 @@ void DerivationGoal::startBuilder() /* In a sandbox, for determinism, always use the same temporary directory. */ tmpDirInSandbox = useChroot ? canonPath("/tmp", true) + "/nix-build-" + drvName + "-0" : tmpDir; + chownToBuilder(tmpDir); - /* Add all bindings specified in the derivation via the - environments, except those listed in the passAsFile - attribute. Those are passed as file names pointing to - temporary files containing the contents. */ - PathSet filesToChown; - StringSet passAsFile = tokenizeString(get(drv->env, "passAsFile")); - int fileNr = 0; - for (auto & i : drv->env) { - if (passAsFile.find(i.first) == passAsFile.end()) { - env[i.first] = i.second; - } else { - string fn = ".attr-" + std::to_string(fileNr++); - Path p = tmpDir + "/" + fn; - writeFile(p, i.second); - filesToChown.insert(p); - env[i.first + "Path"] = tmpDirInSandbox + "/" + fn; - } - } - - /* For convenience, set an environment pointing to the top build - directory. */ - env["NIX_BUILD_TOP"] = tmpDirInSandbox; - - /* Also set TMPDIR and variants to point to this directory. */ - env["TMPDIR"] = env["TEMPDIR"] = env["TMP"] = env["TEMP"] = tmpDirInSandbox; - - /* Explicitly set PWD to prevent problems with chroot builds. In - particular, dietlibc cannot figure out the cwd because the - inode of the current directory doesn't appear in .. (because - getdents returns the inode of the mount point). */ - env["PWD"] = tmpDirInSandbox; - - /* Compatibility hack with Nix <= 0.7: if this is a fixed-output - derivation, tell the builder, so that for instance `fetchurl' - can skip checking the output. On older Nixes, this environment - variable won't be set, so `fetchurl' will do the check. */ - if (fixedOutput) env["NIX_OUTPUT_CHECKED"] = "1"; - - /* *Only* if this is a fixed-output derivation, propagate the - values of the environment variables specified in the - `impureEnvVars' attribute to the builder. This allows for - instance environment variables for proxy configuration such as - `http_proxy' to be easily passed to downloaders like - `fetchurl'. Passing such environment variables from the caller - to the builder is generally impure, but the output of - fixed-output derivations is by definition pure (since we - already know the cryptographic hash of the output). */ - if (fixedOutput) { - Strings varNames = tokenizeString(get(drv->env, "impureEnvVars")); - for (auto & i : varNames) env[i] = getEnv(i); - } + /* Construct the environment passed to the builder. */ + initEnv(); /* Substitute output placeholders with the actual output paths. */ for (auto & output : drv->outputs) inputRewrites[hashPlaceholder(output.first)] = output.second.path; - /* Handle exportReferencesGraph(), if set. */ doExportReferencesGraph(); - - /* If `build-users-group' is not empty, then we have to build as - one of the members of that group. */ - if (settings.buildUsersGroup != "" && getuid() == 0) { - buildUser.acquire(); - - /* Make sure that no other processes are executing under this - uid. */ - buildUser.kill(); - - /* Change ownership of the temporary build directory. */ - filesToChown.insert(tmpDir); - - for (auto & p : filesToChown) - if (chown(p.c_str(), buildUser.getUID(), buildUser.getGID()) == -1) - throw SysError(format("cannot change ownership of ‘%1%’") % p); - } - - if (useChroot) { string defaultChrootDirs; @@ -2203,6 +2127,93 @@ void DerivationGoal::startBuilder() } +void DerivationGoal::initEnv() +{ + env.clear(); + + /* Most shells initialise PATH to some default (/bin:/usr/bin:...) when + PATH is not set. We don't want this, so we fill it in with some dummy + value. */ + env["PATH"] = "/path-not-set"; + + /* Set HOME to a non-existing path to prevent certain programs from using + /etc/passwd (or NIS, or whatever) to locate the home directory (for + example, wget looks for ~/.wgetrc). I.e., these tools use /etc/passwd + if HOME is not set, but they will just assume that the settings file + they are looking for does not exist if HOME is set but points to some + non-existing path. */ + env["HOME"] = homeDir; + + /* Tell the builder where the Nix store is. Usually they + shouldn't care, but this is useful for purity checking (e.g., + the compiler or linker might only want to accept paths to files + in the store or in the build directory). */ + env["NIX_STORE"] = worker.store.storeDir; + + /* The maximum number of cores to utilize for parallel building. */ + env["NIX_BUILD_CORES"] = (format("%d") % settings.buildCores).str(); + + /* Add all bindings specified in the derivation via the + environments, except those listed in the passAsFile + attribute. Those are passed as file names pointing to + temporary files containing the contents. */ + StringSet passAsFile = tokenizeString(get(drv->env, "passAsFile")); + int fileNr = 0; + for (auto & i : drv->env) { + if (passAsFile.find(i.first) == passAsFile.end()) { + env[i.first] = i.second; + } else { + string fn = ".attr-" + std::to_string(fileNr++); + Path p = tmpDir + "/" + fn; + writeFile(p, i.second); + chownToBuilder(p); + env[i.first + "Path"] = tmpDirInSandbox + "/" + fn; + } + } + + /* For convenience, set an environment pointing to the top build + directory. */ + env["NIX_BUILD_TOP"] = tmpDirInSandbox; + + /* Also set TMPDIR and variants to point to this directory. */ + env["TMPDIR"] = env["TEMPDIR"] = env["TMP"] = env["TEMP"] = tmpDirInSandbox; + + /* Explicitly set PWD to prevent problems with chroot builds. In + particular, dietlibc cannot figure out the cwd because the + inode of the current directory doesn't appear in .. (because + getdents returns the inode of the mount point). */ + env["PWD"] = tmpDirInSandbox; + + /* Compatibility hack with Nix <= 0.7: if this is a fixed-output + derivation, tell the builder, so that for instance `fetchurl' + can skip checking the output. On older Nixes, this environment + variable won't be set, so `fetchurl' will do the check. */ + if (fixedOutput) env["NIX_OUTPUT_CHECKED"] = "1"; + + /* *Only* if this is a fixed-output derivation, propagate the + values of the environment variables specified in the + `impureEnvVars' attribute to the builder. This allows for + instance environment variables for proxy configuration such as + `http_proxy' to be easily passed to downloaders like + `fetchurl'. Passing such environment variables from the caller + to the builder is generally impure, but the output of + fixed-output derivations is by definition pure (since we + already know the cryptographic hash of the output). */ + if (fixedOutput) { + Strings varNames = tokenizeString(get(drv->env, "impureEnvVars")); + for (auto & i : varNames) env[i] = getEnv(i); + } +} + + +void DerivationGoal::chownToBuilder(const Path & path) +{ + if (!buildUser.enabled()) return; + if (chown(path.c_str(), buildUser.getUID(), buildUser.getGID()) == -1) + throw SysError(format("cannot change ownership of ‘%1%’") % path); +} + + void DerivationGoal::doExportReferencesGraph() { /* The `exportReferencesGraph' feature allows the references graph From c0f2f4eeeffd9c62ee2c59b42e6824d297d210f1 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 25 Jan 2017 12:45:38 +0100 Subject: [PATCH 0073/2196] UserLock: Make more RAII-ish --- src/libstore/build.cc | 91 ++++++++++++++++++------------------------- 1 file changed, 38 insertions(+), 53 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 1d039d33849..e0859269dce 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -440,16 +440,14 @@ class UserLock AutoCloseFD fdUserLock; string user; - uid_t uid = 0; - gid_t gid = 0; + uid_t uid; + gid_t gid; std::vector supplementaryGIDs; public: + UserLock(); ~UserLock(); - void acquire(); - void release(); - void kill(); string getUser() { return user; } @@ -465,16 +463,8 @@ class UserLock PathSet UserLock::lockedPaths; -UserLock::~UserLock() -{ - release(); -} - - -void UserLock::acquire() +UserLock::UserLock() { - assert(uid == 0); - assert(settings.buildUsersGroup != ""); /* Get the members of the build-users-group. */ @@ -551,20 +541,15 @@ void UserLock::acquire() } -void UserLock::release() +UserLock::~UserLock() { - if (uid == 0) return; - fdUserLock = -1; /* releases lock */ - assert(lockedPaths.find(fnUserLock) != lockedPaths.end()); + assert(lockedPaths.count(fnUserLock)); lockedPaths.erase(fnUserLock); - fnUserLock = ""; - uid = 0; } void UserLock::kill() { - assert(enabled()); killUser(uid); } @@ -720,7 +705,7 @@ class DerivationGoal : public Goal PathSet missingPaths; /* User selected for running the builder. */ - UserLock buildUser; + std::unique_ptr buildUser; /* The process ID of the builder. */ Pid pid; @@ -966,7 +951,7 @@ void DerivationGoal::killChild() if (pid != -1) { worker.childTerminated(this); - if (buildUser.enabled()) { + if (buildUser) { /* If we're using a build user, then there is a tricky race condition: if we kill the build user before the child has done its setuid() to the build user uid, then @@ -974,7 +959,7 @@ void DerivationGoal::killChild() pid.wait(). So also send a conventional kill to the child. */ ::kill(-pid, SIGKILL); /* ignore the result */ - buildUser.kill(); + buildUser->kill(); pid.wait(); } else pid.kill(); @@ -1395,7 +1380,7 @@ void DerivationGoal::tryToBuild() } catch (BuildError & e) { printError(e.msg()); outputLocks.unlock(); - buildUser.release(); + buildUser.reset(); worker.permanentFailure = true; done(BuildResult::InputRejected, e.msg()); return; @@ -1429,6 +1414,11 @@ void DerivationGoal::buildDone() { trace("build done"); + /* Release the build user at the end of this function. We don't do + it right away because we don't want another build grabbing this + uid and then messing around with our output. */ + Finally releaseBuildUser([&]() { buildUser.reset(); }); + /* Since we got an EOF on the logger pipe, the builder is presumed to have terminated. In fact, the builder could also have simply have closed its end of the pipe, so just to be sure, @@ -1458,7 +1448,7 @@ void DerivationGoal::buildDone() malicious user from leaving behind a process that keeps files open and modifies them after they have been chown'ed to root. */ - if (buildUser.enabled()) buildUser.kill(); + if (buildUser) buildUser->kill(); bool diskFull = false; @@ -1528,7 +1518,6 @@ void DerivationGoal::buildDone() /* Repeat the build if necessary. */ if (curRound++ < nrRounds) { outputLocks.unlock(); - buildUser.release(); state = &DerivationGoal::tryToBuild; worker.wakeUp(shared_from_this()); return; @@ -1545,7 +1534,6 @@ void DerivationGoal::buildDone() if (!hook) printError(e.msg()); outputLocks.unlock(); - buildUser.release(); BuildResult::Status st = BuildResult::MiscFailure; @@ -1567,9 +1555,6 @@ void DerivationGoal::buildDone() return; } - /* Release the build user, if applicable. */ - buildUser.release(); - done(BuildResult::Built); } @@ -1714,11 +1699,11 @@ void DerivationGoal::startBuilder() /* If `build-users-group' is not empty, then we have to build as one of the members of that group. */ if (settings.buildUsersGroup != "" && getuid() == 0) { - buildUser.acquire(); + buildUser = std::make_unique(); /* Make sure that no other processes are executing under this uid. */ - buildUser.kill(); + buildUser->kill(); } /* Create a temporary directory where the build will take @@ -1831,7 +1816,7 @@ void DerivationGoal::startBuilder() if (mkdir(chrootRootDir.c_str(), 0750) == -1) throw SysError(format("cannot create ‘%1%’") % chrootRootDir); - if (buildUser.enabled() && chown(chrootRootDir.c_str(), 0, buildUser.getGID()) == -1) + if (buildUser && chown(chrootRootDir.c_str(), 0, buildUser->getGID()) == -1) throw SysError(format("cannot change ownership of ‘%1%’") % chrootRootDir); /* Create a writable /tmp in the chroot. Many builders need @@ -1875,7 +1860,7 @@ void DerivationGoal::startBuilder() createDirs(chrootStoreDir); chmod_(chrootStoreDir, 01775); - if (buildUser.enabled() && chown(chrootStoreDir.c_str(), 0, buildUser.getGID()) == -1) + if (buildUser && chown(chrootStoreDir.c_str(), 0, buildUser->getGID()) == -1) throw SysError(format("cannot change ownership of ‘%1%’") % chrootStoreDir); for (auto & i : inputPaths) { @@ -2085,8 +2070,8 @@ void DerivationGoal::startBuilder() /* Set the UID/GID mapping of the builder's user namespace such that the sandbox user maps to the build user, or to the calling user (if build users are disabled). */ - uid_t hostUid = buildUser.enabled() ? buildUser.getUID() : getuid(); - uid_t hostGid = buildUser.enabled() ? buildUser.getGID() : getgid(); + uid_t hostUid = buildUser ? buildUser->getUID() : getuid(); + uid_t hostGid = buildUser ? buildUser->getGID() : getgid(); writeFile("/proc/" + std::to_string(pid) + "/uid_map", (format("%d %d 1") % sandboxUid % hostUid).str()); @@ -2104,7 +2089,7 @@ void DerivationGoal::startBuilder() } else #endif { - options.allowVfork = !buildUser.enabled() && !drv->isBuiltin(); + options.allowVfork = !buildUser && !drv->isBuiltin(); pid = startProcess([&]() { runChild(); }, options); @@ -2208,8 +2193,8 @@ void DerivationGoal::initEnv() void DerivationGoal::chownToBuilder(const Path & path) { - if (!buildUser.enabled()) return; - if (chown(path.c_str(), buildUser.getUID(), buildUser.getGID()) == -1) + if (!buildUser) return; + if (chown(path.c_str(), buildUser->getUID(), buildUser->getGID()) == -1) throw SysError(format("cannot change ownership of ‘%1%’") % path); } @@ -2497,22 +2482,22 @@ void DerivationGoal::runChild() descriptors except std*, so that's safe. Also note that setuid() when run as root sets the real, effective and saved UIDs. */ - if (setUser && buildUser.enabled()) { + if (setUser && buildUser) { /* Preserve supplementary groups of the build user, to allow admins to specify groups such as "kvm". */ - if (!buildUser.getSupplementaryGIDs().empty() && - setgroups(buildUser.getSupplementaryGIDs().size(), - buildUser.getSupplementaryGIDs().data()) == -1) + if (!buildUser->getSupplementaryGIDs().empty() && + setgroups(buildUser->getSupplementaryGIDs().size(), + buildUser->getSupplementaryGIDs().data()) == -1) throw SysError("cannot set supplementary groups of build user"); - if (setgid(buildUser.getGID()) == -1 || - getgid() != buildUser.getGID() || - getegid() != buildUser.getGID()) + if (setgid(buildUser->getGID()) == -1 || + getgid() != buildUser->getGID() || + getegid() != buildUser->getGID()) throw SysError("setgid failed"); - if (setuid(buildUser.getUID()) == -1 || - getuid() != buildUser.getUID() || - geteuid() != buildUser.getUID()) + if (setuid(buildUser->getUID()) == -1 || + getuid() != buildUser->getUID() || + geteuid() != buildUser->getUID()) throw SysError("setuid failed"); } @@ -2752,7 +2737,7 @@ void DerivationGoal::registerOutputs() build. Also, the output should be owned by the build user. */ if ((!S_ISLNK(st.st_mode) && (st.st_mode & (S_IWGRP | S_IWOTH))) || - (buildUser.enabled() && st.st_uid != buildUser.getUID())) + (buildUser && st.st_uid != buildUser->getUID())) throw BuildError(format("suspicious ownership or permission on ‘%1%’; rejecting this build output") % path); #endif @@ -2764,7 +2749,7 @@ void DerivationGoal::registerOutputs() /* Canonicalise first. This ensures that the path we're rewriting doesn't contain a hard link to /etc/shadow or something like that. */ - canonicalisePathMetaData(actualPath, buildUser.enabled() ? buildUser.getUID() : -1, inodesSeen); + canonicalisePathMetaData(actualPath, buildUser ? buildUser->getUID() : -1, inodesSeen); /* FIXME: this is in-memory. */ StringSink sink; @@ -2822,7 +2807,7 @@ void DerivationGoal::registerOutputs() /* Get rid of all weird permissions. This also checks that all files are owned by the build user, if applicable. */ canonicalisePathMetaData(actualPath, - buildUser.enabled() && !rewritten ? buildUser.getUID() : -1, inodesSeen); + buildUser && !rewritten ? buildUser->getUID() : -1, inodesSeen); /* For this output path, find the references to other paths contained in it. Compute the SHA-256 NAR hash at the same From a55f589720e6499ed8ca1e3dd63ae18c52782150 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 25 Jan 2017 12:51:35 +0100 Subject: [PATCH 0074/2196] openLockFile: Return an AutoCloseFD --- src/build-remote/build-remote.cc | 4 ++-- src/libstore/pathlocks.cc | 4 ++-- src/libstore/pathlocks.hh | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index 1daf0b80ba7..acbd308f84e 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -109,7 +109,7 @@ static std::vector read_conf() static string currentLoad; -static int openSlotLock(const machine & m, unsigned long long slot) +static AutoCloseFD openSlotLock(const machine & m, unsigned long long slot) { std::ostringstream fn_stream(currentLoad, std::ios_base::ate | std::ios_base::out); fn_stream << "/"; @@ -187,7 +187,7 @@ int main (int argc, char * * argv) AutoCloseFD free; unsigned long long load = 0; for (unsigned long long slot = 0; slot < m.maxJobs; ++slot) { - AutoCloseFD slotLock = openSlotLock(m, slot); + auto slotLock = openSlotLock(m, slot); if (lockFile(slotLock.get(), ltWrite, false)) { if (!free) { free = std::move(slotLock); diff --git a/src/libstore/pathlocks.cc b/src/libstore/pathlocks.cc index 620c9a6b752..55973799279 100644 --- a/src/libstore/pathlocks.cc +++ b/src/libstore/pathlocks.cc @@ -13,7 +13,7 @@ namespace nix { -int openLockFile(const Path & path, bool create) +AutoCloseFD openLockFile(const Path & path, bool create) { AutoCloseFD fd; @@ -21,7 +21,7 @@ int openLockFile(const Path & path, bool create) if (!fd && (create || errno != ENOENT)) throw SysError(format("opening lock file ‘%1%’") % path); - return fd.release(); + return fd; } diff --git a/src/libstore/pathlocks.hh b/src/libstore/pathlocks.hh index 40103c393f6..2a7de611446 100644 --- a/src/libstore/pathlocks.hh +++ b/src/libstore/pathlocks.hh @@ -1,6 +1,6 @@ #pragma once -#include "types.hh" +#include "util.hh" namespace nix { @@ -9,7 +9,7 @@ namespace nix { /* Open (possibly create) a lock file and return the file descriptor. -1 is returned if create is false and the lock could not be opened because it doesn't exist. Any other error throws an exception. */ -int openLockFile(const Path & path, bool create); +AutoCloseFD openLockFile(const Path & path, bool create); /* Delete an open lock file. */ void deleteLockFile(const Path & path, int fd); @@ -19,7 +19,7 @@ enum LockType { ltRead, ltWrite, ltNone }; bool lockFile(int fd, LockType lockType, bool wait); -class PathLocks +class PathLocks { private: typedef std::pair FDPair; From 951357e5fb4cd0804e729866f204b635add926a3 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 25 Jan 2017 13:00:38 +0100 Subject: [PATCH 0075/2196] UserLock: Fix multi-threaded access to a global variable --- src/libstore/build.cc | 70 ++++++++++++++++++++++++------------------- 1 file changed, 40 insertions(+), 30 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index e0859269dce..6250de13cbb 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -434,7 +434,7 @@ class UserLock close that file again (without closing the original file descriptor), we lose the lock. So we have to be *very* careful not to open a lock file on which we are holding a lock. */ - static PathSet lockedPaths; /* !!! not thread-safe */ + static Sync lockedPaths_; Path fnUserLock; AutoCloseFD fdUserLock; @@ -460,7 +460,7 @@ class UserLock }; -PathSet UserLock::lockedPaths; +Sync UserLock::lockedPaths_; UserLock::UserLock() @@ -499,39 +499,48 @@ UserLock::UserLock() fnUserLock = (format("%1%/userpool/%2%") % settings.nixStateDir % pw->pw_uid).str(); - if (lockedPaths.find(fnUserLock) != lockedPaths.end()) - /* We already have a lock on this one. */ - continue; + { + auto lockedPaths(lockedPaths_.lock()); + if (lockedPaths->count(fnUserLock)) + /* We already have a lock on this one. */ + continue; + lockedPaths->insert(fnUserLock); + } + + try { - AutoCloseFD fd = open(fnUserLock.c_str(), O_RDWR | O_CREAT | O_CLOEXEC, 0600); - if (!fd) - throw SysError(format("opening user lock ‘%1%’") % fnUserLock); + AutoCloseFD fd = open(fnUserLock.c_str(), O_RDWR | O_CREAT | O_CLOEXEC, 0600); + if (!fd) + throw SysError(format("opening user lock ‘%1%’") % fnUserLock); - if (lockFile(fd.get(), ltWrite, false)) { - fdUserLock = std::move(fd); - lockedPaths.insert(fnUserLock); - user = i; - uid = pw->pw_uid; + if (lockFile(fd.get(), ltWrite, false)) { + fdUserLock = std::move(fd); + user = i; + uid = pw->pw_uid; - /* Sanity check... */ - if (uid == getuid() || uid == geteuid()) - throw Error(format("the Nix user should not be a member of ‘%1%’") - % settings.buildUsersGroup); + /* Sanity check... */ + if (uid == getuid() || uid == geteuid()) + throw Error(format("the Nix user should not be a member of ‘%1%’") + % settings.buildUsersGroup); #if __linux__ - /* Get the list of supplementary groups of this build user. This - is usually either empty or contains a group such as "kvm". */ - supplementaryGIDs.resize(10); - int ngroups = supplementaryGIDs.size(); - int err = getgrouplist(pw->pw_name, pw->pw_gid, - supplementaryGIDs.data(), &ngroups); - if (err == -1) - throw Error(format("failed to get list of supplementary groups for ‘%1%’") % pw->pw_name); - - supplementaryGIDs.resize(ngroups); + /* Get the list of supplementary groups of this build user. This + is usually either empty or contains a group such as "kvm". */ + supplementaryGIDs.resize(10); + int ngroups = supplementaryGIDs.size(); + int err = getgrouplist(pw->pw_name, pw->pw_gid, + supplementaryGIDs.data(), &ngroups); + if (err == -1) + throw Error(format("failed to get list of supplementary groups for ‘%1%’") % pw->pw_name); + + supplementaryGIDs.resize(ngroups); #endif - return; + return; + } + + } catch (...) { + lockedPaths_.lock()->erase(fnUserLock); } } @@ -543,8 +552,9 @@ UserLock::UserLock() UserLock::~UserLock() { - assert(lockedPaths.count(fnUserLock)); - lockedPaths.erase(fnUserLock); + auto lockedPaths(lockedPaths_.lock()); + assert(lockedPaths->count(fnUserLock)); + lockedPaths->erase(fnUserLock); } From 83ae6503e87c7f5237fb0f1602793c126436495a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 25 Jan 2017 13:37:02 +0100 Subject: [PATCH 0076/2196] Fix interrupt handling --- src/libmain/shared.cc | 9 +++++++++ src/libutil/monitor-fd.hh | 3 +-- src/libutil/util.cc | 28 ++++++++++++++++------------ src/libutil/util.hh | 16 ++++++++++++++++ 4 files changed, 42 insertions(+), 14 deletions(-) diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 12f083c7f79..d564e03853e 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -97,6 +97,9 @@ static void opensslLockCallback(int mode, int type, const char * file, int line) } +static void sigHandler(int signo) { } + + void initNix() { /* Turn on buffering for cerr. */ @@ -130,6 +133,10 @@ void initNix() if (sigaction(SIGCHLD, &act, 0)) throw SysError("resetting SIGCHLD"); + /* Install a dummy SIGUSR1 handler for use with pthread_kill(). */ + act.sa_handler = sigHandler; + if (sigaction(SIGUSR1, &act, 0)) throw SysError("handling SIGUSR1"); + /* Register a SIGSEGV handler to detect stack overflows. */ detectStackOverflow(); @@ -253,6 +260,8 @@ void showManPage(const string & name) int handleExceptions(const string & programName, std::function fun) { + ReceiveInterrupts receiveInterrupts; // FIXME: need better place for this + string error = ANSI_RED "error:" ANSI_NORMAL " "; try { try { diff --git a/src/libutil/monitor-fd.hh b/src/libutil/monitor-fd.hh index 6f01ccd91a4..e0ec66c0180 100644 --- a/src/libutil/monitor-fd.hh +++ b/src/libutil/monitor-fd.hh @@ -27,8 +27,7 @@ public: fds[0].events = 0; if (poll(fds, 1, -1) == -1) abort(); // can't happen assert(fds[0].revents & POLLHUP); - /* We got POLLHUP, so send an INT signal to the main thread. */ - kill(getpid(), SIGINT); + triggerInterrupt(); }); }; diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 52608ac2a01..ca4edc2cd6c 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -1197,18 +1197,22 @@ static void signalHandlerThread(sigset_t set) int signal = 0; sigwait(&set, &signal); - if (signal == SIGINT || signal == SIGTERM || signal == SIGHUP) { - _isInterrupted = 1; - - { - auto interruptCallbacks(_interruptCallbacks.lock()); - for (auto & callback : *interruptCallbacks) { - try { - callback(); - } catch (...) { - ignoreException(); - } - } + if (signal == SIGINT || signal == SIGTERM || signal == SIGHUP) + triggerInterrupt(); + } +} + +void triggerInterrupt() +{ + _isInterrupted = 1; + + { + auto interruptCallbacks(_interruptCallbacks.lock()); + for (auto & callback : *interruptCallbacks) { + try { + callback(); + } catch (...) { + ignoreException(); } } } diff --git a/src/libutil/util.hh b/src/libutil/util.hh index b68d48582b3..07141ffed6b 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -433,5 +433,21 @@ struct InterruptCallback std::unique_ptr createInterruptCallback( std::function callback); +void triggerInterrupt(); + +/* A RAII class that causes the current thread to receive SIGUSR1 when + the signal handler thread receives SIGINT. That is, this allows + SIGINT to be multiplexed to multiple threads. */ +struct ReceiveInterrupts +{ + pthread_t target; + std::unique_ptr callback; + + ReceiveInterrupts() + : target(pthread_self()) + , callback(createInterruptCallback([&]() { pthread_kill(target, SIGUSR1); })) + { } +}; + } From b1f001538e41a4f28e315baeede93a8fe70d6d62 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 25 Jan 2017 13:42:07 +0100 Subject: [PATCH 0077/2196] Fix assertion failure when a path is locked Fixes: nix-store: src/libstore/build.cc:3649: void nix::Worker::run(const Goals&): Assertion `!awake.empty()' failed. --- src/libstore/pathlocks.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libstore/pathlocks.cc b/src/libstore/pathlocks.cc index 55973799279..bf7ad3d2185 100644 --- a/src/libstore/pathlocks.cc +++ b/src/libstore/pathlocks.cc @@ -136,6 +136,7 @@ bool PathLocks::lockPaths(const PathSet & _paths, /* Failed to lock this path; release all other locks. */ unlock(); + lockedPaths_.lock()->erase(lockPath); return false; } } From 54801ed6ad4e0ea8faa67b0b4ba10debeb824d3b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 25 Jan 2017 16:06:50 +0100 Subject: [PATCH 0078/2196] Bindings: Add a method for iterating in lexicographically sorted order --- src/libexpr/attr-set.hh | 13 +++++++++++++ src/libexpr/eval.cc | 10 +++------- src/libexpr/get-drvs.cc | 24 +++++++++--------------- src/libexpr/primops.cc | 9 +++------ 4 files changed, 28 insertions(+), 28 deletions(-) diff --git a/src/libexpr/attr-set.hh b/src/libexpr/attr-set.hh index 7cf6a9c5808..e1fc2bf6d79 100644 --- a/src/libexpr/attr-set.hh +++ b/src/libexpr/attr-set.hh @@ -75,6 +75,19 @@ public: size_t capacity() { return capacity_; } + /* Returns the attributes in lexicographically sorted order. */ + std::vector lexicographicOrder() const + { + std::vector res; + res.reserve(size_); + for (size_t n = 0; n < size_; n++) + res.emplace_back(&attrs[n]); + std::sort(res.begin(), res.end(), [](const Attr * a, const Attr * b) { + return (string) a->name < (string) b->name; + }); + return res; + } + friend class EvalState; }; diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 64f3874db61..5f9f5bac1ed 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -91,13 +91,9 @@ static void printValue(std::ostream & str, std::set & active, con break; case tAttrs: { str << "{ "; - typedef std::map Sorted; - Sorted sorted; - for (auto & i : *v.attrs) - sorted[i.name] = i.value; - for (auto & i : sorted) { - str << i.first << " = "; - printValue(str, active, *i.second); + for (auto & i : v.attrs->lexicographicOrder()) { + str << i->name << " = "; + printValue(str, active, *i->value); str << "; "; } str << "}"; diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc index dc5def911ca..5342739c53c 100644 --- a/src/libexpr/get-drvs.cc +++ b/src/libexpr/get-drvs.cc @@ -284,25 +284,19 @@ static void getDerivations(EvalState & state, Value & vIn, there are names clashes between derivations, the derivation bound to the attribute with the "lower" name should take precedence). */ - typedef std::map SortedSymbols; - SortedSymbols attrs; - for (auto & i : *v.attrs) - attrs.insert(std::pair(i.name, i.name)); - - for (auto & i : attrs) { - Activity act(*logger, lvlDebug, format("evaluating attribute ‘%1%’") % i.first); - string pathPrefix2 = addToPath(pathPrefix, i.first); - Value & v2(*v.attrs->find(i.second)->value); + for (auto & i : v.attrs->lexicographicOrder()) { + Activity act(*logger, lvlDebug, format("evaluating attribute ‘%1%’") % i->name); + string pathPrefix2 = addToPath(pathPrefix, i->name); if (combineChannels) - getDerivations(state, v2, pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures); - else if (getDerivation(state, v2, pathPrefix2, drvs, done, ignoreAssertionFailures)) { + getDerivations(state, *i->value, pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures); + else if (getDerivation(state, *i->value, pathPrefix2, drvs, done, ignoreAssertionFailures)) { /* If the value of this attribute is itself a set, should we recurse into it? => Only if it has a `recurseForDerivations = true' attribute. */ - if (v2.type == tAttrs) { - Bindings::iterator j = v2.attrs->find(state.symbols.create("recurseForDerivations")); - if (j != v2.attrs->end() && state.forceBool(*j->value, *j->pos)) - getDerivations(state, v2, pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures); + if (i->value->type == tAttrs) { + Bindings::iterator j = i->value->attrs->find(state.symbols.create("recurseForDerivations")); + if (j != i->value->attrs->end() && state.forceBool(*j->value, *j->pos)) + getDerivations(state, *i->value, pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures); } } } diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 59623874c3f..d8dbedcaf89 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -998,12 +998,9 @@ static void prim_attrNames(EvalState & state, const Pos & pos, Value * * args, V state.mkList(v, args[0]->attrs->size()); - unsigned int n = 0; - for (auto & i : *args[0]->attrs) - mkString(*(v.listElems()[n++] = state.allocValue()), i.name); - - std::sort(v.listElems(), v.listElems() + n, - [](Value * v1, Value * v2) { return strcmp(v1->string.s, v2->string.s) < 0; }); + size_t n = 0; + for (auto & i : args[0]->attrs->lexicographicOrder()) + mkString(*(v.listElems()[n++] = state.allocValue()), i->name); } From 6de33a9c675b187437a2e1abbcb290981a89ecb1 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 25 Jan 2017 16:42:07 +0100 Subject: [PATCH 0079/2196] Add support for passing structured data to builders Previously, all derivation attributes had to be coerced into strings so that they could be passed via the environment. This is lossy (e.g. lists get flattened, necessitating configureFlags vs. configureFlagsArray, of which the latter cannot be specified as an attribute), doesn't support attribute sets at all, and has size limitations (necessitating hacks like passAsFile). This patch adds a new mode for passing attributes to builders, namely encoded as a JSON file ".attrs.json" in the current directory of the builder. This mode is activated via the special attribute __structuredAttrs = true; (The idea is that one day we can set this in stdenv.mkDerivation.) For example, stdenv.mkDerivation { __structuredAttrs = true; name = "foo"; buildInputs = [ pkgs.hello pkgs.cowsay ]; doCheck = true; hardening.format = false; } results in a ".attrs.json" file containing (sans the indentation): { "buildInputs": [], "builder": "/nix/store/ygl61ycpr2vjqrx775l1r2mw1g2rb754-bash-4.3-p48/bin/bash", "configureFlags": [ "--with-foo", "--with-bar=1 2" ], "doCheck": true, "hardening": { "format": false }, "name": "foo", "nativeBuildInputs": [ "/nix/store/10h6li26i7g6z3mdpvra09yyf10mmzdr-hello-2.10", "/nix/store/4jnvjin0r6wp6cv1hdm5jbkx3vinlcvk-cowsay-3.03" ], "propagatedBuildInputs": [], "propagatedNativeBuildInputs": [], "stdenv": "/nix/store/f3hw3p8armnzy6xhd4h8s7anfjrs15n2-stdenv", "system": "x86_64-linux" } "passAsFile" is ignored in this mode because it's not needed - large strings are included directly in the JSON representation. It is up to the builder to do something with the JSON representation. For example, in bash-based builders, lists/attrsets of string values could be mapped to bash (associative) arrays. --- src/libexpr/eval.cc | 2 + src/libexpr/eval.hh | 2 +- src/libexpr/primops.cc | 127 ++++++++++++++++++++++++++++------------- src/libstore/build.cc | 57 ++++++++++++------ 4 files changed, 130 insertions(+), 58 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 5f9f5bac1ed..d418ab4e43a 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -291,6 +291,8 @@ EvalState::EvalState(const Strings & _searchPath, ref store) , sToString(symbols.create("__toString")) , sRight(symbols.create("right")) , sWrong(symbols.create("wrong")) + , sStructuredAttrs(symbols.create("__structuredAttrs")) + , sBuilder(symbols.create("builder")) , store(store) , baseEnv(allocEnv(128)) , staticBaseEnv(false, 0) diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 195cb0db3ac..46d5a1cc866 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -68,7 +68,7 @@ public: const Symbol sWith, sOutPath, sDrvPath, sType, sMeta, sName, sValue, sSystem, sOverrides, sOutputs, sOutputName, sIgnoreNulls, sFile, sLine, sColumn, sFunctor, sToString, - sRight, sWrong; + sRight, sWrong, sStructuredAttrs, sBuilder; Symbol sDerivationNix; /* If set, force copying files to the Nix store even if they diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index d8dbedcaf89..5a570cefb2f 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -8,6 +8,7 @@ #include "names.hh" #include "store-api.hh" #include "util.hh" +#include "json.hh" #include "value-to-json.hh" #include "value-to-xml.hh" #include "primops.hh" @@ -474,6 +475,13 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * throw; } + /* Check whether attributes should be passed as a JSON file. */ + std::ostringstream jsonBuf; + std::unique_ptr jsonObject; + attr = args[0]->attrs->find(state.sStructuredAttrs); + if (attr != args[0]->attrs->end() && state.forceBool(*attr->value, pos)) + jsonObject = std::make_unique(jsonBuf); + /* Check whether null attributes should be ignored. */ bool ignoreNulls = false; attr = args[0]->attrs->find(state.sIgnoreNulls); @@ -491,24 +499,48 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * StringSet outputs; outputs.insert("out"); - for (auto & i : *args[0]->attrs) { - if (i.name == state.sIgnoreNulls) continue; - string key = i.name; + for (auto & i : args[0]->attrs->lexicographicOrder()) { + if (i->name == state.sIgnoreNulls) continue; + string key = i->name; Activity act(*logger, lvlVomit, format("processing attribute ‘%1%’") % key); + auto handleHashMode = [&](const std::string & s) { + if (s == "recursive") outputHashRecursive = true; + else if (s == "flat") outputHashRecursive = false; + else throw EvalError("invalid value ‘%s’ for ‘outputHashMode’ attribute, at %s", s, posDrvName); + }; + + auto handleOutputs = [&](const Strings & ss) { + outputs.clear(); + for (auto & j : ss) { + if (outputs.find(j) != outputs.end()) + throw EvalError(format("duplicate derivation output ‘%1%’, at %2%") % j % posDrvName); + /* !!! Check whether j is a valid attribute + name. */ + /* Derivations cannot be named ‘drv’, because + then we'd have an attribute ‘drvPath’ in + the resulting set. */ + if (j == "drv") + throw EvalError(format("invalid derivation output name ‘drv’, at %1%") % posDrvName); + outputs.insert(j); + } + if (outputs.empty()) + throw EvalError(format("derivation cannot have an empty set of outputs, at %1%") % posDrvName); + }; + try { if (ignoreNulls) { - state.forceValue(*i.value); - if (i.value->type == tNull) continue; + state.forceValue(*i->value); + if (i->value->type == tNull) continue; } /* The `args' attribute is special: it supplies the command-line arguments to the builder. */ if (key == "args") { - state.forceList(*i.value, pos); - for (unsigned int n = 0; n < i.value->listSize(); ++n) { - string s = state.coerceToString(posDrvName, *i.value->listElems()[n], context, true); + state.forceList(*i->value, pos); + for (unsigned int n = 0; n < i->value->listSize(); ++n) { + string s = state.coerceToString(posDrvName, *i->value->listElems()[n], context, true); drv.args.push_back(s); } } @@ -516,39 +548,51 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * /* All other attributes are passed to the builder through the environment. */ else { - string s = state.coerceToString(posDrvName, *i.value, context, true); - drv.env[key] = s; - if (key == "builder") drv.builder = s; - else if (i.name == state.sSystem) drv.platform = s; - else if (i.name == state.sName) { - drvName = s; - printMsg(lvlVomit, format("derivation name is ‘%1%’") % drvName); - } - else if (key == "outputHash") outputHash = s; - else if (key == "outputHashAlgo") outputHashAlgo = s; - else if (key == "outputHashMode") { - if (s == "recursive") outputHashRecursive = true; - else if (s == "flat") outputHashRecursive = false; - else throw EvalError(format("invalid value ‘%1%’ for ‘outputHashMode’ attribute, at %2%") % s % posDrvName); - } - else if (key == "outputs") { - Strings tmp = tokenizeString(s); - outputs.clear(); - for (auto & j : tmp) { - if (outputs.find(j) != outputs.end()) - throw EvalError(format("duplicate derivation output ‘%1%’, at %2%") % j % posDrvName); - /* !!! Check whether j is a valid attribute - name. */ - /* Derivations cannot be named ‘drv’, because - then we'd have an attribute ‘drvPath’ in - the resulting set. */ - if (j == "drv") - throw EvalError(format("invalid derivation output name ‘drv’, at %1%") % posDrvName); - outputs.insert(j); + + if (jsonObject) { + + if (i->name == state.sStructuredAttrs) continue; + + auto placeholder(jsonObject->placeholder(key)); + printValueAsJSON(state, true, *i->value, placeholder, context); + + if (i->name == state.sBuilder) + drv.builder = state.forceString(*i->value, context, posDrvName); + else if (i->name == state.sSystem) + drv.platform = state.forceStringNoCtx(*i->value, posDrvName); + else if (i->name == state.sName) + drvName = state.forceStringNoCtx(*i->value, posDrvName); + else if (key == "outputHash") + outputHash = state.forceStringNoCtx(*i->value, posDrvName); + else if (key == "outputHashAlgo") + outputHashAlgo = state.forceStringNoCtx(*i->value, posDrvName); + else if (key == "outputHashMode") + handleHashMode(state.forceStringNoCtx(*i->value, posDrvName)); + else if (key == "outputs") { + /* Require ‘outputs’ to be a list of strings. */ + state.forceList(*i->value, posDrvName); + Strings ss; + for (unsigned int n = 0; n < i->value->listSize(); ++n) + ss.emplace_back(state.forceStringNoCtx(*i->value->listElems()[n], posDrvName)); + handleOutputs(ss); + } + + } else { + auto s = state.coerceToString(posDrvName, *i->value, context, true); + drv.env.emplace(key, s); + if (i->name == state.sBuilder) drv.builder = s; + else if (i->name == state.sSystem) drv.platform = s; + else if (i->name == state.sName) { + drvName = s; + printMsg(lvlVomit, format("derivation name is ‘%1%’") % drvName); } - if (outputs.empty()) - throw EvalError(format("derivation cannot have an empty set of outputs, at %1%") % posDrvName); + else if (key == "outputHash") outputHash = s; + else if (key == "outputHashAlgo") outputHashAlgo = s; + else if (key == "outputHashMode") handleHashMode(s); + else if (key == "outputs") + handleOutputs(tokenizeString(s)); } + } } catch (Error & e) { @@ -558,6 +602,11 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * } } + if (jsonObject) { + jsonObject.reset(); + drv.env.emplace("__json", jsonBuf.str()); + } + /* Everything in the context of the strings in the derivation attributes should be added as dependencies of the resulting derivation. */ diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 6250de13cbb..d76c8d1727f 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -865,6 +865,9 @@ class DerivationGoal : public Goal /* Fill in the environment for the builder. */ void initEnv(); + /* Write a JSON file containing the derivation attributes. */ + void writeStructuredAttrs(); + /* Make a file owned by the builder. */ void chownToBuilder(const Path & path); @@ -1726,13 +1729,15 @@ void DerivationGoal::startBuilder() tmpDirInSandbox = useChroot ? canonPath("/tmp", true) + "/nix-build-" + drvName + "-0" : tmpDir; chownToBuilder(tmpDir); - /* Construct the environment passed to the builder. */ - initEnv(); - /* Substitute output placeholders with the actual output paths. */ for (auto & output : drv->outputs) inputRewrites[hashPlaceholder(output.first)] = output.second.path; + /* Construct the environment passed to the builder. */ + initEnv(); + + writeStructuredAttrs(); + /* Handle exportReferencesGraph(), if set. */ doExportReferencesGraph(); @@ -2148,22 +2153,29 @@ void DerivationGoal::initEnv() /* The maximum number of cores to utilize for parallel building. */ env["NIX_BUILD_CORES"] = (format("%d") % settings.buildCores).str(); - /* Add all bindings specified in the derivation via the - environments, except those listed in the passAsFile - attribute. Those are passed as file names pointing to - temporary files containing the contents. */ - StringSet passAsFile = tokenizeString(get(drv->env, "passAsFile")); - int fileNr = 0; - for (auto & i : drv->env) { - if (passAsFile.find(i.first) == passAsFile.end()) { - env[i.first] = i.second; - } else { - string fn = ".attr-" + std::to_string(fileNr++); - Path p = tmpDir + "/" + fn; - writeFile(p, i.second); - chownToBuilder(p); - env[i.first + "Path"] = tmpDirInSandbox + "/" + fn; + /* In non-structured mode, add all bindings specified in the + derivation via the environments, except those listed in the + passAsFile attribute. Those are passed as file names pointing + to temporary files containing the contents. Note that + passAsFile is ignored in structure mode because it's not + needed (attributes are not passed through the environment, so + there is no size constraint). */ + if (!drv->env.count("__json")) { + + StringSet passAsFile = tokenizeString(get(drv->env, "passAsFile")); + int fileNr = 0; + for (auto & i : drv->env) { + if (passAsFile.find(i.first) == passAsFile.end()) { + env[i.first] = i.second; + } else { + string fn = ".attr-" + std::to_string(fileNr++); + Path p = tmpDir + "/" + fn; + writeFile(p, i.second); + chownToBuilder(p); + env[i.first + "Path"] = tmpDirInSandbox + "/" + fn; + } } + } /* For convenience, set an environment pointing to the top build @@ -2201,6 +2213,15 @@ void DerivationGoal::initEnv() } +void DerivationGoal::writeStructuredAttrs() +{ + auto json = drv->env.find("__json"); + if (json == drv->env.end()) return; + + writeFile(tmpDir + "/.attrs.json", rewriteStrings(json->second, inputRewrites)); +} + + void DerivationGoal::chownToBuilder(const Path & path) { if (!buildUser) return; From c2b0d8749f7e77afc1c4b3e8dd36b7ee9720af4a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 26 Jan 2017 20:36:20 +0100 Subject: [PATCH 0080/2196] exportReferencesGraph: Export more complete info in JSON format MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This writes info about every path in the closure in the same format as ‘nix path-info --json’. Thus it also includes NAR hashes and sizes. Example: [ { "path": "/nix/store/10h6li26i7g6z3mdpvra09yyf10mmzdr-hello-2.10", "narHash": "sha256:0ckdc4z20kkmpqdilx0wl6cricxv90lh85xpv2qljppcmz6vzcxl", "narSize": 197648, "references": [ "/nix/store/10h6li26i7g6z3mdpvra09yyf10mmzdr-hello-2.10", "/nix/store/27binbdy296qvjycdgr1535v8872vz3z-glibc-2.24" ], "closureSize": 20939776 }, { "path": "/nix/store/27binbdy296qvjycdgr1535v8872vz3z-glibc-2.24", "narHash": "sha256:1nfn3m3p98y1c0kd0brp80dn9n5mycwgrk183j17rajya0h7gax3", "narSize": 20742128, "references": [ "/nix/store/27binbdy296qvjycdgr1535v8872vz3z-glibc-2.24" ], "closureSize": 20742128 } ] Fixes #1134. --- src/libstore/build.cc | 12 +++++++- src/libstore/store-api.cc | 59 +++++++++++++++++++++++++++++++++++++++ src/libstore/store-api.hh | 14 ++++++++++ src/nix/path-info.cc | 55 ++++-------------------------------- 4 files changed, 90 insertions(+), 50 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index d76c8d1727f..7fb5271f4a8 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -10,6 +10,7 @@ #include "builtins.hh" #include "finally.hh" #include "compression.hh" +#include "json.hh" #include #include @@ -2273,9 +2274,18 @@ void DerivationGoal::doExportReferencesGraph() } } - /* Write closure info to `fileName'. */ + /* Write closure info to . */ writeFile(tmpDir + "/" + fileName, worker.store.makeValidityRegistration(paths, false, false)); + + /* Write a more comprehensive JSON serialisation to + .json. */ + std::ostringstream str; + { + JSONPlaceholder jsonRoot(str, true); + worker.store.pathInfoToJSON(jsonRoot, paths, false, true); + } + writeFile(tmpDir + "/" + fileName + ".json", str.str()); } } diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 8fdd6277155..a42d1183405 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -4,6 +4,7 @@ #include "util.hh" #include "nar-info-disk-cache.hh" #include "thread-pool.hh" +#include "json.hh" #include @@ -439,6 +440,64 @@ string Store::makeValidityRegistration(const PathSet & paths, } +void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const PathSet & storePaths, + bool includeImpureInfo, bool showClosureSize) +{ + auto jsonList = jsonOut.list(); + + for (auto storePath : storePaths) { + auto info = queryPathInfo(storePath); + storePath = info->path; + + auto jsonPath = jsonList.object(); + jsonPath + .attr("path", storePath) + .attr("narHash", info->narHash.to_string()) + .attr("narSize", info->narSize); + + { + auto jsonRefs = jsonPath.list("references"); + for (auto & ref : info->references) + jsonRefs.elem(ref); + } + + if (info->ca != "") + jsonPath.attr("ca", info->ca); + + if (showClosureSize) + jsonPath.attr("closureSize", getClosureSize(storePath)); + + if (!includeImpureInfo) continue; + + if (info->deriver != "") + jsonPath.attr("deriver", info->deriver); + + if (info->registrationTime) + jsonPath.attr("registrationTime", info->registrationTime); + + if (info->ultimate) + jsonPath.attr("ultimate", info->ultimate); + + if (!info->sigs.empty()) { + auto jsonSigs = jsonPath.list("signatures"); + for (auto & sig : info->sigs) + jsonSigs.elem(sig); + } + } +} + + +unsigned long long Store::getClosureSize(const Path & storePath) +{ + unsigned long long totalSize = 0; + PathSet closure; + computeFSClosure(storePath, closure, false, false); + for (auto & p : closure) + totalSize += queryPathInfo(p)->narSize; + return totalSize; +} + + const Store::Stats & Store::getStats() { { diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index ec3bf5a6fd8..3fee999072f 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -22,6 +22,7 @@ struct Derivation; class FSAccessor; class NarInfoDiskCache; class Store; +class JSONPlaceholder; /* Size of the hash part of store paths, in base-32 characters. */ @@ -469,6 +470,19 @@ public: string makeValidityRegistration(const PathSet & paths, bool showDerivers, bool showHash); + /* Write a JSON representation of store path metadata, such as the + hash and the references. If ‘includeImpureInfo’ is true, + variable elements such as the registration time are + included. If ‘showClosureSize’ is true, the closure size of + each path is included. */ + void pathInfoToJSON(JSONPlaceholder & jsonOut, const PathSet & storePaths, + bool includeImpureInfo, bool showClosureSize); + + /* Return the size of the closure of the specified path, that is, + the sum of the size of the NAR serialisation of each path in + the closure. */ + unsigned long long getClosureSize(const Path & storePath); + /* Optimise the disk space usage of the Nix store by hard-linking files with the same contents. */ virtual void optimiseStore() = 0; diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc index a9b33e1877d..0f9a1125f2e 100644 --- a/src/nix/path-info.cc +++ b/src/nix/path-info.cc @@ -65,55 +65,12 @@ struct CmdPathInfo : StorePathsCommand for (auto & storePath : storePaths) pathLen = std::max(pathLen, storePath.size()); - auto getClosureSize = [&](const Path & storePath) -> unsigned long long { - unsigned long long totalSize = 0; - PathSet closure; - store->computeFSClosure(storePath, closure, false, false); - for (auto & p : closure) - totalSize += store->queryPathInfo(p)->narSize; - return totalSize; - }; - if (json) { - JSONList jsonRoot(std::cout, true); - - for (auto storePath : storePaths) { - auto info = store->queryPathInfo(storePath); - storePath = info->path; - - auto jsonPath = jsonRoot.object(); - jsonPath - .attr("path", storePath) - .attr("narHash", info->narHash.to_string()) - .attr("narSize", info->narSize); - - if (showClosureSize) - jsonPath.attr("closureSize", getClosureSize(storePath)); - - if (info->deriver != "") - jsonPath.attr("deriver", info->deriver); - - { - auto jsonRefs = jsonPath.list("references"); - for (auto & ref : info->references) - jsonRefs.elem(ref); - } - - if (info->registrationTime) - jsonPath.attr("registrationTime", info->registrationTime); - - if (info->ultimate) - jsonPath.attr("ultimate", info->ultimate); - - if (info->ca != "") - jsonPath.attr("ca", info->ca); - - if (!info->sigs.empty()) { - auto jsonSigs = jsonPath.list("signatures"); - for (auto & sig : info->sigs) - jsonSigs.elem(sig); - } - } + JSONPlaceholder jsonRoot(std::cout, true); + store->pathInfoToJSON(jsonRoot, + // FIXME: preserve order? + PathSet(storePaths.begin(), storePaths.end()), + true, showClosureSize); } else { @@ -128,7 +85,7 @@ struct CmdPathInfo : StorePathsCommand std::cout << '\t' << std::setw(11) << info->narSize; if (showClosureSize) - std::cout << '\t' << std::setw(11) << getClosureSize(storePath); + std::cout << '\t' << std::setw(11) << store->getClosureSize(storePath); if (showSigs) { std::cout << '\t'; From f57a38b109dbea26239a1cc001ff4b608006af6d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 27 Jan 2017 12:57:49 +0100 Subject: [PATCH 0081/2196] Remove unused NARExistence table --- src/libstore/nar-info-disk-cache.cc | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc index d28ff42c7f2..ff4bd651aa5 100644 --- a/src/libstore/nar-info-disk-cache.cc +++ b/src/libstore/nar-info-disk-cache.cc @@ -36,15 +36,6 @@ create table if not exists NARs ( foreign key (cache) references BinaryCaches(id) on delete cascade ); -create table if not exists NARExistence ( - cache integer not null, - storePath text not null, - exist integer not null, - timestamp integer not null, - primary key (cache, storePath), - foreign key (cache) references BinaryCaches(id) on delete cascade -); - )sql"; class NarInfoDiskCacheImpl : public NarInfoDiskCache @@ -155,7 +146,6 @@ class NarInfoDiskCacheImpl : public NarInfoDiskCache auto queryNAR(state->queryNAR.use()(cache.id)(hashPart)); if (!queryNAR.next()) - // FIXME: check NARExistence return {oUnknown, 0}; if (!queryNAR.getInt(13)) From 211bc7f0e6daa65fe4083334e2411bc441067904 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 27 Jan 2017 13:17:08 +0100 Subject: [PATCH 0082/2196] Implement TTL for binary cache lookups --- src/libstore/nar-info-disk-cache.cc | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc index ff4bd651aa5..ed2f18ffe82 100644 --- a/src/libstore/nar-info-disk-cache.cc +++ b/src/libstore/nar-info-disk-cache.cc @@ -42,8 +42,9 @@ class NarInfoDiskCacheImpl : public NarInfoDiskCache { public: - /* How long negative lookups are valid. */ + /* How long negative and positive lookups are valid. */ const int ttlNegative = 3600; + const int ttlPositive = 30 * 24 * 3600; struct Cache { @@ -94,7 +95,7 @@ class NarInfoDiskCacheImpl : public NarInfoDiskCache "insert or replace into NARs(cache, hashPart, timestamp, present) values (?, ?, ?, 0)"); state->queryNAR.create(state->db, - "select * from NARs where cache = ? and hashPart = ?"); + "select * from NARs where cache = ? and hashPart = ? and ((present = 0 and timestamp > ?) or (present = 1 and timestamp > ?))"); } Cache & getCache(State & state, const std::string & uri) @@ -143,7 +144,13 @@ class NarInfoDiskCacheImpl : public NarInfoDiskCache auto & cache(getCache(*state, uri)); - auto queryNAR(state->queryNAR.use()(cache.id)(hashPart)); + auto now = time(0); + + auto queryNAR(state->queryNAR.use() + (cache.id) + (hashPart) + (now - ttlNegative) + (now - ttlPositive)); if (!queryNAR.next()) return {oUnknown, 0}; @@ -153,8 +160,6 @@ class NarInfoDiskCacheImpl : public NarInfoDiskCache auto narInfo = make_ref(); - // FIXME: implement TTL. - auto namePart = queryNAR.getStr(2); narInfo->path = cache.storeDir + "/" + hashPart + (namePart.empty() ? "" : "-" + namePart); From cb1951e746cb04f331b126c746758c96bb9fdc81 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 27 Jan 2017 15:19:33 +0100 Subject: [PATCH 0083/2196] Periodically purge binary-cache.sqlite --- src/libstore/nar-info-disk-cache.cc | 31 ++++++++++++++++++++++++++++- src/libstore/sqlite.hh | 1 + 2 files changed, 31 insertions(+), 1 deletion(-) diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc index ed2f18ffe82..13b67b81f35 100644 --- a/src/libstore/nar-info-disk-cache.cc +++ b/src/libstore/nar-info-disk-cache.cc @@ -36,6 +36,11 @@ create table if not exists NARs ( foreign key (cache) references BinaryCaches(id) on delete cascade ); +create table if not exists LastPurge ( + dummy text primary key, + value integer +); + )sql"; class NarInfoDiskCacheImpl : public NarInfoDiskCache @@ -46,6 +51,9 @@ class NarInfoDiskCacheImpl : public NarInfoDiskCache const int ttlNegative = 3600; const int ttlPositive = 30 * 24 * 3600; + /* How often to purge expired entries from the cache. */ + const int purgeInterval = 24 * 3600; + struct Cache { int id; @@ -57,7 +65,7 @@ class NarInfoDiskCacheImpl : public NarInfoDiskCache struct State { SQLite db; - SQLiteStmt insertCache, queryCache, insertNAR, insertMissingNAR, queryNAR; + SQLiteStmt insertCache, queryCache, insertNAR, insertMissingNAR, queryNAR, purgeCache; std::map caches; }; @@ -96,6 +104,27 @@ class NarInfoDiskCacheImpl : public NarInfoDiskCache state->queryNAR.create(state->db, "select * from NARs where cache = ? and hashPart = ? and ((present = 0 and timestamp > ?) or (present = 1 and timestamp > ?))"); + + /* Periodically purge expired entries from the database. */ + auto now = time(0); + + SQLiteStmt queryLastPurge(state->db, "select value from LastPurge"); + auto queryLastPurge_(queryLastPurge.use()); + + if (!queryLastPurge_.next() || queryLastPurge_.getInt(0) < now - purgeInterval) { + SQLiteStmt(state->db, + "delete from NARs where ((present = 0 and timestamp < ?) or (present = 1 and timestamp < ?))") + .use() + (now - ttlNegative) + (now - ttlPositive) + .exec(); + + debug("deleted %d entries from the NAR info disk cache", sqlite3_changes(state->db)); + + SQLiteStmt(state->db, + "insert or replace into LastPurge(dummy, value) values ('', ?)") + .use()(now).exec(); + } } Cache & getCache(State & state, const std::string & uri) diff --git a/src/libstore/sqlite.hh b/src/libstore/sqlite.hh index 7c1ed538215..4d347a2e56a 100644 --- a/src/libstore/sqlite.hh +++ b/src/libstore/sqlite.hh @@ -31,6 +31,7 @@ struct SQLiteStmt sqlite3 * db = 0; sqlite3_stmt * stmt = 0; SQLiteStmt() { } + SQLiteStmt(sqlite3 * db, const std::string & s) { create(db, s); } void create(sqlite3 * db, const std::string & s); ~SQLiteStmt(); operator sqlite3_stmt * () { return stmt; } From 583ff4ec46fe1fa758f0fa4df1d8b37d9192736c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 27 Jan 2017 16:13:22 +0100 Subject: [PATCH 0084/2196] release.nix: Drop nix-shell references --- release.nix | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/release.nix b/release.nix index d825dd58376..ace0f9cc89a 100644 --- a/release.nix +++ b/release.nix @@ -20,7 +20,7 @@ let name = "nix-tarball"; version = builtins.readFile ./version; versionSuffix = if officialRelease then "" else "pre${toString nix.revCount}_${nix.shortRev}"; - src = if lib.inNixShell then null else nix; + src = nix; inherit officialRelease; buildInputs = @@ -28,7 +28,8 @@ let pkgconfig sqlite libsodium boehmgc docbook5 docbook5_xsl autoconf-archive - ] ++ lib.optional (!lib.inNixShell) git; + git + ]; configureFlags = '' --with-dbi=${perlPackages.DBI}/${perl.libPrefix} From 7a65b2470eb53a320749d76746fbf65790183d9d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 1 Feb 2017 13:00:21 +0100 Subject: [PATCH 0085/2196] Restore default signal handling in child processes In particular, this fixes Ctrl-C in nix-shell sessions. --- src/download-via-ssh/download-via-ssh.cc | 1 + src/libmain/shared.cc | 29 ++++++++---------------- src/libstore/build.cc | 4 ++-- src/libstore/ssh-store.cc | 1 + src/libutil/util.cc | 24 ++++++++++++-------- src/libutil/util.hh | 7 +++--- src/nix-build/nix-build.cc | 2 ++ 7 files changed, 33 insertions(+), 35 deletions(-) diff --git a/src/download-via-ssh/download-via-ssh.cc b/src/download-via-ssh/download-via-ssh.cc index ff28a60ff4b..4a1ba9a1123 100644 --- a/src/download-via-ssh/download-via-ssh.cc +++ b/src/download-via-ssh/download-via-ssh.cc @@ -30,6 +30,7 @@ static std::pair connect(const string & conn) throw SysError("dupping stdin"); if (dup2(from.writeSide, STDOUT_FILENO) == -1) throw SysError("dupping stdout"); + restoreSignals(); execlp("ssh", "ssh", "-x", "-T", conn.c_str(), "nix-store --serve", NULL); throw SysError("executing ssh"); }); diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index d564e03853e..52cb2312826 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -119,15 +119,9 @@ void initNix() startSignalHandlerThread(); - /* Ignore SIGPIPE. */ + /* Reset SIGCHLD to its default. */ struct sigaction act; sigemptyset(&act.sa_mask); - act.sa_handler = SIG_IGN; - act.sa_flags = 0; - if (sigaction(SIGPIPE, &act, 0)) - throw SysError("ignoring SIGPIPE"); - - /* Reset SIGCHLD to its default. */ act.sa_handler = SIG_DFL; act.sa_flags = 0; if (sigaction(SIGCHLD, &act, 0)) @@ -252,7 +246,7 @@ void printVersion(const string & programName) void showManPage(const string & name) { - restoreSIGPIPE(); + restoreSignals(); execlp("man", "man", name.c_str(), NULL); throw SysError(format("command ‘man %1%’ failed") % name.c_str()); } @@ -305,16 +299,6 @@ RunPager::RunPager() if (!pager) pager = getenv("PAGER"); if (pager && ((string) pager == "" || (string) pager == "cat")) return; - /* Ignore SIGINT. The pager will handle it (and we'll get - SIGPIPE). */ - struct sigaction act; - act.sa_handler = SIG_IGN; - act.sa_flags = 0; - sigemptyset(&act.sa_mask); - if (sigaction(SIGINT, &act, 0)) throw SysError("ignoring SIGINT"); - - restoreSIGPIPE(); - Pipe toPager; toPager.create(); @@ -323,6 +307,7 @@ RunPager::RunPager() throw SysError("dupping stdin"); if (!getenv("LESS")) setenv("LESS", "FRSXMK", 1); + restoreSignals(); if (pager) execl("/bin/sh", "sh", "-c", pager, NULL); execlp("pager", "pager", NULL); @@ -331,6 +316,8 @@ RunPager::RunPager() throw SysError(format("executing ‘%1%’") % pager); }); + pid.setKillSignal(SIGINT); + if (dup2(toPager.writeSide.get(), STDOUT_FILENO) == -1) throw SysError("dupping stdout"); } @@ -345,7 +332,11 @@ RunPager::~RunPager() pid.wait(); } } catch (...) { - ignoreException(); + try { + pid.kill(true); + } catch (...) { + ignoreException(); + } } } diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 7fb5271f4a8..40927c06326 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -400,6 +400,8 @@ void Goal::trace(const format & f) /* Common initialisation performed in child processes. */ static void commonChildInit(Pipe & logPipe) { + restoreSignals(); + /* Put the child in a separate session (and thus a separate process group) so that it has no controlling terminal (meaning that e.g. ssh cannot open /dev/tty) and it doesn't receive @@ -2662,8 +2664,6 @@ void DerivationGoal::runChild() for (auto & i : drv->args) args.push_back(rewriteStrings(i, inputRewrites)); - restoreSIGPIPE(); - /* Indicate that we managed to set up the build environment. */ writeFull(STDERR_FILENO, string("\1\n")); diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc index 3d01594009a..f5d0a270438 100644 --- a/src/libstore/ssh-store.cc +++ b/src/libstore/ssh-store.cc @@ -91,6 +91,7 @@ ref SSHStore::openConnection() { if ((pid_t) sshMaster == -1) { sshMaster = startProcess([&]() { + restoreSignals(); if (key.empty()) execlp("ssh", "ssh", "-N", "-M", "-S", socketPath.c_str(), uri.c_str(), NULL); else diff --git a/src/libutil/util.cc b/src/libutil/util.cc index ca4edc2cd6c..6c4c5c969d8 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -860,6 +860,8 @@ string runProgram(Path program, bool searchPath, const Strings & args, Strings args_(args); args_.push_front(program); + restoreSignals(); + if (searchPath) execvp(program.c_str(), stringsToCharPtrs(args_).data()); else @@ -909,16 +911,6 @@ void closeOnExec(int fd) } -void restoreSIGPIPE() -{ - struct sigaction act; - act.sa_handler = SIG_DFL; - act.sa_flags = 0; - sigemptyset(&act.sa_mask); - if (sigaction(SIGPIPE, &act, 0)) throw SysError("resetting SIGPIPE"); -} - - ////////////////////////////////////////////////////////////////////// @@ -1218,19 +1210,31 @@ void triggerInterrupt() } } +static sigset_t savedSignalMask; + void startSignalHandlerThread() { + if (sigprocmask(SIG_BLOCK, nullptr, &savedSignalMask)) + throw SysError("quering signal mask"); + sigset_t set; sigemptyset(&set); sigaddset(&set, SIGINT); sigaddset(&set, SIGTERM); sigaddset(&set, SIGHUP); + sigaddset(&set, SIGPIPE); if (pthread_sigmask(SIG_BLOCK, &set, nullptr)) throw SysError("blocking signals"); std::thread(signalHandlerThread, set).detach(); } +void restoreSignals() +{ + if (sigprocmask(SIG_SETMASK, &savedSignalMask, nullptr)) + throw SysError("restoring signals"); +} + /* RAII helper to automatically deregister a callback. */ struct InterruptCallbackImpl : InterruptCallback { diff --git a/src/libutil/util.hh b/src/libutil/util.hh index 07141ffed6b..cfaaf1486e9 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -256,10 +256,6 @@ void closeMostFDs(const set & exceptions); /* Set the close-on-exec flag for the given file descriptor. */ void closeOnExec(int fd); -/* Restore default handling of SIGPIPE, otherwise some programs will - randomly say "Broken pipe". */ -void restoreSIGPIPE(); - /* User interruption. */ @@ -423,6 +419,9 @@ void callSuccess( on the current thread (and thus any threads created by it). */ void startSignalHandlerThread(); +/* Restore default signal handling. */ +void restoreSignals(); + struct InterruptCallback { virtual ~InterruptCallback() { }; diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 3eb2d2c0b7a..ee030c57b6b 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -452,6 +452,8 @@ int main(int argc, char ** argv) auto argPtrs = stringsToCharPtrs(args); + restoreSignals(); + execvp(getEnv("NIX_BUILD_SHELL", "bash").c_str(), argPtrs.data()); throw SysError("executing shell"); From 1351b0df87a0984914769c5dc76489618b3a3fec Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 2 Feb 2017 12:20:28 +0100 Subject: [PATCH 0086/2196] exportReferencesGraph: Only export in JSON format when in structured mode This prevents breaking compatibility with builders that read "closure.*", since they would accidentally pick up the new JSON files. --- src/libstore/build.cc | 26 ++++++++++++++++---------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 40927c06326..5d6fff4e349 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2276,18 +2276,24 @@ void DerivationGoal::doExportReferencesGraph() } } - /* Write closure info to . */ - writeFile(tmpDir + "/" + fileName, - worker.store.makeValidityRegistration(paths, false, false)); + if (!drv->env.count("__json")) { + + /* Write closure info to . */ + writeFile(tmpDir + "/" + fileName, + worker.store.makeValidityRegistration(paths, false, false)); + + } else { + + /* Write a more comprehensive JSON serialisation to + . */ + std::ostringstream str; + { + JSONPlaceholder jsonRoot(str, true); + worker.store.pathInfoToJSON(jsonRoot, paths, false, true); + } + writeFile(tmpDir + "/" + fileName, str.str()); - /* Write a more comprehensive JSON serialisation to - .json. */ - std::ostringstream str; - { - JSONPlaceholder jsonRoot(str, true); - worker.store.pathInfoToJSON(jsonRoot, paths, false, true); } - writeFile(tmpDir + "/" + fileName + ".json", str.str()); } } From e6e74f987f0fa284d220432d426eb965269a97d6 Mon Sep 17 00:00:00 2001 From: Renzo Carbonara Date: Wed, 1 Feb 2017 13:37:34 +0100 Subject: [PATCH 0087/2196] Add netrc-file support --- doc/manual/command-ref/conf-file.xml | 15 +++++++++++++++ src/libstore/download.cc | 5 +++++ 2 files changed, 20 insertions(+) diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index 6c0af39ecda..a7d60538c21 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -430,6 +430,21 @@ flag, e.g. --option gc-keep-outputs false. + netrc-file + + If set to an absolute path to a netrc + file, Nix will use the HTTP authentication credentials in this file when + trying to download from a remote host through HTTP or HTTPS. Defaults to + $NIX_CONF_DIR/netrc. + + The netrc file consists of zero or more lines + like: machine my-machine login + my-username password + my-password. + + + + system This option specifies the canonical Nix system diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 074e0ca6642..8f2387b6325 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -230,6 +230,11 @@ struct CurlDownloader : public Downloader curl_easy_setopt(req, CURLOPT_SSL_VERIFYHOST, 0); } + Path netrcFile = settings.get("netrc-file", + (format("%1%/%2%") % settings.nixConfDir % "netrc").str()); + curl_easy_setopt(req, CURLOPT_NETRC_FILE, netrcFile.c_str()); + curl_easy_setopt(req, CURLOPT_NETRC, CURL_NETRC_OPTIONAL); + result.data = std::make_shared(); } From 27dc76c1a5dbe654465245ff5f6bc22e2c8902da Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 7 Feb 2017 18:49:17 +0100 Subject: [PATCH 0088/2196] Remove build-remote.pl.in --- .gitignore | 1 - local.mk | 2 +- scripts/build-remote.pl.in | 275 ------------------------------------- scripts/local.mk | 2 - 4 files changed, 1 insertion(+), 279 deletions(-) delete mode 100755 scripts/build-remote.pl.in diff --git a/.gitignore b/.gitignore index 92f95fe1fcb..4d0ac32b16f 100644 --- a/.gitignore +++ b/.gitignore @@ -35,7 +35,6 @@ Makefile.config # /scripts/ /scripts/nix-profile.sh /scripts/nix-copy-closure -/scripts/build-remote.pl /scripts/nix-reduce-build /scripts/nix-http-export.cgi diff --git a/local.mk b/local.mk index 2541f3f3229..eebd7196119 100644 --- a/local.mk +++ b/local.mk @@ -10,7 +10,7 @@ clean-files += Makefile.config GLOBAL_CXXFLAGS += -I . -I src -I src/libutil -I src/libstore -I src/libmain -I src/libexpr \ -Wno-unneeded-internal-declaration -$(foreach i, config.h $(call rwildcard, src/lib*, *.hh) src/nix-store/serve-protocol.hh, \ +$(foreach i, config.h $(call rwildcard, src/lib*, *.hh), \ $(eval $(call install-file-in, $(i), $(includedir)/nix, 0644))) $(foreach i, $(call rwildcard, src/boost, *.hpp), $(eval $(call install-file-in, $(i), $(includedir)/nix/$(patsubst src/%/,%,$(dir $(i))), 0644))) diff --git a/scripts/build-remote.pl.in b/scripts/build-remote.pl.in deleted file mode 100755 index b5fc629eb49..00000000000 --- a/scripts/build-remote.pl.in +++ /dev/null @@ -1,275 +0,0 @@ -#! @perl@ -w @perlFlags@ - -use utf8; -use Fcntl qw(:DEFAULT :flock); -use English '-no_match_vars'; -use IO::Handle; -use Nix::Config; -use Nix::SSH; -use Nix::CopyClosure; -use Nix::Store; -use Encode; -no warnings('once'); - -STDERR->autoflush(1); -binmode STDERR, ":encoding(utf8)"; - -my $debug = defined $ENV{NIX_DEBUG_HOOK}; - - -# General operation: -# -# Try to find a free machine of type $neededSystem. We do this as -# follows: -# - We acquire an exclusive lock on $currentLoad/main-lock. -# - For each machine $machine of type $neededSystem and for each $slot -# less than the maximum load for that machine, we try to get an -# exclusive lock on $currentLoad/$machine-$slot (without blocking). -# If we get such a lock, we send "accept" to the caller. Otherwise, -# we send "postpone" and exit. -# - We release the exclusive lock on $currentLoad/main-lock. -# - We perform the build on $neededSystem. -# - We release the exclusive lock on $currentLoad/$machine-$slot. -# -# The nice thing about this scheme is that if we die prematurely, the -# locks are released automatically. - - -# Make sure that we don't get any SSH passphrase or host key popups - -# if there is any problem it should fail, not do something -# interactive. -$ENV{"DISPLAY"} = ""; -$ENV{"SSH_ASKPASS"} = ""; - - -sub sendReply { - my $reply = shift; - print STDERR "# $reply\n"; -} - -sub all { $_ || return 0 for @_; 1 } - - -# Initialisation. -my $loadIncreased = 0; - -my ($localSystem, $maxSilentTime, $buildTimeout) = @ARGV; - -my $currentLoad = $ENV{"NIX_CURRENT_LOAD"} // "/run/nix/current-load"; -my $conf = $ENV{"NIX_REMOTE_SYSTEMS"} // "@sysconfdir@/nix/machines"; - - -sub openSlotLock { - my ($machine, $slot) = @_; - my $slotLockFn = "$currentLoad/" . (join '+', @{$machine->{systemTypes}}) . "-" . $machine->{hostName} . "-$slot"; - my $slotLock = new IO::Handle; - sysopen $slotLock, "$slotLockFn", O_RDWR|O_CREAT, 0600 or die; - return $slotLock; -} - - -# Read the list of machines. -my @machines; -if (defined $conf && -e $conf) { - open CONF, "<$conf" or die; - while () { - chomp; - s/\#.*$//g; - next if /^\s*$/; - my @tokens = split /\s/, $_; - my @supportedFeatures = split(/,/, $tokens[5] || ""); - my @mandatoryFeatures = split(/,/, $tokens[6] || ""); - push @machines, - { hostName => $tokens[0] - , systemTypes => [ split(/,/, $tokens[1]) ] - , sshKey => $tokens[2] - , maxJobs => int($tokens[3]) - , speedFactor => 1.0 * (defined $tokens[4] ? int($tokens[4]) : 1) - , supportedFeatures => [ @supportedFeatures, @mandatoryFeatures ] - , mandatoryFeatures => [ @mandatoryFeatures ] - , enabled => 1 - }; - } - close CONF; -} - - - -# Wait for the calling process to ask us whether we can build some derivation. -my ($drvPath, $hostName, $slotLock); -my ($from, $to); - -REQ: while (1) { - $_ = || exit 0; - (my $amWilling, my $neededSystem, $drvPath, my $requiredFeatures) = split; - my @requiredFeatures = split /,/, $requiredFeatures; - - my $canBuildLocally = $amWilling && ($localSystem eq $neededSystem); - - if (!defined $currentLoad) { - sendReply "decline"; - next; - } - - # Acquire the exclusive lock on $currentLoad/main-lock. - mkdir $currentLoad, 0777 or die unless -d $currentLoad; - my $mainLock = "$currentLoad/main-lock"; - sysopen MAINLOCK, "$mainLock", O_RDWR|O_CREAT, 0600 or die; - flock(MAINLOCK, LOCK_EX) or die; - - - while (1) { - # Find all machine that can execute this build, i.e., that - # support builds for the given platform and features, and are - # not at their job limit. - my $rightType = 0; - my @available = (); - LOOP: foreach my $cur (@machines) { - if ($cur->{enabled} - && (grep { $neededSystem eq $_ } @{$cur->{systemTypes}}) - && all(map { my $f = $_; 0 != grep { $f eq $_ } @{$cur->{supportedFeatures}} } (@requiredFeatures, @mandatoryFeatures)) - && all(map { my $f = $_; 0 != grep { $f eq $_ } @requiredFeatures } @{$cur->{mandatoryFeatures}}) - ) - { - $rightType = 1; - - # We have a machine of the right type. Determine the load on - # the machine. - my $slot = 0; - my $load = 0; - my $free; - while ($slot < $cur->{maxJobs}) { - my $slotLock = openSlotLock($cur, $slot); - if (flock($slotLock, LOCK_EX | LOCK_NB)) { - $free = $slot unless defined $free; - flock($slotLock, LOCK_UN) or die; - } else { - $load++; - } - close $slotLock; - $slot++; - } - - push @available, { machine => $cur, load => $load, free => $free } - if $load < $cur->{maxJobs}; - } - } - - if ($debug) { - print STDERR "load on " . $_->{machine}->{hostName} . " = " . $_->{load} . "\n" - foreach @available; - } - - - # Didn't find any available machine? Then decline or postpone. - if (scalar @available == 0) { - # Postpone if we have a machine of the right type, except - # if the local system can and wants to do the build. - if ($rightType && !$canBuildLocally) { - sendReply "postpone"; - } else { - sendReply "decline"; - } - close MAINLOCK; - next REQ; - } - - - # Prioritise the available machines as follows: - # - First by load divided by speed factor, rounded to the nearest - # integer. This causes fast machines to be preferred over slow - # machines with similar loads. - # - Then by speed factor. - # - Finally by load. - sub lf { my $x = shift; return int($x->{load} / $x->{machine}->{speedFactor} + 0.4999); } - @available = sort - { lf($a) <=> lf($b) - || $b->{machine}->{speedFactor} <=> $a->{machine}->{speedFactor} - || $a->{load} <=> $b->{load} - } @available; - - - # Select the best available machine and lock a free slot. - my $selected = $available[0]; - my $machine = $selected->{machine}; - - $slotLock = openSlotLock($machine, $selected->{free}); - flock($slotLock, LOCK_EX | LOCK_NB) or die; - utime undef, undef, $slotLock; - - close MAINLOCK; - - - # Connect to the selected machine. - my @sshOpts = ("-i", $machine->{sshKey}); - $hostName = $machine->{hostName}; - eval { - ($from, $to) = connectToRemoteNix($hostName, \@sshOpts, "2>&4"); - # FIXME: check if builds are inhibited. - }; - last REQ unless $@; - print STDERR "$@"; - warn "unable to open SSH connection to ‘$hostName’, trying other available machines...\n"; - $from = undef; - $to = undef; - $machine->{enabled} = 0; - } -} - - -# Tell Nix we've accepted the build. -sendReply "accept"; -my @inputs = split /\s/, readline(STDIN); -my @outputs = split /\s/, readline(STDIN); - - -# Copy the derivation and its dependencies to the build machine. This -# is guarded by an exclusive lock per machine to prevent multiple -# build-remote instances from copying to a machine simultaneously. -# That's undesirable because we may end up with N instances uploading -# the same missing path simultaneously, causing the effective network -# bandwidth and target disk speed to be divided by N. -my $uploadLock = "$currentLoad/$hostName.upload-lock"; -sysopen UPLOADLOCK, "$uploadLock", O_RDWR|O_CREAT, 0600 or die; -eval { - local $SIG{ALRM} = sub { die "alarm\n" }; - # Don't wait forever, so that a process that gets stuck while - # holding the lock doesn't block everybody else indefinitely. - # It's safe to continue after a timeout, just (potentially) - # inefficient. - alarm 15 * 60; - flock(UPLOADLOCK, LOCK_EX); - alarm 0; -}; -if ($@) { - die unless $@ eq "alarm\n"; - print STDERR "somebody is hogging $uploadLock, continuing...\n"; - unlink $uploadLock; -} -Nix::CopyClosure::copyToOpen($from, $to, $hostName, [ $drvPath, @inputs ], 0, 0); -close UPLOADLOCK; - - -# Perform the build. -print STDERR "building ‘$drvPath’ on ‘$hostName’\n"; -writeInt(6, $to) or die; # == cmdBuildPaths -writeStrings([$drvPath], $to); -writeInt($maxSilentTime, $to); -writeInt($buildTimeout, $to); -my $res = readInt($from); -if ($res != 0) { - my $msg = decode("utf-8", readString($from)); - print STDERR "error: $msg on ‘$hostName’\n"; - exit $res; -} - - -# Copy the output from the build machine. -my @outputs2 = grep { !isValidPath($_) } @outputs; -if (scalar @outputs2 > 0) { - writeInt(5, $to); # == cmdExportPaths - writeInt(0, $to); # don't sign - writeStrings(\@outputs2, $to); - $ENV{'NIX_HELD_LOCKS'} = "@outputs2"; # FIXME: ugly - importPaths(fileno($from), 1); -} diff --git a/scripts/local.mk b/scripts/local.mk index ee8ae6845dc..365d72086bf 100644 --- a/scripts/local.mk +++ b/scripts/local.mk @@ -4,7 +4,6 @@ nix_bin_scripts := \ bin-scripts += $(nix_bin_scripts) nix_noinst_scripts := \ - $(d)/build-remote.pl \ $(d)/nix-http-export.cgi \ $(d)/nix-profile.sh \ $(d)/nix-reduce-build @@ -14,6 +13,5 @@ noinst-scripts += $(nix_noinst_scripts) profiledir = $(sysconfdir)/profile.d $(eval $(call install-file-as, $(d)/nix-profile.sh, $(profiledir)/nix.sh, 0644)) -$(eval $(call install-program-in, $(d)/build-remote.pl, $(libexecdir)/nix)) clean-files += $(nix_bin_scripts) $(nix_noinst_scripts) From c54814b175793f0e7a53fdeba55d1149342ec82a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 7 Feb 2017 18:54:33 +0100 Subject: [PATCH 0089/2196] Remove download-via-ssh Replaced by SSHStore. --- .gitignore | 3 - Makefile | 1 - src/download-via-ssh/download-via-ssh.cc | 142 ----------------------- src/download-via-ssh/local.mk | 11 -- 4 files changed, 157 deletions(-) delete mode 100644 src/download-via-ssh/download-via-ssh.cc delete mode 100644 src/download-via-ssh/local.mk diff --git a/.gitignore b/.gitignore index 4d0ac32b16f..336967d15eb 100644 --- a/.gitignore +++ b/.gitignore @@ -71,9 +71,6 @@ Makefile.config # /src/nix-channel/ /src/nix-channel/nix-channel -# /src/download-via-ssh/ -/src/download-via-ssh/download-via-ssh - # /src/buildenv/ /src/buildenv/buildenv diff --git a/Makefile b/Makefile index 14be271bb10..67dc14f8e48 100644 --- a/Makefile +++ b/Makefile @@ -26,7 +26,6 @@ makefiles = \ misc/emacs/local.mk \ doc/manual/local.mk \ tests/local.mk - #src/download-via-ssh/local.mk \ GLOBAL_CXXFLAGS += -std=c++14 -g -Wall diff --git a/src/download-via-ssh/download-via-ssh.cc b/src/download-via-ssh/download-via-ssh.cc deleted file mode 100644 index 4a1ba9a1123..00000000000 --- a/src/download-via-ssh/download-via-ssh.cc +++ /dev/null @@ -1,142 +0,0 @@ -#include "shared.hh" -#include "util.hh" -#include "serialise.hh" -#include "archive.hh" -#include "affinity.hh" -#include "globals.hh" -#include "serve-protocol.hh" -#include "worker-protocol.hh" -#include "store-api.hh" - -#include -#include -#include - -using namespace nix; - -// !!! TODO: -// * Respect more than the first host -// * use a database -// * show progress - - -static std::pair connect(const string & conn) -{ - Pipe to, from; - to.create(); - from.create(); - startProcess([&]() { - if (dup2(to.readSide, STDIN_FILENO) == -1) - throw SysError("dupping stdin"); - if (dup2(from.writeSide, STDOUT_FILENO) == -1) - throw SysError("dupping stdout"); - restoreSignals(); - execlp("ssh", "ssh", "-x", "-T", conn.c_str(), "nix-store --serve", NULL); - throw SysError("executing ssh"); - }); - // If child exits unexpectedly, we'll EPIPE or EOF early. - // If we exit unexpectedly, child will EPIPE or EOF early. - // So no need to keep track of it. - - return std::pair(to.writeSide.borrow(), from.readSide.borrow()); -} - - -static void substitute(std::pair & pipes, Path storePath, Path destPath) -{ - pipes.first << cmdDumpStorePath << storePath; - pipes.first.flush(); - restorePath(destPath, pipes.second); - std::cout << std::endl; -} - - -static void query(std::pair & pipes) -{ - for (string line; getline(std::cin, line);) { - Strings tokenized = tokenizeString(line); - string cmd = tokenized.front(); - tokenized.pop_front(); - if (cmd == "have") { - pipes.first - << cmdQueryValidPaths - << 0 // don't lock - << 0 // don't substitute - << tokenized; - pipes.first.flush(); - PathSet paths = readStrings(pipes.second); - for (auto & i : paths) - std::cout << i << std::endl; - } else if (cmd == "info") { - pipes.first << cmdQueryPathInfos << tokenized; - pipes.first.flush(); - while (1) { - Path path = readString(pipes.second); - if (path.empty()) break; - assertStorePath(path); - std::cout << path << std::endl; - string deriver = readString(pipes.second); - if (!deriver.empty()) assertStorePath(deriver); - std::cout << deriver << std::endl; - PathSet references = readStorePaths(pipes.second); - std::cout << references.size() << std::endl; - for (auto & i : references) - std::cout << i << std::endl; - std::cout << readLongLong(pipes.second) << std::endl; - std::cout << readLongLong(pipes.second) << std::endl; - } - } else - throw Error(format("unknown substituter query ‘%1%’") % cmd); - std::cout << std::endl; - } -} - - -int main(int argc, char * * argv) -{ - return handleExceptions(argv[0], [&]() { - if (argc < 2) - throw UsageError("download-via-ssh requires an argument"); - - initNix(); - - settings.update(); - - if (settings.sshSubstituterHosts.empty()) - return; - - std::cout << std::endl; - - /* Pass on the location of the daemon client's SSH - authentication socket. */ - string sshAuthSock = settings.get("ssh-auth-sock", string("")); - if (sshAuthSock != "") setenv("SSH_AUTH_SOCK", sshAuthSock.c_str(), 1); - - string host = settings.sshSubstituterHosts.front(); - std::pair pipes = connect(host); - - /* Exchange the greeting */ - pipes.first << SERVE_MAGIC_1; - pipes.first.flush(); - unsigned int magic = readInt(pipes.second); - if (magic != SERVE_MAGIC_2) - throw Error("protocol mismatch"); - readInt(pipes.second); // Server version, unused for now - pipes.first << SERVE_PROTOCOL_VERSION; - pipes.first.flush(); - - string arg = argv[1]; - if (arg == "--query") - query(pipes); - else if (arg == "--substitute") { - if (argc != 4) - throw UsageError("download-via-ssh: --substitute takes exactly two arguments"); - Path storePath = argv[2]; - Path destPath = argv[3]; - printError(format("downloading ‘%1%’ via SSH from ‘%2%’...") % storePath % host); - substitute(pipes, storePath, destPath); - } - else - throw UsageError(format("download-via-ssh: unknown command ‘%1%’") % arg); - }); -} diff --git a/src/download-via-ssh/local.mk b/src/download-via-ssh/local.mk deleted file mode 100644 index 80f4c385acb..00000000000 --- a/src/download-via-ssh/local.mk +++ /dev/null @@ -1,11 +0,0 @@ -programs += download-via-ssh - -download-via-ssh_DIR := $(d) - -download-via-ssh_SOURCES := $(d)/download-via-ssh.cc - -download-via-ssh_INSTALL_DIR := $(libexecdir)/nix/substituters - -download-via-ssh_CXXFLAGS = -Isrc/nix-store - -download-via-ssh_LIBS = libmain libstore libutil libformat From 612aeb2df5e02000a02def849b94b8342385974e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 7 Feb 2017 19:16:25 +0100 Subject: [PATCH 0090/2196] Cleanup --- src/libstore/binary-cache-store.hh | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh index 31878bbb247..7b228422d1b 100644 --- a/src/libstore/binary-cache-store.hh +++ b/src/libstore/binary-cache-store.hh @@ -138,8 +138,6 @@ public: ref getFSAccessor() override; -public: - void addSignatures(const Path & storePath, const StringSet & sigs) override { notImpl(); } From 7a58ad0ef5d32f130163276d49f962b4be92e6fd Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 7 Feb 2017 19:20:15 +0100 Subject: [PATCH 0091/2196] SSHStore: uri -> host --- src/libstore/ssh-store.cc | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc index f5d0a270438..1b44af07bcc 100644 --- a/src/libstore/ssh-store.cc +++ b/src/libstore/ssh-store.cc @@ -7,11 +7,13 @@ namespace nix { +static std::string uriScheme = "ssh://"; + class SSHStore : public RemoteStore { public: - SSHStore(string uri, const Params & params, size_t maxConnections = std::numeric_limits::max()); + SSHStore(string host, const Params & params, size_t maxConnections = std::numeric_limits::max()); std::string getUri() override; @@ -36,17 +38,17 @@ class SSHStore : public RemoteStore Pid sshMaster; - string uri; + string host; Path key; }; -SSHStore::SSHStore(string uri, const Params & params, size_t maxConnections) +SSHStore::SSHStore(string host, const Params & params, size_t maxConnections) : Store(params) , RemoteStore(params, maxConnections) , tmpDir(createTempDir("", "nix", true, true, 0700)) , socketPath((Path) tmpDir + "/ssh.sock") - , uri(std::move(uri)) + , host(std::move(host)) , key(get(params, "ssh-key", "")) { /* open a connection and perform the handshake to verify all is well */ @@ -55,7 +57,7 @@ SSHStore::SSHStore(string uri, const Params & params, size_t maxConnections) string SSHStore::getUri() { - return "ssh://" + uri; + return uriScheme + host; } class ForwardSource : public Source @@ -93,9 +95,9 @@ ref SSHStore::openConnection() sshMaster = startProcess([&]() { restoreSignals(); if (key.empty()) - execlp("ssh", "ssh", "-N", "-M", "-S", socketPath.c_str(), uri.c_str(), NULL); + execlp("ssh", "ssh", "-N", "-M", "-S", socketPath.c_str(), host.c_str(), NULL); else - execlp("ssh", "ssh", "-N", "-M", "-S", socketPath.c_str(), "-i", key.c_str(), uri.c_str(), NULL); + execlp("ssh", "ssh", "-N", "-M", "-S", socketPath.c_str(), "-i", key.c_str(), host.c_str(), NULL); throw SysError("starting ssh master"); }); } @@ -109,7 +111,7 @@ ref SSHStore::openConnection() throw SysError("duping over STDIN"); if (dup2(out.writeSide.get(), STDOUT_FILENO) == -1) throw SysError("duping over STDOUT"); - execlp("ssh", "ssh", "-S", socketPath.c_str(), uri.c_str(), "nix-daemon", "--stdio", NULL); + execlp("ssh", "ssh", "-S", socketPath.c_str(), host.c_str(), "nix-daemon", "--stdio", NULL); throw SysError("executing nix-daemon --stdio over ssh"); }); in.readSide = -1; @@ -126,8 +128,8 @@ static RegisterStoreImplementation regStore([]( const std::string & uri, const Store::Params & params) -> std::shared_ptr { - if (std::string(uri, 0, 6) != "ssh://") return 0; - return std::make_shared(uri.substr(6), params); + if (std::string(uri, 0, uriScheme.size()) != uriScheme) return 0; + return std::make_shared(std::string(uri, uriScheme.size()), params); }); } From ce4d8e3ef83505560db787876beb30d20ca6faf2 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 7 Feb 2017 19:20:41 +0100 Subject: [PATCH 0092/2196] Remove unnecessary call to topoSortPaths() exportPaths() already does this. --- src/nix-store/nix-store.cc | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index c1e6afef0e5..0aabe66c562 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -922,9 +922,7 @@ static void opServe(Strings opFlags, Strings opArgs) case cmdExportPaths: { readInt(in); // obsolete - Paths sorted = store->topoSortPaths(readStorePaths(*store, in)); - reverse(sorted.begin(), sorted.end()); - store->exportPaths(sorted, out); + store->exportPaths(readStorePaths(*store, in), out); break; } From ddb5577f2eb158279ee40219de409dcf1230aacc Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 7 Feb 2017 19:21:54 +0100 Subject: [PATCH 0093/2196] Move SavingSourceAdapter to serialise.hh --- src/libutil/serialise.hh | 15 +++++++++++++++ src/nix-daemon/nix-daemon.cc | 15 --------------- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh index f12f02543bc..5646d08c131 100644 --- a/src/libutil/serialise.hh +++ b/src/libutil/serialise.hh @@ -139,6 +139,21 @@ struct StringSource : Source }; +/* Adapter class of a Source that saves all data read to `s'. */ +struct SavingSourceAdapter : Source +{ + Source & orig; + string s; + SavingSourceAdapter(Source & orig) : orig(orig) { } + size_t read(unsigned char * data, size_t len) + { + size_t n = orig.read(data, len); + s.append((const char *) data, n); + return n; + } +}; + + void writePadding(size_t len, Sink & sink); void writeString(const unsigned char * buf, size_t len, Sink & sink); diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index 90a7301873c..f3ee0afc11e 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -168,21 +168,6 @@ struct RetrieveRegularNARSink : ParseSink }; -/* Adapter class of a Source that saves all data read to `s'. */ -struct SavingSourceAdapter : Source -{ - Source & orig; - string s; - SavingSourceAdapter(Source & orig) : orig(orig) { } - size_t read(unsigned char * data, size_t len) - { - size_t n = orig.read(data, len); - s.append((const char *) data, n); - return n; - } -}; - - static void performOp(ref store, bool trusted, unsigned int clientVersion, Source & from, Sink & to, unsigned int op) { From fa07558a069b974769e22ac944cfe21a69fb4485 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 7 Feb 2017 19:22:48 +0100 Subject: [PATCH 0094/2196] Provide default implementations for a couple of Store methods --- src/libstore/binary-cache-store.hh | 19 ------------------- src/{nix-store => libstore}/serve-protocol.hh | 0 src/libstore/store-api.cc | 13 +++++++++++++ src/libstore/store-api.hh | 14 +++++++------- 4 files changed, 20 insertions(+), 26 deletions(-) rename src/{nix-store => libstore}/serve-protocol.hh (100%) diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh index 7b228422d1b..a70d50d4949 100644 --- a/src/libstore/binary-cache-store.hh +++ b/src/libstore/binary-cache-store.hh @@ -71,9 +71,6 @@ public: PathSet & referrers) override { notImpl(); } - PathSet queryValidDerivers(const Path & path) override - { return {}; } - PathSet queryDerivationOutputs(const Path & path) override { notImpl(); } @@ -83,13 +80,6 @@ public: Path queryPathFromHashPart(const string & hashPart) override { notImpl(); } - PathSet querySubstitutablePaths(const PathSet & paths) override - { return {}; } - - void querySubstitutablePathInfos(const PathSet & paths, - SubstitutablePathInfos & infos) override - { } - bool wantMassQuery() override { return wantMassQuery_; } void addToStore(const ValidPathInfo & info, const ref & nar, @@ -121,21 +111,12 @@ public: void addIndirectRoot(const Path & path) override { notImpl(); } - void syncWithGC() override - { } - Roots findRoots() override { notImpl(); } void collectGarbage(const GCOptions & options, GCResults & results) override { notImpl(); } - void optimiseStore() override - { } - - bool verifyStore(bool checkContents, bool repair) override - { return true; } - ref getFSAccessor() override; void addSignatures(const Path & storePath, const StringSet & sigs) override diff --git a/src/nix-store/serve-protocol.hh b/src/libstore/serve-protocol.hh similarity index 100% rename from src/nix-store/serve-protocol.hh rename to src/libstore/serve-protocol.hh diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index a42d1183405..f98ba38406d 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -285,6 +285,19 @@ bool Store::isValidPath(const Path & storePath) } +/* Default implementation for stores that only implement + queryPathInfoUncached(). */ +bool Store::isValidPathUncached(const Path & path) +{ + try { + queryPathInfo(path); + return true; + } catch (InvalidPath &) { + return false; + } +} + + ref Store::queryPathInfo(const Path & storePath) { std::promise> promise; diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 3fee999072f..8058daf149b 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -320,7 +320,7 @@ public: protected: - virtual bool isValidPathUncached(const Path & path) = 0; + virtual bool isValidPathUncached(const Path & path); public: @@ -360,7 +360,7 @@ public: output. (Note that the result of `queryDeriver()' is the derivation that was actually used to produce `path', which may not exist anymore.) */ - virtual PathSet queryValidDerivers(const Path & path) = 0; + virtual PathSet queryValidDerivers(const Path & path) { return {}; }; /* Query the outputs of the derivation denoted by `path'. */ virtual PathSet queryDerivationOutputs(const Path & path) = 0; @@ -373,13 +373,13 @@ public: virtual Path queryPathFromHashPart(const string & hashPart) = 0; /* Query which of the given paths have substitutes. */ - virtual PathSet querySubstitutablePaths(const PathSet & paths) = 0; + virtual PathSet querySubstitutablePaths(const PathSet & paths) { return {}; }; /* Query substitute info (i.e. references, derivers and download sizes) of a set of paths. If a path does not have substitute info, it's omitted from the resulting ‘infos’ map. */ virtual void querySubstitutablePathInfos(const PathSet & paths, - SubstitutablePathInfos & infos) = 0; + SubstitutablePathInfos & infos) { return; }; virtual bool wantMassQuery() { return false; } @@ -454,7 +454,7 @@ public: permanent root and sees our's. In either case the permanent root is seen by the collector. */ - virtual void syncWithGC() = 0; + virtual void syncWithGC() { }; /* Find the roots of the garbage collector. Each root is a pair (link, storepath) where `link' is the path of the symlink @@ -485,11 +485,11 @@ public: /* Optimise the disk space usage of the Nix store by hard-linking files with the same contents. */ - virtual void optimiseStore() = 0; + virtual void optimiseStore() { }; /* Check the integrity of the Nix store. Returns true if errors remain. */ - virtual bool verifyStore(bool checkContents, bool repair) = 0; + virtual bool verifyStore(bool checkContents, bool repair) { return false; }; /* Return an object to access files in the Nix store. */ virtual ref getFSAccessor() = 0; From f38224e924bc38ea2b94930f8d12e29c7c8df7a8 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 7 Feb 2017 19:23:16 +0100 Subject: [PATCH 0095/2196] copyStorePath(): Don't require signatures for "trusted" stores For example, SSH stores could be trusted. --- src/libstore/store-api.cc | 9 +++++++++ src/libstore/store-api.hh | 4 ++++ 2 files changed, 13 insertions(+) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index f98ba38406d..11c2f4b02b2 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -529,6 +529,15 @@ void copyStorePath(ref srcStore, ref dstStore, StringSink sink; srcStore->narFromPath({storePath}, sink); + if (srcStore->isTrusted()) + dontCheckSigs = true; + + if (!info->narHash && dontCheckSigs) { + auto info2 = make_ref(*info); + info2->narHash = hashString(htSHA256, *sink.s); + info = info2; + } + dstStore->addToStore(*info, sink.s, repair, dontCheckSigs); } diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 8058daf149b..39132be893c 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -562,6 +562,10 @@ public: const Stats & getStats(); + /* Whether this store paths from this store can be imported even + if they lack a signature. */ + virtual bool isTrusted() { return false; } + protected: Stats stats; From caa5793b4a74049ee37dd88eb1c5b785456ce40d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 7 Feb 2017 19:28:40 +0100 Subject: [PATCH 0096/2196] Add a LegacySSHStore that uses nix-store --serve This is useful for nix-copy-closure. --- src/libstore/legacy-ssh-store.cc | 247 +++++++++++++++++++++++++++++++ src/libstore/remote-store.cc | 1 + 2 files changed, 248 insertions(+) create mode 100644 src/libstore/legacy-ssh-store.cc diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc new file mode 100644 index 00000000000..5d9e5aad6e0 --- /dev/null +++ b/src/libstore/legacy-ssh-store.cc @@ -0,0 +1,247 @@ +#include "archive.hh" +#include "pool.hh" +#include "remote-store.hh" +#include "serve-protocol.hh" +#include "store-api.hh" +#include "worker-protocol.hh" + +namespace nix { + +static std::string uriScheme = "legacy-ssh://"; + +struct LegacySSHStore : public Store +{ + string host; + + struct Connection + { + Pid sshPid; + AutoCloseFD out; + AutoCloseFD in; + FdSink to; + FdSource from; + }; + + AutoDelete tmpDir; + + Path socketPath; + + Pid sshMaster; + + ref> connections; + + Path key; + + LegacySSHStore(const string & host, const Params & params, + size_t maxConnections = std::numeric_limits::max()) + : Store(params) + , host(host) + , tmpDir(createTempDir("", "nix", true, true, 0700)) + , socketPath((Path) tmpDir + "/ssh.sock") + , connections(make_ref>( + maxConnections, + [this]() { return openConnection(); }, + [](const ref & r) { return true; } + )) + , key(get(params, "ssh-key", "")) + { + } + + ref openConnection() + { + if ((pid_t) sshMaster == -1) { + sshMaster = startProcess([&]() { + restoreSignals(); + Strings args{ "ssh", "-M", "-S", socketPath, "-N", "-x", "-a", host }; + if (!key.empty()) + args.insert(args.end(), {"-i", key}); + execvp("ssh", stringsToCharPtrs(args).data()); + throw SysError("starting SSH master connection to host ‘%s’", host); + }); + } + + auto conn = make_ref(); + Pipe in, out; + in.create(); + out.create(); + conn->sshPid = startProcess([&]() { + if (dup2(in.readSide.get(), STDIN_FILENO) == -1) + throw SysError("duping over STDIN"); + if (dup2(out.writeSide.get(), STDOUT_FILENO) == -1) + throw SysError("duping over STDOUT"); + execlp("ssh", "ssh", "-S", socketPath.c_str(), host.c_str(), "nix-store", "--serve", "--write", nullptr); + throw SysError("executing ‘nix-store --serve’ on remote host ‘%s’", host); + }); + in.readSide = -1; + out.writeSide = -1; + conn->out = std::move(out.readSide); + conn->in = std::move(in.writeSide); + conn->to = FdSink(conn->in.get()); + conn->from = FdSource(conn->out.get()); + + int remoteVersion; + + try { + conn->to << SERVE_MAGIC_1 << SERVE_PROTOCOL_VERSION; + conn->to.flush(); + + unsigned int magic = readInt(conn->from); + if (magic != SERVE_MAGIC_2) + throw Error("protocol mismatch with ‘nix-store --serve’ on ‘%s’", host); + remoteVersion = readInt(conn->from); + if (GET_PROTOCOL_MAJOR(remoteVersion) != 0x200) + throw Error("unsupported ‘nix-store --serve’ protocol version on ‘%s’", host); + + } catch (EndOfFile & e) { + throw Error("cannot connect to ‘%1%’", host); + } + + return conn; + }; + + string getUri() override + { + return uriScheme + host; + } + + void queryPathInfoUncached(const Path & path, + std::function)> success, + std::function failure) override + { + sync2async>(success, failure, [&]() -> std::shared_ptr { + auto conn(connections->get()); + + debug("querying remote host ‘%s’ for info on ‘%s’", host, path); + + conn->to << cmdQueryPathInfos << PathSet{path}; + conn->to.flush(); + + auto info = std::make_shared(); + conn->from >> info->path; + if (info->path.empty()) return nullptr; + assert(path == info->path); + + PathSet references; + conn->from >> info->deriver; + info->references = readStorePaths(*this, conn->from); + readLongLong(conn->from); // download size + info->narSize = readLongLong(conn->from); + + auto s = readString(conn->from); + assert(s == ""); + + return info; + }); + } + + void addToStore(const ValidPathInfo & info, const ref & nar, + bool repair, bool dontCheckSigs, + std::shared_ptr accessor) override + { + debug("adding path ‘%s’ to remote host ‘%s’", info.path, host); + + auto conn(connections->get()); + + conn->to + << cmdImportPaths + << 1; + conn->to(*nar); + conn->to + << exportMagic + << info.path + << info.references + << info.deriver + << 0 + << 0; + conn->to.flush(); + + if (readInt(conn->from) != 1) + throw Error("failed to add path ‘%s’ to remote host ‘%s’, info.path, host"); + + } + + void narFromPath(const Path & path, Sink & sink) override + { + auto conn(connections->get()); + + conn->to << cmdDumpStorePath << path; + conn->to.flush(); + + /* FIXME: inefficient. */ + ParseSink parseSink; /* null sink; just parse the NAR */ + SavingSourceAdapter savedNAR(conn->from); + parseDump(parseSink, savedNAR); + sink(savedNAR.s); + } + + /* Unsupported methods. */ + [[noreturn]] void unsupported() + { + throw Error("operation not supported on SSH stores"); + } + + PathSet queryAllValidPaths() override { unsupported(); } + + void queryReferrers(const Path & path, PathSet & referrers) override + { unsupported(); } + + PathSet queryDerivationOutputs(const Path & path) override + { unsupported(); } + + StringSet queryDerivationOutputNames(const Path & path) override + { unsupported(); } + + Path queryPathFromHashPart(const string & hashPart) override + { unsupported(); } + + Path addToStore(const string & name, const Path & srcPath, + bool recursive, HashType hashAlgo, + PathFilter & filter, bool repair) override + { unsupported(); } + + Path addTextToStore(const string & name, const string & s, + const PathSet & references, bool repair) override + { unsupported(); } + + void buildPaths(const PathSet & paths, BuildMode buildMode) override + { unsupported(); } + + BuildResult buildDerivation(const Path & drvPath, const BasicDerivation & drv, + BuildMode buildMode) override + { unsupported(); } + + void ensurePath(const Path & path) override + { unsupported(); } + + void addTempRoot(const Path & path) override + { unsupported(); } + + void addIndirectRoot(const Path & path) override + { unsupported(); } + + Roots findRoots() override + { unsupported(); } + + void collectGarbage(const GCOptions & options, GCResults & results) override + { unsupported(); } + + ref getFSAccessor() + { unsupported(); } + + void addSignatures(const Path & storePath, const StringSet & sigs) override + { unsupported(); } + + bool isTrusted() override + { return true; } + +}; + +static RegisterStoreImplementation regStore([]( + const std::string & uri, const Store::Params & params) + -> std::shared_ptr +{ + if (std::string(uri, 0, uriScheme.size()) != uriScheme) return 0; + return std::make_shared(std::string(uri, uriScheme.size()), params); +}); + +} diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 816d95ba607..42c09ec7e0b 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -37,6 +37,7 @@ template T readStorePaths(Store & store, Source & from) } template PathSet readStorePaths(Store & store, Source & from); +template Paths readStorePaths(Store & store, Source & from); /* TODO: Separate these store impls into different files, give them better names */ RemoteStore::RemoteStore(const Params & params, size_t maxConnections) From 4724903c78e80481fc63d627081fac6a98e4205d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 7 Feb 2017 20:55:47 +0100 Subject: [PATCH 0097/2196] nix-copy-closure: Use computeFSClosure() and LegacySSHStore --- src/nix-copy-closure/nix-copy-closure.cc | 23 +++++++++++------------ 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/src/nix-copy-closure/nix-copy-closure.cc b/src/nix-copy-closure/nix-copy-closure.cc index b7e997ca4b0..4340443b5cc 100755 --- a/src/nix-copy-closure/nix-copy-closure.cc +++ b/src/nix-copy-closure/nix-copy-closure.cc @@ -7,13 +7,15 @@ int main(int argc, char ** argv) { return handleExceptions(argv[0], [&]() { initNix(); + auto gzip = false; auto toMode = true; auto includeOutputs = false; auto dryRun = false; auto useSubstitutes = false; - auto sshHost = string{}; - auto storePaths = PathSet{}; + std::string sshHost; + PathSet storePaths; + parseCmdLine(argc, argv, [&](Strings::iterator & arg, const Strings::iterator & end) { if (*arg == "--help") showManPage("nix-copy-closure"); @@ -41,20 +43,17 @@ int main(int argc, char ** argv) storePaths.insert(*arg); return true; }); + if (sshHost.empty()) throw UsageError("no host name specified"); - auto remoteUri = "ssh://" + sshHost + (gzip ? "?compress=true" : ""); + auto remoteUri = "legacy-ssh://" + sshHost + (gzip ? "?compress=true" : ""); auto to = toMode ? openStore(remoteUri) : openStore(); auto from = toMode ? openStore() : openStore(remoteUri); - if (includeOutputs) { - auto newPaths = PathSet{}; - for (const auto & p : storePaths) { - auto outputs = from->queryDerivationOutputs(p); - newPaths.insert(outputs.begin(), outputs.end()); - } - storePaths.insert(newPaths.begin(), newPaths.end()); - } - copyPaths(from, to, Paths(storePaths.begin(), storePaths.end()), useSubstitutes); + + PathSet closure; + from->computeFSClosure(storePaths, closure, false, includeOutputs); + + copyPaths(from, to, Paths(closure.begin(), closure.end()), useSubstitutes); }); } From 418a837897e597c5dc2eb4e458462bd04b2abde7 Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Fri, 12 Aug 2016 06:38:08 -0400 Subject: [PATCH 0098/2196] Remove perl dependency. Fixes #341 --- Makefile | 1 - configure.ac | 52 ------------------- .../advanced-topics/distributed-builds.xml | 10 ++-- release.nix | 22 +++----- src/nix-store/nix-store.cc | 2 +- tests/optimise-store.sh | 12 ++--- 6 files changed, 19 insertions(+), 80 deletions(-) diff --git a/Makefile b/Makefile index 62a4850d854..960685b8f48 100644 --- a/Makefile +++ b/Makefile @@ -18,7 +18,6 @@ makefiles = \ src/nix-channel/local.mk \ src/nix-build/local.mk \ src/build-remote/local.mk \ - perl/local.mk \ scripts/local.mk \ corepkgs/local.mk \ misc/systemd/local.mk \ diff --git a/configure.ac b/configure.ac index e6b11be2df1..5d5f1d2be79 100644 --- a/configure.ac +++ b/configure.ac @@ -121,7 +121,6 @@ AC_PATH_PROG(xmllint, xmllint, false) AC_PATH_PROG(xsltproc, xsltproc, false) AC_PATH_PROG(flex, flex, false) AC_PATH_PROG(bison, bison, false) -NEED_PROG(perl, perl) NEED_PROG(sed, sed) NEED_PROG(tar, tar) NEED_PROG(bzip2, bzip2) @@ -131,23 +130,6 @@ AC_PATH_PROG(dot, dot) AC_PATH_PROG(pv, pv, pv) -# Test that Perl has the open/fork feature (Perl 5.8.0 and beyond). -AC_MSG_CHECKING([whether Perl is recent enough]) -if ! $perl -e 'open(FOO, "-|", "true"); while () { print; }; close FOO or die;'; then - AC_MSG_RESULT(no) - AC_MSG_ERROR([Your Perl version is too old. Nix requires Perl 5.8.0 or newer.]) -fi -AC_MSG_RESULT(yes) - - -# Figure out where to install Perl modules. -AC_MSG_CHECKING([for the Perl installation prefix]) -perlversion=$($perl -e 'use Config; print $Config{version};') -perlarchname=$($perl -e 'use Config; print $Config{archname};') -AC_SUBST(perllibdir, [${libdir}/perl5/site_perl/$perlversion/$perlarchname]) -AC_MSG_RESULT($perllibdir) - - NEED_PROG(cat, cat) NEED_PROG(tr, tr) AC_ARG_WITH(coreutils-bin, AC_HELP_STRING([--with-coreutils-bin=PATH], @@ -213,40 +195,6 @@ if test "$gc" = yes; then fi -# Check for the required Perl dependencies (DBI, DBD::SQLite). -perlFlags="-I$perllibdir" - -AC_ARG_WITH(dbi, AC_HELP_STRING([--with-dbi=PATH], - [prefix of the Perl DBI library]), - perlFlags="$perlFlags -I$withval") - -AC_ARG_WITH(dbd-sqlite, AC_HELP_STRING([--with-dbd-sqlite=PATH], - [prefix of the Perl DBD::SQLite library]), - perlFlags="$perlFlags -I$withval") - -AC_MSG_CHECKING([whether DBD::SQLite works]) -if ! $perl $perlFlags -e 'use DBI; use DBD::SQLite;' 2>&5; then - AC_MSG_RESULT(no) - AC_MSG_FAILURE([The Perl modules DBI and/or DBD::SQLite are missing.]) -fi -AC_MSG_RESULT(yes) - -AC_SUBST(perlFlags) - - -# Whether to build the Perl bindings -AC_MSG_CHECKING([whether to build the Perl bindings]) -AC_ARG_ENABLE(perl-bindings, AC_HELP_STRING([--enable-perl-bindings], - [whether to build the Perl bindings (recommended) [default=yes]]), - perlbindings=$enableval, perlbindings=yes) -if test "$enable_shared" = no; then - # Perl bindings require shared libraries. - perlbindings=no -fi -AC_SUBST(perlbindings) -AC_MSG_RESULT($perlbindings) - - AC_ARG_ENABLE(init-state, AC_HELP_STRING([--disable-init-state], [do not initialise DB etc. in `make install']), init_state=$enableval, init_state=yes) diff --git a/doc/manual/advanced-topics/distributed-builds.xml b/doc/manual/advanced-topics/distributed-builds.xml index f8583700393..d5bc1c59255 100644 --- a/doc/manual/advanced-topics/distributed-builds.xml +++ b/doc/manual/advanced-topics/distributed-builds.xml @@ -42,7 +42,7 @@ purposes. It uses ssh and nix-copy-closure to copy the build inputs and outputs and perform the remote build. To use it, you should set NIX_BUILD_HOOK to -prefix/libexec/nix/build-remote.pl. +prefix/libexec/nix/build-remote. You should also define a list of available build machines and point the environment variable NIX_REMOTE_SYSTEMS to it. NIX_REMOTE_SYSTEMS must be an absolute path. An @@ -68,7 +68,7 @@ bits of information: should not have a passphrase! The maximum number of builds that - build-remote.pl will execute in parallel on the + build-remote will execute in parallel on the machine. Typically this should be equal to the number of CPU cores. For instance, the machine itchy in the example will execute up to 8 builds in parallel. @@ -80,7 +80,7 @@ bits of information: A comma-separated list of supported features. If a derivation has the requiredSystemFeatures attribute, then - build-remote.pl will only perform the + build-remote will only perform the derivation on a machine that has the specified features. For instance, the attribute @@ -106,11 +106,11 @@ requiredSystemFeatures = [ "kvm" ]; You should also set up the environment variable NIX_CURRENT_LOAD to point at a directory (e.g., /var/run/nix/current-load) that -build-remote.pl uses to remember how many builds +build-remote uses to remember how many builds it is currently executing remotely. It doesn't look at the actual load on the remote machine, so if you have multiple instances of Nix running, they should use the same NIX_CURRENT_LOAD -file. Maybe in the future build-remote.pl will +file. Maybe in the future build-remote will look at the actual remote load. diff --git a/release.nix b/release.nix index ace0f9cc89a..7bfde71a644 100644 --- a/release.nix +++ b/release.nix @@ -24,18 +24,14 @@ let inherit officialRelease; buildInputs = - [ curl bison flex perl libxml2 libxslt bzip2 xz + [ curl bison flex libxml2 libxslt bzip2 xz pkgconfig sqlite libsodium boehmgc docbook5 docbook5_xsl autoconf-archive git ]; - configureFlags = '' - --with-dbi=${perlPackages.DBI}/${perl.libPrefix} - --with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix} - --enable-gc - ''; + configureFlags = "--enable-gc"; postUnpack = '' # Clean up when building from a working tree. @@ -73,7 +69,7 @@ let src = tarball; buildInputs = - [ curl perl bzip2 xz openssl pkgconfig sqlite boehmgc ] + [ curl bzip2 xz openssl pkgconfig sqlite boehmgc ] ++ lib.optional stdenv.isLinux libsodium ++ lib.optional stdenv.isLinux (aws-sdk-cpp.override { @@ -83,8 +79,6 @@ let configureFlags = '' --disable-init-state - --with-dbi=${perlPackages.DBI}/${perl.libPrefix} - --with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix} --enable-gc --sysconfdir=/etc ''; @@ -147,15 +141,13 @@ let src = tarball; buildInputs = - [ curl perl bzip2 openssl pkgconfig sqlite xz libsodium + [ curl bzip2 openssl pkgconfig sqlite xz libsodium # These are for "make check" only: graphviz libxml2 libxslt ]; configureFlags = '' --disable-init-state - --with-dbi=${perlPackages.DBI}/${perl.libPrefix} - --with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix} ''; dontInstall = false; @@ -282,7 +274,7 @@ let src = jobs.tarball; diskImage = (diskImageFun vmTools.diskImageFuns) { extraPackages = - [ "perl-DBD-SQLite" "perl-devel" "sqlite" "sqlite-devel" "bzip2-devel" "emacs" "libcurl-devel" "openssl-devel" "xz-devel" ] + [ "sqlite" "sqlite-devel" "bzip2-devel" "emacs" "libcurl-devel" "openssl-devel" "xz-devel" ] ++ extraPackages; }; memSize = 1024; meta.schedulingPriority = 50; @@ -303,14 +295,14 @@ let src = jobs.tarball; diskImage = (diskImageFun vmTools.diskImageFuns) { extraPackages = - [ "libdbd-sqlite3-perl" "libsqlite3-dev" "libbz2-dev" "libwww-curl-perl" "libcurl-dev" "libcurl3-nss" "libssl-dev" "liblzma-dev" ] + [ "libsqlite3-dev" "libbz2-dev" "libcurl-dev" "libcurl3-nss" "libssl-dev" "liblzma-dev" ] ++ extraPackages; }; memSize = 1024; meta.schedulingPriority = 50; postInstall = "make installcheck"; configureFlags = "--sysconfdir=/etc"; debRequires = - [ "curl" "libdbd-sqlite3-perl" "libsqlite3-0" "libbz2-1.0" "bzip2" "xz-utils" "libwww-curl-perl" "libssl1.0.0" "liblzma5" ] + [ "curl" "libsqlite3-0" "libbz2-1.0" "bzip2" "xz-utils" "libssl1.0.0" "liblzma5" ] ++ extraDebPackages; debMaintainer = "Eelco Dolstra "; doInstallCheck = true; diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 0aabe66c562..7dda6d208df 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -926,7 +926,7 @@ static void opServe(Strings opFlags, Strings opArgs) break; } - case cmdBuildPaths: { /* Used by build-remote.pl. */ + case cmdBuildPaths: { if (!writeAllowed) throw Error("building paths is not allowed"); PathSet paths = readStorePaths(*store, in); diff --git a/tests/optimise-store.sh b/tests/optimise-store.sh index ea4478693e7..bd88662bc37 100644 --- a/tests/optimise-store.sh +++ b/tests/optimise-store.sh @@ -5,14 +5,14 @@ clearStore outPath1=$(echo 'with import ./config.nix; mkDerivation { name = "foo1"; builder = builtins.toFile "builder" "mkdir $out; echo hello > $out/foo"; }' | nix-build - --no-out-link --option auto-optimise-store true) outPath2=$(echo 'with import ./config.nix; mkDerivation { name = "foo2"; builder = builtins.toFile "builder" "mkdir $out; echo hello > $out/foo"; }' | nix-build - --no-out-link --option auto-optimise-store true) -inode1="$(perl -e "print ((lstat('$outPath1/foo'))[1])")" -inode2="$(perl -e "print ((lstat('$outPath2/foo'))[1])")" +inode1="$(stat --format=%i $outPath1/foo)" +inode2="$(stat --format=%i $outPath2/foo)" if [ "$inode1" != "$inode2" ]; then echo "inodes do not match" exit 1 fi -nlink="$(perl -e "print ((lstat('$outPath1/foo'))[3])")" +nlink="$(stat --format=%h $outPath1/foo)" if [ "$nlink" != 3 ]; then echo "link count incorrect" exit 1 @@ -20,7 +20,7 @@ fi outPath3=$(echo 'with import ./config.nix; mkDerivation { name = "foo3"; builder = builtins.toFile "builder" "mkdir $out; echo hello > $out/foo"; }' | nix-build - --no-out-link) -inode3="$(perl -e "print ((lstat('$outPath3/foo'))[1])")" +inode3="$(stat --format=%i $outPath3/foo)" if [ "$inode1" = "$inode3" ]; then echo "inodes match unexpectedly" exit 1 @@ -28,8 +28,8 @@ fi nix-store --optimise -inode1="$(perl -e "print ((lstat('$outPath1/foo'))[1])")" -inode3="$(perl -e "print ((lstat('$outPath3/foo'))[1])")" +inode1="$(stat --format=%i $outPath1/foo)" +inode3="$(stat --format=%i $outPath3/foo)" if [ "$inode1" != "$inode3" ]; then echo "inodes do not match" exit 1 From f7b7df8d1fcb41156fe92ef2ecf5b17074b88446 Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Fri, 12 Aug 2016 09:44:01 -0400 Subject: [PATCH 0099/2196] Add nix-perl package for the perl bindings --- .gitignore | 1 + Makefile.config.in | 3 - corepkgs/config.nix.in | 3 + local.mk | 2 +- perl/Makefile | 14 +++++ perl/Makefile.config.in | 19 +++++++ perl/configure.ac | 117 ++++++++++++++++++++++++++++++++++++++ perl/lib/Nix/Config.pm.in | 14 ++--- perl/local.mk | 25 ++++---- release.nix | 27 +++++++++ 10 files changed, 201 insertions(+), 24 deletions(-) create mode 100644 perl/Makefile create mode 100644 perl/Makefile.config.in create mode 100644 perl/configure.ac diff --git a/.gitignore b/.gitignore index 951efb4c908..4f7e668e781 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ Makefile.config +perl/Makefile.config # / /aclocal.m4 diff --git a/Makefile.config.in b/Makefile.config.in index 15e94380477..e2277c667ec 100644 --- a/Makefile.config.in +++ b/Makefile.config.in @@ -24,9 +24,6 @@ libdir = @libdir@ libexecdir = @libexecdir@ localstatedir = @localstatedir@ mandir = @mandir@ -perl = @perl@ -perlbindings = @perlbindings@ -perllibdir = @perllibdir@ pkglibdir = $(libdir)/$(PACKAGE_NAME) prefix = @prefix@ storedir = @storedir@ diff --git a/corepkgs/config.nix.in b/corepkgs/config.nix.in index f0f4890a32f..32ce6b399f2 100644 --- a/corepkgs/config.nix.in +++ b/corepkgs/config.nix.in @@ -14,6 +14,9 @@ in rec { nixBinDir = fromEnv "NIX_BIN_DIR" "@bindir@"; nixPrefix = "@prefix@"; nixLibexecDir = fromEnv "NIX_LIBEXEC_DIR" "@libexecdir@"; + nixLocalstateDir = "@localstatedir@"; + nixSysconfDir = "@sysconfdir@"; + nixStoreDir = fromEnv "NIX_STORE_DIR" "@storedir@"; # If Nix is installed in the Nix store, then automatically add it as # a dependency to the core packages. This ensures that they work diff --git a/local.mk b/local.mk index eebd7196119..dc10e6870a8 100644 --- a/local.mk +++ b/local.mk @@ -3,7 +3,7 @@ ifeq ($(MAKECMDGOALS), dist) dist-files += $(shell git --git-dir=.git ls-files || find * -type f) endif -dist-files += configure config.h.in nix.spec +dist-files += configure config.h.in nix.spec perl/configure clean-files += Makefile.config diff --git a/perl/Makefile b/perl/Makefile new file mode 100644 index 00000000000..41a32576e9b --- /dev/null +++ b/perl/Makefile @@ -0,0 +1,14 @@ +makefiles = local.mk + +GLOBAL_CXXFLAGS += -std=c++11 -g -Wall + +-include Makefile.config + +OPTIMIZE = 1 + +ifeq ($(OPTIMIZE), 1) + GLOBAL_CFLAGS += -O3 + GLOBAL_CXXFLAGS += -O3 +endif + +include mk/lib.mk diff --git a/perl/Makefile.config.in b/perl/Makefile.config.in new file mode 100644 index 00000000000..901d1283e55 --- /dev/null +++ b/perl/Makefile.config.in @@ -0,0 +1,19 @@ +CC = @CC@ +CFLAGS = @CFLAGS@ +CXX = @CXX@ +CXXFLAGS = @CXXFLAGS@ +HAVE_SODIUM = @HAVE_SODIUM@ +PACKAGE_NAME = @PACKAGE_NAME@ +PACKAGE_VERSION = @PACKAGE_VERSION@ +SODIUM_LIBS = @SODIUM_LIBS@ +NIX_CFLAGS = @NIX_CFLAGS@ +NIX_LIBS = @NIX_LIBS@ +nixbindir = @nixbindir@ +curl = @curl@ +nixlibexecdir = @nixlibexecdir@ +nixlocalstatedir = @nixlocalstatedir@ +perl = @perl@ +perllibdir = @perllibdir@ +nixstoredir = @nixstoredir@ +nixsysconfdir = @nixsysconfdir@ +perlbindings = @perlbindings@ diff --git a/perl/configure.ac b/perl/configure.ac new file mode 100644 index 00000000000..dea2b614004 --- /dev/null +++ b/perl/configure.ac @@ -0,0 +1,117 @@ +AC_INIT(nix-perl, m4_esyscmd([bash -c "echo -n $(cat ../version)$VERSION_SUFFIX"])) +AC_CONFIG_SRCDIR(MANIFEST) +AC_CONFIG_AUX_DIR(../config) + +CFLAGS= +CXXFLAGS= +AC_PROG_CC +AC_PROG_CXX +AX_CXX_COMPILE_STDCXX_11 + +# Use 64-bit file system calls so that we can support files > 2 GiB. +AC_SYS_LARGEFILE + +AC_DEFUN([NEED_PROG], +[ +AC_PATH_PROG($1, $2) +if test -z "$$1"; then + AC_MSG_ERROR([$2 is required]) +fi +]) + +NEED_PROG(perl, perl) +NEED_PROG(curl, curl) +NEED_PROG(bzip2, bzip2) +NEED_PROG(xz, xz) + +# Test that Perl has the open/fork feature (Perl 5.8.0 and beyond). +AC_MSG_CHECKING([whether Perl is recent enough]) +if ! $perl -e 'open(FOO, "-|", "true"); while () { print; }; close FOO or die;'; then + AC_MSG_RESULT(no) + AC_MSG_ERROR([Your Perl version is too old. Nix requires Perl 5.8.0 or newer.]) +fi +AC_MSG_RESULT(yes) + + +# Figure out where to install Perl modules. +AC_MSG_CHECKING([for the Perl installation prefix]) +perlversion=$($perl -e 'use Config; print $Config{version};') +perlarchname=$($perl -e 'use Config; print $Config{archname};') +AC_SUBST(perllibdir, [${libdir}/perl5/site_perl/$perlversion/$perlarchname]) +AC_MSG_RESULT($perllibdir) + +AC_ARG_WITH(store-dir, AC_HELP_STRING([--with-store-dir=PATH], + [path of the Nix store (defaults to /nix/store)]), + storedir=$withval, storedir='/nix/store') +AC_SUBST(storedir) + +# Look for libsodium, an optional dependency. +PKG_CHECK_MODULES([SODIUM], [libsodium], + [AC_DEFINE([HAVE_SODIUM], [1], [Whether to use libsodium for cryptography.]) + CXXFLAGS="$SODIUM_CFLAGS $CXXFLAGS" + have_sodium=1], [have_sodium=]) +AC_SUBST(HAVE_SODIUM, [$have_sodium]) + +# Check for the required Perl dependencies (DBI, DBD::SQLite and WWW::Curl). +perlFlags="-I$perllibdir" + +AC_ARG_WITH(dbi, AC_HELP_STRING([--with-dbi=PATH], + [prefix of the Perl DBI library]), + perlFlags="$perlFlags -I$withval") + +AC_ARG_WITH(dbd-sqlite, AC_HELP_STRING([--with-dbd-sqlite=PATH], + [prefix of the Perl DBD::SQLite library]), + perlFlags="$perlFlags -I$withval") + +AC_ARG_WITH(www-curl, AC_HELP_STRING([--with-www-curl=PATH], + [prefix of the Perl WWW::Curl library]), + perlFlags="$perlFlags -I$withval") + +AC_MSG_CHECKING([whether DBD::SQLite works]) +if ! $perl $perlFlags -e 'use DBI; use DBD::SQLite;' 2>&5; then + AC_MSG_RESULT(no) + AC_MSG_FAILURE([The Perl modules DBI and/or DBD::SQLite are missing.]) +fi +AC_MSG_RESULT(yes) + +AC_MSG_CHECKING([whether WWW::Curl works]) +if ! $perl $perlFlags -e 'use WWW::Curl;' 2>&5; then + AC_MSG_RESULT(no) + AC_MSG_FAILURE([The Perl module WWW::Curl is missing.]) +fi +AC_MSG_RESULT(yes) + +AC_SUBST(perlFlags) + +PKG_CHECK_MODULES([NIX], [nix-store]) + +NEED_PROG([NIX_INSTANTIATE_PROGRAM], [nix-instantiate]) + +# Get nix configure values +nixbindir=$("$NIX_INSTANTIATE_PROGRAM" --eval '' -A nixBinDir | tr -d \") +nixlibexecdir=$("$NIX_INSTANTIATE_PROGRAM" --eval '' -A nixLibexecDir | tr -d \") +nixlocalstatedir=$("$NIX_INSTANTIATE_PROGRAM" --eval '' -A nixLocalstateDir | tr -d \") +nixsysconfdir=$("$NIX_INSTANTIATE_PROGRAM" --eval '' -A nixSysconfDir | tr -d \") +nixstoredir=$("$NIX_INSTANTIATE_PROGRAM" --eval '' -A nixStoreDir | tr -d \") +AC_SUBST(nixbindir) +AC_SUBST(nixlibexecdir) +AC_SUBST(nixlocalstatedir) +AC_SUBST(nixsysconfdir) +AC_SUBST(nixstoredir) + +AC_SUBST(perlbindings, "yes") + +# Expand all variables in config.status. +test "$prefix" = NONE && prefix=$ac_default_prefix +test "$exec_prefix" = NONE && exec_prefix='${prefix}' +for name in $ac_subst_vars; do + declare $name="$(eval echo "${!name}")" + declare $name="$(eval echo "${!name}")" + declare $name="$(eval echo "${!name}")" +done + +rm -f Makefile.config +ln -s ../mk mk + +AC_CONFIG_FILES([]) +AC_OUTPUT diff --git a/perl/lib/Nix/Config.pm.in b/perl/lib/Nix/Config.pm.in index 3575d99cb67..4f1dd967491 100644 --- a/perl/lib/Nix/Config.pm.in +++ b/perl/lib/Nix/Config.pm.in @@ -4,18 +4,18 @@ use MIME::Base64; $version = "@PACKAGE_VERSION@"; -$binDir = $ENV{"NIX_BIN_DIR"} || "@bindir@"; -$libexecDir = $ENV{"NIX_LIBEXEC_DIR"} || "@libexecdir@"; -$stateDir = $ENV{"NIX_STATE_DIR"} || "@localstatedir@/nix"; -$logDir = $ENV{"NIX_LOG_DIR"} || "@localstatedir@/log/nix"; -$confDir = $ENV{"NIX_CONF_DIR"} || "@sysconfdir@/nix"; -$storeDir = $ENV{"NIX_STORE_DIR"} || "@storedir@"; +$binDir = $ENV{"NIX_BIN_DIR"} || "@nixbindir@"; +$libexecDir = $ENV{"NIX_LIBEXEC_DIR"} || "@nixlibexecdir@"; +$stateDir = $ENV{"NIX_STATE_DIR"} || "@nixlocalstatedir@/nix"; +$logDir = $ENV{"NIX_LOG_DIR"} || "@nixlocalstatedir@/log/nix"; +$confDir = $ENV{"NIX_CONF_DIR"} || "@nixsysconfdir@/nix"; +$storeDir = $ENV{"NIX_STORE_DIR"} || "@nixstoredir@"; $bzip2 = "@bzip2@"; $xz = "@xz@"; $curl = "@curl@"; -$useBindings = "@perlbindings@" eq "yes"; +$useBindings = 1; %config = (); diff --git a/perl/local.mk b/perl/local.mk index 5b43c4b717f..1793ececfd6 100644 --- a/perl/local.mk +++ b/perl/local.mk @@ -1,10 +1,10 @@ nix_perl_sources := \ - $(d)/lib/Nix/Store.pm \ - $(d)/lib/Nix/Manifest.pm \ - $(d)/lib/Nix/SSH.pm \ - $(d)/lib/Nix/CopyClosure.pm \ - $(d)/lib/Nix/Config.pm.in \ - $(d)/lib/Nix/Utils.pm + lib/Nix/Store.pm \ + lib/Nix/Manifest.pm \ + lib/Nix/SSH.pm \ + lib/Nix/CopyClosure.pm \ + lib/Nix/Config.pm.in \ + lib/Nix/Utils.pm nix_perl_modules := $(nix_perl_sources:.in=) @@ -12,12 +12,12 @@ $(foreach x, $(nix_perl_modules), $(eval $(call install-data-in, $(x), $(perllib ifeq ($(perlbindings), yes) - $(d)/lib/Nix/Store.cc: $(d)/lib/Nix/Store.xs + lib/Nix/Store.cc: lib/Nix/Store.xs $(trace-gen) xsubpp $^ -output $@ libraries += Store - Store_DIR := $(d)/lib/Nix + Store_DIR := lib/Nix Store_SOURCES := $(Store_DIR)/Store.cc @@ -25,11 +25,10 @@ ifeq ($(perlbindings), yes) -I$(shell $(perl) -e 'use Config; print $$Config{archlibexp};')/CORE \ -D_FILE_OFFSET_BITS=64 \ -Wno-unknown-warning-option -Wno-unused-variable -Wno-literal-suffix \ - -Wno-reserved-user-defined-literal -Wno-duplicate-decl-specifier -Wno-pointer-bool-conversion + -Wno-reserved-user-defined-literal -Wno-duplicate-decl-specifier -Wno-pointer-bool-conversion \ + $(NIX_CFLAGS) - Store_LIBS = libstore libutil - - Store_LDFLAGS := $(SODIUM_LIBS) + Store_LDFLAGS := $(SODIUM_LIBS) $(NIX_LIBS) ifeq (CYGWIN,$(findstring CYGWIN,$(OS))) archlib = $(shell perl -E 'use Config; print $$Config{archlib};') @@ -45,4 +44,4 @@ ifeq ($(perlbindings), yes) endif -clean-files += $(d)/lib/Nix/Config.pm $(d)/lib/Nix/Store.cc +clean-files += lib/Nix/Config.pm lib/Nix/Store.cc Makefile.config diff --git a/release.nix b/release.nix index 7bfde71a644..b93b64ea923 100644 --- a/release.nix +++ b/release.nix @@ -41,6 +41,7 @@ let ''; preConfigure = '' + (cd perl ; autoreconf --install --force --verbose) # TeX needs a writable font cache. export VARTEXFONTS=$TMPDIR/texfonts ''; @@ -96,6 +97,32 @@ let }); + perl = pkgs.lib.genAttrs systems (system: + + let pkgs = import { inherit system; }; in with pkgs; + + releaseTools.nixBuild { + name = "nix-perl"; + src = tarball; + + buildInputs = + [ (builtins.getAttr system jobs.build) curl bzip2 xz pkgconfig pkgs.perl ] + ++ lib.optional stdenv.isLinux libsodium; + + configureFlags = '' + --with-dbi=${perlPackages.DBI}/${pkgs.perl.libPrefix} + --with-dbd-sqlite=${perlPackages.DBDSQLite}/${pkgs.perl.libPrefix} + --with-www-curl=${perlPackages.WWWCurl}/${pkgs.perl.libPrefix} + ''; + + enableParallelBuilding = true; + + postUnpack = "sourceRoot=$sourceRoot/perl"; + + preBuild = "unset NIX_INDENT_MAKE"; + }); + + binaryTarball = pkgs.lib.genAttrs systems (system: # FIXME: temporarily use a different branch for the Darwin build. From 2b4c24f46a9a5ab5b0eb6d714efa73de201344f2 Mon Sep 17 00:00:00 2001 From: Janus Troelsen Date: Mon, 22 Aug 2016 15:01:58 +0200 Subject: [PATCH 0100/2196] Remove Perl dependency listing in doc --- doc/manual/installation/prerequisites-source.xml | 7 ------- 1 file changed, 7 deletions(-) diff --git a/doc/manual/installation/prerequisites-source.xml b/doc/manual/installation/prerequisites-source.xml index cd6d61356ba..7311e4885e7 100644 --- a/doc/manual/installation/prerequisites-source.xml +++ b/doc/manual/installation/prerequisites-source.xml @@ -12,8 +12,6 @@ A version of GCC or Clang that supports C++11. - Perl 5.8 or higher. - pkg-config to locate dependencies. If your distribution does not provide it, you can get it from . - The Perl DBI and DBD::SQLite libraries, which are - available from CPAN if your - distribution does not provide them. - The Boehm garbage collector to reduce the evaluator’s memory From 81c53fe8e56f4a4ce10088fe2d7b6a524a6dc126 Mon Sep 17 00:00:00 2001 From: Tuomas Tynkkynen Date: Wed, 8 Feb 2017 20:29:37 +0200 Subject: [PATCH 0101/2196] configure.ac: We require C++14 now At least in the main Makefile we have: GLOBAL_CXXFLAGS += -std=c++14 -g -Wall --- configure.ac | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configure.ac b/configure.ac index e6b11be2df1..bfe81840c3b 100644 --- a/configure.ac +++ b/configure.ac @@ -61,7 +61,7 @@ CFLAGS= CXXFLAGS= AC_PROG_CC AC_PROG_CXX -AX_CXX_COMPILE_STDCXX_11 +AX_CXX_COMPILE_STDCXX_14 # Use 64-bit file system calls so that we can support files > 2 GiB. From 2cd468874fe512387820bd47d23fa6351d069da2 Mon Sep 17 00:00:00 2001 From: Tuomas Tynkkynen Date: Wed, 8 Feb 2017 20:35:06 +0200 Subject: [PATCH 0102/2196] Include config.h implicitly with '-include config.h' in CFLAGS Because config.h can #define things like _FILE_OFFSET_BITS=64 and not every compilation unit includes config.h, we currently compile half of Nix with _FILE_OFFSET_BITS=64 and other half with _FILE_OFFSET_BITS unset. This causes major havoc with the Settings class on e.g. 32-bit ARM, where different compilation units disagree with the struct layout. E.g.: diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc @@ -166,6 +166,8 @@ void Settings::update() _get(useSubstitutes, "build-use-substitutes"); + fprintf(stderr, "at Settings::update(): &useSubstitutes = %p\n", &nix::settings.useSubstitutes); _get(buildUsersGroup, "build-users-group"); diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -138,6 +138,8 @@ void RemoteStore::initConnection(Connection & conn) void RemoteStore::setOptions(Connection & conn) { + fprintf(stderr, "at RemoteStore::setOptions(): &useSubstitutes = %p\n", &nix::settings.useSubstitutes); conn.to << wopSetOptions Gave me: at Settings::update(): &useSubstitutes = 0xb6e5c5cb at RemoteStore::setOptions(): &useSubstitutes = 0xb6e5c5c7 That was not a fun one to debug! --- Makefile | 2 +- src/libexpr/json-to-value.cc | 1 - src/libexpr/symbol-table.hh | 2 -- src/libexpr/value.hh | 1 - src/libmain/shared.cc | 2 -- src/libmain/stack.cc | 2 -- src/libstore/build.cc | 2 -- src/libstore/globals.cc | 2 -- src/libstore/local-store.cc | 1 - src/libstore/optimise-store.cc | 2 -- src/libstore/s3-binary-cache-store.cc | 2 -- src/libutil/archive.cc | 2 -- src/libutil/hash.cc | 2 -- src/libutil/types.hh | 1 - src/libutil/util.cc | 2 -- 15 files changed, 1 insertion(+), 25 deletions(-) diff --git a/Makefile b/Makefile index 62a4850d854..d26cf8d99d2 100644 --- a/Makefile +++ b/Makefile @@ -28,7 +28,7 @@ makefiles = \ doc/manual/local.mk \ tests/local.mk -GLOBAL_CXXFLAGS += -std=c++14 -g -Wall +GLOBAL_CXXFLAGS += -std=c++14 -g -Wall -include config.h -include Makefile.config diff --git a/src/libexpr/json-to-value.cc b/src/libexpr/json-to-value.cc index f671802bcc2..c189cdef35e 100644 --- a/src/libexpr/json-to-value.cc +++ b/src/libexpr/json-to-value.cc @@ -1,4 +1,3 @@ -#include "config.h" #include "json-to-value.hh" #include diff --git a/src/libexpr/symbol-table.hh b/src/libexpr/symbol-table.hh index 2fdf820211c..c2ee49dd32f 100644 --- a/src/libexpr/symbol-table.hh +++ b/src/libexpr/symbol-table.hh @@ -1,7 +1,5 @@ #pragma once -#include "config.h" - #include #include diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh index 271e6a1b24a..81f918d48de 100644 --- a/src/libexpr/value.hh +++ b/src/libexpr/value.hh @@ -1,6 +1,5 @@ #pragma once -#include "config.h" #include "symbol-table.hh" #if HAVE_BOEHMGC diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 52cb2312826..56aa3db0015 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -1,5 +1,3 @@ -#include "config.h" - #include "common-args.hh" #include "globals.hh" #include "shared.hh" diff --git a/src/libmain/stack.cc b/src/libmain/stack.cc index abf59dc4baa..57b6a197c0f 100644 --- a/src/libmain/stack.cc +++ b/src/libmain/stack.cc @@ -1,5 +1,3 @@ -#include "config.h" - #include "types.hh" #include diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 5d6fff4e349..1aee150fda3 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -1,5 +1,3 @@ -#include "config.h" - #include "references.hh" #include "pathlocks.hh" #include "globals.hh" diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 00b46889252..90f83a5bbd9 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -1,5 +1,3 @@ -#include "config.h" - #include "globals.hh" #include "util.hh" #include "archive.hh" diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 612efde7bb8..4c161cfb341 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1,4 +1,3 @@ -#include "config.h" #include "local-store.hh" #include "globals.hh" #include "archive.hh" diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc index b71c7e905ff..cf234e35d37 100644 --- a/src/libstore/optimise-store.cc +++ b/src/libstore/optimise-store.cc @@ -1,5 +1,3 @@ -#include "config.h" - #include "util.hh" #include "local-store.hh" #include "globals.hh" diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index ccb71f1eefe..cc1b3310420 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -1,5 +1,3 @@ -#include "config.h" - #if ENABLE_S3 #if __linux__ diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index fbba7f853f9..e0e6f5dfd73 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -1,5 +1,3 @@ -#include "config.h" - #include #include #include diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index aa50fceb9e3..f447c80c5d8 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -1,5 +1,3 @@ -#include "config.h" - #include #include diff --git a/src/libutil/types.hh b/src/libutil/types.hh index b9a93d27d2a..97d79af9b5d 100644 --- a/src/libutil/types.hh +++ b/src/libutil/types.hh @@ -1,6 +1,5 @@ #pragma once -#include "config.h" #include "ref.hh" diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 6c4c5c969d8..33659936800 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -1,5 +1,3 @@ -#include "config.h" - #include "util.hh" #include "affinity.hh" #include "sync.hh" From e2257d4eeb3f75bac57d9cb77c9ce06b702de050 Mon Sep 17 00:00:00 2001 From: Renzo Carbonara Date: Thu, 9 Feb 2017 18:16:09 +0100 Subject: [PATCH 0103/2196] Documentation. --- src/libstore/download.cc | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 8f2387b6325..b2adc154818 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -230,8 +230,11 @@ struct CurlDownloader : public Downloader curl_easy_setopt(req, CURLOPT_SSL_VERIFYHOST, 0); } + /* If no file exist in the specified path, curl continues to work + * anyway as if netrc support was disabled. */ Path netrcFile = settings.get("netrc-file", (format("%1%/%2%") % settings.nixConfDir % "netrc").str()); + /* Curl copies the given C string, so the following call is safe. */ curl_easy_setopt(req, CURLOPT_NETRC_FILE, netrcFile.c_str()); curl_easy_setopt(req, CURLOPT_NETRC, CURL_NETRC_OPTIONAL); From 649a81bcd6445d3b00f400cd6017d184bf0aaa25 Mon Sep 17 00:00:00 2001 From: Tuomas Tynkkynen Date: Mon, 13 Feb 2017 15:06:46 +0200 Subject: [PATCH 0104/2196] nix-daemon: Don't splice with len=SIZE_MAX Currently, 'nix-daemon --stdio' is always failing for me, due to the splice call always failing with (on a 32-bit host): splice(0, NULL, 3, NULL, 4294967295, SPLICE_F_MOVE) = -1 EINVAL (Invalid argument) With a bit of ftracing (and luck) the problem seems to be that splice() always fails with EINVAL if the len cast as ssize_t is negative: http://lxr.free-electrons.com/source/fs/read_write.c?v=4.4#L384 So use SSIZE_MAX instead of SIZE_MAX. --- src/nix-daemon/nix-daemon.cc | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index f3ee0afc11e..3b43ddfa16d 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -23,6 +23,7 @@ #include #include #include +#include #if __APPLE__ || __FreeBSD__ #include @@ -967,14 +968,14 @@ int main(int argc, char * * argv) if (select(nfds, &fds, nullptr, nullptr, nullptr) == -1) throw SysError("waiting for data from client or server"); if (FD_ISSET(s, &fds)) { - auto res = splice(s, nullptr, STDOUT_FILENO, nullptr, SIZE_MAX, SPLICE_F_MOVE); + auto res = splice(s, nullptr, STDOUT_FILENO, nullptr, SSIZE_MAX, SPLICE_F_MOVE); if (res == -1) throw SysError("splicing data from daemon socket to stdout"); else if (res == 0) throw EndOfFile("unexpected EOF from daemon socket"); } if (FD_ISSET(STDIN_FILENO, &fds)) { - auto res = splice(STDIN_FILENO, nullptr, s, nullptr, SIZE_MAX, SPLICE_F_MOVE); + auto res = splice(STDIN_FILENO, nullptr, s, nullptr, SSIZE_MAX, SPLICE_F_MOVE); if (res == -1) throw SysError("splicing data from stdin to daemon socket"); else if (res == 0) From 9ff9c3f2f80ba4108e9c945bbfda2c64735f987b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 14 Feb 2017 14:20:00 +0100 Subject: [PATCH 0105/2196] Add support for s3:// URIs This adds support for s3:// URIs in all places where Nix allows URIs, e.g. in builtins.fetchurl, builtins.fetchTarball, and NIX_PATH. It allows fetching resources from private S3 buckets, using credentials obtained from the standard places (i.e. AWS_* environment variables, ~/.aws/credentials and the EC2 metadata server). This may not be super-useful in general, but since we already depend on aws-sdk-cpp, it's a cheap feature to add. --- src/libstore/download.cc | 28 ++++- src/libstore/download.hh | 2 +- src/libstore/s3-binary-cache-store.cc | 141 +++++++++++++++----------- src/libstore/s3.hh | 33 ++++++ src/libutil/logging.hh | 1 + 5 files changed, 142 insertions(+), 63 deletions(-) create mode 100644 src/libstore/s3.hh diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 074e0ca6642..85215439a0f 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -4,6 +4,7 @@ #include "hash.hh" #include "store-api.hh" #include "archive.hh" +#include "s3.hh" #include #include @@ -480,6 +481,31 @@ struct CurlDownloader : public Downloader std::function success, std::function failure) override { + /* Ugly hack to support s3:// URIs. */ + if (hasPrefix(request.uri, "s3://")) { + // FIXME: do this on a worker thread + sync2async(success, failure, [&]() { +#ifdef ENABLE_S3 + S3Helper s3Helper; + auto slash = request.uri.find('/', 5); + if (slash == std::string::npos) + throw nix::Error("bad S3 URI ‘%s’", request.uri); + std::string bucketName(request.uri, 5, slash - 5); + std::string key(request.uri, slash + 1); + // FIXME: implement ETag + auto s3Res = s3Helper.getObject(bucketName, key); + DownloadResult res; + if (!s3Res.data) + throw DownloadError(NotFound, fmt("S3 object ‘%s’ does not exist", request.uri)); + res.data = s3Res.data; + return res; +#else + throw nix::Error("cannot download ‘%s’ because Nix is not built with S3 support", request.uri); +#endif + }); + return; + } + auto item = std::make_shared(*this, request); item->success = success; item->failure = failure; @@ -629,7 +655,7 @@ bool isUri(const string & s) size_t pos = s.find("://"); if (pos == string::npos) return false; string scheme(s, 0, pos); - return scheme == "http" || scheme == "https" || scheme == "file" || scheme == "channel" || scheme == "git"; + return scheme == "http" || scheme == "https" || scheme == "file" || scheme == "channel" || scheme == "git" || scheme == "s3"; } diff --git a/src/libstore/download.hh b/src/libstore/download.hh index 82b5d641fde..bdb5011e783 100644 --- a/src/libstore/download.hh +++ b/src/libstore/download.hh @@ -23,7 +23,7 @@ struct DownloadRequest struct DownloadResult { - bool cached; + bool cached = false; std::string etag; std::string effectiveUrl; std::shared_ptr data; diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index cc1b3310420..ac083410b35 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -1,6 +1,6 @@ #if ENABLE_S3 -#if __linux__ +#include "s3.hh" #include "s3-binary-cache-store.hh" #include "nar-info.hh" #include "nar-info-disk-cache.hh" @@ -18,15 +18,6 @@ namespace nix { -struct istringstream_nocopy : public std::stringstream -{ - istringstream_nocopy(const std::string & s) - { - rdbuf()->pubsetbuf( - (char *) s.data(), s.size()); - } -}; - struct S3Error : public Error { Aws::S3::S3Errors err; @@ -60,21 +51,81 @@ static void initAWS() }); } +S3Helper::S3Helper() + : config(makeConfig()) + , client(make_ref(*config)) +{ +} + +ref S3Helper::makeConfig() +{ + initAWS(); + auto res = make_ref(); + res->region = Aws::Region::US_EAST_1; // FIXME: make configurable + res->requestTimeoutMs = 600 * 1000; + return res; +} + +S3Helper::DownloadResult S3Helper::getObject( + const std::string & bucketName, const std::string & key) +{ + debug("fetching ‘s3://%s/%s’...", bucketName, key); + + auto request = + Aws::S3::Model::GetObjectRequest() + .WithBucket(bucketName) + .WithKey(key); + + request.SetResponseStreamFactory([&]() { + return Aws::New("STRINGSTREAM"); + }); + + DownloadResult res; + + auto now1 = std::chrono::steady_clock::now(); + + try { + + auto result = checkAws(fmt("AWS error fetching ‘%s’", key), + client->GetObject(request)); + + res.data = std::make_shared( + dynamic_cast(result.GetBody()).str()); + + } catch (S3Error & e) { + if (e.err != Aws::S3::S3Errors::NO_SUCH_KEY) throw; + } + + auto now2 = std::chrono::steady_clock::now(); + + res.durationMs = std::chrono::duration_cast(now2 - now1).count(); + + return res; +} + +#if __linux__ + +struct istringstream_nocopy : public std::stringstream +{ + istringstream_nocopy(const std::string & s) + { + rdbuf()->pubsetbuf( + (char *) s.data(), s.size()); + } +}; + struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore { std::string bucketName; - ref config; - ref client; - Stats stats; + S3Helper s3Helper; + S3BinaryCacheStoreImpl( const Params & params, const std::string & bucketName) : S3BinaryCacheStore(params) , bucketName(bucketName) - , config(makeConfig()) - , client(make_ref(*config)) { diskCache = getNarInfoDiskCache(); } @@ -84,15 +135,6 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore return "s3://" + bucketName; } - ref makeConfig() - { - initAWS(); - auto res = make_ref(); - res->region = Aws::Region::US_EAST_1; // FIXME: make configurable - res->requestTimeoutMs = 600 * 1000; - return res; - } - void init() override { if (!diskCache->cacheExists(getUri(), wantMassQuery_, priority)) { @@ -100,7 +142,7 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore /* Create the bucket if it doesn't already exists. */ // FIXME: HeadBucket would be more appropriate, but doesn't return // an easily parsed 404 message. - auto res = client->GetBucketLocation( + auto res = s3Helper.client->GetBucketLocation( Aws::S3::Model::GetBucketLocationRequest().WithBucket(bucketName)); if (!res.IsSuccess()) { @@ -108,7 +150,7 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore throw Error(format("AWS error checking bucket ‘%s’: %s") % bucketName % res.GetError().GetMessage()); checkAws(format("AWS error creating bucket ‘%s’") % bucketName, - client->CreateBucket( + s3Helper.client->CreateBucket( Aws::S3::Model::CreateBucketRequest() .WithBucket(bucketName) .WithCreateBucketConfiguration( @@ -146,7 +188,7 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore { stats.head++; - auto res = client->HeadObject( + auto res = s3Helper.client->HeadObject( Aws::S3::Model::HeadObjectRequest() .WithBucket(bucketName) .WithKey(path)); @@ -179,7 +221,7 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore auto now1 = std::chrono::steady_clock::now(); auto result = checkAws(format("AWS error uploading ‘%s’") % path, - client->PutObject(request)); + s3Helper.client->PutObject(request)); auto now2 = std::chrono::steady_clock::now(); @@ -198,42 +240,18 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore sync2async>(success, failure, [&]() { debug(format("fetching ‘s3://%1%/%2%’...") % bucketName % path); - auto request = - Aws::S3::Model::GetObjectRequest() - .WithBucket(bucketName) - .WithKey(path); - - request.SetResponseStreamFactory([&]() { - return Aws::New("STRINGSTREAM"); - }); - stats.get++; - try { - - auto now1 = std::chrono::steady_clock::now(); - - auto result = checkAws(format("AWS error fetching ‘%s’") % path, - client->GetObject(request)); - - auto now2 = std::chrono::steady_clock::now(); + auto res = s3Helper.getObject(bucketName, path); - auto res = dynamic_cast(result.GetBody()).str(); + stats.getBytes += res.data ? res.data->size() : 0; + stats.getTimeMs += res.durationMs; - auto duration = std::chrono::duration_cast(now2 - now1).count(); + if (res.data) + printTalkative("downloaded ‘s3://%s/%s’ (%d bytes) in %d ms", + bucketName, path, res.data->size(), res.durationMs); - printMsg(lvlTalkative, format("downloaded ‘s3://%1%/%2%’ (%3% bytes) in %4% ms") - % bucketName % path % res.size() % duration); - - stats.getBytes += res.size(); - stats.getTimeMs += duration; - - return std::make_shared(res); - - } catch (S3Error & e) { - if (e.err == Aws::S3::S3Errors::NO_SUCH_KEY) return std::shared_ptr(); - throw; - } + return res.data; }); } @@ -246,7 +264,7 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore debug(format("listing bucket ‘s3://%s’ from key ‘%s’...") % bucketName % marker); auto res = checkAws(format("AWS error listing bucket ‘%s’") % bucketName, - client->ListObjects( + s3Helper.client->ListObjects( Aws::S3::Model::ListObjectsRequest() .WithBucket(bucketName) .WithDelimiter("/") @@ -281,7 +299,8 @@ static RegisterStoreImplementation regStore([]( return store; }); +#endif + } #endif -#endif diff --git a/src/libstore/s3.hh b/src/libstore/s3.hh new file mode 100644 index 00000000000..5d5d3475c44 --- /dev/null +++ b/src/libstore/s3.hh @@ -0,0 +1,33 @@ +#pragma once + +#if ENABLE_S3 + +#include "ref.hh" + +namespace Aws { namespace Client { class ClientConfiguration; } } +namespace Aws { namespace S3 { class S3Client; } } + +namespace nix { + +struct S3Helper +{ + ref config; + ref client; + + S3Helper(); + + ref makeConfig(); + + struct DownloadResult + { + std::shared_ptr data; + unsigned int durationMs; + }; + + DownloadResult getObject( + const std::string & bucketName, const std::string & key); +}; + +} + +#endif diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh index 3e6c4b54853..3f83664794f 100644 --- a/src/libutil/logging.hh +++ b/src/libutil/logging.hh @@ -78,6 +78,7 @@ extern Verbosity verbosity; /* suppress msgs > this */ #define printError(args...) printMsg(lvlError, args) #define printInfo(args...) printMsg(lvlInfo, args) +#define printTalkative(args...) printMsg(lvlTalkative, args) #define debug(args...) printMsg(lvlDebug, args) #define vomit(args...) printMsg(lvlVomit, args) From 40f0e3b366458a6f1bc09a366a218abf13a1af1e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 16 Feb 2017 13:55:43 +0100 Subject: [PATCH 0106/2196] Include "curl" in the User-Agent header Some sites (e.g. BitBucket) give a helpful 401 error when trying to download a private archive if the User-Agent contains "curl", but give a redirect to a login page otherwise (so for instance "nix-prefetch-url" will succeed but produce useless output). --- src/libstore/download.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/download.cc b/src/libstore/download.cc index f93fb1e968a..d65ac7b873d 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -201,7 +201,7 @@ struct CurlDownloader : public Downloader curl_easy_setopt(req, CURLOPT_URL, request.uri.c_str()); curl_easy_setopt(req, CURLOPT_FOLLOWLOCATION, 1L); curl_easy_setopt(req, CURLOPT_NOSIGNAL, 1); - curl_easy_setopt(req, CURLOPT_USERAGENT, ("Nix/" + nixVersion).c_str()); + curl_easy_setopt(req, CURLOPT_USERAGENT, ("curl/" LIBCURL_VERSION " Nix/" + nixVersion).c_str()); #if LIBCURL_VERSION_NUM >= 0x072b00 curl_easy_setopt(req, CURLOPT_PIPEWAIT, 1); #endif From b63f79175e128f8ae394d2be7d65999263eebe27 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 16 Feb 2017 14:06:47 +0100 Subject: [PATCH 0107/2196] : Remove unnecessary assertion --- corepkgs/fetchurl.nix | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/corepkgs/fetchurl.nix b/corepkgs/fetchurl.nix index 042705b1abb..62359433971 100644 --- a/corepkgs/fetchurl.nix +++ b/corepkgs/fetchurl.nix @@ -1,24 +1,20 @@ { system ? builtins.currentSystem , url -, outputHash ? "" -, outputHashAlgo ? "" , md5 ? "", sha1 ? "", sha256 ? "" +, outputHash ? + if sha1 != "" then sha1 else if md5 != "" then md5 else sha256 +, outputHashAlgo ? + if sha1 != "" then "sha1" else if md5 != "" then "md5" else "sha256" , executable ? false , unpack ? false , name ? baseNameOf (toString url) }: -assert (outputHash != "" && outputHashAlgo != "") - || md5 != "" || sha1 != "" || sha256 != ""; - derivation { builder = "builtin:fetchurl"; # New-style output content requirements. - outputHashAlgo = if outputHashAlgo != "" then outputHashAlgo else - if sha256 != "" then "sha256" else if sha1 != "" then "sha1" else "md5"; - outputHash = if outputHash != "" then outputHash else - if sha256 != "" then sha256 else if sha1 != "" then sha1 else md5; + inherit outputHashAlgo outputHash; outputHashMode = if unpack || executable then "recursive" else "flat"; inherit name system url executable unpack; From bd5388e7b22daa2d22c21578ef5735be8b8353a6 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 16 Feb 2017 14:23:30 +0100 Subject: [PATCH 0108/2196] Tweak netrc docs --- doc/manual/command-ref/conf-file.xml | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index a7d60538c21..0f7a2deba04 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -437,10 +437,18 @@ flag, e.g. --option gc-keep-outputs false. trying to download from a remote host through HTTP or HTTPS. Defaults to $NIX_CONF_DIR/netrc. - The netrc file consists of zero or more lines - like: machine my-machine login - my-username password - my-password. + The netrc file consists of a list of + accounts in the following format: + + +machine my-machine +login my-username +password my-password + + + For the exact syntax, see the + curl documentation. From cde4b609192d11dc299ea3c27d7f92735f161db1 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 16 Feb 2017 14:46:36 +0100 Subject: [PATCH 0109/2196] Move netrcFile to Settings Also get rid of Settings::processEnvironment(), it appears to be useless. --- perl/lib/Nix/Store.xs | 1 - src/libmain/shared.cc | 1 - src/libstore/download.cc | 7 ++----- src/libstore/globals.cc | 36 +++++++++++++++++------------------- src/libstore/globals.hh | 6 ++++-- 5 files changed, 23 insertions(+), 28 deletions(-) diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs index 6b137a13c41..f613e3df329 100644 --- a/perl/lib/Nix/Store.xs +++ b/perl/lib/Nix/Store.xs @@ -26,7 +26,6 @@ static ref store() if (!_store) { try { logger = makeDefaultLogger(); - settings.processEnvironment(); settings.loadConfFile(); settings.update(); settings.lockCPU = false; diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 56aa3db0015..53fa83fe0de 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -112,7 +112,6 @@ void initNix() opensslLocks = std::vector(CRYPTO_num_locks()); CRYPTO_set_locking_callback(opensslLockCallback); - settings.processEnvironment(); settings.loadConfFile(); startSignalHandlerThread(); diff --git a/src/libstore/download.cc b/src/libstore/download.cc index d65ac7b873d..d301d4409b0 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -232,11 +232,8 @@ struct CurlDownloader : public Downloader } /* If no file exist in the specified path, curl continues to work - * anyway as if netrc support was disabled. */ - Path netrcFile = settings.get("netrc-file", - (format("%1%/%2%") % settings.nixConfDir % "netrc").str()); - /* Curl copies the given C string, so the following call is safe. */ - curl_easy_setopt(req, CURLOPT_NETRC_FILE, netrcFile.c_str()); + anyway as if netrc support was disabled. */ + curl_easy_setopt(req, CURLOPT_NETRC_FILE, settings.netrcFile.c_str()); curl_easy_setopt(req, CURLOPT_NETRC, CURL_NETRC_OPTIONAL); result.data = std::make_shared(); diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 90f83a5bbd9..474288b7812 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -23,6 +23,21 @@ Settings settings; Settings::Settings() { + nixPrefix = NIX_PREFIX; + nixStore = canonPath(getEnv("NIX_STORE_DIR", getEnv("NIX_STORE", NIX_STORE_DIR))); + nixDataDir = canonPath(getEnv("NIX_DATA_DIR", NIX_DATA_DIR)); + nixLogDir = canonPath(getEnv("NIX_LOG_DIR", NIX_LOG_DIR)); + nixStateDir = canonPath(getEnv("NIX_STATE_DIR", NIX_STATE_DIR)); + nixConfDir = canonPath(getEnv("NIX_CONF_DIR", NIX_CONF_DIR)); + nixLibexecDir = canonPath(getEnv("NIX_LIBEXEC_DIR", NIX_LIBEXEC_DIR)); + nixBinDir = canonPath(getEnv("NIX_BIN_DIR", NIX_BIN_DIR)); + nixDaemonSocketFile = canonPath(nixStateDir + DEFAULT_SOCKET_PATH); + + // should be set with the other config options, but depends on nixLibexecDir +#ifdef __APPLE__ + preBuildHook = nixLibexecDir + "/nix/resolve-system-dependencies"; +#endif + keepFailed = false; keepGoing = false; tryFallback = false; @@ -57,25 +72,7 @@ Settings::Settings() lockCPU = getEnv("NIX_AFFINITY_HACK", "1") == "1"; showTrace = false; enableImportNative = false; -} - - -void Settings::processEnvironment() -{ - nixPrefix = NIX_PREFIX; - nixStore = canonPath(getEnv("NIX_STORE_DIR", getEnv("NIX_STORE", NIX_STORE_DIR))); - nixDataDir = canonPath(getEnv("NIX_DATA_DIR", NIX_DATA_DIR)); - nixLogDir = canonPath(getEnv("NIX_LOG_DIR", NIX_LOG_DIR)); - nixStateDir = canonPath(getEnv("NIX_STATE_DIR", NIX_STATE_DIR)); - nixConfDir = canonPath(getEnv("NIX_CONF_DIR", NIX_CONF_DIR)); - nixLibexecDir = canonPath(getEnv("NIX_LIBEXEC_DIR", NIX_LIBEXEC_DIR)); - nixBinDir = canonPath(getEnv("NIX_BIN_DIR", NIX_BIN_DIR)); - nixDaemonSocketFile = canonPath(nixStateDir + DEFAULT_SOCKET_PATH); - - // should be set with the other config options, but depends on nixLibexecDir -#ifdef __APPLE__ - preBuildHook = nixLibexecDir + "/nix/resolve-system-dependencies"; -#endif + netrcFile = fmt("%s/%s", nixConfDir, "netrc"); } @@ -183,6 +180,7 @@ void Settings::update() _get(preBuildHook, "pre-build-hook"); _get(keepGoing, "keep-going"); _get(keepFailed, "keep-failed"); + _get(netrcFile, "netrc-file"); } diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index a423b4e5c0f..0ff18f8b16e 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -16,8 +16,6 @@ struct Settings { Settings(); - void processEnvironment(); - void loadConfFile(); void set(const string & name, const string & value); @@ -193,6 +191,10 @@ struct Settings { build settings */ Path preBuildHook; + /* Path to the netrc file used to obtain usernames/passwords for + downloads. */ + Path netrcFile; + private: SettingsMap settings, overrides; From 302386f775eea309679654e5ea7c972fb6e7b9af Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 16 Feb 2017 15:42:49 +0100 Subject: [PATCH 0110/2196] Support netrc in This allows to fetch private Git/Mercurial repositories, e.g. import { url = https://edolstra@bitbucket.org/edolstra/my-private-repo/get/80a14018daed.tar.bz2; sha256 = "1mgqzn7biqkq3hf2697b0jc4wabkqhmzq2srdymjfa6sb9zb6qs7"; } where /etc/nix/netrc contains: machine bitbucket.org login edolstra password blabla... This works even when sandboxing is enabled. To do: add unpacking support (i.e. fetchzip functionality). --- src/libstore/build.cc | 14 ++++++++++++-- src/libstore/builtins.cc | 10 +++++++++- src/libstore/builtins.hh | 2 +- src/libutil/util.cc | 4 ++-- src/libutil/util.hh | 2 +- 5 files changed, 25 insertions(+), 7 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 1aee150fda3..1ce23135fc3 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2307,6 +2307,14 @@ void DerivationGoal::runChild() bool setUser = true; + /* Make the contents of netrc available to builtin:fetchurl + (which may run under a different uid and/or in a sandbox). */ + std::string netrcData; + try { + if (drv->isBuiltin() && drv->builder == "builtin:fetchurl") + netrcData = readFile(settings.netrcFile); + } catch (SysError &) { } + #if __linux__ if (useChroot) { @@ -2675,7 +2683,7 @@ void DerivationGoal::runChild() if (drv->isBuiltin()) { try { if (drv->builder == "builtin:fetchurl") - builtinFetchurl(*drv); + builtinFetchurl(*drv, netrcData); else throw Error(format("unsupported builtin function ‘%1%’") % string(drv->builder, 8)); _exit(0); @@ -3072,7 +3080,9 @@ void DerivationGoal::closeLogFile() void DerivationGoal::deleteTmpDir(bool force) { if (tmpDir != "") { - if (settings.keepFailed && !force) { + /* Don't keep temporary directories for builtins because they + might have privileged stuff (like a copy of netrc). */ + if (settings.keepFailed && !force && !drv->isBuiltin()) { printError( format("note: keeping build directory ‘%2%’") % drvPath % tmpDir); diff --git a/src/libstore/builtins.cc b/src/libstore/builtins.cc index a30f30906f0..c5dbd57f8bc 100644 --- a/src/libstore/builtins.cc +++ b/src/libstore/builtins.cc @@ -6,8 +6,16 @@ namespace nix { -void builtinFetchurl(const BasicDerivation & drv) +void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) { + /* Make the host's netrc data available. Too bad curl requires + this to be stored in a file. It would be nice if we could just + pass a pointer to the data. */ + if (netrcData != "") { + settings.netrcFile = "netrc"; + writeFile(settings.netrcFile, netrcData, 0600); + } + auto getAttr = [&](const string & name) { auto i = drv.env.find(name); if (i == drv.env.end()) throw Error(format("attribute ‘%s’ missing") % name); diff --git a/src/libstore/builtins.hh b/src/libstore/builtins.hh index 4b2431aa08c..0cc6ba31f65 100644 --- a/src/libstore/builtins.hh +++ b/src/libstore/builtins.hh @@ -4,6 +4,6 @@ namespace nix { -void builtinFetchurl(const BasicDerivation & drv); +void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData); } diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 33659936800..0a5f796e4ea 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -288,9 +288,9 @@ string readFile(const Path & path, bool drain) } -void writeFile(const Path & path, const string & s) +void writeFile(const Path & path, const string & s, mode_t mode) { - AutoCloseFD fd = open(path.c_str(), O_WRONLY | O_TRUNC | O_CREAT | O_CLOEXEC, 0666); + AutoCloseFD fd = open(path.c_str(), O_WRONLY | O_TRUNC | O_CREAT | O_CLOEXEC, mode); if (!fd) throw SysError(format("opening file ‘%1%’") % path); writeFull(fd.get(), s); diff --git a/src/libutil/util.hh b/src/libutil/util.hh index cfaaf1486e9..2950f7daa5e 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -89,7 +89,7 @@ string readFile(int fd); string readFile(const Path & path, bool drain = false); /* Write a string to a file. */ -void writeFile(const Path & path, const string & s); +void writeFile(const Path & path, const string & s, mode_t mode = 0666); /* Read a line from a file descriptor. */ string readLine(int fd); From b8564987a3d9455ee779ca1dd63b1dc9f8ee72d9 Mon Sep 17 00:00:00 2001 From: Linus Heckemann Date: Sun, 19 Feb 2017 18:54:18 +0000 Subject: [PATCH 0111/2196] Document toString better --- doc/manual/expressions/builtins.xml | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/doc/manual/expressions/builtins.xml b/doc/manual/expressions/builtins.xml index 063bc04be48..6c38941c01b 100644 --- a/doc/manual/expressions/builtins.xml +++ b/doc/manual/expressions/builtins.xml @@ -1023,10 +1023,18 @@ in foo Convert the expression e to a string. - e can be a string (in which case - toString is a no-op), a path (e.g., - toString /foo/bar yields - "/foo/bar" or a set containing { __toString = self: ...; }. + e can be: + + a string (in which case the string is returned unmodified) + a path (e.g., toString /foo/bar yields "/foo/bar" + a set containing { __toString = self: ...; } + an integer + a list, in which case the string representations of its elements are joined with spaces + a boolean (false yields "", true yields "1" + null, which yields the empty string. + + + From df66d346dfc3c1d1136256ea58d0419d12599a50 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 21 Feb 2017 11:42:38 +0100 Subject: [PATCH 0112/2196] Log AWS retries --- src/libstore/s3-binary-cache-store.cc | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index ac083410b35..041c68c6816 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -8,6 +8,7 @@ #include #include +#include #include #include #include @@ -57,12 +58,25 @@ S3Helper::S3Helper() { } +/* Log AWS retries. */ +class RetryStrategy : public Aws::Client::DefaultRetryStrategy +{ + long CalculateDelayBeforeNextRetry(const Aws::Client::AWSError& error, long attemptedRetries) const override + { + auto res = Aws::Client::DefaultRetryStrategy::CalculateDelayBeforeNextRetry(error, attemptedRetries); + printError("AWS error '%s' (%s), will retry in %d ms", + error.GetExceptionName(), error.GetMessage(), res); + return res; + } +}; + ref S3Helper::makeConfig() { initAWS(); auto res = make_ref(); res->region = Aws::Region::US_EAST_1; // FIXME: make configurable res->requestTimeoutMs = 600 * 1000; + res->retryStrategy = std::make_shared(); return res; } From 79f4583f8a3facdd6422ab8c8530a4f5295e1045 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 21 Feb 2017 13:04:31 +0100 Subject: [PATCH 0113/2196] Fix XML validity --- doc/manual/expressions/builtins.xml | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/doc/manual/expressions/builtins.xml b/doc/manual/expressions/builtins.xml index 6c38941c01b..e9baff65961 100644 --- a/doc/manual/expressions/builtins.xml +++ b/doc/manual/expressions/builtins.xml @@ -1023,17 +1023,16 @@ in foo Convert the expression e to a string. - e can be: + e can be: - a string (in which case the string is returned unmodified) - a path (e.g., toString /foo/bar yields "/foo/bar" - a set containing { __toString = self: ...; } - an integer - a list, in which case the string representations of its elements are joined with spaces - a boolean (false yields "", true yields "1" - null, which yields the empty string. + A string (in which case the string is returned unmodified). + A path (e.g., toString /foo/bar yields "/foo/bar". + A set containing { __toString = self: ...; }. + An integer. + A list, in which case the string representations of its elements are joined with spaces. + A Boolean (false yields "", true yields "1". + null, which yields the empty string. - From 99bbddedb1e70fc885e05cb398d8bfb438b039ed Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 21 Feb 2017 13:15:07 +0100 Subject: [PATCH 0114/2196] Fix building without S3 support http://hydra.nixos.org/build/49031196/nixlog/2/raw --- src/libstore/download.cc | 2 +- src/libstore/legacy-ssh-store.cc | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libstore/download.cc b/src/libstore/download.cc index d301d4409b0..0ca41b15bf7 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -489,7 +489,7 @@ struct CurlDownloader : public Downloader /* Ugly hack to support s3:// URIs. */ if (hasPrefix(request.uri, "s3://")) { // FIXME: do this on a worker thread - sync2async(success, failure, [&]() { + sync2async(success, failure, [&]() -> DownloadResult { #ifdef ENABLE_S3 S3Helper s3Helper; auto slash = request.uri.find('/', 5); diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 5d9e5aad6e0..b20ff185f9b 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -225,7 +225,7 @@ struct LegacySSHStore : public Store void collectGarbage(const GCOptions & options, GCResults & results) override { unsupported(); } - ref getFSAccessor() + ref getFSAccessor() override { unsupported(); } void addSignatures(const Path & storePath, const StringSet & sigs) override From bb6656b8a259f2ad0f90536ee32bc3080e466f1a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 21 Feb 2017 14:26:23 +0100 Subject: [PATCH 0115/2196] Build RPMs for Fedora 25 Disabled hardened build because it makes the linker fail with messages like relocation R_X86_64_PC32 against undefined symbol `BZ2_bzWriteOpen' can not be used when making a shared object; recompile with -fPIC See https://fedoraproject.org/wiki/Changes/Harden_All_Packages. --- nix.spec.in | 2 ++ release.nix | 8 ++++---- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/nix.spec.in b/nix.spec.in index 401a2dc8a1f..2447f13a8f8 100644 --- a/nix.spec.in +++ b/nix.spec.in @@ -1,3 +1,5 @@ +%undefine _hardened_build + %global nixbld_user "nix-builder-" %global nixbld_group "nixbld" diff --git a/release.nix b/release.nix index ace0f9cc89a..8d71cc841c7 100644 --- a/release.nix +++ b/release.nix @@ -171,8 +171,8 @@ let }; - rpm_fedora21i386 = makeRPM_i686 (diskImageFuns: diskImageFuns.fedora21i386) [ "libsodium-devel" ]; - rpm_fedora21x86_64 = makeRPM_x86_64 (diskImageFunsFun: diskImageFunsFun.fedora21x86_64) [ "libsodium-devel" ]; + rpm_fedora25i386 = makeRPM_i686 (diskImageFuns: diskImageFuns.fedora25i386) [ "libsodium-devel" ]; + rpm_fedora25x86_64 = makeRPM_x86_64 (diskImageFunsFun: diskImageFunsFun.fedora25x86_64) [ "libsodium-devel" ]; deb_debian8i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.debian8i386) [ "libsodium-dev" ] [ "libsodium13" ]; @@ -256,8 +256,8 @@ let deb_debian8x86_64 deb_ubuntu1504i386 deb_ubuntu1504x86_64 - rpm_fedora21i386 - rpm_fedora21x86_64 + rpm_fedora25i386 + rpm_fedora25x86_64 tests.remoteBuilds tests.nix-copy-closure tests.binaryTarball From e4dd7dadf4f52a2afef0fcd826d8c85ec13cd904 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 21 Feb 2017 14:52:36 +0100 Subject: [PATCH 0116/2196] RPM build: Use parallel make --- nix.spec.in | 2 +- release.nix | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/nix.spec.in b/nix.spec.in index 2447f13a8f8..0c9b9ab2013 100644 --- a/nix.spec.in +++ b/nix.spec.in @@ -107,7 +107,7 @@ extraFlags= %configure --localstatedir=/nix/var \ --docdir=%{_defaultdocdir}/%{name}-doc-%{version} \ $extraFlags -make %{?_smp_flags} +make -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES %{_emacs_bytecompile} misc/emacs/nix-mode.el diff --git a/release.nix b/release.nix index 8d71cc841c7..0b5b5579b26 100644 --- a/release.nix +++ b/release.nix @@ -284,9 +284,10 @@ let { extraPackages = [ "perl-DBD-SQLite" "perl-devel" "sqlite" "sqlite-devel" "bzip2-devel" "emacs" "libcurl-devel" "openssl-devel" "xz-devel" ] ++ extraPackages; }; - memSize = 1024; + memSize = 8192; meta.schedulingPriority = 50; postRPMInstall = "cd /tmp/rpmout/BUILD/nix-* && make installcheck"; + enableParallelBuilding = true; }; From b95ce3194dc431be2da9d79aecd5a7865fb5d486 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 21 Feb 2017 14:56:50 +0100 Subject: [PATCH 0117/2196] Debian build: Use parallel make and add Ubuntu 16.10 --- release.nix | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/release.nix b/release.nix index 0b5b5579b26..91573276129 100644 --- a/release.nix +++ b/release.nix @@ -186,6 +186,8 @@ let deb_ubuntu1510x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1510x86_64) [ "libsodium-dev" ] [ "libsodium13" ]; deb_ubuntu1604i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1604i386) [ "libsodium-dev" ] [ "libsodium18" ]; deb_ubuntu1604x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1604x86_64) [ "libsodium-dev" ] [ "libsodium18" ]; + deb_ubuntu1610i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1610i386) [ "libsodium-dev" ] [ "libsodium18" ]; + deb_ubuntu1610x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1610x86_64) [ "libsodium-dev" ] [ "libsodium18" ]; # System tests. @@ -306,7 +308,7 @@ let { extraPackages = [ "libdbd-sqlite3-perl" "libsqlite3-dev" "libbz2-dev" "libwww-curl-perl" "libcurl-dev" "libcurl3-nss" "libssl-dev" "liblzma-dev" ] ++ extraPackages; }; - memSize = 1024; + memSize = 8192; meta.schedulingPriority = 50; postInstall = "make installcheck"; configureFlags = "--sysconfdir=/etc"; @@ -315,6 +317,7 @@ let ++ extraDebPackages; debMaintainer = "Eelco Dolstra "; doInstallCheck = true; + enableParallelBuilding = true; }; From c0a133876ec58d902cf0dd74e550af50eb5d1832 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 21 Feb 2017 15:01:42 +0100 Subject: [PATCH 0118/2196] Revert "configure.ac: We require C++14 now" This reverts commit 81c53fe8e56f4a4ce10088fe2d7b6a524a6dc126. This check appears to be stricter than we need (it broke a bunch of platforms that previously did build: http://hydra.nixos.org/eval/1331921#tabs-now-fail). --- configure.ac | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configure.ac b/configure.ac index bfe81840c3b..e6b11be2df1 100644 --- a/configure.ac +++ b/configure.ac @@ -61,7 +61,7 @@ CFLAGS= CXXFLAGS= AC_PROG_CC AC_PROG_CXX -AX_CXX_COMPILE_STDCXX_14 +AX_CXX_COMPILE_STDCXX_11 # Use 64-bit file system calls so that we can support files > 2 GiB. From 1a57f499b06b6b37cdf98787d38614f1449d1dd7 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 21 Feb 2017 15:19:54 +0100 Subject: [PATCH 0119/2196] Drop some Ubuntu releases --- release.nix | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/release.nix b/release.nix index 91573276129..dc49b20ccd6 100644 --- a/release.nix +++ b/release.nix @@ -7,7 +7,7 @@ let pkgs = import {}; - systems = [ "x86_64-linux" "i686-linux" "x86_64-darwin" /* "x86_64-freebsd" "i686-freebsd" */ ]; + systems = [ "x86_64-linux" "i686-linux" "x86_64-darwin" ]; jobs = rec { @@ -180,10 +180,6 @@ let deb_ubuntu1410i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1410i386) [] []; deb_ubuntu1410x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1410x86_64) [] []; - deb_ubuntu1504i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1504i386) [ "libsodium-dev" ] [ "libsodium13" ]; - deb_ubuntu1504x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1504x86_64) [ "libsodium-dev" ] [ "libsodium13" ]; - deb_ubuntu1510i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1510i386) [ "libsodium-dev" ] [ "libsodium13"]; - deb_ubuntu1510x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1510x86_64) [ "libsodium-dev" ] [ "libsodium13" ]; deb_ubuntu1604i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1604i386) [ "libsodium-dev" ] [ "libsodium18" ]; deb_ubuntu1604x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1604x86_64) [ "libsodium-dev" ] [ "libsodium18" ]; deb_ubuntu1610i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1610i386) [ "libsodium-dev" ] [ "libsodium18" ]; @@ -244,20 +240,16 @@ let meta.description = "Release-critical builds"; constituents = [ tarball - #build.i686-freebsd build.i686-linux build.x86_64-darwin - #build.x86_64-freebsd build.x86_64-linux - #binaryTarball.i686-freebsd binaryTarball.i686-linux binaryTarball.x86_64-darwin - #binaryTarball.x86_64-freebsd binaryTarball.x86_64-linux deb_debian8i386 deb_debian8x86_64 - deb_ubuntu1504i386 - deb_ubuntu1504x86_64 + deb_ubuntu1604i386 + deb_ubuntu1604x86_64 rpm_fedora25i386 rpm_fedora25x86_64 tests.remoteBuilds From 8d7c6644c592b7d7f31e23d8915f0b634c191dd3 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 21 Feb 2017 15:21:56 +0100 Subject: [PATCH 0120/2196] useChroot -> useSandbox --- tests/remote-builds.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/remote-builds.nix b/tests/remote-builds.nix index d14d6ff7f05..63aaa4d88f5 100644 --- a/tests/remote-builds.nix +++ b/tests/remote-builds.nix @@ -14,7 +14,7 @@ let { services.openssh.enable = true; virtualisation.writableStore = true; nix.package = nix; - nix.useChroot = true; + nix.useSandbox = true; }; # Trivial Nix expression to build remotely. From 8b1b5f9a12d4b5196be791118c58ae253ba02d96 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 21 Feb 2017 16:04:47 +0100 Subject: [PATCH 0121/2196] Handle CURLE_RECV_ERROR as a transient error MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This fixes unable to download ‘https://cache.nixos.org/nar/077h8ji74y9b0qx7rjk71xd80vjqp6q5gy137r553jlvdlxdcdlk.nar.xz’: HTTP error 200 (curl error: Failure when receiving data from the peer) --- src/libstore/download.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 0ca41b15bf7..25ccd7d0b52 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -273,7 +273,7 @@ struct CurlDownloader : public Downloader httpStatus == 403 ? Forbidden : (httpStatus == 408 || httpStatus == 500 || httpStatus == 503 || httpStatus == 504 || httpStatus == 522 || httpStatus == 524 - || code == CURLE_COULDNT_RESOLVE_HOST) ? Transient : + || code == CURLE_COULDNT_RESOLVE_HOST || code == CURLE_RECV_ERROR) ? Transient : Misc; attempt++; From b8ce649a352e915a653850767b519b9ee7a6ebc2 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 22 Feb 2017 13:54:11 +0100 Subject: [PATCH 0122/2196] Fix 32-bit RPM/Deb builds http://hydra.nixos.org/build/49130529 --- release.nix | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/release.nix b/release.nix index dc49b20ccd6..854650bb905 100644 --- a/release.nix +++ b/release.nix @@ -278,10 +278,10 @@ let { extraPackages = [ "perl-DBD-SQLite" "perl-devel" "sqlite" "sqlite-devel" "bzip2-devel" "emacs" "libcurl-devel" "openssl-devel" "xz-devel" ] ++ extraPackages; }; - memSize = 8192; + #memSize = 8192; meta.schedulingPriority = 50; postRPMInstall = "cd /tmp/rpmout/BUILD/nix-* && make installcheck"; - enableParallelBuilding = true; + #enableParallelBuilding = true; }; @@ -300,7 +300,7 @@ let { extraPackages = [ "libdbd-sqlite3-perl" "libsqlite3-dev" "libbz2-dev" "libwww-curl-perl" "libcurl-dev" "libcurl3-nss" "libssl-dev" "liblzma-dev" ] ++ extraPackages; }; - memSize = 8192; + #memSize = 8192; meta.schedulingPriority = 50; postInstall = "make installcheck"; configureFlags = "--sysconfdir=/etc"; @@ -309,7 +309,7 @@ let ++ extraDebPackages; debMaintainer = "Eelco Dolstra "; doInstallCheck = true; - enableParallelBuilding = true; + #enableParallelBuilding = true; }; From fe2db1dae59f379eda793da952425a8331139c65 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 22 Feb 2017 15:39:17 +0100 Subject: [PATCH 0123/2196] Doh --- release.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/release.nix b/release.nix index 854650bb905..78f96196399 100644 --- a/release.nix +++ b/release.nix @@ -278,7 +278,7 @@ let { extraPackages = [ "perl-DBD-SQLite" "perl-devel" "sqlite" "sqlite-devel" "bzip2-devel" "emacs" "libcurl-devel" "openssl-devel" "xz-devel" ] ++ extraPackages; }; - #memSize = 8192; + memSize = 1024; meta.schedulingPriority = 50; postRPMInstall = "cd /tmp/rpmout/BUILD/nix-* && make installcheck"; #enableParallelBuilding = true; @@ -300,7 +300,7 @@ let { extraPackages = [ "libdbd-sqlite3-perl" "libsqlite3-dev" "libbz2-dev" "libwww-curl-perl" "libcurl-dev" "libcurl3-nss" "libssl-dev" "liblzma-dev" ] ++ extraPackages; }; - #memSize = 8192; + memSize = 1024; meta.schedulingPriority = 50; postInstall = "make installcheck"; configureFlags = "--sysconfdir=/etc"; From f023f64f40809afa8a8e3a5e7081209cbcfd2e7e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 22 Feb 2017 16:31:04 +0100 Subject: [PATCH 0124/2196] RemoteStore::addToStore(): Pass content-addressability assertion MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ... and use this in Downloader::downloadCached(). This fixes $ nix-build https://nixos.org/channels/nixos-16.09-small/nixexprs.tar.xz -A hello error: cannot import path ‘/nix/store/csfbp1s60dkgmk9f8g0zk0mwb7hzgabd-nixexprs.tar.xz’ because it lacks a valid signature --- src/libstore/download.cc | 1 + src/libstore/local-store.cc | 2 +- src/libstore/remote-store.cc | 2 +- src/libstore/store-api.cc | 6 ++++++ src/libstore/store-api.hh | 7 ++++++- src/nix-daemon/nix-daemon.cc | 1 + 6 files changed, 16 insertions(+), 3 deletions(-) diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 25ccd7d0b52..a56fd6922a5 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -612,6 +612,7 @@ Path Downloader::downloadCached(ref store, const string & url_, bool unpa Hash hash = hashString(expectedHash ? expectedHash.type : htSHA256, *res.data); info.path = store->makeFixedOutputPath(false, hash, name); info.narHash = hashString(htSHA256, *sink.s); + info.ca = makeFixedOutputCA(false, hash); store->addToStore(info, sink.s, false, true); storePath = info.path; } diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 4c161cfb341..9b775e16a38 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1002,7 +1002,7 @@ Path LocalStore::addToStoreFromDump(const string & dump, const string & name, info.narHash = hash.first; info.narSize = hash.second; info.ultimate = true; - info.ca = "fixed:" + (recursive ? (std::string) "r:" : "") + h.to_string(); + info.ca = makeFixedOutputCA(recursive, h); registerValidPath(info); } diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 42c09ec7e0b..7f398685a2c 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -380,7 +380,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, const ref conn->to << wopAddToStoreNar << info.path << info.deriver << printHash(info.narHash) << info.references << info.registrationTime << info.narSize - << info.ultimate << info.sigs << *nar << repair << dontCheckSigs; + << info.ultimate << info.sigs << info.ca << *nar << repair << dontCheckSigs; // FIXME: don't send nar as a string conn->processStderr(); } diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index b5934a0d123..401b001b2d8 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -676,6 +676,12 @@ Strings ValidPathInfo::shortRefs() const } +std::string makeFixedOutputCA(bool recursive, const Hash & hash) +{ + return "fixed:" + (recursive ? (std::string) "r:" : "") + hash.to_string(); +} + + } diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index d03e70849f9..c344b9d66ed 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -128,7 +128,7 @@ struct ValidPathInfo of an output path of a derivation were actually produced by that derivation. In the intensional model, we have to trust that a particular output path was produced by a derivation; the - path name then implies the contents.) + path then implies the contents.) Ideally, the content-addressability assertion would just be a Boolean, and the store path would be computed from @@ -687,6 +687,11 @@ ValidPathInfo decodeValidPathInfo(std::istream & str, bool hashGiven = false); +/* Compute the content-addressability assertion (ValidPathInfo::ca) + for paths created by makeFixedOutputPath() / addToStore(). */ +std::string makeFixedOutputCA(bool recursive, const Hash & hash); + + MakeError(SubstError, Error) MakeError(BuildError, Error) /* denotes a permanent build failure */ MakeError(InvalidPath, Error) diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index 3b43ddfa16d..9fbc43b82d7 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -582,6 +582,7 @@ static void performOp(ref store, bool trusted, unsigned int clientVe info.narSize = readLongLong(from); info.ultimate = readLongLong(from); info.sigs = readStrings(from); + info.ca = readString(from); auto nar = make_ref(readString(from)); auto repair = readInt(from) ? true : false; auto dontCheckSigs = readInt(from) ? true : false; From e7cb2847ab1cec48eac6a86c56885b3f0df76275 Mon Sep 17 00:00:00 2001 From: Dan Peebles Date: Tue, 21 Feb 2017 22:50:18 -0500 Subject: [PATCH 0125/2196] Explicitly model all settings and fail on unrecognized ones Previously, the Settings class allowed other code to query for string properties, which led to a proliferation of code all over the place making up new options without any sort of central registry of valid options. This commit pulls all those options back into the central Settings class and removes the public get() methods, to discourage future abuses like that. Furthermore, because we know the full set of options ahead of time, we now fail loudly if someone enters an unrecognized option, thus preventing subtle typos. With some template fun, we could probably also dump the full set of options (with documentation, defaults, etc.) to the command line, but I'm not doing that yet here. --- src/libexpr/eval.cc | 2 +- src/libstore/build.cc | 43 +++------- src/libstore/crypto.cc | 4 +- src/libstore/download.cc | 6 +- src/libstore/globals.cc | 148 +++++++++++++++++++++++++++-------- src/libstore/globals.hh | 86 ++++++++++++++++++-- src/libstore/local-store.cc | 4 +- src/libstore/store-api.cc | 6 +- src/nix-daemon/nix-daemon.cc | 4 +- src/nix-store/nix-store.cc | 2 +- 10 files changed, 218 insertions(+), 87 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index d418ab4e43a..f6cdedb3797 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -299,7 +299,7 @@ EvalState::EvalState(const Strings & _searchPath, ref store) { countCalls = getEnv("NIX_COUNT_CALLS", "0") != "0"; - restricted = settings.get("restrict-eval", false); + restricted = settings.restrictEval; assert(gcInitialised); diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 1ce23135fc3..4a7e1a62b50 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -34,13 +34,6 @@ #include #include -/* chroot-like behavior from Apple's sandbox */ -#if __APPLE__ - #define DEFAULT_ALLOWED_IMPURE_PREFIXES "/System/Library /usr/lib /dev /bin/sh" -#else - #define DEFAULT_ALLOWED_IMPURE_PREFIXES "" -#endif - /* Includes required for chroot support. */ #if __linux__ #include @@ -1279,7 +1272,7 @@ void DerivationGoal::inputsRealised() /* Don't repeat fixed-output derivations since they're already verified by their output hash.*/ - nrRounds = fixedOutput ? 1 : settings.get("build-repeat", 0) + 1; + nrRounds = fixedOutput ? 1 : settings.buildRepeat + 1; /* Okay, try to build. Note that here we don't wait for a build slot to become available, since we don't need one if there is a @@ -1685,9 +1678,7 @@ void DerivationGoal::startBuilder() /* Are we doing a chroot build? */ { - string x = settings.get("build-use-sandbox", - /* deprecated alias */ - settings.get("build-use-chroot", string("false"))); + string x = settings.useSandbox; if (x != "true" && x != "false" && x != "relaxed") throw Error("option ‘build-use-sandbox’ must be set to one of ‘true’, ‘false’ or ‘relaxed’"); if (x == "true") { @@ -1744,21 +1735,10 @@ void DerivationGoal::startBuilder() if (useChroot) { - string defaultChrootDirs; -#if __linux__ - if (worker.store.isInStore(BASH_PATH)) - defaultChrootDirs = "/bin/sh=" BASH_PATH; -#endif - /* Allow a user-configurable set of directories from the host file system. */ - PathSet dirs = tokenizeString( - settings.get("build-sandbox-paths", - /* deprecated alias with lower priority */ - settings.get("build-chroot-dirs", defaultChrootDirs))); - PathSet dirs2 = tokenizeString( - settings.get("build-extra-chroot-dirs", - settings.get("build-extra-sandbox-paths", string("")))); + PathSet dirs = settings.sandboxPaths; + PathSet dirs2 = settings.extraSandboxPaths; dirs.insert(dirs2.begin(), dirs2.end()); dirsInChroot.clear(); @@ -1790,8 +1770,7 @@ void DerivationGoal::startBuilder() for (auto & i : closure) dirsInChroot[i] = i; - string allowed = settings.get("allowed-impure-host-deps", string(DEFAULT_ALLOWED_IMPURE_PREFIXES)); - PathSet allowedPaths = tokenizeString(allowed); + PathSet allowedPaths = settings.allowedImpureHostPrefixes; /* This works like the above, except on a per-derivation level */ Strings impurePaths = tokenizeString(get(drv->env, "__impureHostDeps")); @@ -1811,7 +1790,7 @@ void DerivationGoal::startBuilder() } } if (!found) - throw Error(format("derivation ‘%1%’ requested impure path ‘%2%’, but it was not in allowed-impure-host-deps (‘%3%’)") % drvPath % i % allowed); + throw Error(format("derivation ‘%1%’ requested impure path ‘%2%’, but it was not in allowed-impure-host-deps") % drvPath % i); dirsInChroot[i] = i; } @@ -2433,7 +2412,7 @@ void DerivationGoal::runChild() /* Mount a new tmpfs on /dev/shm to ensure that whatever the builder puts in /dev/shm is cleaned up automatically. */ if (pathExists("/dev/shm") && mount("none", (chrootRootDir + "/dev/shm").c_str(), "tmpfs", 0, - fmt("size=%s", settings.get("sandbox-dev-shm-size", std::string("50%"))).c_str()) == -1) + fmt("size=%s", settings.sandboxShmSize).c_str()) == -1) throw SysError("mounting /dev/shm"); #if 0 @@ -2596,7 +2575,7 @@ void DerivationGoal::runChild() sandboxProfile += "(version 1)\n"; /* Violations will go to the syslog if you set this. Unfortunately the destination does not appear to be configurable */ - if (settings.get("darwin-log-sandbox-violations", false)) { + if (settings.darwinLogSandboxViolations) { sandboxProfile += "(deny default)\n"; } else { sandboxProfile += "(deny default (with no-log))\n"; @@ -2743,7 +2722,7 @@ void DerivationGoal::registerOutputs() InodesSeen inodesSeen; Path checkSuffix = ".check"; - bool runDiffHook = settings.get("run-diff-hook", false); + bool runDiffHook = settings.runDiffHook; bool keepPreviousRound = settings.keepFailed || runDiffHook; /* Check whether the output paths were created, and grep each @@ -2981,7 +2960,7 @@ void DerivationGoal::registerOutputs() ? fmt("output ‘%1%’ of ‘%2%’ differs from ‘%3%’ from previous round", i->path, drvPath, prev) : fmt("output ‘%1%’ of ‘%2%’ differs from previous round", i->path, drvPath); - auto diffHook = settings.get("diff-hook", std::string("")); + auto diffHook = settings.diffHook; if (prevExists && diffHook != "" && runDiffHook) { try { auto diff = runProgram(diffHook, true, {prev, i->path}); @@ -2992,7 +2971,7 @@ void DerivationGoal::registerOutputs() } } - if (settings.get("enforce-determinism", true)) + if (settings.enforceDeterminism) throw NotDeterministic(msg); printError(msg); diff --git a/src/libstore/crypto.cc b/src/libstore/crypto.cc index 747483afb30..9692dd83b4e 100644 --- a/src/libstore/crypto.cc +++ b/src/libstore/crypto.cc @@ -105,12 +105,12 @@ PublicKeys getDefaultPublicKeys() // FIXME: filter duplicates - for (auto s : settings.get("binary-cache-public-keys", Strings())) { + for (auto s : settings.binaryCachePublicKeys) { PublicKey key(s); publicKeys.emplace(key.name, key); } - for (auto secretKeyFile : settings.get("secret-key-files", Strings())) { + for (auto secretKeyFile : settings.secretKeyFiles) { try { SecretKey secretKey(readFile(secretKeyFile)); publicKeys.emplace(secretKey.name, secretKey.toPublicKey()); diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 25ccd7d0b52..661ee2ed54b 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -331,9 +331,9 @@ struct CurlDownloader : public Downloader curl_multi_setopt(curlm, CURLMOPT_PIPELINING, CURLPIPE_MULTIPLEX); #endif curl_multi_setopt(curlm, CURLMOPT_MAX_TOTAL_CONNECTIONS, - settings.get("binary-caches-parallel-connections", 25)); + settings.binaryCachesParallelConnections); - enableHttp2 = settings.get("enable-http2", true); + enableHttp2 = settings.enableHttp2; wakeupPipe.create(); fcntl(wakeupPipe.readSide.get(), F_SETFL, O_NONBLOCK); @@ -573,7 +573,7 @@ Path Downloader::downloadCached(ref store, const string & url_, bool unpa string expectedETag; - int ttl = settings.get("tarball-ttl", 60 * 60); + int ttl = settings.tarballTtl; bool skip = false; if (pathExists(fileLink) && pathExists(dataFile)) { diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 474288b7812..62ed0376d71 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -17,12 +17,23 @@ namespace nix { must be deleted and recreated on startup.) */ #define DEFAULT_SOCKET_PATH "/daemon-socket/socket" +/* chroot-like behavior from Apple's sandbox */ +#if __APPLE__ + #define DEFAULT_ALLOWED_IMPURE_PREFIXES "/System/Library /usr/lib /dev /bin/sh" +#else + #define DEFAULT_ALLOWED_IMPURE_PREFIXES "" +#endif Settings settings; Settings::Settings() { + deprecatedOptions = StringSet({ + "build-use-chroot", "build-chroot-dirs", "build-extra-chroot-dirs", + "this-option-never-existed-but-who-will-know" + }); + nixPrefix = NIX_PREFIX; nixStore = canonPath(getEnv("NIX_STORE_DIR", getEnv("NIX_STORE", NIX_STORE_DIR))); nixDataDir = canonPath(getEnv("NIX_DATA_DIR", NIX_DATA_DIR)); @@ -73,6 +84,32 @@ Settings::Settings() showTrace = false; enableImportNative = false; netrcFile = fmt("%s/%s", nixConfDir, "netrc"); + useSandbox = "false"; // TODO: make into an enum + +#if __linux__ + sandboxPaths = tokenizeString("/bin/sh=" BASH_PATH); +#endif + + restrictEval = false; + buildRepeat = 0; + allowedImpureHostPrefixes = tokenizeString(DEFAULT_ALLOWED_IMPURE_PREFIXES); + sandboxShmSize = "50%"; + darwinLogSandboxViolations = false; + runDiffHook = false; + diffHook = ""; + enforceDeterminism = true; + binaryCachePublicKeys = Strings(); + secretKeyFiles = Strings(); + binaryCachesParallelConnections = 25; + enableHttp2 = true; + tarballTtl = 60 * 60; + signedBinaryCaches = ""; + substituters = Strings(); + binaryCaches = Strings(); + extraBinaryCaches = Strings(); + trustedUsers = Strings({"root"}); + allowedUsers = Strings({"*"}); + printMissing = true; } @@ -115,39 +152,6 @@ void Settings::set(const string & name, const string & value) overrides[name] = value; } - -string Settings::get(const string & name, const string & def) -{ - auto i = settings.find(name); - if (i == settings.end()) return def; - return i->second; -} - - -Strings Settings::get(const string & name, const Strings & def) -{ - auto i = settings.find(name); - if (i == settings.end()) return def; - return tokenizeString(i->second); -} - - -bool Settings::get(const string & name, bool def) -{ - bool res = def; - _get(res, name); - return res; -} - - -int Settings::get(const string & name, int def) -{ - int res = def; - _get(res, name); - return res; -} - - void Settings::update() { _get(tryFallback, "build-fallback"); @@ -181,13 +185,71 @@ void Settings::update() _get(keepGoing, "keep-going"); _get(keepFailed, "keep-failed"); _get(netrcFile, "netrc-file"); + _get(useSandbox, "build-use-sandbox", "build-use-chroot"); + _get(sandboxPaths, "build-sandbox-paths", "build-chroot-dirs"); + _get(extraSandboxPaths, "build-extra-sandbox-paths", "build-extra-chroot-dirs"); + _get(restrictEval, "restrict-eval"); + _get(buildRepeat, "build-repeat"); + _get(allowedImpureHostPrefixes, "allowed-impure-host-deps"); + _get(sandboxShmSize, "sandbox-dev-shm-size"); + _get(darwinLogSandboxViolations, "darwin-log-sandbox-violations"); + _get(runDiffHook, "run-diff-hook"); + _get(diffHook, "diff-hook"); + _get(enforceDeterminism, "enforce-determinism"); + _get(binaryCachePublicKeys, "binary-cache-public-keys"); + _get(secretKeyFiles, "secret-key-files"); + _get(binaryCachesParallelConnections, "binary-caches-parallel-connections"); + _get(enableHttp2, "enable-http2"); + _get(tarballTtl, "tarball-ttl"); + _get(signedBinaryCaches, "signed-binary-caches"); + _get(substituters, "substituters"); + _get(binaryCaches, "binary-caches"); + _get(extraBinaryCaches, "extra-binary-caches"); + _get(trustedUsers, "trusted-users"); + _get(allowedUsers, "allowed-users"); + _get(printMissing, "print-missing"); + + /* Clear out any deprecated options that might be left, so users know we recognize the option + but aren't processing it anymore */ + for (auto &i : deprecatedOptions) { + if (settings.find(i) != settings.end()) { + printError(format("warning: deprecated option '%1%' is no longer supported and will be ignored") % i); + settings.erase(i); + } + } + + if (settings.size() != 0) { + string bad; + for (auto &i : settings) + bad += "'" + i.first + "', "; + bad.pop_back(); + bad.pop_back(); + throw Error(format("unrecognized options: %s") % bad); + } } +void Settings::checkDeprecated(const string & name) +{ + if (deprecatedOptions.find(name) != deprecatedOptions.end()) + printError(format("warning: deprecated option '%1%' will soon be unsupported") % name); +} void Settings::_get(string & res, const string & name) { SettingsMap::iterator i = settings.find(name); if (i == settings.end()) return; + checkDeprecated(i->first); + settings.erase(i); + res = i->second; +} + +void Settings::_get(string & res, const string & name1, const string & name2) +{ + SettingsMap::iterator i = settings.find(name1); + if (i == settings.end()) i = settings.find(name2); + if (i == settings.end()) return; + checkDeprecated(i->first); + settings.erase(i); res = i->second; } @@ -196,6 +258,8 @@ void Settings::_get(bool & res, const string & name) { SettingsMap::iterator i = settings.find(name); if (i == settings.end()) return; + checkDeprecated(i->first); + settings.erase(i); if (i->second == "true") res = true; else if (i->second == "false") res = false; else throw Error(format("configuration option ‘%1%’ should be either ‘true’ or ‘false’, not ‘%2%’") @@ -207,6 +271,20 @@ void Settings::_get(StringSet & res, const string & name) { SettingsMap::iterator i = settings.find(name); if (i == settings.end()) return; + checkDeprecated(i->first); + settings.erase(i); + res.clear(); + Strings ss = tokenizeString(i->second); + res.insert(ss.begin(), ss.end()); +} + +void Settings::_get(StringSet & res, const string & name1, const string & name2) +{ + SettingsMap::iterator i = settings.find(name1); + if (i == settings.end()) i = settings.find(name2); + if (i == settings.end()) return; + checkDeprecated(i->first); + settings.erase(i); res.clear(); Strings ss = tokenizeString(i->second); res.insert(ss.begin(), ss.end()); @@ -216,6 +294,8 @@ void Settings::_get(Strings & res, const string & name) { SettingsMap::iterator i = settings.find(name); if (i == settings.end()) return; + checkDeprecated(i->first); + settings.erase(i); res = tokenizeString(i->second); } @@ -224,6 +304,8 @@ template void Settings::_get(N & res, const string & name) { SettingsMap::iterator i = settings.find(name); if (i == settings.end()) return; + checkDeprecated(i->first); + settings.erase(i); if (!string2Int(i->second, res)) throw Error(format("configuration setting ‘%1%’ should have an integer value") % name); } diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 0ff18f8b16e..d74488a41b3 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -20,14 +20,6 @@ struct Settings { void set(const string & name, const string & value); - string get(const string & name, const string & def); - - Strings get(const string & name, const Strings & def); - - bool get(const string & name, bool def); - - int get(const string & name, int def); - void update(); string pack(); @@ -36,6 +28,10 @@ struct Settings { SettingsMap getOverrides(); + /* TODO: the comments below should be strings and exposed via a nice command-line UI or similar. + We should probably replace it with some sort of magic template or macro to minimize the amount + of duplication and pain here. */ + /* The directory where we store sources and derived files. */ Path nixStore; @@ -187,6 +183,75 @@ struct Settings { /* Whether the importNative primop should be enabled */ bool enableImportNative; + /* Whether to enable sandboxed builds (string until we get an enum for true/false/relaxed) */ + string useSandbox; + + /* The basic set of paths to expose in a sandbox */ + PathSet sandboxPaths; + + /* Any extra sandbox paths to expose */ + PathSet extraSandboxPaths; + + /* Whether to allow certain questionable operations (like fetching) during evaluation */ + bool restrictEval; + + /* The number of times to repeat a build to check for determinism */ + int buildRepeat; + + /* Which prefixes to allow derivations to ask for access to (primarily for Darwin) */ + PathSet allowedImpureHostPrefixes; + + /* The size of /dev/shm in the build sandbox (for Linux) */ + string sandboxShmSize; + + /* Whether to log Darwin sandbox access violations to the system log */ + bool darwinLogSandboxViolations; + + /* ??? */ + bool runDiffHook; + + /* ??? */ + string diffHook; + + /* Whether to fail if repeated builds produce different output */ + bool enforceDeterminism; + + /* The known public keys for a binary cache */ + Strings binaryCachePublicKeys; + + /* Secret keys to use for build output signing */ + Strings secretKeyFiles; + + /* Number of parallel connections to hit a binary cache with when finding out if it contains hashes */ + int binaryCachesParallelConnections; + + /* Whether to enable HTTP2 */ + bool enableHttp2; + + /* How soon to expire tarballs like builtins.fetchTarball and (ugh, bad name) builtins.fetchurl */ + int tarballTtl; + + /* ??? */ + string signedBinaryCaches; + + /* ??? */ + Strings substituters; + + /* ??? */ + Strings binaryCaches; + + /* ??? */ + Strings extraBinaryCaches; + + /* Who we trust to ask the daemon to do unsafe things */ + Strings trustedUsers; + + /* ?Who we trust to use the daemon in safe ways */ + Strings allowedUsers; + + /* ??? */ + bool printMissing; + /* The hook to run just before a build to set derivation-specific build settings */ Path preBuildHook; @@ -196,11 +261,16 @@ struct Settings { Path netrcFile; private: + StringSet deprecatedOptions; SettingsMap settings, overrides; + void checkDeprecated(const string & name); + void _get(string & res, const string & name); + void _get(string & res, const string & name1, const string & name2); void _get(bool & res, const string & name); void _get(StringSet & res, const string & name); + void _get(StringSet & res, const string & name1, const string & name2); void _get(Strings & res, const string & name); template void _get(N & res, const string & name); }; diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 4c161cfb341..afcda6e2be7 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -44,7 +44,7 @@ LocalStore::LocalStore(const Params & params) , reservedPath(dbDir + "/reserved") , schemaPath(dbDir + "/schema") , trashDir(realStoreDir + "/trash") - , requireSigs(trim(settings.get("signed-binary-caches", std::string(""))) != "") // FIXME: rename option + , requireSigs(trim(settings.signedBinaryCaches) != "") // FIXME: rename option , publicKeys(getDefaultPublicKeys()) { auto state(_state.lock()); @@ -1330,7 +1330,7 @@ void LocalStore::signPathInfo(ValidPathInfo & info) { // FIXME: keep secret keys in memory. - auto secretKeyFiles = settings.get("secret-key-files", Strings()); + auto secretKeyFiles = settings.secretKeyFiles; for (auto & secretKeyFile : secretKeyFiles) { SecretKey secretKey(readFile(secretKeyFile)); diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index b5934a0d123..60342427751 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -766,13 +766,13 @@ std::list> getDefaultSubstituters() state->stores.push_back(openStore(uri)); }; - for (auto uri : settings.get("substituters", Strings())) + for (auto uri : settings.substituters) addStore(uri); - for (auto uri : settings.get("binary-caches", Strings())) + for (auto uri : settings.binaryCaches) addStore(uri); - for (auto uri : settings.get("extra-binary-caches", Strings())) + for (auto uri : settings.extraBinaryCaches) addStore(uri); state->done = true; diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index 3b43ddfa16d..2fcb5b565a9 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -865,8 +865,8 @@ static void daemonLoop(char * * argv) struct group * gr = peer.gidKnown ? getgrgid(peer.gid) : 0; string group = gr ? gr->gr_name : std::to_string(peer.gid); - Strings trustedUsers = settings.get("trusted-users", Strings({"root"})); - Strings allowedUsers = settings.get("allowed-users", Strings({"*"})); + Strings trustedUsers = settings.trustedUsers; + Strings allowedUsers = settings.allowedUsers; if (matchUser(user, group, trustedUsers)) trusted = true; diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 0aabe66c562..bb3b430c9ec 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -146,7 +146,7 @@ static void opRealise(Strings opFlags, Strings opArgs) unknown = PathSet(); } - if (settings.get("print-missing", true)) + if (settings.printMissing) printMissing(ref(store), willBuild, willSubstitute, unknown, downloadSize, narSize); if (dryRun) return; From 668fef2e4f1c7758e2a55e355b4826014d5b7ba9 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 24 Feb 2017 13:31:46 +0100 Subject: [PATCH 0126/2196] nix-shell: Overwrite environment variables Need to remember that std::map::insert() and emplace() don't overwrite existing entries... This fixes a regression relative to 1.11 that in particular triggers in nested nix-shells. Before: $ nativeBuildInputs=/foo nix-shell -p hello --run 'hello' build input /foo does not exist After: $ nativeBuildInputs=/foo nix-shell -p hello --run 'hello' Hello, world! --- src/nix-build/nix-build.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index ee030c57b6b..f4484ba572d 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -408,7 +408,7 @@ int main(int argc, char ** argv) env["NIX_STORE"] = store->storeDir; for (auto & var : drv.env) - env.emplace(var); + env[var.first] = var.second; restoreAffinity(); From 1c718f80d3e91c6dd8f1337f81b37e4837384c75 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 24 Feb 2017 16:38:14 +0100 Subject: [PATCH 0127/2196] Verify content-addressability assertions at registration time --- src/libstore/local-store.cc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 9b775e16a38..bc63955af0e 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -519,6 +519,8 @@ void LocalStore::checkDerivationOutputs(const Path & drvPath, const Derivation & uint64_t LocalStore::addValidPath(State & state, const ValidPathInfo & info, bool checkOutputs) { + assert(info.ca == "" || info.isContentAddressed(*this)); + state.stmtRegisterValidPath.use() (info.path) ("sha256:" + printHash(info.narHash)) From e76df9bd5285272f025025d97fe8993156726eef Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 24 Feb 2017 16:39:53 +0100 Subject: [PATCH 0128/2196] Register content-addressability assertion for fixed outputs --- src/libstore/build.cc | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 1ce23135fc3..eef9549669b 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2753,6 +2753,8 @@ void DerivationGoal::registerOutputs() Path path = i.second.path; if (missingPaths.find(path) == missingPaths.end()) continue; + ValidPathInfo info; + Path actualPath = path; if (useChroot) { actualPath = chrootRootDir + path; @@ -2855,6 +2857,8 @@ void DerivationGoal::registerOutputs() format("output path ‘%1%’ has %2% hash ‘%3%’ when ‘%4%’ was expected") % path % i.second.hashAlgo % printHash16or32(h2) % printHash16or32(h)); } + + info.ca = makeFixedOutputCA(recursive, h2); } /* Get rid of all weird permissions. This also checks that @@ -2954,7 +2958,6 @@ void DerivationGoal::registerOutputs() worker.markContentsGood(path); } - ValidPathInfo info; info.path = path; info.narHash = hash.first; info.narSize = hash.second; From d3e1aad421a88a7a075cac25142c49224b3ecca2 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 24 Feb 2017 17:25:00 +0100 Subject: [PATCH 0129/2196] nix-shell: Better error message when the shell can't be started --- src/nix-build/nix-build.cc | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index f4484ba572d..b81c98868e6 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -448,15 +448,17 @@ int main(int argc, char ** argv) auto envPtrs = stringsToCharPtrs(envStrs); + auto shell = getEnv("NIX_BUILD_SHELL", "bash"); + environ = envPtrs.data(); auto argPtrs = stringsToCharPtrs(args); restoreSignals(); - execvp(getEnv("NIX_BUILD_SHELL", "bash").c_str(), argPtrs.data()); + execvp(shell.c_str(), argPtrs.data()); - throw SysError("executing shell"); + throw SysError("executing shell ‘%s’", shell); } // Ugly hackery to make "nix-build -A foo.all" produce symlinks From 89ffe1eff946639aa2752177fcc5159b6926af70 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 24 Feb 2017 17:25:22 +0100 Subject: [PATCH 0130/2196] Fix nix-shell tests The nix-shell fix in 668fef2e4f1c7758e2a55e355b4826014d5b7ba9 revealed that we had some --pure tests that incorrectly depended on PATH from config.nix's mkDerivation being overwritten by the caller's PATH. http://hydra.nixos.org/build/49242478 --- tests/nix-shell.sh | 1 + tests/shell.nix | 1 + 2 files changed, 2 insertions(+) diff --git a/tests/nix-shell.sh b/tests/nix-shell.sh index 26cc521bbcb..f0f34a5f870 100644 --- a/tests/nix-shell.sh +++ b/tests/nix-shell.sh @@ -4,6 +4,7 @@ clearStore # Test nix-shell -A export IMPURE_VAR=foo +export NIX_BUILD_SHELL=$SHELL output=$(nix-shell --pure shell.nix -A shellDrv --run \ 'echo "$IMPURE_VAR - $VAR_FROM_STDENV_SETUP - $VAR_FROM_NIX"') diff --git a/tests/shell.nix b/tests/shell.nix index ed4d6fbaaa0..1a092913b3b 100644 --- a/tests/shell.nix +++ b/tests/shell.nix @@ -34,6 +34,7 @@ rec { mkdir -p $out/bin echo 'echo foo' > $out/bin/foo chmod a+rx $out/bin/foo + ln -s ${shell} $out/bin/bash ''; bar = runCommand "bar" {} '' From f72206b736e4fa1be010a05aa12fa57afea2019c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Forsman?= Date: Mon, 27 Feb 2017 10:09:48 +0100 Subject: [PATCH 0131/2196] unpack-channel.nix: fix 'mv' corner case unpack-channel.nix fails if the tarball contains a directory named the same as the channel: mv: cannot move 'nixpkgs' to a subdirectory of itself, '.../nixpkgs' This commit fixes that by not moving the directory if it already has the correct name. --- corepkgs/unpack-channel.nix | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/corepkgs/unpack-channel.nix b/corepkgs/unpack-channel.nix index 9445532ded0..a654db40e62 100644 --- a/corepkgs/unpack-channel.nix +++ b/corepkgs/unpack-channel.nix @@ -15,7 +15,9 @@ let else ${bzip2} -d < $src | ${tar} xf - ${tarFlags} fi - mv * $out/$channelName + if [ * != $channelName ]; then + mv * $out/$channelName + fi if [ -n "$binaryCacheURL" ]; then mkdir $out/binary-caches echo -n "$binaryCacheURL" > $out/binary-caches/$channelName From 3cb0387d3f0932c9ce6f0aca2663860da7a65fb0 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 27 Feb 2017 14:36:09 +0100 Subject: [PATCH 0132/2196] Retry downloads on HTTP/2 stream errors Issue #1254. --- src/libstore/download.cc | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/libstore/download.cc b/src/libstore/download.cc index a56fd6922a5..c0cec658cbb 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -273,7 +273,9 @@ struct CurlDownloader : public Downloader httpStatus == 403 ? Forbidden : (httpStatus == 408 || httpStatus == 500 || httpStatus == 503 || httpStatus == 504 || httpStatus == 522 || httpStatus == 524 - || code == CURLE_COULDNT_RESOLVE_HOST || code == CURLE_RECV_ERROR) ? Transient : + || code == CURLE_COULDNT_RESOLVE_HOST + || code == CURLE_RECV_ERROR + || code == CURLE_HTTP2_STREAM) ? Transient : Misc; attempt++; From 4f3fb34844eef457d910fbbb78fe676f696eeac5 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 27 Feb 2017 15:58:24 +0100 Subject: [PATCH 0133/2196] Fix 1.8 release note about build-max-jobs We set build-cores automatically, not build-max-jobs. (The commit message for de4cdd0d47adc70a4db12397a42c18ee50b4e662 also got this wrong.) --- doc/manual/release-notes/rl-1.8.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/manual/release-notes/rl-1.8.xml b/doc/manual/release-notes/rl-1.8.xml index 48caac2c6b6..c854c5c5f85 100644 --- a/doc/manual/release-notes/rl-1.8.xml +++ b/doc/manual/release-notes/rl-1.8.xml @@ -83,8 +83,8 @@ $ nix-store -l $(which xterm) caches). The configuration option - now defaults to the number of - available CPU cores. + now defaults to the number of available + CPU cores. Build users are now used by default when Nix is invoked as root. This prevents builds from accidentally running as From 3fab1f04a7d9a5d8ca0f7faf071d5767f93c7e22 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 27 Feb 2017 16:01:54 +0100 Subject: [PATCH 0134/2196] _SC_NPROCESSORS_ONLN -> std::thread::hardware_concurrency() --- src/libstore/globals.cc | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 474288b7812..12e2a3cf43d 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -2,9 +2,9 @@ #include "util.hh" #include "archive.hh" -#include #include -#include +#include +#include namespace nix { @@ -42,11 +42,7 @@ Settings::Settings() keepGoing = false; tryFallback = false; maxBuildJobs = 1; - buildCores = 1; -#ifdef _SC_NPROCESSORS_ONLN - long res = sysconf(_SC_NPROCESSORS_ONLN); - if (res > 0) buildCores = res; -#endif + buildCores = std::max(1U, std::thread::hardware_concurrency()); readOnlyMode = false; thisSystem = SYSTEM; maxSilentTime = 0; From 7251d048fa812d2551b7003bc9f13a8f5d4c95a5 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 28 Feb 2017 12:54:50 +0100 Subject: [PATCH 0135/2196] Support auto-configuration of build-max-jobs "build-max-jobs" and the "-j" option can now be set to "auto" to use the number of CPUs in the system. (Unlike build-cores, it doesn't use 0 to imply auto-configuration, because a) magic values are a bad idea in general; b) 0 is a legitimate value used to disable local building.) Fixes #1198. --- doc/manual/command-ref/conf-file.xml | 6 +++--- doc/manual/command-ref/opt-common.xml | 5 +++-- src/libmain/shared.cc | 5 ++++- src/libstore/globals.cc | 9 ++++++++- 4 files changed, 18 insertions(+), 7 deletions(-) diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index 0f7a2deba04..96f8a4b608a 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -101,9 +101,9 @@ flag, e.g. --option gc-keep-outputs false. This option defines the maximum number of jobs that Nix will try to build in parallel. The default is - 1. You should generally set it to the number - of CPUs in your system (e.g., 2 on an Athlon 64 - X2). It can be overridden using the () command line switch. diff --git a/doc/manual/command-ref/opt-common.xml b/doc/manual/command-ref/opt-common.xml index 2a076877a1b..2aa41c4d438 100644 --- a/doc/manual/command-ref/opt-common.xml +++ b/doc/manual/command-ref/opt-common.xml @@ -93,8 +93,9 @@ Sets the maximum number of build jobs that Nix will - perform in parallel to the specified number. The default is - specified by the auto to use the number of CPUs in the system. + The default is specified by the build-max-jobs configuration setting, which itself defaults to 1. A higher value is useful on SMP systems or to diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 53fa83fe0de..326202d295f 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -167,6 +167,10 @@ struct LegacyArgs : public MixCommonArgs settings.set("build-fallback", "true"); }); + mkFlag1('j', "max-jobs", "jobs", "maximum number of parallel builds", [=](std::string s) { + settings.set("build-max-jobs", s); + }); + auto intSettingAlias = [&](char shortName, const std::string & longName, const std::string & description, const std::string & dest) { mkFlag(shortName, longName, description, [=](unsigned int n) { @@ -174,7 +178,6 @@ struct LegacyArgs : public MixCommonArgs }); }; - intSettingAlias('j', "max-jobs", "maximum number of parallel builds", "build-max-jobs"); intSettingAlias(0, "cores", "maximum number of CPU cores to use inside a build", "build-cores"); intSettingAlias(0, "max-silent-time", "number of seconds of silence before a build is killed", "build-max-silent-time"); intSettingAlias(0, "timeout", "number of seconds before a build is killed", "build-timeout"); diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 12e2a3cf43d..07af629260a 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -147,7 +147,14 @@ int Settings::get(const string & name, int def) void Settings::update() { _get(tryFallback, "build-fallback"); - _get(maxBuildJobs, "build-max-jobs"); + + auto s = get("build-max-jobs", std::string("1")); + if (s == "auto") + maxBuildJobs = std::max(1U, std::thread::hardware_concurrency()); + else + if (!string2Int(s, maxBuildJobs)) + throw Error("configuration setting ‘build-max-jobs’ should be ‘auto’ or an integer"); + _get(buildCores, "build-cores"); _get(thisSystem, "system"); _get(maxSilentTime, "build-max-silent-time"); From 80027144ae765544aa96d9c38dc2dd345bcf703d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 28 Feb 2017 13:20:12 +0100 Subject: [PATCH 0136/2196] In SQLite errors, include the database path This is necessary because we have multiple SQLite databases (e.g. the binary cache cache). --- src/libstore/sqlite.cc | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/src/libstore/sqlite.cc b/src/libstore/sqlite.cc index 0197b091cd1..7d656121249 100644 --- a/src/libstore/sqlite.cc +++ b/src/libstore/sqlite.cc @@ -8,13 +8,17 @@ namespace nix { [[noreturn]] void throwSQLiteError(sqlite3 * db, const format & f) { int err = sqlite3_errcode(db); + + auto path = sqlite3_db_filename(db, nullptr); + if (!path) path = "(in-memory)"; + if (err == SQLITE_BUSY || err == SQLITE_PROTOCOL) { if (err == SQLITE_PROTOCOL) - printError("warning: SQLite database is busy (SQLITE_PROTOCOL)"); + printError("warning: SQLite database ‘%s’ is busy (SQLITE_PROTOCOL)", path); else { static bool warned = false; if (!warned) { - printError("warning: SQLite database is busy"); + printError("warning: SQLite database ‘%s’ is busy", path); warned = true; } } @@ -29,10 +33,10 @@ namespace nix { #else sleep(1); #endif - throw SQLiteBusy(format("%1%: %2%") % f.str() % sqlite3_errmsg(db)); + throw SQLiteBusy("%s: %s (in ‘%s’)", f.str(), sqlite3_errstr(err), path); } else - throw SQLiteError(format("%1%: %2%") % f.str() % sqlite3_errmsg(db)); + throw SQLiteError("%s: %s (in ‘%s’)", f.str(), sqlite3_errstr(err), path); } SQLite::SQLite(const Path & path) From 34b12bad597a5d9f67408bebefc1bcb65c27bc4a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 28 Feb 2017 13:44:11 +0100 Subject: [PATCH 0137/2196] NarInfoDiskCache: Handle SQLite busy errors --- src/libstore/nar-info-disk-cache.cc | 219 +++++++++++++++------------- 1 file changed, 115 insertions(+), 104 deletions(-) diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc index 13b67b81f35..180a936edb8 100644 --- a/src/libstore/nar-info-disk-cache.cc +++ b/src/libstore/nar-info-disk-cache.cc @@ -106,25 +106,27 @@ class NarInfoDiskCacheImpl : public NarInfoDiskCache "select * from NARs where cache = ? and hashPart = ? and ((present = 0 and timestamp > ?) or (present = 1 and timestamp > ?))"); /* Periodically purge expired entries from the database. */ - auto now = time(0); - - SQLiteStmt queryLastPurge(state->db, "select value from LastPurge"); - auto queryLastPurge_(queryLastPurge.use()); - - if (!queryLastPurge_.next() || queryLastPurge_.getInt(0) < now - purgeInterval) { - SQLiteStmt(state->db, - "delete from NARs where ((present = 0 and timestamp < ?) or (present = 1 and timestamp < ?))") - .use() - (now - ttlNegative) - (now - ttlPositive) - .exec(); - - debug("deleted %d entries from the NAR info disk cache", sqlite3_changes(state->db)); - - SQLiteStmt(state->db, - "insert or replace into LastPurge(dummy, value) values ('', ?)") - .use()(now).exec(); - } + retrySQLite([&]() { + auto now = time(0); + + SQLiteStmt queryLastPurge(state->db, "select value from LastPurge"); + auto queryLastPurge_(queryLastPurge.use()); + + if (!queryLastPurge_.next() || queryLastPurge_.getInt(0) < now - purgeInterval) { + SQLiteStmt(state->db, + "delete from NARs where ((present = 0 and timestamp < ?) or (present = 1 and timestamp < ?))") + .use() + (now - ttlNegative) + (now - ttlPositive) + .exec(); + + debug("deleted %d entries from the NAR info disk cache", sqlite3_changes(state->db)); + + SQLiteStmt(state->db, + "insert or replace into LastPurge(dummy, value) values ('', ?)") + .use()(now).exec(); + } + }); } Cache & getCache(State & state, const std::string & uri) @@ -136,114 +138,123 @@ class NarInfoDiskCacheImpl : public NarInfoDiskCache void createCache(const std::string & uri, const Path & storeDir, bool wantMassQuery, int priority) override { - auto state(_state.lock()); + retrySQLite([&]() { + auto state(_state.lock()); - // FIXME: race + // FIXME: race - state->insertCache.use()(uri)(time(0))(storeDir)(wantMassQuery)(priority).exec(); - assert(sqlite3_changes(state->db) == 1); - state->caches[uri] = Cache{(int) sqlite3_last_insert_rowid(state->db), storeDir, wantMassQuery, priority}; + state->insertCache.use()(uri)(time(0))(storeDir)(wantMassQuery)(priority).exec(); + assert(sqlite3_changes(state->db) == 1); + state->caches[uri] = Cache{(int) sqlite3_last_insert_rowid(state->db), storeDir, wantMassQuery, priority}; + }); } bool cacheExists(const std::string & uri, bool & wantMassQuery, int & priority) override { - auto state(_state.lock()); + return retrySQLite([&]() { + auto state(_state.lock()); - auto i = state->caches.find(uri); - if (i == state->caches.end()) { - auto queryCache(state->queryCache.use()(uri)); - if (!queryCache.next()) return false; - state->caches.emplace(uri, - Cache{(int) queryCache.getInt(0), queryCache.getStr(1), queryCache.getInt(2) != 0, (int) queryCache.getInt(3)}); - } + auto i = state->caches.find(uri); + if (i == state->caches.end()) { + auto queryCache(state->queryCache.use()(uri)); + if (!queryCache.next()) return false; + state->caches.emplace(uri, + Cache{(int) queryCache.getInt(0), queryCache.getStr(1), queryCache.getInt(2) != 0, (int) queryCache.getInt(3)}); + } - auto & cache(getCache(*state, uri)); + auto & cache(getCache(*state, uri)); - wantMassQuery = cache.wantMassQuery; - priority = cache.priority; + wantMassQuery = cache.wantMassQuery; + priority = cache.priority; - return true; + return true; + }); } std::pair> lookupNarInfo( const std::string & uri, const std::string & hashPart) override { - auto state(_state.lock()); + return retrySQLite>>( + [&]() -> std::pair> { + auto state(_state.lock()); + + auto & cache(getCache(*state, uri)); - auto & cache(getCache(*state, uri)); - - auto now = time(0); - - auto queryNAR(state->queryNAR.use() - (cache.id) - (hashPart) - (now - ttlNegative) - (now - ttlPositive)); - - if (!queryNAR.next()) - return {oUnknown, 0}; - - if (!queryNAR.getInt(13)) - return {oInvalid, 0}; - - auto narInfo = make_ref(); - - auto namePart = queryNAR.getStr(2); - narInfo->path = cache.storeDir + "/" + - hashPart + (namePart.empty() ? "" : "-" + namePart); - narInfo->url = queryNAR.getStr(3); - narInfo->compression = queryNAR.getStr(4); - if (!queryNAR.isNull(5)) - narInfo->fileHash = parseHash(queryNAR.getStr(5)); - narInfo->fileSize = queryNAR.getInt(6); - narInfo->narHash = parseHash(queryNAR.getStr(7)); - narInfo->narSize = queryNAR.getInt(8); - for (auto & r : tokenizeString(queryNAR.getStr(9), " ")) - narInfo->references.insert(cache.storeDir + "/" + r); - if (!queryNAR.isNull(10)) - narInfo->deriver = cache.storeDir + "/" + queryNAR.getStr(10); - for (auto & sig : tokenizeString(queryNAR.getStr(11), " ")) - narInfo->sigs.insert(sig); - - return {oValid, narInfo}; + auto now = time(0); + + auto queryNAR(state->queryNAR.use() + (cache.id) + (hashPart) + (now - ttlNegative) + (now - ttlPositive)); + + if (!queryNAR.next()) + return {oUnknown, 0}; + + if (!queryNAR.getInt(13)) + return {oInvalid, 0}; + + auto narInfo = make_ref(); + + auto namePart = queryNAR.getStr(2); + narInfo->path = cache.storeDir + "/" + + hashPart + (namePart.empty() ? "" : "-" + namePart); + narInfo->url = queryNAR.getStr(3); + narInfo->compression = queryNAR.getStr(4); + if (!queryNAR.isNull(5)) + narInfo->fileHash = parseHash(queryNAR.getStr(5)); + narInfo->fileSize = queryNAR.getInt(6); + narInfo->narHash = parseHash(queryNAR.getStr(7)); + narInfo->narSize = queryNAR.getInt(8); + for (auto & r : tokenizeString(queryNAR.getStr(9), " ")) + narInfo->references.insert(cache.storeDir + "/" + r); + if (!queryNAR.isNull(10)) + narInfo->deriver = cache.storeDir + "/" + queryNAR.getStr(10); + for (auto & sig : tokenizeString(queryNAR.getStr(11), " ")) + narInfo->sigs.insert(sig); + + return {oValid, narInfo}; + }); } void upsertNarInfo( const std::string & uri, const std::string & hashPart, std::shared_ptr info) override { - auto state(_state.lock()); - - auto & cache(getCache(*state, uri)); - - if (info) { - - auto narInfo = std::dynamic_pointer_cast(info); - - assert(hashPart == storePathToHash(info->path)); - - state->insertNAR.use() - (cache.id) - (hashPart) - (storePathToName(info->path)) - (narInfo ? narInfo->url : "", narInfo != 0) - (narInfo ? narInfo->compression : "", narInfo != 0) - (narInfo && narInfo->fileHash ? narInfo->fileHash.to_string() : "", narInfo && narInfo->fileHash) - (narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize) - (info->narHash.to_string()) - (info->narSize) - (concatStringsSep(" ", info->shortRefs())) - (info->deriver != "" ? baseNameOf(info->deriver) : "", info->deriver != "") - (concatStringsSep(" ", info->sigs)) - (time(0)).exec(); - - } else { - state->insertMissingNAR.use() - (cache.id) - (hashPart) - (time(0)).exec(); - } + retrySQLite([&]() { + auto state(_state.lock()); + + auto & cache(getCache(*state, uri)); + + if (info) { + + auto narInfo = std::dynamic_pointer_cast(info); + + assert(hashPart == storePathToHash(info->path)); + + state->insertNAR.use() + (cache.id) + (hashPart) + (storePathToName(info->path)) + (narInfo ? narInfo->url : "", narInfo != 0) + (narInfo ? narInfo->compression : "", narInfo != 0) + (narInfo && narInfo->fileHash ? narInfo->fileHash.to_string() : "", narInfo && narInfo->fileHash) + (narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize) + (info->narHash.to_string()) + (info->narSize) + (concatStringsSep(" ", info->shortRefs())) + (info->deriver != "" ? baseNameOf(info->deriver) : "", info->deriver != "") + (concatStringsSep(" ", info->sigs)) + (time(0)).exec(); + + } else { + state->insertMissingNAR.use() + (cache.id) + (hashPart) + (time(0)).exec(); + } + }); } }; From fd86dd93dd44a826235feb0fd82aabcdaa79a65b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 28 Feb 2017 13:59:11 +0100 Subject: [PATCH 0138/2196] Improve SQLite busy handling --- configure.ac | 2 +- src/libstore/sqlite.cc | 66 +++++++++++++++++++++++------------------- src/libstore/sqlite.hh | 6 +++- 3 files changed, 43 insertions(+), 31 deletions(-) diff --git a/configure.ac b/configure.ac index e6b11be2df1..46b0ac0651b 100644 --- a/configure.ac +++ b/configure.ac @@ -265,7 +265,7 @@ AC_CHECK_FUNCS([setresuid setreuid lchown]) # Nice to have, but not essential. -AC_CHECK_FUNCS([strsignal posix_fallocate nanosleep sysconf]) +AC_CHECK_FUNCS([strsignal posix_fallocate sysconf]) # This is needed if bzip2 is a static library, and the Nix libraries diff --git a/src/libstore/sqlite.cc b/src/libstore/sqlite.cc index 7d656121249..a81e62dbd6e 100644 --- a/src/libstore/sqlite.cc +++ b/src/libstore/sqlite.cc @@ -3,6 +3,8 @@ #include +#include + namespace nix { [[noreturn]] void throwSQLiteError(sqlite3 * db, const format & f) @@ -13,27 +15,10 @@ namespace nix { if (!path) path = "(in-memory)"; if (err == SQLITE_BUSY || err == SQLITE_PROTOCOL) { - if (err == SQLITE_PROTOCOL) - printError("warning: SQLite database ‘%s’ is busy (SQLITE_PROTOCOL)", path); - else { - static bool warned = false; - if (!warned) { - printError("warning: SQLite database ‘%s’ is busy", path); - warned = true; - } - } - /* Sleep for a while since retrying the transaction right away - is likely to fail again. */ - checkInterrupt(); -#if HAVE_NANOSLEEP - struct timespec t; - t.tv_sec = 0; - t.tv_nsec = (random() % 100) * 1000 * 1000; /* <= 0.1s */ - nanosleep(&t, 0); -#else - sleep(1); -#endif - throw SQLiteBusy("%s: %s (in ‘%s’)", f.str(), sqlite3_errstr(err), path); + throw SQLiteBusy( + err == SQLITE_PROTOCOL + ? fmt("SQLite database ‘%s’ is busy (SQLITE_PROTOCOL)", path) + : fmt("SQLite database ‘%s’ is busy", path)); } else throw SQLiteError("%s: %s (in ‘%s’)", f.str(), sqlite3_errstr(err), path); @@ -58,24 +43,27 @@ SQLite::~SQLite() void SQLite::exec(const std::string & stmt) { - if (sqlite3_exec(db, stmt.c_str(), 0, 0, 0) != SQLITE_OK) - throwSQLiteError(db, format("executing SQLite statement ‘%s’") % stmt); + retrySQLite([&]() { + if (sqlite3_exec(db, stmt.c_str(), 0, 0, 0) != SQLITE_OK) + throwSQLiteError(db, format("executing SQLite statement ‘%s’") % stmt); + }); } -void SQLiteStmt::create(sqlite3 * db, const string & s) +void SQLiteStmt::create(sqlite3 * db, const string & sql) { checkInterrupt(); assert(!stmt); - if (sqlite3_prepare_v2(db, s.c_str(), -1, &stmt, 0) != SQLITE_OK) - throwSQLiteError(db, "creating statement"); + if (sqlite3_prepare_v2(db, sql.c_str(), -1, &stmt, 0) != SQLITE_OK) + throwSQLiteError(db, fmt("creating statement ‘%s’", sql)); this->db = db; + this->sql = sql; } SQLiteStmt::~SQLiteStmt() { try { if (stmt && sqlite3_finalize(stmt) != SQLITE_OK) - throwSQLiteError(db, "finalizing statement"); + throwSQLiteError(db, fmt("finalizing statement ‘%s’", sql)); } catch (...) { ignoreException(); } @@ -132,14 +120,14 @@ void SQLiteStmt::Use::exec() int r = step(); assert(r != SQLITE_ROW); if (r != SQLITE_DONE) - throwSQLiteError(stmt.db, "executing SQLite statement"); + throwSQLiteError(stmt.db, fmt("executing SQLite statement ‘%s’", stmt.sql)); } bool SQLiteStmt::Use::next() { int r = step(); if (r != SQLITE_DONE && r != SQLITE_ROW) - throwSQLiteError(stmt.db, "executing SQLite query"); + throwSQLiteError(stmt.db, fmt("executing SQLite query ‘%s’", stmt.sql)); return r == SQLITE_ROW; } @@ -186,4 +174,24 @@ SQLiteTxn::~SQLiteTxn() } } +void handleSQLiteBusy(const SQLiteBusy & e) +{ + static std::atomic lastWarned{0}; + + time_t now = time(0); + + if (now > lastWarned + 10) { + lastWarned = now; + printError("warning: %s", e.what()); + } + + /* Sleep for a while since retrying the transaction right away + is likely to fail again. */ + checkInterrupt(); + struct timespec t; + t.tv_sec = 0; + t.tv_nsec = (random() % 100) * 1000 * 1000; /* <= 0.1s */ + nanosleep(&t, 0); +} + } diff --git a/src/libstore/sqlite.hh b/src/libstore/sqlite.hh index 4d347a2e56a..14a7a0dd899 100644 --- a/src/libstore/sqlite.hh +++ b/src/libstore/sqlite.hh @@ -30,8 +30,9 @@ struct SQLiteStmt { sqlite3 * db = 0; sqlite3_stmt * stmt = 0; + std::string sql; SQLiteStmt() { } - SQLiteStmt(sqlite3 * db, const std::string & s) { create(db, s); } + SQLiteStmt(sqlite3 * db, const std::string & sql) { create(db, sql); } void create(sqlite3 * db, const std::string & s); ~SQLiteStmt(); operator sqlite3_stmt * () { return stmt; } @@ -94,6 +95,8 @@ MakeError(SQLiteBusy, SQLiteError); [[noreturn]] void throwSQLiteError(sqlite3 * db, const format & f); +void handleSQLiteBusy(const SQLiteBusy & e); + /* Convenience function for retrying a SQLite transaction when the database is busy. */ template @@ -103,6 +106,7 @@ T retrySQLite(std::function fun) try { return fun(); } catch (SQLiteBusy & e) { + handleSQLiteBusy(e); } } } From 07808052461e9534dc42f7f98e83a7b58565fd13 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 28 Feb 2017 14:03:43 +0100 Subject: [PATCH 0139/2196] Fix building against older curl versions http://hydra.nixos.org/build/49490928 --- src/libstore/download.cc | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/libstore/download.cc b/src/libstore/download.cc index c0cec658cbb..75c00d85d34 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -275,7 +275,10 @@ struct CurlDownloader : public Downloader || httpStatus == 504 || httpStatus == 522 || httpStatus == 524 || code == CURLE_COULDNT_RESOLVE_HOST || code == CURLE_RECV_ERROR - || code == CURLE_HTTP2_STREAM) ? Transient : +#if LIBCURL_VERSION_NUM >= 0x073200 + || code == CURLE_HTTP2_STREAM +#endif + ) ? Transient : Misc; attempt++; From c4a40949d945b4a3be85ad68b8cfb449843f34a6 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 1 Mar 2017 13:52:54 +0100 Subject: [PATCH 0140/2196] Handle importing NARs containing files greater than 4 GiB Also templatize readInt() to work for various integer types. --- src/libstore/derivations.cc | 8 +++--- src/libstore/export-import.cc | 2 +- src/libstore/remote-store.cc | 24 +++++++--------- src/libutil/serialise.cc | 43 ++-------------------------- src/libutil/serialise.hh | 54 ++++++++++++++++++++++++++++++++--- src/nix-daemon/nix-daemon.cc | 32 +++++++++------------ src/nix-store/nix-store.cc | 2 +- 7 files changed, 84 insertions(+), 81 deletions(-) diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 79526c594f7..ce1ac7d3322 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -397,8 +397,8 @@ PathSet BasicDerivation::outputPaths() const Source & readDerivation(Source & in, Store & store, BasicDerivation & drv) { drv.outputs.clear(); - auto nr = readInt(in); - for (unsigned int n = 0; n < nr; n++) { + auto nr = readNum(in); + for (size_t n = 0; n < nr; n++) { auto name = readString(in); DerivationOutput o; in >> o.path >> o.hashAlgo >> o.hash; @@ -410,8 +410,8 @@ Source & readDerivation(Source & in, Store & store, BasicDerivation & drv) in >> drv.platform >> drv.builder; drv.args = readStrings(in); - nr = readInt(in); - for (unsigned int n = 0; n < nr; n++) { + nr = readNum(in); + for (size_t n = 0; n < nr; n++) { auto key = readString(in); auto value = readString(in); drv.env[key] = value; diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index c5618c826c5..e584ae538da 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -86,7 +86,7 @@ Paths Store::importPaths(Source & source, std::shared_ptr accessor, { Paths res; while (true) { - unsigned long long n = readLongLong(source); + auto n = readNum(source); if (n == 0) break; if (n != 1) throw Error("input doesn't look like something created by ‘nix-store --export’"); diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 7f398685a2c..6428259144a 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -108,7 +108,7 @@ void RemoteStore::initConnection(Connection & conn) unsigned int magic = readInt(conn.from); if (magic != WORKER_MAGIC_2) throw Error("protocol mismatch"); - conn.daemonVersion = readInt(conn.from); + conn.from >> conn.daemonVersion; if (GET_PROTOCOL_MAJOR(conn.daemonVersion) != GET_PROTOCOL_MAJOR(PROTOCOL_VERSION)) throw Error("Nix daemon protocol version not supported"); if (GET_PROTOCOL_MINOR(conn.daemonVersion) < 10) @@ -170,8 +170,7 @@ bool RemoteStore::isValidPathUncached(const Path & path) auto conn(connections->get()); conn->to << wopIsValidPath << path; conn->processStderr(); - unsigned int reply = readInt(conn->from); - return reply != 0; + return readInt(conn->from); } @@ -246,8 +245,8 @@ void RemoteStore::querySubstitutablePathInfos(const PathSet & paths, conn->to << wopQuerySubstitutablePathInfos << paths; conn->processStderr(); - unsigned int count = readInt(conn->from); - for (unsigned int n = 0; n < count; n++) { + size_t count = readNum(conn->from); + for (size_t n = 0; n < count; n++) { Path path = readStorePath(*this, conn->from); SubstitutablePathInfo & info(infos[path]); info.deriver = readString(conn->from); @@ -277,7 +276,7 @@ void RemoteStore::queryPathInfoUncached(const Path & path, throw; } if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 17) { - bool valid = readInt(conn->from) != 0; + bool valid; conn->from >> valid; if (!valid) throw InvalidPath(format("path ‘%s’ is not valid") % path); } auto info = std::make_shared(); @@ -286,12 +285,11 @@ void RemoteStore::queryPathInfoUncached(const Path & path, if (info->deriver != "") assertStorePath(info->deriver); info->narHash = parseHash(htSHA256, readString(conn->from)); info->references = readStorePaths(*this, conn->from); - info->registrationTime = readInt(conn->from); - info->narSize = readLongLong(conn->from); + conn->from >> info->registrationTime >> info->narSize; if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) { - info->ultimate = readInt(conn->from) != 0; + conn->from >> info->ultimate; info->sigs = readStrings(conn->from); - info->ca = readString(conn->from); + conn->from >> info->ca; } return info; }); @@ -515,7 +513,7 @@ Roots RemoteStore::findRoots() auto conn(connections->get()); conn->to << wopFindRoots; conn->processStderr(); - unsigned int count = readInt(conn->from); + size_t count = readNum(conn->from); Roots result; while (count--) { Path link = readString(conn->from); @@ -563,7 +561,7 @@ bool RemoteStore::verifyStore(bool checkContents, bool repair) auto conn(connections->get()); conn->to << wopVerifyStore << checkContents << repair; conn->processStderr(); - return readInt(conn->from) != 0; + return readInt(conn->from); } @@ -599,7 +597,7 @@ void RemoteStore::Connection::processStderr(Sink * sink, Source * source) } else if (msg == STDERR_READ) { if (!source) throw Error("no source"); - size_t len = readInt(from); + size_t len = readNum(from); auto buf = std::make_unique(len); writeString(buf.get(), source->read(buf.get(), len), to); to.flush(); diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index a68f7a0fa8e..6064e15f5e6 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -194,39 +194,9 @@ void readPadding(size_t len, Source & source) } -unsigned int readInt(Source & source) -{ - unsigned char buf[8]; - source(buf, sizeof(buf)); - if (buf[4] || buf[5] || buf[6] || buf[7]) - throw SerialisationError("implementation cannot deal with > 32-bit integers"); - return - buf[0] | - (buf[1] << 8) | - (buf[2] << 16) | - (buf[3] << 24); -} - - -unsigned long long readLongLong(Source & source) -{ - unsigned char buf[8]; - source(buf, sizeof(buf)); - return - ((unsigned long long) buf[0]) | - ((unsigned long long) buf[1] << 8) | - ((unsigned long long) buf[2] << 16) | - ((unsigned long long) buf[3] << 24) | - ((unsigned long long) buf[4] << 32) | - ((unsigned long long) buf[5] << 40) | - ((unsigned long long) buf[6] << 48) | - ((unsigned long long) buf[7] << 56); -} - - size_t readString(unsigned char * buf, size_t max, Source & source) { - size_t len = readInt(source); + auto len = readNum(source); if (len > max) throw Error("string is too long"); source(buf, len); readPadding(len, source); @@ -236,7 +206,7 @@ size_t readString(unsigned char * buf, size_t max, Source & source) string readString(Source & source) { - size_t len = readInt(source); + auto len = readNum(source); auto buf = std::make_unique(len); source(buf.get(), len); readPadding(len, source); @@ -250,16 +220,9 @@ Source & operator >> (Source & in, string & s) } -Source & operator >> (Source & in, unsigned int & n) -{ - n = readInt(in); - return in; -} - - template T readStrings(Source & source) { - unsigned int count = readInt(source); + auto count = readNum(source); T ss; while (count--) ss.insert(ss.end(), readString(source)); diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh index 5646d08c131..3072f422ea9 100644 --- a/src/libutil/serialise.hh +++ b/src/libutil/serialise.hh @@ -177,18 +177,64 @@ Sink & operator << (Sink & sink, const Strings & s); Sink & operator << (Sink & sink, const StringSet & s); +MakeError(SerialisationError, Error) + + +template +T readNum(Source & source) +{ + unsigned char buf[8]; + source(buf, sizeof(buf)); + + uint64_t n = + ((unsigned long long) buf[0]) | + ((unsigned long long) buf[1] << 8) | + ((unsigned long long) buf[2] << 16) | + ((unsigned long long) buf[3] << 24) | + ((unsigned long long) buf[4] << 32) | + ((unsigned long long) buf[5] << 40) | + ((unsigned long long) buf[6] << 48) | + ((unsigned long long) buf[7] << 56); + + if (n > std::numeric_limits::max()) + throw SerialisationError("serialised integer %d is too large for type ‘%s’", n, typeid(T).name()); + + return n; +} + + +inline unsigned int readInt(Source & source) +{ + return readNum(source); +} + + +inline uint64_t readLongLong(Source & source) +{ + return readNum(source); +} + + void readPadding(size_t len, Source & source); -unsigned int readInt(Source & source); -unsigned long long readLongLong(Source & source); size_t readString(unsigned char * buf, size_t max, Source & source); string readString(Source & source); template T readStrings(Source & source); Source & operator >> (Source & in, string & s); -Source & operator >> (Source & in, unsigned int & n); +template +Source & operator >> (Source & in, T & n) +{ + n = readNum(in); + return in; +} -MakeError(SerialisationError, Error) +template +Source & operator >> (Source & in, bool & b) +{ + b = readNum(in); + return in; +} } diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index 9fbc43b82d7..a1b3f2f6fbc 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -273,10 +273,9 @@ static void performOp(ref store, bool trusted, unsigned int clientVe } case wopAddToStore: { - string baseName = readString(from); - bool fixed = readInt(from) == 1; /* obsolete */ - bool recursive = readInt(from) == 1; - string s = readString(from); + bool fixed, recursive; + std::string s, baseName; + from >> baseName >> fixed /* obsolete */ >> recursive >> s; /* Compatibility hack. */ if (!fixed) { s = "sha256"; @@ -340,7 +339,7 @@ static void performOp(ref store, bool trusted, unsigned int clientVe PathSet drvs = readStorePaths(*store, from); BuildMode mode = bmNormal; if (GET_PROTOCOL_MINOR(clientVersion) >= 15) { - mode = (BuildMode)readInt(from); + mode = (BuildMode) readInt(from); /* Repairing is not atomic, so disallowed for "untrusted" clients. */ @@ -417,8 +416,7 @@ static void performOp(ref store, bool trusted, unsigned int clientVe GCOptions options; options.action = (GCOptions::GCAction) readInt(from); options.pathsToDelete = readStorePaths(*store, from); - options.ignoreLiveness = readInt(from); - options.maxFreed = readLongLong(from); + from >> options.ignoreLiveness >> options.maxFreed; // obsolete fields readInt(from); readInt(from); @@ -438,8 +436,8 @@ static void performOp(ref store, bool trusted, unsigned int clientVe } case wopSetOptions: { - settings.keepFailed = readInt(from) != 0; - settings.keepGoing = readInt(from) != 0; + from >> settings.keepFailed; + from >> settings.keepGoing; settings.set("build-fallback", readInt(from) ? "true" : "false"); verbosity = (Verbosity) readInt(from); settings.set("build-max-jobs", std::to_string(readInt(from))); @@ -539,8 +537,8 @@ static void performOp(ref store, bool trusted, unsigned int clientVe break; case wopVerifyStore: { - bool checkContents = readInt(from) != 0; - bool repair = readInt(from) != 0; + bool checkContents, repair; + from >> checkContents >> repair; startWork(); if (repair && !trusted) throw Error("you are not privileged to repair paths"); @@ -573,19 +571,17 @@ static void performOp(ref store, bool trusted, unsigned int clientVe case wopAddToStoreNar: { ValidPathInfo info; info.path = readStorePath(*store, from); - info.deriver = readString(from); + from >> info.deriver; if (!info.deriver.empty()) store->assertStorePath(info.deriver); info.narHash = parseHash(htSHA256, readString(from)); info.references = readStorePaths(*store, from); - info.registrationTime = readInt(from); - info.narSize = readLongLong(from); - info.ultimate = readLongLong(from); + from >> info.registrationTime >> info.narSize >> info.ultimate; info.sigs = readStrings(from); - info.ca = readString(from); + from >> info.ca; auto nar = make_ref(readString(from)); - auto repair = readInt(from) ? true : false; - auto dontCheckSigs = readInt(from) ? true : false; + bool repair, dontCheckSigs; + from >> repair >> dontCheckSigs; if (!trusted && dontCheckSigs) dontCheckSigs = false; startWork(); diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 0aabe66c562..bd889c8515d 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -839,7 +839,7 @@ static void opServe(Strings opFlags, Strings opArgs) settings.maxSilentTime = readInt(in); settings.buildTimeout = readInt(in); if (GET_PROTOCOL_MINOR(clientVersion) >= 2) - settings.maxLogSize = readInt(in); + in >> settings.maxLogSize; if (GET_PROTOCOL_MINOR(clientVersion) >= 3) { settings.set("build-repeat", std::to_string(readInt(in))); settings.set("enforce-determinism", readInt(in) != 0 ? "true" : "false"); From e321551d540df340fadaa1b088d9e5225193c87a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 1 Mar 2017 14:41:54 +0100 Subject: [PATCH 0141/2196] Fix assertion failure in nix-store --export Fixes #1173. --- src/nix-store/nix-store.cc | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index bd889c8515d..868ec2f5a2d 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -708,6 +708,9 @@ static void opExport(Strings opFlags, Strings opArgs) for (auto & i : opFlags) throw UsageError(format("unknown flag ‘%1%’") % i); + for (auto & i : opArgs) + i = store->followLinksToStorePath(i); + FdSink sink(STDOUT_FILENO); store->exportPaths(opArgs, sink); } From 56e19d970db33de6b7aef088cde57817b5a61d61 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 1 Mar 2017 14:47:52 +0100 Subject: [PATCH 0142/2196] nix-store --import: Fix importing unsigned paths --- src/nix-store/nix-store.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 868ec2f5a2d..950c2a7c977 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -724,7 +724,7 @@ static void opImport(Strings opFlags, Strings opArgs) if (!opArgs.empty()) throw UsageError("no arguments expected"); FdSource source(STDIN_FILENO); - Paths paths = store->importPaths(source, 0); + Paths paths = store->importPaths(source, nullptr, true); for (auto & i : paths) cout << format("%1%\n") % i << std::flush; From 07a0b8ca671669794f51f9d886397bf038a8881e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 1 Mar 2017 14:52:16 +0100 Subject: [PATCH 0143/2196] Tweak message --- src/libstore/local-store.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index bc63955af0e..63f069c2ff1 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -920,7 +920,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, const ref & info.path % info.narHash.to_string() % h.to_string()); if (requireSigs && !dontCheckSigs && !info.checkSignatures(*this, publicKeys)) - throw Error(format("cannot import path ‘%s’ because it lacks a valid signature") % info.path); + throw Error("cannot add path ‘%s’ because it lacks a valid signature", info.path); addTempRoot(info.path); From 374908726b87f6cd137ea7d097fdcda57003594e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 1 Mar 2017 14:54:11 +0100 Subject: [PATCH 0144/2196] readString(): Read directly into std::string When reading a huge string, this halves memory consumption. (Strictly speaking, this appears only valid in C++17, but who cares...) --- src/libutil/serialise.cc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index 6064e15f5e6..950e6362a24 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -207,10 +207,10 @@ size_t readString(unsigned char * buf, size_t max, Source & source) string readString(Source & source) { auto len = readNum(source); - auto buf = std::make_unique(len); - source(buf.get(), len); + std::string res(len, 0); + source((unsigned char*) res.data(), len); readPadding(len, source); - return string((char *) buf.get(), len); + return res; } Source & operator >> (Source & in, string & s) From f61f67ddee12a976a0a6a20652e7c545b49fa46c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 1 Mar 2017 16:07:15 +0100 Subject: [PATCH 0145/2196] RemoteStore::addToStore(): Send NAR rather than string containing NAR This allows the NAR to be streamed in the future (though we're not doing that yet). --- src/libstore/export-import.cc | 23 +---------------------- src/libstore/legacy-ssh-store.cc | 4 ++-- src/libstore/remote-store.cc | 5 +++-- src/libutil/serialise.hh | 9 +++++---- src/nix-daemon/nix-daemon.cc | 17 ++++++++++------- 5 files changed, 21 insertions(+), 37 deletions(-) diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index e584ae538da..531f010d93a 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -61,27 +61,6 @@ void Store::exportPath(const Path & path, Sink & sink) hashAndWriteSink << exportMagic << path << info->references << info->deriver << 0; } -struct TeeSource : Source -{ - Source & readSource; - ref data; - TeeSource(Source & readSource) - : readSource(readSource) - , data(make_ref()) - { - } - size_t read(unsigned char * data, size_t len) - { - size_t n = readSource.read(data, len); - this->data->append((char *) data, n); - return n; - } -}; - -struct NopSink : ParseSink -{ -}; - Paths Store::importPaths(Source & source, std::shared_ptr accessor, bool dontCheckSigs) { Paths res; @@ -92,7 +71,7 @@ Paths Store::importPaths(Source & source, std::shared_ptr accessor, /* Extract the NAR from the source. */ TeeSource tee(source); - NopSink sink; + ParseSink sink; parseDump(sink, tee); uint32_t magic = readInt(source); diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index b20ff185f9b..031fcac95e5 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -169,9 +169,9 @@ struct LegacySSHStore : public Store /* FIXME: inefficient. */ ParseSink parseSink; /* null sink; just parse the NAR */ - SavingSourceAdapter savedNAR(conn->from); + TeeSource savedNAR(conn->from); parseDump(parseSink, savedNAR); - sink(savedNAR.s); + sink(*savedNAR.data); } /* Unsupported methods. */ diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 6428259144a..47413d573b7 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -378,8 +378,9 @@ void RemoteStore::addToStore(const ValidPathInfo & info, const ref conn->to << wopAddToStoreNar << info.path << info.deriver << printHash(info.narHash) << info.references << info.registrationTime << info.narSize - << info.ultimate << info.sigs << info.ca << *nar << repair << dontCheckSigs; - // FIXME: don't send nar as a string + << info.ultimate << info.sigs << info.ca + << repair << dontCheckSigs; + conn->to(*nar); conn->processStderr(); } } diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh index 3072f422ea9..2bdee70807b 100644 --- a/src/libutil/serialise.hh +++ b/src/libutil/serialise.hh @@ -140,15 +140,16 @@ struct StringSource : Source /* Adapter class of a Source that saves all data read to `s'. */ -struct SavingSourceAdapter : Source +struct TeeSource : Source { Source & orig; - string s; - SavingSourceAdapter(Source & orig) : orig(orig) { } + ref data; + TeeSource(Source & orig) + : orig(orig), data(make_ref()) { } size_t read(unsigned char * data, size_t len) { size_t n = orig.read(data, len); - s.append((const char *) data, n); + this->data->append((const char *) data, n); return n; } }; diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index a1b3f2f6fbc..17482188444 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -283,7 +283,7 @@ static void performOp(ref store, bool trusted, unsigned int clientVe } HashType hashAlgo = parseHashType(s); - SavingSourceAdapter savedNAR(from); + TeeSource savedNAR(from); RetrieveRegularNARSink savedRegular; if (recursive) { @@ -297,7 +297,7 @@ static void performOp(ref store, bool trusted, unsigned int clientVe startWork(); if (!savedRegular.regular) throw Error("regular file expected"); - Path path = store->addToStoreFromDump(recursive ? savedNAR.s : savedRegular.s, baseName, recursive, hashAlgo); + Path path = store->addToStoreFromDump(recursive ? *savedNAR.data : savedRegular.s, baseName, recursive, hashAlgo); stopWork(); to << path; @@ -569,6 +569,7 @@ static void performOp(ref store, bool trusted, unsigned int clientVe } case wopAddToStoreNar: { + bool repair, dontCheckSigs; ValidPathInfo info; info.path = readStorePath(*store, from); from >> info.deriver; @@ -578,14 +579,16 @@ static void performOp(ref store, bool trusted, unsigned int clientVe info.references = readStorePaths(*store, from); from >> info.registrationTime >> info.narSize >> info.ultimate; info.sigs = readStrings(from); - from >> info.ca; - auto nar = make_ref(readString(from)); - bool repair, dontCheckSigs; - from >> repair >> dontCheckSigs; + from >> info.ca >> repair >> dontCheckSigs; if (!trusted && dontCheckSigs) dontCheckSigs = false; + + TeeSource tee(from); + ParseSink sink; + parseDump(sink, tee); + startWork(); - store->addToStore(info, nar, repair, dontCheckSigs, nullptr); + store->addToStore(info, tee.data, repair, dontCheckSigs, nullptr); stopWork(); break; } From fa125b9b28bea25a4eeb4d39a71a481563127cb9 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 1 Mar 2017 16:16:04 +0100 Subject: [PATCH 0146/2196] TeeSink: Pre-reserve string space When receiving a very large file, this can prevent the string from having tobe copied, which temporarily doubles memory consumption. --- src/libstore/export-import.cc | 11 +++++------ src/libutil/archive.hh | 13 +++++++++++++ src/nix-daemon/nix-daemon.cc | 7 +++---- 3 files changed, 21 insertions(+), 10 deletions(-) diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index 531f010d93a..2b8ab063e18 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -70,9 +70,8 @@ Paths Store::importPaths(Source & source, std::shared_ptr accessor, if (n != 1) throw Error("input doesn't look like something created by ‘nix-store --export’"); /* Extract the NAR from the source. */ - TeeSource tee(source); - ParseSink sink; - parseDump(sink, tee); + TeeSink tee(source); + parseDump(tee, tee.source); uint32_t magic = readInt(source); if (magic != exportMagic) @@ -89,14 +88,14 @@ Paths Store::importPaths(Source & source, std::shared_ptr accessor, info.deriver = readString(source); if (info.deriver != "") assertStorePath(info.deriver); - info.narHash = hashString(htSHA256, *tee.data); - info.narSize = tee.data->size(); + info.narHash = hashString(htSHA256, *tee.source.data); + info.narSize = tee.source.data->size(); // Ignore optional legacy signature. if (readInt(source) == 1) readString(source); - addToStore(info, tee.data, false, dontCheckSigs, accessor); + addToStore(info, tee.source.data, false, dontCheckSigs, accessor); res.push_back(info.path); } diff --git a/src/libutil/archive.hh b/src/libutil/archive.hh index d58b91df046..c067cd2ad24 100644 --- a/src/libutil/archive.hh +++ b/src/libutil/archive.hh @@ -70,6 +70,19 @@ struct ParseSink virtual void createSymlink(const Path & path, const string & target) { }; }; +struct TeeSink : ParseSink +{ + TeeSource source; + + TeeSink(Source & source) : source(source) { } + + void preallocateContents(unsigned long long size) override + { + source.data->reserve(source.data->size() + size + 1024); + }; + +}; + void parseDump(ParseSink & sink, Source & source); void restorePath(const Path & path, Source & source); diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index 17482188444..ab5826b0d1a 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -583,12 +583,11 @@ static void performOp(ref store, bool trusted, unsigned int clientVe if (!trusted && dontCheckSigs) dontCheckSigs = false; - TeeSource tee(from); - ParseSink sink; - parseDump(sink, tee); + TeeSink tee(from); + parseDump(tee, tee.source); startWork(); - store->addToStore(info, tee.data, repair, dontCheckSigs, nullptr); + store->addToStore(info, tee.source.data, repair, dontCheckSigs, nullptr); stopWork(); break; } From fbf17f1ad7259f08ab53dc8319ae0f452321f081 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 2 Mar 2017 11:40:11 +0100 Subject: [PATCH 0147/2196] builtins.fetchgit: Fix bad format string --- src/libexpr/primops/fetchgit.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/primops/fetchgit.cc b/src/libexpr/primops/fetchgit.cc index bd440c8c62a..6a5b5ac36e6 100644 --- a/src/libexpr/primops/fetchgit.cc +++ b/src/libexpr/primops/fetchgit.cc @@ -63,7 +63,7 @@ static void prim_fetchgit(EvalState & state, const Pos & pos, Value * * args, Va else if (name == "rev") rev = state.forceStringNoCtx(*attr.value, *attr.pos); else - throw EvalError(format("unsupported argument ‘%1%’ to ‘fetchgit’, at %3%") % attr.name % attr.pos); + throw EvalError("unsupported argument ‘%s’ to ‘fetchgit’, at %s", attr.name, *attr.pos); } if (url.empty()) From ffcf9d24a68fd166f5c60ebae721a9badc469b0e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 2 Mar 2017 11:46:28 +0100 Subject: [PATCH 0148/2196] builtins.fetchgit: Support paths --- src/libexpr/primops/fetchgit.cc | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/libexpr/primops/fetchgit.cc b/src/libexpr/primops/fetchgit.cc index 6a5b5ac36e6..09e2c077bab 100644 --- a/src/libexpr/primops/fetchgit.cc +++ b/src/libexpr/primops/fetchgit.cc @@ -58,9 +58,11 @@ static void prim_fetchgit(EvalState & state, const Pos & pos, Value * * args, Va for (auto & attr : *args[0]->attrs) { string name(attr.name); - if (name == "url") - url = state.forceStringNoCtx(*attr.value, *attr.pos); - else if (name == "rev") + if (name == "url") { + PathSet context; + url = state.coerceToString(*attr.pos, *attr.value, context, false, false); + if (hasPrefix(url, "/")) url = "file://" + url; + } else if (name == "rev") rev = state.forceStringNoCtx(*attr.value, *attr.pos); else throw EvalError("unsupported argument ‘%s’ to ‘fetchgit’, at %s", attr.name, *attr.pos); From 2f992692e2badef2d9084335bd7290940a031671 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 3 Mar 2017 15:40:06 +0100 Subject: [PATCH 0149/2196] Fix fatal "broken pipe" error when $NIX_BUILD_HOOK is missing --- src/libstore/build.cc | 66 +++++++++++++++++++++++++------------------ 1 file changed, 39 insertions(+), 27 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index eef9549669b..fd1f5dc3a4d 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -1580,36 +1580,48 @@ HookReply DerivationGoal::tryBuildHook() if (!worker.hook) worker.hook = std::make_unique(); - /* Tell the hook about system features (beyond the system type) - required from the build machine. (The hook could parse the - drv file itself, but this is easier.) */ - Strings features = tokenizeString(get(drv->env, "requiredSystemFeatures")); - for (auto & i : features) checkStoreName(i); /* !!! abuse */ - - /* Send the request to the hook. */ - writeLine(worker.hook->toHook.writeSide.get(), (format("%1% %2% %3% %4%") - % (worker.getNrLocalBuilds() < settings.maxBuildJobs ? "1" : "0") - % drv->platform % drvPath % concatStringsSep(",", features)).str()); - - /* Read the first line of input, which should be a word indicating - whether the hook wishes to perform the build. */ - string reply; - while (true) { - string s = readLine(worker.hook->fromHook.readSide.get()); - if (string(s, 0, 2) == "# ") { - reply = string(s, 2); - break; + try { + + /* Tell the hook about system features (beyond the system type) + required from the build machine. (The hook could parse the + drv file itself, but this is easier.) */ + Strings features = tokenizeString(get(drv->env, "requiredSystemFeatures")); + for (auto & i : features) checkStoreName(i); /* !!! abuse */ + + /* Send the request to the hook. */ + writeLine(worker.hook->toHook.writeSide.get(), (format("%1% %2% %3% %4%") + % (worker.getNrLocalBuilds() < settings.maxBuildJobs ? "1" : "0") + % drv->platform % drvPath % concatStringsSep(",", features)).str()); + + /* Read the first line of input, which should be a word indicating + whether the hook wishes to perform the build. */ + string reply; + while (true) { + string s = readLine(worker.hook->fromHook.readSide.get()); + if (string(s, 0, 2) == "# ") { + reply = string(s, 2); + break; + } + s += "\n"; + writeToStderr(s); } - s += "\n"; - writeToStderr(s); - } - debug(format("hook reply is ‘%1%’") % reply); + debug(format("hook reply is ‘%1%’") % reply); + + if (reply == "decline" || reply == "postpone") + return reply == "decline" ? rpDecline : rpPostpone; + else if (reply != "accept") + throw Error(format("bad hook reply ‘%1%’") % reply); - if (reply == "decline" || reply == "postpone") - return reply == "decline" ? rpDecline : rpPostpone; - else if (reply != "accept") - throw Error(format("bad hook reply ‘%1%’") % reply); + } catch (SysError & e) { + if (e.errNo == EPIPE) { + printError("build hook died unexpectedly: %s", + chomp(drainFD(worker.hook->fromHook.readSide.get()))); + worker.hook = 0; + return rpDecline; + } else + throw; + } printMsg(lvlTalkative, format("using hook to build path(s) %1%") % showPaths(missingPaths)); From 5a1fb03b8fc32fd93708f5030537dd580910a35a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 3 Mar 2017 16:18:49 +0100 Subject: [PATCH 0150/2196] build-remote: Misc cleanup --- src/build-remote/build-remote.cc | 89 +++++++++++++++----------------- 1 file changed, 41 insertions(+), 48 deletions(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index 2ce20882da1..fe68b4cd8c4 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -17,13 +17,12 @@ #include "derivations.hh" using namespace nix; -using std::cerr; using std::cin; -static void handle_alarm(int sig) { +static void handleAlarm(int sig) { } -class machine { +class Machine { const std::set supportedFeatures; const std::set mandatoryFeatures; @@ -31,8 +30,8 @@ class machine { const string hostName; const std::vector systemTypes; const string sshKey; - const unsigned long long maxJobs; - const unsigned long long speedFactor; + const unsigned int maxJobs; + const unsigned int speedFactor; bool enabled; bool allSupported(const std::set & features) const { @@ -50,28 +49,29 @@ class machine { }); } - machine(decltype(hostName) hostName, + Machine(decltype(hostName) hostName, decltype(systemTypes) systemTypes, decltype(sshKey) sshKey, decltype(maxJobs) maxJobs, decltype(speedFactor) speedFactor, decltype(supportedFeatures) supportedFeatures, decltype(mandatoryFeatures) mandatoryFeatures) : - supportedFeatures{std::move(supportedFeatures)}, - mandatoryFeatures{std::move(mandatoryFeatures)}, - hostName{std::move(hostName)}, - systemTypes{std::move(systemTypes)}, - sshKey{std::move(sshKey)}, - maxJobs{std::move(maxJobs)}, - speedFactor{speedFactor == 0 ? 1 : std::move(speedFactor)}, - enabled{true} {}; + supportedFeatures(supportedFeatures), + mandatoryFeatures(mandatoryFeatures), + hostName(hostName), + systemTypes(systemTypes), + sshKey(sshKey), + maxJobs(maxJobs), + speedFactor(std::max(1U, speedFactor)), + enabled(true) + {}; };; -static std::vector read_conf() +static std::vector readConf() { auto conf = getEnv("NIX_REMOTE_SYSTEMS", SYSCONFDIR "/nix/machines"); - auto machines = std::vector{}; + auto machines = std::vector{}; auto lines = std::vector{}; try { lines = tokenizeString>(readFile(conf), "\n"); @@ -87,10 +87,8 @@ static std::vector read_conf() } auto tokens = tokenizeString>(line); auto sz = tokens.size(); - if (sz < 4) { - throw new FormatError(format("Bad machines.conf file %1%") - % conf); - } + if (sz < 4) + throw FormatError("bad machines.conf file ‘%1%’", conf); machines.emplace_back(tokens[0], tokenizeString>(tokens[1], ","), tokens[2], @@ -108,7 +106,7 @@ static std::vector read_conf() static string currentLoad; -static AutoCloseFD openSlotLock(const machine & m, unsigned long long slot) +static AutoCloseFD openSlotLock(const Machine & m, unsigned long long slot) { std::ostringstream fn_stream(currentLoad, std::ios_base::ate | std::ios_base::out); fn_stream << "/"; @@ -126,15 +124,14 @@ int main (int argc, char * * argv) { return handleExceptions(argv[0], [&]() { initNix(); + /* Ensure we don't get any SSH passphrase or host key popups. */ if (putenv(display_env) == -1 || - putenv(ssh_env) == -1) { - throw SysError("Setting SSH env vars"); - } + putenv(ssh_env) == -1) + throw SysError("setting SSH env vars"); - if (argc != 4) { + if (argc != 4) throw UsageError("called without required arguments"); - } auto store = openStore(); @@ -147,15 +144,14 @@ int main (int argc, char * * argv) std::shared_ptr sshStore; AutoCloseFD bestSlotLock; - auto machines = read_conf(); + auto machines = readConf(); string drvPath; string hostName; for (string line; getline(cin, line);) { auto tokens = tokenizeString>(line); auto sz = tokens.size(); - if (sz != 3 && sz != 4) { - throw Error(format("invalid build hook line %1%") % line); - } + if (sz != 3 && sz != 4) + throw Error("invalid build hook line ‘%1%’", line); auto amWilling = tokens[0] == "1"; auto neededSystem = tokens[1]; drvPath = tokens[2]; @@ -174,7 +170,7 @@ int main (int argc, char * * argv) bool rightType = false; - machine * bestMachine = nullptr; + Machine * bestMachine = nullptr; unsigned long long bestLoad = 0; for (auto & m : machines) { if (m.enabled && std::find(m.systemTypes.begin(), @@ -221,11 +217,10 @@ int main (int argc, char * * argv) } if (!bestSlotLock) { - if (rightType && !canBuildLocally) { - cerr << "# postpone\n"; - } else { - cerr << "# decline\n"; - } + if (rightType && !canBuildLocally) + std::cerr << "# postpone\n"; + else + std::cerr << "# decline\n"; break; } @@ -241,37 +236,35 @@ int main (int argc, char * * argv) sshStore = openStore("ssh://" + bestMachine->hostName + "?key=" + bestMachine->sshKey); hostName = bestMachine->hostName; } catch (std::exception & e) { - cerr << e.what() << '\n'; - cerr << "unable to open SSH connection to ‘" << bestMachine->hostName << "’, trying other available machines...\n"; + printError("unable to open SSH connection to ‘%s’: %s; trying other available machines...", + bestMachine->hostName, e.what()); bestMachine->enabled = false; continue; } goto connected; } } + connected: - cerr << "# accept\n"; + std::cerr << "# accept\n"; string line; - if (!getline(cin, line)) { + if (!getline(cin, line)) throw Error("hook caller didn't send inputs"); - } auto inputs = tokenizeString>(line); - if (!getline(cin, line)) { + if (!getline(cin, line)) throw Error("hook caller didn't send outputs"); - } auto outputs = tokenizeString(line); AutoCloseFD uploadLock = openLockFile(currentLoad + "/" + hostName + ".upload-lock", true); - auto old = signal(SIGALRM, handle_alarm); + auto old = signal(SIGALRM, handleAlarm); alarm(15 * 60); - if (!lockFile(uploadLock.get(), ltWrite, true)) { - cerr << "somebody is hogging the upload lock for " << hostName << ", continuing...\n"; - } + if (!lockFile(uploadLock.get(), ltWrite, true)) + printError("somebody is hogging the upload lock for ‘%s’, continuing..."); alarm(0); signal(SIGALRM, old); copyPaths(store, ref(sshStore), inputs); uploadLock = -1; - cerr << "building ‘" << drvPath << "’ on ‘" << hostName << "’\n"; + printError("building ‘%s’ on ‘%s’", drvPath, hostName); sshStore->buildDerivation(drvPath, readDerivation(drvPath)); std::remove_if(outputs.begin(), outputs.end(), [=](const Path & path) { return store->isValidPath(path); }); From 7f62be1bcd2a228076a6c39eb435ad1931bb66e4 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 3 Mar 2017 16:33:18 +0100 Subject: [PATCH 0151/2196] build-remote: Fix passing SSH key --- src/build-remote/build-remote.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index fe68b4cd8c4..fd710c2b6b7 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -233,7 +233,7 @@ int main (int argc, char * * argv) lock = -1; try { - sshStore = openStore("ssh://" + bestMachine->hostName + "?key=" + bestMachine->sshKey); + sshStore = openStore("ssh://" + bestMachine->hostName + "?ssh-key=" + bestMachine->sshKey); hostName = bestMachine->hostName; } catch (std::exception & e) { printError("unable to open SSH connection to ‘%s’: %s; trying other available machines...", From 577ebeaefb71020f0d6b79488602fd56ba2c1863 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 3 Mar 2017 19:05:50 +0100 Subject: [PATCH 0152/2196] Improve SSH handling * Unify SSH code in SSHStore and LegacySSHStore. * Fix a race starting the SSH master. We now wait synchronously for the SSH master to finish starting. This prevents the SSH clients from starting their own connections. * Don't use a master if max-connections == 1. * Add a "max-connections" store parameter. * Add a "compress" store parameter. --- src/libstore/legacy-ssh-store.cc | 60 ++++++--------------- src/libstore/remote-store.cc | 10 ++-- src/libstore/remote-store.hh | 4 +- src/libstore/ssh-store.cc | 83 ++++++++-------------------- src/libstore/ssh.cc | 93 ++++++++++++++++++++++++++++++++ src/libstore/ssh.hh | 41 ++++++++++++++ src/libutil/pool.hh | 7 ++- 7 files changed, 185 insertions(+), 113 deletions(-) create mode 100644 src/libstore/ssh.cc create mode 100644 src/libstore/ssh.hh diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 031fcac95e5..4a2ac42f31c 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -4,6 +4,7 @@ #include "serve-protocol.hh" #include "store-api.hh" #include "worker-protocol.hh" +#include "ssh.hh" namespace nix { @@ -11,73 +12,42 @@ static std::string uriScheme = "legacy-ssh://"; struct LegacySSHStore : public Store { - string host; - struct Connection { - Pid sshPid; - AutoCloseFD out; - AutoCloseFD in; + std::unique_ptr sshConn; FdSink to; FdSource from; }; - AutoDelete tmpDir; - - Path socketPath; - - Pid sshMaster; + std::string host; ref> connections; - Path key; + SSHMaster master; - LegacySSHStore(const string & host, const Params & params, - size_t maxConnections = std::numeric_limits::max()) + LegacySSHStore(const string & host, const Params & params) : Store(params) , host(host) - , tmpDir(createTempDir("", "nix", true, true, 0700)) - , socketPath((Path) tmpDir + "/ssh.sock") , connections(make_ref>( - maxConnections, + std::max(1, std::stoi(get(params, "max-connections", "1"))), [this]() { return openConnection(); }, [](const ref & r) { return true; } )) - , key(get(params, "ssh-key", "")) + , master( + host, + get(params, "ssh-key", ""), + // Use SSH master only if using more than 1 connection. + connections->capacity() > 1, + get(params, "compress", "") == "true") { } ref openConnection() { - if ((pid_t) sshMaster == -1) { - sshMaster = startProcess([&]() { - restoreSignals(); - Strings args{ "ssh", "-M", "-S", socketPath, "-N", "-x", "-a", host }; - if (!key.empty()) - args.insert(args.end(), {"-i", key}); - execvp("ssh", stringsToCharPtrs(args).data()); - throw SysError("starting SSH master connection to host ‘%s’", host); - }); - } - auto conn = make_ref(); - Pipe in, out; - in.create(); - out.create(); - conn->sshPid = startProcess([&]() { - if (dup2(in.readSide.get(), STDIN_FILENO) == -1) - throw SysError("duping over STDIN"); - if (dup2(out.writeSide.get(), STDOUT_FILENO) == -1) - throw SysError("duping over STDOUT"); - execlp("ssh", "ssh", "-S", socketPath.c_str(), host.c_str(), "nix-store", "--serve", "--write", nullptr); - throw SysError("executing ‘nix-store --serve’ on remote host ‘%s’", host); - }); - in.readSide = -1; - out.writeSide = -1; - conn->out = std::move(out.readSide); - conn->in = std::move(in.writeSide); - conn->to = FdSink(conn->in.get()); - conn->from = FdSource(conn->out.get()); + conn->sshConn = master.startCommand("nix-store --serve"); + conn->to = FdSink(conn->sshConn->in.get()); + conn->from = FdSource(conn->sshConn->out.get()); int remoteVersion; diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 47413d573b7..5e62bd3d5ac 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -40,10 +40,10 @@ template PathSet readStorePaths(Store & store, Source & from); template Paths readStorePaths(Store & store, Source & from); /* TODO: Separate these store impls into different files, give them better names */ -RemoteStore::RemoteStore(const Params & params, size_t maxConnections) +RemoteStore::RemoteStore(const Params & params) : Store(params) , connections(make_ref>( - maxConnections, + std::max(1, std::stoi(get(params, "max-connections", "1"))), [this]() { return openConnection(); }, [](const ref & r) { return r->to.good() && r->from.good(); } )) @@ -51,10 +51,10 @@ RemoteStore::RemoteStore(const Params & params, size_t maxConnections) } -UDSRemoteStore::UDSRemoteStore(const Params & params, size_t maxConnections) +UDSRemoteStore::UDSRemoteStore(const Params & params) : Store(params) , LocalFSStore(params) - , RemoteStore(params, maxConnections) + , RemoteStore(params) { } @@ -129,7 +129,7 @@ void RemoteStore::initConnection(Connection & conn) conn.processStderr(); } catch (Error & e) { - throw Error(format("cannot start daemon worker: %1%") % e.msg()); + throw Error("cannot open connection to remote store ‘%s’: %s", getUri(), e.what()); } setOptions(conn); diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh index 40f17da300d..ed7b27c8886 100644 --- a/src/libstore/remote-store.hh +++ b/src/libstore/remote-store.hh @@ -22,7 +22,7 @@ class RemoteStore : public virtual Store { public: - RemoteStore(const Params & params, size_t maxConnections = std::numeric_limits::max()); + RemoteStore(const Params & params); /* Implementations of abstract store API methods. */ @@ -113,7 +113,7 @@ class UDSRemoteStore : public LocalFSStore, public RemoteStore { public: - UDSRemoteStore(const Params & params, size_t maxConnections = std::numeric_limits::max()); + UDSRemoteStore(const Params & params); std::string getUri() override; diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc index 6f1862afa89..20f020bdada 100644 --- a/src/libstore/ssh-store.cc +++ b/src/libstore/ssh-store.cc @@ -4,6 +4,7 @@ #include "archive.hh" #include "worker-protocol.hh" #include "pool.hh" +#include "ssh.hh" namespace nix { @@ -13,9 +14,23 @@ class SSHStore : public RemoteStore { public: - SSHStore(string host, const Params & params, size_t maxConnections = std::numeric_limits::max()); + SSHStore(const std::string & host, const Params & params) + : Store(params) + , RemoteStore(params) + , host(host) + , master( + host, + get(params, "ssh-key", ""), + // Use SSH master only if using more than 1 connection. + connections->capacity() > 1, + get(params, "compress", "") == "true") + { + } - std::string getUri() override; + std::string getUri() override + { + return uriScheme + host; + } void narFromPath(const Path & path, Sink & sink) override; @@ -25,43 +40,16 @@ class SSHStore : public RemoteStore struct Connection : RemoteStore::Connection { - Pid sshPid; - AutoCloseFD out; - AutoCloseFD in; + std::unique_ptr sshConn; }; ref openConnection() override; - AutoDelete tmpDir; - - Path socketPath; - - Pid sshMaster; - - string host; - - Path key; + std::string host; - bool compress; + SSHMaster master; }; -SSHStore::SSHStore(string host, const Params & params, size_t maxConnections) - : Store(params) - , RemoteStore(params, maxConnections) - , tmpDir(createTempDir("", "nix", true, true, 0700)) - , socketPath((Path) tmpDir + "/ssh.sock") - , host(std::move(host)) - , key(get(params, "ssh-key", "")) - , compress(get(params, "compress", "") == "true") -{ - /* open a connection and perform the handshake to verify all is well */ - connections->get(); -} - -string SSHStore::getUri() -{ - return uriScheme + host; -} class ForwardSource : public Source { @@ -94,35 +82,10 @@ ref SSHStore::getFSAccessor() ref SSHStore::openConnection() { - if ((pid_t) sshMaster == -1) { - sshMaster = startProcess([&]() { - restoreSignals(); - if (key.empty()) - execlp("ssh", "ssh", "-N", "-M", "-S", socketPath.c_str(), host.c_str(), NULL); - else - execlp("ssh", "ssh", "-N", "-M", "-S", socketPath.c_str(), "-i", key.c_str(), host.c_str(), NULL); - throw SysError("starting ssh master"); - }); - } - auto conn = make_ref(); - Pipe in, out; - in.create(); - out.create(); - conn->sshPid = startProcess([&]() { - if (dup2(in.readSide.get(), STDIN_FILENO) == -1) - throw SysError("duping over STDIN"); - if (dup2(out.writeSide.get(), STDOUT_FILENO) == -1) - throw SysError("duping over STDOUT"); - execlp("ssh", "ssh", "-S", socketPath.c_str(), host.c_str(), "nix-daemon", "--stdio", NULL); - throw SysError("executing nix-daemon --stdio over ssh"); - }); - in.readSide = -1; - out.writeSide = -1; - conn->out = std::move(out.readSide); - conn->in = std::move(in.writeSide); - conn->to = FdSink(conn->in.get()); - conn->from = FdSource(conn->out.get()); + conn->sshConn = master.startCommand("nix-daemon --stdio"); + conn->to = FdSink(conn->sshConn->in.get()); + conn->from = FdSource(conn->sshConn->out.get()); initConnection(*conn); return conn; } diff --git a/src/libstore/ssh.cc b/src/libstore/ssh.cc new file mode 100644 index 00000000000..7c3de4a4827 --- /dev/null +++ b/src/libstore/ssh.cc @@ -0,0 +1,93 @@ +#include "ssh.hh" + +namespace nix { + +std::unique_ptr SSHMaster::startCommand(const std::string & command) +{ + startMaster(); + + Pipe in, out; + in.create(); + out.create(); + + auto conn = std::make_unique(); + conn->sshPid = startProcess([&]() { + restoreSignals(); + + close(in.writeSide.get()); + close(out.readSide.get()); + + if (dup2(in.readSide.get(), STDIN_FILENO) == -1) + throw SysError("duping over stdin"); + if (dup2(out.writeSide.get(), STDOUT_FILENO) == -1) + throw SysError("duping over stdout"); + + Strings args = { "ssh", host.c_str(), "-x", "-a" }; + if (!keyFile.empty()) + args.insert(args.end(), {"-i", keyFile}); + if (compress) + args.push_back("-C"); + if (useMaster) + args.insert(args.end(), {"-S", socketPath}); + args.push_back(command); + execvp(args.begin()->c_str(), stringsToCharPtrs(args).data()); + + throw SysError("executing ‘%s’ on ‘%s’", command, host); + }); + + + in.readSide = -1; + out.writeSide = -1; + + conn->out = std::move(out.readSide); + conn->in = std::move(in.writeSide); + + return conn; +} + +void SSHMaster::startMaster() +{ + if (!useMaster || sshMaster != -1) return; + + tmpDir = std::make_unique(createTempDir("", "nix", true, true, 0700)); + + socketPath = (Path) *tmpDir + "/ssh.sock"; + + Pipe out; + out.create(); + + sshMaster = startProcess([&]() { + restoreSignals(); + + close(out.readSide.get()); + + if (dup2(out.writeSide.get(), STDOUT_FILENO) == -1) + throw SysError("duping over stdout"); + + Strings args = + { "ssh", host.c_str(), "-M", "-N", "-S", socketPath + , "-o", "LocalCommand=echo started" + , "-o", "PermitLocalCommand=yes" + }; + if (!keyFile.empty()) + args.insert(args.end(), {"-i", keyFile}); + if (compress) + args.push_back("-C"); + + execvp(args.begin()->c_str(), stringsToCharPtrs(args).data()); + + throw SysError("starting SSH master"); + }); + + out.writeSide = -1; + + std::string reply; + try { + reply = readLine(out.readSide.get()); + } catch (EndOfFile & e) { } + + if (reply != "started") + throw Error("failed to start SSH master connection to ‘%s’", host); +} + +} diff --git a/src/libstore/ssh.hh b/src/libstore/ssh.hh new file mode 100644 index 00000000000..2d2b9837039 --- /dev/null +++ b/src/libstore/ssh.hh @@ -0,0 +1,41 @@ +#pragma once + +#include "util.hh" + +namespace nix { + +class SSHMaster +{ +private: + + std::string host; + std::string keyFile; + bool useMaster; + bool compress; + Pid sshMaster; + std::unique_ptr tmpDir; + Path socketPath; + +public: + + SSHMaster(const std::string & host, const std::string & keyFile, bool useMaster, bool compress) + : host(host) + , keyFile(keyFile) + , useMaster(useMaster) + , compress(compress) + { + } + + struct Connection + { + Pid sshPid; + AutoCloseFD out, in; + }; + + std::unique_ptr startCommand(const std::string & command); + + void startMaster(); + +}; + +} diff --git a/src/libutil/pool.hh b/src/libutil/pool.hh index f291cd57838..3c3dd4b074f 100644 --- a/src/libutil/pool.hh +++ b/src/libutil/pool.hh @@ -141,11 +141,16 @@ public: } } - unsigned int count() + size_t count() { auto state_(state.lock()); return state_->idle.size() + state_->inUse; } + + size_t capacity() + { + return state.lock()->max; + } }; } From d140c75530d385431850bed2f2c250f889c43c2f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 3 Mar 2017 19:21:43 +0100 Subject: [PATCH 0153/2196] Pool: Don't hang if creating a connection fails --- src/libutil/pool.hh | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libutil/pool.hh b/src/libutil/pool.hh index 3c3dd4b074f..20df2194884 100644 --- a/src/libutil/pool.hh +++ b/src/libutil/pool.hh @@ -137,6 +137,7 @@ public: } catch (...) { auto state_(state.lock()); state_->inUse--; + wakeup.notify_one(); throw; } } From d3eb1cf3bbf57a33ac2e71a19a150c077011ecd9 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 3 Mar 2017 19:23:20 +0100 Subject: [PATCH 0154/2196] build-remote: Don't use a SSH master This is unnecessary because we make only one connection. --- src/build-remote/build-remote.cc | 4 +++- src/libstore/store-api.cc | 4 ++++ src/libstore/store-api.hh | 2 ++ 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index fd710c2b6b7..3908dfac487 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -233,7 +233,9 @@ int main (int argc, char * * argv) lock = -1; try { - sshStore = openStore("ssh://" + bestMachine->hostName + "?ssh-key=" + bestMachine->sshKey); + sshStore = openStore("ssh://" + bestMachine->hostName, + { {"ssh-key", bestMachine->sshKey }, + {"max-connections", "1" } }); hostName = bestMachine->hostName; } catch (std::exception & e) { printError("unable to open SSH connection to ‘%s’: %s; trying other available machines...", diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 401b001b2d8..9c755965e45 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -708,7 +708,11 @@ ref openStore(const std::string & uri_) } uri = uri_.substr(0, q); } + return openStore(uri, params); +} +ref openStore(const std::string & uri, const Store::Params & params) +{ for (auto fun : *RegisterStoreImplementation::implementations) { auto store = fun(uri, params); if (store) return ref(store); diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index c344b9d66ed..481d0b79906 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -642,6 +642,8 @@ void removeTempRoots(); set to true *unless* you're going to collect garbage. */ ref openStore(const std::string & uri = getEnv("NIX_REMOTE")); +ref openStore(const std::string & uri, const Store::Params & params); + void copyPaths(ref from, ref to, const Paths & storePaths, bool substitute = false); From 8490ee37a6dbfb66e1b3dbaf88918bea044b143a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 3 Mar 2017 19:28:27 +0100 Subject: [PATCH 0155/2196] SSHMaster: Make thread-safe --- src/libstore/ssh.cc | 22 ++++++++++++++-------- src/libstore/ssh.hh | 24 +++++++++++++++--------- 2 files changed, 29 insertions(+), 17 deletions(-) diff --git a/src/libstore/ssh.cc b/src/libstore/ssh.cc index 7c3de4a4827..4f88fa64dbd 100644 --- a/src/libstore/ssh.cc +++ b/src/libstore/ssh.cc @@ -4,7 +4,7 @@ namespace nix { std::unique_ptr SSHMaster::startCommand(const std::string & command) { - startMaster(); + Path socketPath = startMaster(); Pipe in, out; in.create(); @@ -27,7 +27,7 @@ std::unique_ptr SSHMaster::startCommand(const std::string args.insert(args.end(), {"-i", keyFile}); if (compress) args.push_back("-C"); - if (useMaster) + if (socketPath != "") args.insert(args.end(), {"-S", socketPath}); args.push_back(command); execvp(args.begin()->c_str(), stringsToCharPtrs(args).data()); @@ -45,18 +45,22 @@ std::unique_ptr SSHMaster::startCommand(const std::string return conn; } -void SSHMaster::startMaster() +Path SSHMaster::startMaster() { - if (!useMaster || sshMaster != -1) return; + if (!useMaster) return ""; - tmpDir = std::make_unique(createTempDir("", "nix", true, true, 0700)); + auto state(state_.lock()); - socketPath = (Path) *tmpDir + "/ssh.sock"; + if (state->sshMaster != -1) return state->socketPath; + + state->tmpDir = std::make_unique(createTempDir("", "nix", true, true, 0700)); + + state->socketPath = (Path) *state->tmpDir + "/ssh.sock"; Pipe out; out.create(); - sshMaster = startProcess([&]() { + state->sshMaster = startProcess([&]() { restoreSignals(); close(out.readSide.get()); @@ -65,7 +69,7 @@ void SSHMaster::startMaster() throw SysError("duping over stdout"); Strings args = - { "ssh", host.c_str(), "-M", "-N", "-S", socketPath + { "ssh", host.c_str(), "-M", "-N", "-S", state->socketPath , "-o", "LocalCommand=echo started" , "-o", "PermitLocalCommand=yes" }; @@ -88,6 +92,8 @@ void SSHMaster::startMaster() if (reply != "started") throw Error("failed to start SSH master connection to ‘%s’", host); + + return state->socketPath; } } diff --git a/src/libstore/ssh.hh b/src/libstore/ssh.hh index 2d2b9837039..72238dad79a 100644 --- a/src/libstore/ssh.hh +++ b/src/libstore/ssh.hh @@ -1,6 +1,7 @@ #pragma once #include "util.hh" +#include "sync.hh" namespace nix { @@ -8,13 +9,19 @@ class SSHMaster { private: - std::string host; - std::string keyFile; - bool useMaster; - bool compress; - Pid sshMaster; - std::unique_ptr tmpDir; - Path socketPath; + const std::string host; + const std::string keyFile; + const bool useMaster; + const bool compress; + + struct State + { + Pid sshMaster; + std::unique_ptr tmpDir; + Path socketPath; + }; + + Sync state_; public: @@ -34,8 +41,7 @@ public: std::unique_ptr startCommand(const std::string & command); - void startMaster(); - + Path startMaster(); }; } From d1158bb8168804b27508972988d4b85ba9d5e49d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 3 Mar 2017 19:35:34 +0100 Subject: [PATCH 0156/2196] Cache connection failures --- src/libstore/remote-store.cc | 15 ++++++++++++++- src/libstore/remote-store.hh | 4 ++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 5e62bd3d5ac..1ac2d7b6e78 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -44,13 +44,26 @@ RemoteStore::RemoteStore(const Params & params) : Store(params) , connections(make_ref>( std::max(1, std::stoi(get(params, "max-connections", "1"))), - [this]() { return openConnection(); }, + [this]() { return openConnectionWrapper(); }, [](const ref & r) { return r->to.good() && r->from.good(); } )) { } +ref RemoteStore::openConnectionWrapper() +{ + if (failed) + throw Error("opening a connection to remote store ‘%s’ previously failed", getUri()); + try { + return openConnection(); + } catch (...) { + failed = true; + throw; + } +} + + UDSRemoteStore::UDSRemoteStore(const Params & params) : Store(params) , LocalFSStore(params) diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh index ed7b27c8886..66540a2a2ec 100644 --- a/src/libstore/remote-store.hh +++ b/src/libstore/remote-store.hh @@ -98,6 +98,8 @@ protected: void processStderr(Sink * sink = 0, Source * source = 0); }; + ref openConnectionWrapper(); + virtual ref openConnection() = 0; void initConnection(Connection & conn); @@ -106,6 +108,8 @@ protected: private: + std::atomic_bool failed{false}; + void setOptions(Connection & conn); }; From 5789eaa3f45cadec719b4f642de2169b8b0a56c4 Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Fri, 3 Mar 2017 16:12:17 -0500 Subject: [PATCH 0157/2196] Add aws-region param to S3 store URLs --- src/libstore/download.cc | 5 ++++- src/libstore/s3-binary-cache-store.cc | 9 +++++---- src/libstore/s3.hh | 4 ++-- 3 files changed, 11 insertions(+), 7 deletions(-) diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 75c00d85d34..11374b1da52 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -5,6 +5,9 @@ #include "store-api.hh" #include "archive.hh" #include "s3.hh" +#ifdef ENABLE_S3 +#include +#endif #include #include @@ -496,7 +499,7 @@ struct CurlDownloader : public Downloader // FIXME: do this on a worker thread sync2async(success, failure, [&]() -> DownloadResult { #ifdef ENABLE_S3 - S3Helper s3Helper; + S3Helper s3Helper(Aws::Region::US_EAST_1); // FIXME: make configurable auto slash = request.uri.find('/', 5); if (slash == std::string::npos) throw nix::Error("bad S3 URI ‘%s’", request.uri); diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 041c68c6816..a110f5ade48 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -52,8 +52,8 @@ static void initAWS() }); } -S3Helper::S3Helper() - : config(makeConfig()) +S3Helper::S3Helper(const string & region) + : config(makeConfig(region)) , client(make_ref(*config)) { } @@ -70,11 +70,11 @@ class RetryStrategy : public Aws::Client::DefaultRetryStrategy } }; -ref S3Helper::makeConfig() +ref S3Helper::makeConfig(const string & region) { initAWS(); auto res = make_ref(); - res->region = Aws::Region::US_EAST_1; // FIXME: make configurable + res->region = region; res->requestTimeoutMs = 600 * 1000; res->retryStrategy = std::make_shared(); return res; @@ -140,6 +140,7 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore const Params & params, const std::string & bucketName) : S3BinaryCacheStore(params) , bucketName(bucketName) + , s3Helper(get(params, "aws-region", Aws::Region::US_EAST_1)) { diskCache = getNarInfoDiskCache(); } diff --git a/src/libstore/s3.hh b/src/libstore/s3.hh index 5d5d3475c44..08a7fbf96e9 100644 --- a/src/libstore/s3.hh +++ b/src/libstore/s3.hh @@ -14,9 +14,9 @@ struct S3Helper ref config; ref client; - S3Helper(); + S3Helper(const std::string & region); - ref makeConfig(); + ref makeConfig(const std::string & region); struct DownloadResult { From b667abc699c7d63e0a6e2beba6daae57f05ddab4 Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Sun, 5 Mar 2017 07:39:10 -0500 Subject: [PATCH 0158/2196] Add signing and s3 support on darwin --- release.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/release.nix b/release.nix index 78f96196399..e61e81bdf37 100644 --- a/release.nix +++ b/release.nix @@ -74,8 +74,8 @@ let buildInputs = [ curl perl bzip2 xz openssl pkgconfig sqlite boehmgc ] - ++ lib.optional stdenv.isLinux libsodium - ++ lib.optional stdenv.isLinux + ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium + ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) (aws-sdk-cpp.override { apis = ["s3"]; customMemoryManagement = false; From 689b8256274f669c9a18d8af221e12ff5abbad68 Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Sun, 5 Mar 2017 18:15:44 -0500 Subject: [PATCH 0159/2196] nix-daemon.plist: Set XDG_CACHE_HOME --- misc/launchd/org.nixos.nix-daemon.plist.in | 2 ++ 1 file changed, 2 insertions(+) diff --git a/misc/launchd/org.nixos.nix-daemon.plist.in b/misc/launchd/org.nixos.nix-daemon.plist.in index c5ef97ee9a3..5d57a5ec8ff 100644 --- a/misc/launchd/org.nixos.nix-daemon.plist.in +++ b/misc/launchd/org.nixos.nix-daemon.plist.in @@ -16,6 +16,8 @@ NIX_SSL_CERT_FILE /nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt + XDG_CACHE_HOME + /root/.cache From 1cf480110879ffc8aee94b4b75999da405b71d7c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 6 Mar 2017 13:56:20 +0100 Subject: [PATCH 0160/2196] Revert fa125b9b28bea25a4eeb4d39a71a481563127cb9 This causes quadratic performance. --- src/libutil/archive.hh | 6 ------ 1 file changed, 6 deletions(-) diff --git a/src/libutil/archive.hh b/src/libutil/archive.hh index c067cd2ad24..607ebf8b28f 100644 --- a/src/libutil/archive.hh +++ b/src/libutil/archive.hh @@ -75,12 +75,6 @@ struct TeeSink : ParseSink TeeSource source; TeeSink(Source & source) : source(source) { } - - void preallocateContents(unsigned long long size) override - { - source.data->reserve(source.data->size() + size + 1024); - }; - }; void parseDump(ParseSink & sink, Source & source); From 4fc30922cf00d79bd603ac46255fa73a3c2ee565 Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Mon, 6 Mar 2017 13:03:02 -0500 Subject: [PATCH 0161/2196] istringstream_nocopy: Implement in a standards-compliant way. Fixes the problem mentioned in e6a61b8da788efbbbb0eb690c49434b6b5fc9741 See #1135 --- src/libstore/derivations.cc | 2 +- src/libstore/s3-binary-cache-store.cc | 13 ---- src/libutil/hash.cc | 2 +- src/libutil/util.hh | 87 +++++++++++++++++++++++++++ 4 files changed, 89 insertions(+), 15 deletions(-) diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index ce1ac7d3322..38a87240c3c 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -152,7 +152,7 @@ static StringSet parseStrings(std::istream & str, bool arePaths) static Derivation parseDerivation(const string & s) { Derivation drv; - std::istringstream str(s); + istringstream_nocopy str(s); expect(str, "Derive(["); /* Parse the list of outputs. */ diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 041c68c6816..1121b4d4cd0 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -117,17 +117,6 @@ S3Helper::DownloadResult S3Helper::getObject( return res; } -#if __linux__ - -struct istringstream_nocopy : public std::stringstream -{ - istringstream_nocopy(const std::string & s) - { - rdbuf()->pubsetbuf( - (char *) s.data(), s.size()); - } -}; - struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore { std::string bucketName; @@ -313,8 +302,6 @@ static RegisterStoreImplementation regStore([]( return store; }); -#endif - } #endif diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index f447c80c5d8..a8bbcf8c175 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -104,7 +104,7 @@ Hash parseHash(HashType ht, const string & s) string s2(s, i * 2, 2); if (!isxdigit(s2[0]) || !isxdigit(s2[1])) throw BadHash(format("invalid hash ‘%1%’") % s); - std::istringstream str(s2); + istringstream_nocopy str(s2); int n; str >> std::hex >> n; hash.hash[i] = n; diff --git a/src/libutil/util.hh b/src/libutil/util.hh index 2950f7daa5e..7cb3e68b9ef 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -449,4 +449,91 @@ struct ReceiveInterrupts }; +template , class Allocator = std::allocator> +class basic_istringbuf_nocopy : public std::basic_streambuf +{ +public: + typedef std::basic_string string_type; + + typedef typename std::basic_streambuf::off_type off_type; + + typedef typename std::basic_streambuf::pos_type pos_type; + + typedef typename std::basic_streambuf::int_type int_type; + + typedef typename std::basic_streambuf::traits_type traits_type; + +private: + const string_type & s; + + off_type off; + +public: + basic_istringbuf_nocopy(const string_type & s) : s{s}, off{0} + { + } + +private: + pos_type seekoff(off_type off, std::ios_base::seekdir dir, std::ios_base::openmode which) + { + if (which & std::ios_base::in) { + this->off = dir == std::ios_base::beg + ? off + : (dir == std::ios_base::end + ? s.size() + off + : this->off + off); + } + return pos_type(this->off); + } + + pos_type seekpos(pos_type pos, std::ios_base::openmode which) + { + return seekoff(pos, std::ios_base::beg, which); + } + + std::streamsize showmanyc() + { + return s.size() - off; + } + + int_type underflow() + { + if (typename string_type::size_type(off) == s.size()) + return traits_type::eof(); + return traits_type::to_int_type(s[off]); + } + + int_type uflow() + { + if (typename string_type::size_type(off) == s.size()) + return traits_type::eof(); + return traits_type::to_int_type(s[off++]); + } + + int_type pbackfail(int_type ch) + { + if (off == 0 || (ch != traits_type::eof() && ch != s[off - 1])) + return traits_type::eof(); + + return traits_type::to_int_type(s[--off]); + } + +}; + +template , class Allocator = std::allocator> +class basic_istringstream_nocopy : public std::basic_iostream +{ + typedef basic_istringbuf_nocopy buf_type; + buf_type buf; +public: + basic_istringstream_nocopy(const typename buf_type::string_type & s) : + std::basic_iostream(&buf), buf(s) {}; +}; + +/* A variant of std::istringstream that doesn't its string + argument. This is useful for large strings. The caller must ensure + that the string object is not destroyed while it's referenced by + this object. */ +typedef basic_istringstream_nocopy istringstream_nocopy; + } From 3cc18d3753af79946ba7c21ccdc49a6f58642108 Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Mon, 6 Mar 2017 14:30:35 -0500 Subject: [PATCH 0162/2196] Properly set the caFile for aws-sdk-cpp s3 --- src/libstore/download.cc | 3 +-- src/libstore/globals.cc | 1 + src/libstore/globals.hh | 3 +++ src/libstore/s3-binary-cache-store.cc | 1 + 4 files changed, 6 insertions(+), 2 deletions(-) diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 75c00d85d34..ebea3800ac3 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -224,8 +224,7 @@ struct CurlDownloader : public Downloader curl_easy_setopt(req, CURLOPT_NOBODY, 1); if (request.verifyTLS) - curl_easy_setopt(req, CURLOPT_CAINFO, - getEnv("NIX_SSL_CERT_FILE", getEnv("SSL_CERT_FILE", "/etc/ssl/certs/ca-certificates.crt")).c_str()); + curl_easy_setopt(req, CURLOPT_CAINFO, settings.caFile.c_str()); else { curl_easy_setopt(req, CURLOPT_SSL_VERIFYPEER, 0); curl_easy_setopt(req, CURLOPT_SSL_VERIFYHOST, 0); diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 07af629260a..fcd6347294c 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -69,6 +69,7 @@ Settings::Settings() showTrace = false; enableImportNative = false; netrcFile = fmt("%s/%s", nixConfDir, "netrc"); + caFile = getEnv("NIX_SSL_CERT_FILE", getEnv("SSL_CERT_FILE", "/etc/ssl/certs/ca-certificates.crt")); } diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 0ff18f8b16e..1e6b7c083a6 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -195,6 +195,9 @@ struct Settings { downloads. */ Path netrcFile; + /* Path to the SSL CA file used */ + Path caFile; + private: SettingsMap settings, overrides; diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 041c68c6816..800380c62d4 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -77,6 +77,7 @@ ref S3Helper::makeConfig() res->region = Aws::Region::US_EAST_1; // FIXME: make configurable res->requestTimeoutMs = 600 * 1000; res->retryStrategy = std::make_shared(); + res->caFile = settings.caFile; return res; } From 93f863be9626e5455458abf9e449586270e98163 Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Wed, 8 Mar 2017 08:46:12 -0500 Subject: [PATCH 0163/2196] Add option to disable import-from-derivation completely, even if the drv is already realized --- src/libexpr/primops.cc | 2 ++ src/libstore/globals.cc | 2 ++ src/libstore/globals.hh | 3 +++ 3 files changed, 7 insertions(+) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 5a570cefb2f..93097f3d1bf 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -59,6 +59,8 @@ void EvalState::realiseContext(const PathSet & context) drvs.insert(decoded.first + "!" + decoded.second); } if (!drvs.empty()) { + if (!settings.enableImportFromDerivation) + throw EvalError(format("attempted to realize ‘%1%’ during evaluation but 'allow-import-from-derivation' is false") % *(drvs.begin())); /* For performance, prefetch all substitute info. */ PathSet willBuild, willSubstitute, unknown; unsigned long long downloadSize, narSize; diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index fcd6347294c..df537a51255 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -70,6 +70,7 @@ Settings::Settings() enableImportNative = false; netrcFile = fmt("%s/%s", nixConfDir, "netrc"); caFile = getEnv("NIX_SSL_CERT_FILE", getEnv("SSL_CERT_FILE", "/etc/ssl/certs/ca-certificates.crt")); + enableImportFromDerivation = true; } @@ -185,6 +186,7 @@ void Settings::update() _get(keepGoing, "keep-going"); _get(keepFailed, "keep-failed"); _get(netrcFile, "netrc-file"); + _get(enableImportFromDerivation, "allow-import-from-derivation"); } diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 1e6b7c083a6..7a9a9f6c0ca 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -198,6 +198,9 @@ struct Settings { /* Path to the SSL CA file used */ Path caFile; + /* Whether we allow import-from-derivation */ + bool enableImportFromDerivation; + private: SettingsMap settings, overrides; From 5f831c10574ae7ec44d0d357a6a371ac55d00593 Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Wed, 8 Mar 2017 09:12:03 -0500 Subject: [PATCH 0164/2196] Add docs for allow-import-from-derivation --- doc/manual/command-ref/conf-file.xml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index 96f8a4b608a..b1b604100f9 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -644,6 +644,16 @@ password my-password + allow-import-from-derivation + + By default, nix allows you to import from a derivation, + allowing building at evaluation time. With this option set to false, nix will throw an error + when evaluating an expression that uses this feature, allowing users to ensure their evaluation + will not require any builds to take place. + + + + From d853877ce92e0c202d70645bc716f2796a907cdc Mon Sep 17 00:00:00 2001 From: Adrien Devresse Date: Wed, 8 Mar 2017 22:24:10 +0100 Subject: [PATCH 0165/2196] Add missing header --- src/libutil/util.hh | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libutil/util.hh b/src/libutil/util.hh index 2950f7daa5e..b74c1d41739 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -13,6 +13,7 @@ #include #include #include +#include #ifndef HAVE_STRUCT_DIRENT_D_TYPE #define DT_UNKNOWN 0 From 19643a781ea3baaeb19d321093d99cb74b3ffc7e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 9 Mar 2017 13:36:56 +0100 Subject: [PATCH 0166/2196] nix -> Nix --- doc/manual/command-ref/conf-file.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index b1b604100f9..36b70f0c48f 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -646,8 +646,8 @@ password my-password allow-import-from-derivation - By default, nix allows you to import from a derivation, - allowing building at evaluation time. With this option set to false, nix will throw an error + By default, Nix allows you to import from a derivation, + allowing building at evaluation time. With this option set to false, Nix will throw an error when evaluating an expression that uses this feature, allowing users to ensure their evaluation will not require any builds to take place. From ae568847f5ee2dfd4226f30fa64fdc122623229b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Domen=20Ko=C5=BEar?= Date: Sat, 11 Mar 2017 19:39:13 +0100 Subject: [PATCH 0167/2196] Dockerfile: 1.11.2 -> 1.11.7 --- misc/docker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/misc/docker/Dockerfile b/misc/docker/Dockerfile index 7b2865c946d..85bd32e199a 100644 --- a/misc/docker/Dockerfile +++ b/misc/docker/Dockerfile @@ -1,6 +1,6 @@ FROM alpine -RUN wget -O- http://nixos.org/releases/nix/nix-1.11.2/nix-1.11.2-x86_64-linux.tar.bz2 | bzcat - | tar xf - \ +RUN wget -O- http://nixos.org/releases/nix/nix-1.11.7/nix-1.11.7-x86_64-linux.tar.bz2 | bzcat - | tar xf - \ && echo "nixbld:x:30000:nixbld1,nixbld2,nixbld3,nixbld4,nixbld5,nixbld6,nixbld7,nixbld8,nixbld9,nixbld10,nixbld11,nixbld12,nixbld13,nixbld14,nixbld15,nixbld16,nixbld17,nixbld18,nixbld19,nixbld20,nixbld21,nixbld22,nixbld23,nixbld24,nixbld25,nixbld26,nixbld27,nixbld28,nixbld29,nixbld30" >> /etc/group \ && for i in $(seq 1 30); do echo "nixbld$i:x:$((30000 + $i)):30000:::" >> /etc/passwd; done \ && mkdir -m 0755 /nix && USER=root sh nix-*-x86_64-linux/install \ From f628ca2a1f6a4d2bbaa0d24204fad8fe530e375a Mon Sep 17 00:00:00 2001 From: Stefan Junker Date: Sun, 12 Mar 2017 01:04:21 +0100 Subject: [PATCH 0168/2196] nix-shell/pure: keep environment variable SHLVL --- src/nix-build/nix-build.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index b81c98868e6..b4206033cf5 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -394,7 +394,7 @@ int main(int argc, char ** argv) auto tmp = getEnv("TMPDIR", getEnv("XDG_RUNTIME_DIR", "/tmp")); if (pure) { - std::set keepVars{"HOME", "USER", "LOGNAME", "DISPLAY", "PATH", "TERM", "IN_NIX_SHELL", "TZ", "PAGER", "NIX_BUILD_SHELL"}; + std::set keepVars{"HOME", "USER", "LOGNAME", "DISPLAY", "PATH", "TERM", "IN_NIX_SHELL", "TZ", "PAGER", "NIX_BUILD_SHELL", "SHLVL"}; decltype(env) newEnv; for (auto & i : env) if (keepVars.count(i.first)) From 0afeb7f51e3465c7c27bc5a83017e9ffde8c6725 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 13 Mar 2017 12:07:50 +0100 Subject: [PATCH 0169/2196] Store: Add a method for getting build logs This allows various Store implementations to provide different ways to get build logs. For example, BinaryCacheStore can get the build logs from the binary cache. Also, remove the log-servers option since we can use substituters for this. --- doc/manual/command-ref/conf-file.xml | 14 ------- doc/manual/command-ref/nix-store.xml | 7 +--- src/libstore/build.cc | 5 +-- src/libstore/globals.cc | 1 - src/libstore/globals.hh | 3 -- src/libstore/local-fs-store.cc | 35 +++++++++++++++++ src/libstore/local-store.hh | 3 -- src/libstore/store-api.hh | 8 ++++ src/nix-store/nix-store.cc | 57 +++------------------------- 9 files changed, 50 insertions(+), 83 deletions(-) diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index 36b70f0c48f..3bd133918f4 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -512,20 +512,6 @@ password my-password - log-servers - - - - A list of URL prefixes (such as - http://hydra.nixos.org/log) from which - nix-store -l will try to fetch build logs if - they’re not available locally. - - - - - - trusted-users diff --git a/doc/manual/command-ref/nix-store.xml b/doc/manual/command-ref/nix-store.xml index 0f6172defb3..fb017b741da 100644 --- a/doc/manual/command-ref/nix-store.xml +++ b/doc/manual/command-ref/nix-store.xml @@ -1236,12 +1236,7 @@ the store path is used. /nix/var/log/nix/drvs. However, there is no guarantee that a build log is available for any particular store path. For instance, if the path was downloaded as a pre-built binary through -a substitute, then the log is unavailable. If the log is not available -locally, then nix-store will try to download the -log from the servers specified in the Nix option -. For example, if it’s set to -http://hydra.nixos.org/log, then Nix will check -http://hydra.nixos.org/log/base-name. +a substitute, then the log is unavailable. diff --git a/src/libstore/build.cc b/src/libstore/build.cc index fd1f5dc3a4d..2b0f8e592d5 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -3048,9 +3048,6 @@ void DerivationGoal::registerOutputs() } -string drvsLogDir = "drvs"; - - Path DerivationGoal::openLogFile() { logSize = 0; @@ -3060,7 +3057,7 @@ Path DerivationGoal::openLogFile() string baseName = baseNameOf(drvPath); /* Create a log file. */ - Path dir = (format("%1%/%2%/%3%/") % worker.store.logDir % drvsLogDir % string(baseName, 0, 2)).str(); + Path dir = (format("%1%/%2%/%3%/") % worker.store.logDir % worker.store.drvsLogDir % string(baseName, 0, 2)).str(); createDirs(dir); Path logFileName = (format("%1%/%2%%3%") diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index df537a51255..012b3d5b8b9 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -179,7 +179,6 @@ void Settings::update() _get(envKeepDerivations, "env-keep-derivations"); _get(sshSubstituterHosts, "ssh-substituter-hosts"); _get(useSshSubstituter, "use-ssh-substituter"); - _get(logServers, "log-servers"); _get(enableImportNative, "allow-unsafe-native-code-during-evaluation"); _get(useCaseHack, "use-case-hack"); _get(preBuildHook, "pre-build-hook"); diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 7a9a9f6c0ca..46272168191 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -181,9 +181,6 @@ struct Settings { /* Whether to show a stack trace if Nix evaluation fails. */ bool showTrace; - /* A list of URL prefixes that can return Nix build logs. */ - Strings logServers; - /* Whether the importNative primop should be enabled */ bool enableImportNative; diff --git a/src/libstore/local-fs-store.cc b/src/libstore/local-fs-store.cc index 4571a2211cd..c5da73dba36 100644 --- a/src/libstore/local-fs-store.cc +++ b/src/libstore/local-fs-store.cc @@ -2,6 +2,8 @@ #include "fs-accessor.hh" #include "store-api.hh" #include "globals.hh" +#include "compression.hh" +#include "derivations.hh" namespace nix { @@ -84,4 +86,37 @@ void LocalFSStore::narFromPath(const Path & path, Sink & sink) dumpPath(getRealStoreDir() + std::string(path, storeDir.size()), sink); } +const string LocalFSStore::drvsLogDir = "drvs"; + +std::shared_ptr LocalFSStore::getBuildLog(const Path & path_) +{ + auto path(path_); + + assertStorePath(path); + + if (!isDerivation(path)) { + path = queryPathInfo(path)->deriver; + if (path == "") return nullptr; + } + + string baseName = baseNameOf(path); + + for (int j = 0; j < 2; j++) { + + Path logPath = + j == 0 + ? (format("%1%/%2%/%3%/%4%") % logDir % drvsLogDir % string(baseName, 0, 2) % string(baseName, 2)).str() + : (format("%1%/%2%/%3%") % logDir % drvsLogDir % baseName).str(); + Path logBz2Path = logPath + ".bz2"; + + if (pathExists(logPath)) + return std::make_shared(readFile(logPath)); + + else if (pathExists(logBz2Path)) + return decompress("bzip2", readFile(logBz2Path)); + } + + return nullptr; +} + } diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index 511209d8404..49a0d7e0d63 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -21,9 +21,6 @@ namespace nix { const int nixSchemaVersion = 10; -extern string drvsLogDir; - - struct Derivation; diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 481d0b79906..3aea30c286a 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -566,6 +566,11 @@ public: if they lack a signature. */ virtual bool isTrusted() { return false; } + /* Return the build log of the specified store path, if available, + or null otherwise. */ + virtual std::shared_ptr getBuildLog(const Path & path) + { return nullptr; } + protected: Stats stats; @@ -579,6 +584,7 @@ public: const Path rootDir; const Path stateDir; const Path logDir; + const static string drvsLogDir; LocalFSStore(const Params & params); @@ -595,6 +601,8 @@ public: { return getRealStoreDir() + "/" + baseNameOf(storePath); } + + std::shared_ptr getBuildLog(const Path & path) override; }; diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 950c2a7c977..024fa4168eb 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -9,7 +9,6 @@ #include "util.hh" #include "worker-protocol.hh" #include "xmlgraph.hh" -#include "compression.hh" #include #include @@ -482,58 +481,12 @@ static void opReadLog(Strings opFlags, Strings opArgs) RunPager pager; - // FIXME: move getting logs into Store. - auto store2 = std::dynamic_pointer_cast(store); - if (!store2) throw Error(format("store ‘%s’ does not support reading logs") % store->getUri()); - for (auto & i : opArgs) { - Path path = useDeriver(store->followLinksToStorePath(i)); - - string baseName = baseNameOf(path); - bool found = false; - - for (int j = 0; j < 2; j++) { - - Path logPath = - j == 0 - ? (format("%1%/%2%/%3%/%4%") % store2->logDir % drvsLogDir % string(baseName, 0, 2) % string(baseName, 2)).str() - : (format("%1%/%2%/%3%") % store2->logDir % drvsLogDir % baseName).str(); - Path logBz2Path = logPath + ".bz2"; - - if (pathExists(logPath)) { - /* !!! Make this run in O(1) memory. */ - string log = readFile(logPath); - writeFull(STDOUT_FILENO, log); - found = true; - break; - } - - else if (pathExists(logBz2Path)) { - std::cout << *decompress("bzip2", readFile(logBz2Path)); - found = true; - break; - } - } - - if (!found) { - for (auto & i : settings.logServers) { - string prefix = i; - if (!prefix.empty() && prefix.back() != '/') prefix += '/'; - string url = prefix + baseName; - try { - string log = runProgram(CURL, true, {"--fail", "--location", "--silent", "--", url}); - std::cout << "(using build log from " << url << ")" << std::endl; - std::cout << log; - found = true; - break; - } catch (ExecError & e) { - /* Ignore errors from curl. FIXME: actually, might be - nice to print a warning on HTTP status != 404. */ - } - } - } - - if (!found) throw Error(format("build log of derivation ‘%1%’ is not available") % path); + auto path = store->followLinksToStorePath(i); + auto log = store->getBuildLog(path); + if (!log) + throw Error("build log of derivation ‘%s’ is not available", path); + std::cout << *log; } } From 5b86451f0279c384fb379fed9d2979d8025aa269 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 13 Mar 2017 13:38:29 +0100 Subject: [PATCH 0170/2196] Add a "nix log" command This replaces "nix-store --read-log". It checks the local store and any configured substituters for the requested logs. --- src/nix/log.cc | 57 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 57 insertions(+) create mode 100644 src/nix/log.cc diff --git a/src/nix/log.cc b/src/nix/log.cc new file mode 100644 index 00000000000..d8a3830e91c --- /dev/null +++ b/src/nix/log.cc @@ -0,0 +1,57 @@ +#include "command.hh" +#include "common-args.hh" +#include "installables.hh" +#include "shared.hh" +#include "store-api.hh" + +using namespace nix; + +struct CmdLog : StoreCommand, MixInstallables +{ + CmdLog() + { + } + + std::string name() override + { + return "log"; + } + + std::string description() override + { + return "show the build log of the specified packages or paths"; + } + + void run(ref store) override + { + auto elems = evalInstallables(store); + + PathSet paths; + + for (auto & elem : elems) { + if (elem.isDrv) + paths.insert(elem.drvPath); + else + paths.insert(elem.outPaths.begin(), elem.outPaths.end()); + } + + auto subs = getDefaultSubstituters(); + + subs.push_front(store); + + for (auto & path : paths) { + bool found = false; + for (auto & sub : subs) { + auto log = sub->getBuildLog(path); + if (!log) continue; + std::cout << *log; + found = true; + break; + } + if (!found) + throw Error("build log of path ‘%s’ is not available", path); + } + } +}; + +static RegisterCommand r1(make_ref()); From 532d73d5d86c3d25d08a8d771a189708dac323e2 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 13 Mar 2017 14:07:58 +0100 Subject: [PATCH 0171/2196] BinaryCacheStore: Implement getBuildLog() We assume that build logs are stored under log/, e.g. /nix/store/q7ab198v13p0f8x8wgnd75dva7d5mip6-friday-devil-0.1.1.1.drv maps to https://cache.nixos.org/log/q7ab198v13p0f8x8wgnd75dva7d5mip6-friday-devil-0.1.1.1.drv --- src/libstore/binary-cache-store.cc | 24 ++++++++++++++++++++++++ src/libstore/binary-cache-store.hh | 2 ++ src/libstore/local-fs-store.cc | 6 +++++- 3 files changed, 31 insertions(+), 1 deletion(-) diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 3e07a2aa2b6..120345b2670 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -382,4 +382,28 @@ ref BinaryCacheStore::getFSAccessor() return make_ref(ref(shared_from_this())); } +std::shared_ptr BinaryCacheStore::getBuildLog(const Path & path) +{ + Path drvPath; + + if (isDerivation(path)) + drvPath = path; + else { + try { + auto info = queryPathInfo(path); + // FIXME: add a "Log" field to .narinfo + if (info->deriver == "") return nullptr; + drvPath = info->deriver; + } catch (InvalidPath &) { + return nullptr; + } + } + + auto logPath = "log/" + baseNameOf(drvPath); + + debug("fetching build log from binary cache ‘%s/%s’", getUri(), logPath); + + return getFile(logPath); +} + } diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh index a70d50d4949..1c287056ce5 100644 --- a/src/libstore/binary-cache-store.hh +++ b/src/libstore/binary-cache-store.hh @@ -122,6 +122,8 @@ public: void addSignatures(const Path & storePath, const StringSet & sigs) override { notImpl(); } + std::shared_ptr getBuildLog(const Path & path) override; + }; } diff --git a/src/libstore/local-fs-store.cc b/src/libstore/local-fs-store.cc index c5da73dba36..002ee4a65ce 100644 --- a/src/libstore/local-fs-store.cc +++ b/src/libstore/local-fs-store.cc @@ -95,7 +95,11 @@ std::shared_ptr LocalFSStore::getBuildLog(const Path & path_) assertStorePath(path); if (!isDerivation(path)) { - path = queryPathInfo(path)->deriver; + try { + path = queryPathInfo(path)->deriver; + } catch (InvalidPath &) { + return nullptr; + } if (path == "") return nullptr; } From 73d7a51ee6942f681db468dc8e3c631b4d3daa4e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 13 Mar 2017 14:39:18 +0100 Subject: [PATCH 0172/2196] Remove dependency on "curl" binary --- Makefile.config.in | 1 - configure.ac | 1 - src/nix-store/local.mk | 2 -- 3 files changed, 4 deletions(-) diff --git a/Makefile.config.in b/Makefile.config.in index 15e94380477..d4953b521c4 100644 --- a/Makefile.config.in +++ b/Makefile.config.in @@ -14,7 +14,6 @@ LIBLZMA_LIBS = @LIBLZMA_LIBS@ SQLITE3_LIBS = @SQLITE3_LIBS@ bash = @bash@ bindir = @bindir@ -curl = @curl@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ diff --git a/configure.ac b/configure.ac index 46b0ac0651b..34dcd6b2a7d 100644 --- a/configure.ac +++ b/configure.ac @@ -114,7 +114,6 @@ if test -z "$$1"; then fi ]) -NEED_PROG(curl, curl) NEED_PROG(bash, bash) NEED_PROG(patch, patch) AC_PATH_PROG(xmllint, xmllint, false) diff --git a/src/nix-store/local.mk b/src/nix-store/local.mk index 84ff15b241f..ade0b233adf 100644 --- a/src/nix-store/local.mk +++ b/src/nix-store/local.mk @@ -7,5 +7,3 @@ nix-store_SOURCES := $(wildcard $(d)/*.cc) nix-store_LIBS = libmain libstore libutil libformat nix-store_LDFLAGS = -lbz2 -pthread $(SODIUM_LIBS) - -nix-store_CXXFLAGS = -DCURL=\"$(curl)\" From e8186085e07104d4b844208613c2d704b5b57dec Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 13 Mar 2017 14:40:15 +0100 Subject: [PATCH 0173/2196] Add support for brotli compression Build logs on cache.nixos.org are compressed using Brotli (since this allows them to be decompressed automatically by Chrome and Firefox), so it's handy if "nix log" can decompress them. --- Makefile.config.in | 1 + configure.ac | 1 + release.nix | 8 ++++++-- shell.nix | 3 ++- src/libstore/download.cc | 21 +++++++++++++++++++-- src/libutil/compression.cc | 7 +++++++ src/libutil/local.mk | 2 ++ 7 files changed, 38 insertions(+), 5 deletions(-) diff --git a/Makefile.config.in b/Makefile.config.in index d4953b521c4..fccf63b3627 100644 --- a/Makefile.config.in +++ b/Makefile.config.in @@ -14,6 +14,7 @@ LIBLZMA_LIBS = @LIBLZMA_LIBS@ SQLITE3_LIBS = @SQLITE3_LIBS@ bash = @bash@ bindir = @bindir@ +bro = @bro@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ diff --git a/configure.ac b/configure.ac index 34dcd6b2a7d..21ca78af0fd 100644 --- a/configure.ac +++ b/configure.ac @@ -128,6 +128,7 @@ NEED_PROG(gzip, gzip) NEED_PROG(xz, xz) AC_PATH_PROG(dot, dot) AC_PATH_PROG(pv, pv, pv) +NEED_PROG(bro, bro) # Test that Perl has the open/fork feature (Perl 5.8.0 and beyond). diff --git a/release.nix b/release.nix index e61e81bdf37..a266af7c2e1 100644 --- a/release.nix +++ b/release.nix @@ -24,7 +24,8 @@ let inherit officialRelease; buildInputs = - [ curl bison flex perl libxml2 libxslt bzip2 xz + [ curl bison flex perl libxml2 libxslt + bzip2 xz brotli pkgconfig sqlite libsodium boehmgc docbook5 docbook5_xsl autoconf-archive @@ -73,7 +74,10 @@ let src = tarball; buildInputs = - [ curl perl bzip2 xz openssl pkgconfig sqlite boehmgc ] + [ curl perl + bzip2 xz brotli + openssl pkgconfig sqlite boehmgc + ] ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) (aws-sdk-cpp.override { diff --git a/shell.nix b/shell.nix index 4c1608230ce..df0ad01df58 100644 --- a/shell.nix +++ b/shell.nix @@ -6,7 +6,8 @@ with import {}; name = "nix"; buildInputs = - [ curl bison flex perl libxml2 libxslt bzip2 xz + [ curl bison flex perl libxml2 libxslt + bzip2 xz brotli pkgconfig sqlite libsodium boehmgc docbook5 docbook5_xsl autoconf-archive diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 6567a4dc475..d9b8fbc0808 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -5,6 +5,8 @@ #include "store-api.hh" #include "archive.hh" #include "s3.hh" +#include "compression.hh" + #ifdef ENABLE_S3 #include #endif @@ -70,6 +72,8 @@ struct CurlDownloader : public Downloader struct curl_slist * requestHeaders = 0; + std::string encoding; + DownloadItem(CurlDownloader & downloader, const DownloadRequest & request) : downloader(downloader), request(request) { @@ -127,6 +131,7 @@ struct CurlDownloader : public Downloader auto ss = tokenizeString>(line, " "); status = ss.size() >= 2 ? ss[1] : ""; result.data = std::make_shared(); + encoding = ""; } else { auto i = line.find(':'); if (i != string::npos) { @@ -142,7 +147,8 @@ struct CurlDownloader : public Downloader debug(format("shutting down on 200 HTTP response with expected ETag")); return 0; } - } + } else if (name == "content-encoding") + encoding = trim(string(line, i + 1));; } } return realSize; @@ -268,7 +274,18 @@ struct CurlDownloader : public Downloader { result.cached = httpStatus == 304; done = true; - callSuccess(success, failure, const_cast(result)); + + /* Ad hoc support for brotli, since curl doesn't do + this yet. */ + try { + if (encoding == "br") + result.data = decompress("br", *result.data); + + callSuccess(success, failure, const_cast(result)); + } catch (...) { + done = true; + callFailure(failure, std::current_exception()); + } } else { Error err = (httpStatus == 404 || code == CURLE_FILE_COULDNT_READ_FILE) ? NotFound : diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc index a3bbb5170d9..723b072af96 100644 --- a/src/libutil/compression.cc +++ b/src/libutil/compression.cc @@ -89,6 +89,11 @@ static ref decompressBzip2(const std::string & in) } } +static ref decompressBrotli(const std::string & in) +{ + return make_ref(runProgram(BRO, true, {"-d"}, in)); +} + ref compress(const std::string & method, const std::string & in) { StringSink ssink; @@ -106,6 +111,8 @@ ref decompress(const std::string & method, const std::string & in) return decompressXZ(in); else if (method == "bzip2") return decompressBzip2(in); + else if (method == "br") + return decompressBrotli(in); else throw UnknownCompressionMethod(format("unknown compression method ‘%s’") % method); } diff --git a/src/libutil/local.mk b/src/libutil/local.mk index cac5c8795db..0721b21c208 100644 --- a/src/libutil/local.mk +++ b/src/libutil/local.mk @@ -9,3 +9,5 @@ libutil_SOURCES := $(wildcard $(d)/*.cc) libutil_LDFLAGS = $(LIBLZMA_LIBS) -lbz2 -pthread $(OPENSSL_LIBS) libutil_LIBS = libformat + +libutil_CXXFLAGS = -DBRO=\"$(bro)\" From fbbc4d8dda4d347351c0667d1d9a571b201256cf Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 13 Mar 2017 14:56:33 +0100 Subject: [PATCH 0174/2196] Fix deadlock in runProgram() when input is larger than the pipe buffer size --- src/libutil/util.cc | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 0a5f796e4ea..bc66b0c5322 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -870,11 +870,14 @@ string runProgram(Path program, bool searchPath, const Strings & args, out.writeSide = -1; - /* FIXME: This can deadlock if the input is too long. */ + std::thread writerThread; + if (!input.empty()) { in.readSide = -1; - writeFull(in.writeSide.get(), input); - in.writeSide = -1; + writerThread = std::thread([&]() { + writeFull(in.writeSide.get(), input); + in.writeSide = -1; + }); } string result = drainFD(out.readSide.get()); @@ -885,6 +888,9 @@ string runProgram(Path program, bool searchPath, const Strings & args, throw ExecError(status, format("program ‘%1%’ %2%") % program % statusToString(status)); + if (!input.empty()) + writerThread.join(); + return result; } From 2691498b5c68a9c8908da296bf867bfc7f9a068f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 14 Mar 2017 13:13:29 +0100 Subject: [PATCH 0175/2196] Fix assertion failure on SIGINT nix: src/libutil/compression.cc:142: virtual nix::XzSink::~XzSink(): Assertion `finished' failed. --- src/libutil/compression.cc | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc index 723b072af96..8cb1dde662f 100644 --- a/src/libutil/compression.cc +++ b/src/libutil/compression.cc @@ -146,7 +146,6 @@ struct XzSink : CompressionSink ~XzSink() { - assert(finished); lzma_end(&strm); } @@ -217,7 +216,6 @@ struct BzipSink : CompressionSink ~BzipSink() { - assert(finished); BZ2_bzCompressEnd(&strm); } From 8b1d65bebe5af8960ba813e1233f2596a3ffebb7 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 14 Mar 2017 15:03:53 +0100 Subject: [PATCH 0176/2196] S3BinaryCacheStore: Support compression of narinfo and log files You can now set the store parameter "text-compression=br" to compress textual files in the binary cache (i.e. narinfo and logs) using Brotli. This sets the Content-Encoding header; the extension of compressed files is unchanged. You can separately specify the compression of log files using "log-compression=br". This is useful when you don't want to compress narinfo files for backward compatibility. --- src/libstore/binary-cache-store.cc | 1 + src/libstore/download.cc | 16 +++++++++----- src/libstore/download.hh | 3 +++ src/libstore/s3-binary-cache-store.cc | 28 +++++++++++++++++++++--- src/libutil/compression.cc | 31 +++++++++++++++++++++++++++ 5 files changed, 71 insertions(+), 8 deletions(-) diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 120345b2670..804e3f6aa1e 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -250,6 +250,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, const refurl = "nar/" + printHash32(narInfo->fileHash) + ".nar" + (compression == "xz" ? ".xz" : compression == "bzip2" ? ".bz2" : + compression == "br" ? ".br" : ""); if (repair || !fileExists(narInfo->url)) { stats.narWrite++; diff --git a/src/libstore/download.cc b/src/libstore/download.cc index d9b8fbc0808..da29b2fc6fc 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -39,6 +39,16 @@ std::string resolveUri(const std::string & uri) return uri; } +ref decodeContent(const std::string & encoding, ref data) +{ + if (encoding == "") + return data; + else if (encoding == "br") + return decompress(encoding, *data); + else + throw Error("unsupported Content-Encoding ‘%s’", encoding); +} + struct CurlDownloader : public Downloader { CURLM * curlm = 0; @@ -275,12 +285,8 @@ struct CurlDownloader : public Downloader result.cached = httpStatus == 304; done = true; - /* Ad hoc support for brotli, since curl doesn't do - this yet. */ try { - if (encoding == "br") - result.data = decompress("br", *result.data); - + result.data = decodeContent(encoding, ref(result.data)); callSuccess(success, failure, const_cast(result)); } catch (...) { done = true; diff --git a/src/libstore/download.hh b/src/libstore/download.hh index bdb5011e783..e2e16b36103 100644 --- a/src/libstore/download.hh +++ b/src/libstore/download.hh @@ -73,4 +73,7 @@ public: bool isUri(const string & s); +/* Decode data according to the Content-Encoding header. */ +ref decodeContent(const std::string & encoding, ref data); + } diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 5134dd17526..1d44e68321f 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -5,6 +5,8 @@ #include "nar-info.hh" #include "nar-info-disk-cache.hh" #include "globals.hh" +#include "compression.hh" +#include "download.hh" #include #include @@ -104,8 +106,10 @@ S3Helper::DownloadResult S3Helper::getObject( auto result = checkAws(fmt("AWS error fetching ‘%s’", key), client->GetObject(request)); - res.data = std::make_shared( - dynamic_cast(result.GetBody()).str()); + res.data = decodeContent( + result.GetContentEncoding(), + make_ref( + dynamic_cast(result.GetBody()).str())); } catch (S3Error & e) { if (e.err != Aws::S3::S3Errors::NO_SUCH_KEY) throw; @@ -137,11 +141,15 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore S3Helper s3Helper; + std::string textCompression, logCompression; + S3BinaryCacheStoreImpl( const Params & params, const std::string & bucketName) : S3BinaryCacheStore(params) , bucketName(bucketName) , s3Helper(get(params, "aws-region", Aws::Region::US_EAST_1)) + , textCompression(get(params, "text-compression", "gzip")) + , logCompression(get(params, "log-compression", textCompression)) { diskCache = getNarInfoDiskCache(); } @@ -220,13 +228,17 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore return true; } - void upsertFile(const std::string & path, const std::string & data) override + void uploadFile(const std::string & path, const std::string & data, + const std::string & contentEncoding) { auto request = Aws::S3::Model::PutObjectRequest() .WithBucket(bucketName) .WithKey(path); + if (contentEncoding != "") + request.SetContentEncoding(contentEncoding); + auto stream = std::make_shared(data); request.SetBody(stream); @@ -249,6 +261,16 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore stats.putTimeMs += duration; } + void upsertFile(const std::string & path, const std::string & data) override + { + if (path.find(".narinfo") != std::string::npos) + uploadFile(path, *compress(textCompression, data), textCompression); + else if (path.find("/log") != std::string::npos) + uploadFile(path, *compress(logCompression, data), logCompression); + else + uploadFile(path, data, ""); + } + void getFile(const std::string & path, std::function)> success, std::function failure) override diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc index 8cb1dde662f..5df97e73923 100644 --- a/src/libutil/compression.cc +++ b/src/libutil/compression.cc @@ -91,6 +91,7 @@ static ref decompressBzip2(const std::string & in) static ref decompressBrotli(const std::string & in) { + // FIXME: use libbrotli return make_ref(runProgram(BRO, true, {"-d"}, in)); } @@ -266,6 +267,34 @@ struct BzipSink : CompressionSink } }; +struct BrotliSink : CompressionSink +{ + Sink & nextSink; + std::string data; + + BrotliSink(Sink & nextSink) : nextSink(nextSink) + { + } + + ~BrotliSink() + { + } + + // FIXME: use libbrotli + + void finish() override + { + flush(); + nextSink(runProgram(BRO, true, {}, data)); + } + + void write(const unsigned char * data, size_t len) override + { + checkInterrupt(); + this->data.append((const char *) data, len); + } +}; + ref makeCompressionSink(const std::string & method, Sink & nextSink) { if (method == "none") @@ -274,6 +303,8 @@ ref makeCompressionSink(const std::string & method, Sink & next return make_ref(nextSink); else if (method == "bzip2") return make_ref(nextSink); + else if (method == "br") + return make_ref(nextSink); else throw UnknownCompressionMethod(format("unknown compression method ‘%s’") % method); } From 45c70382ac152107d40956c6a3ab8c329086733f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 14 Mar 2017 15:26:01 +0100 Subject: [PATCH 0177/2196] S3BinaryCacheStore: Set Content-Type This is necessary for serving log files to browsers. --- src/libstore/binary-cache-store.cc | 8 ++++---- src/libstore/binary-cache-store.hh | 4 +++- src/libstore/http-binary-cache-store.cc | 4 +++- src/libstore/local-binary-cache-store.cc | 8 ++++++-- src/libstore/s3-binary-cache-store.cc | 14 +++++++++----- 5 files changed, 25 insertions(+), 13 deletions(-) diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 804e3f6aa1e..d8e68fd5892 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -97,7 +97,7 @@ void BinaryCacheStore::init() auto cacheInfo = getFile(cacheInfoFile); if (!cacheInfo) { - upsertFile(cacheInfoFile, "StoreDir: " + storeDir + "\n"); + upsertFile(cacheInfoFile, "StoreDir: " + storeDir + "\n", "text/x-nix-cache-info"); } else { for (auto & line : tokenizeString(*cacheInfo, "\n")) { size_t colon = line.find(':'); @@ -224,7 +224,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, const refurl)) { stats.narWrite++; - upsertFile(narInfo->url, *narCompressed); + upsertFile(narInfo->url, *narCompressed, "application/x-nix-nar"); } else stats.narWriteAverted++; @@ -265,7 +265,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, const refsign(*secretKey); - upsertFile(narInfoFile, narInfo->to_string()); + upsertFile(narInfoFile, narInfo->to_string(), "text/x-nix-narinfo"); auto hashPart = storePathToHash(narInfo->path); diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh index 1c287056ce5..d42b1abd245 100644 --- a/src/libstore/binary-cache-store.hh +++ b/src/libstore/binary-cache-store.hh @@ -31,7 +31,9 @@ public: virtual bool fileExists(const std::string & path) = 0; - virtual void upsertFile(const std::string & path, const std::string & data) = 0; + virtual void upsertFile(const std::string & path, + const std::string & data, + const std::string & mimeType) = 0; /* Return the contents of the specified file, or null if it doesn't exist. */ diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index 9d31f77c921..37a7d6ace14 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -64,7 +64,9 @@ class HttpBinaryCacheStore : public BinaryCacheStore } } - void upsertFile(const std::string & path, const std::string & data) override + void upsertFile(const std::string & path, + const std::string & data, + const std::string & mimeType) override { throw UploadToHTTP("uploading to an HTTP binary cache is not supported"); } diff --git a/src/libstore/local-binary-cache-store.cc b/src/libstore/local-binary-cache-store.cc index 0f377989bd8..aff22f9fcc2 100644 --- a/src/libstore/local-binary-cache-store.cc +++ b/src/libstore/local-binary-cache-store.cc @@ -30,7 +30,9 @@ class LocalBinaryCacheStore : public BinaryCacheStore bool fileExists(const std::string & path) override; - void upsertFile(const std::string & path, const std::string & data) override; + void upsertFile(const std::string & path, + const std::string & data, + const std::string & mimeType) override; void getFile(const std::string & path, std::function)> success, @@ -83,7 +85,9 @@ bool LocalBinaryCacheStore::fileExists(const std::string & path) return pathExists(binaryCacheDir + "/" + path); } -void LocalBinaryCacheStore::upsertFile(const std::string & path, const std::string & data) +void LocalBinaryCacheStore::upsertFile(const std::string & path, + const std::string & data, + const std::string & mimeType) { atomicWrite(binaryCacheDir + "/" + path, data); } diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 1d44e68321f..5ecf3996d7e 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -148,7 +148,7 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore : S3BinaryCacheStore(params) , bucketName(bucketName) , s3Helper(get(params, "aws-region", Aws::Region::US_EAST_1)) - , textCompression(get(params, "text-compression", "gzip")) + , textCompression(get(params, "text-compression", "")) , logCompression(get(params, "log-compression", textCompression)) { diskCache = getNarInfoDiskCache(); @@ -229,6 +229,7 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore } void uploadFile(const std::string & path, const std::string & data, + const std::string & mimeType, const std::string & contentEncoding) { auto request = @@ -236,6 +237,8 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore .WithBucket(bucketName) .WithKey(path); + request.SetContentType(mimeType); + if (contentEncoding != "") request.SetContentEncoding(contentEncoding); @@ -261,14 +264,15 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore stats.putTimeMs += duration; } - void upsertFile(const std::string & path, const std::string & data) override + void upsertFile(const std::string & path, const std::string & data, + const std::string & mimeType) override { if (path.find(".narinfo") != std::string::npos) - uploadFile(path, *compress(textCompression, data), textCompression); + uploadFile(path, *compress(textCompression, data), mimeType, textCompression); else if (path.find("/log") != std::string::npos) - uploadFile(path, *compress(logCompression, data), logCompression); + uploadFile(path, *compress(logCompression, data), mimeType, logCompression); else - uploadFile(path, data, ""); + uploadFile(path, data, mimeType, ""); } void getFile(const std::string & path, From 042975ea8e2e081c0d44190c8b41104131f8c6d4 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 14 Mar 2017 15:55:02 +0100 Subject: [PATCH 0178/2196] Compress NAR listings using the "text-compression" method So if "text-compression=br", the .ls file in S3 will get a Content-Encoding of "br". Brotli appears to compress better than xz for this kind of file and is natively supported by browsers. --- src/libstore/binary-cache-store.cc | 2 +- src/libstore/s3-binary-cache-store.cc | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index d8e68fd5892..25ad0d75b70 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -224,7 +224,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, const ref Date: Wed, 15 Mar 2017 14:40:47 +0100 Subject: [PATCH 0179/2196] runProgram(): Distinguish between empty input and no input For example, if we call brotli with an empty input, it shouldn't read from the caller's stdin. --- src/libstore/download.cc | 2 +- src/libutil/compression.cc | 2 +- src/libutil/util.cc | 33 +++++++++++++++++------- src/libutil/util.hh | 4 ++- src/nix-prefetch-url/nix-prefetch-url.cc | 4 +-- 5 files changed, 30 insertions(+), 15 deletions(-) diff --git a/src/libstore/download.cc b/src/libstore/download.cc index da29b2fc6fc..22bde086e6a 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -671,7 +671,7 @@ Path Downloader::downloadCached(ref store, const string & url_, bool unpa Path tmpDir = createTempDir(); AutoDelete autoDelete(tmpDir, true); // FIXME: this requires GNU tar for decompression. - runProgram("tar", true, {"xf", storePath, "-C", tmpDir, "--strip-components", "1"}, ""); + runProgram("tar", true, {"xf", storePath, "-C", tmpDir, "--strip-components", "1"}); unpackedStorePath = store->addToStore(name, tmpDir, true, htSHA256, defaultPathFilter, false); } replaceSymlink(unpackedStorePath, unpackedLink); diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc index 5df97e73923..8ffd55efb23 100644 --- a/src/libutil/compression.cc +++ b/src/libutil/compression.cc @@ -92,7 +92,7 @@ static ref decompressBzip2(const std::string & in) static ref decompressBrotli(const std::string & in) { // FIXME: use libbrotli - return make_ref(runProgram(BRO, true, {"-d"}, in)); + return make_ref(runProgram(BRO, true, {"-d"}, {in})); } ref compress(const std::string & method, const std::string & in) diff --git a/src/libutil/util.cc b/src/libutil/util.cc index bc66b0c5322..d2d32782d02 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -1,6 +1,7 @@ #include "util.hh" #include "affinity.hh" #include "sync.hh" +#include "finally.hh" #include #include @@ -10,6 +11,7 @@ #include #include #include +#include #include #include @@ -837,23 +839,21 @@ std::vector stringsToCharPtrs(const Strings & ss) string runProgram(Path program, bool searchPath, const Strings & args, - const string & input) + const std::experimental::optional & input) { checkInterrupt(); /* Create a pipe. */ Pipe out, in; out.create(); - if (!input.empty()) in.create(); + if (input) in.create(); /* Fork. */ Pid pid = startProcess([&]() { if (dup2(out.writeSide.get(), STDOUT_FILENO) == -1) throw SysError("dupping stdout"); - if (!input.empty()) { - if (dup2(in.readSide.get(), STDIN_FILENO) == -1) - throw SysError("dupping stdin"); - } + if (input && dup2(in.readSide.get(), STDIN_FILENO) == -1) + throw SysError("dupping stdin"); Strings args_(args); args_.push_front(program); @@ -872,10 +872,23 @@ string runProgram(Path program, bool searchPath, const Strings & args, std::thread writerThread; - if (!input.empty()) { + std::promise promise; + + Finally doJoin([&]() { + if (writerThread.joinable()) + writerThread.join(); + }); + + + if (input) { in.readSide = -1; writerThread = std::thread([&]() { - writeFull(in.writeSide.get(), input); + try { + writeFull(in.writeSide.get(), *input); + promise.set_value(); + } catch (...) { + promise.set_exception(std::current_exception()); + } in.writeSide = -1; }); } @@ -888,8 +901,8 @@ string runProgram(Path program, bool searchPath, const Strings & args, throw ExecError(status, format("program ‘%1%’ %2%") % program % statusToString(status)); - if (!input.empty()) - writerThread.join(); + /* Wait for the writer thread to finish. */ + if (input) promise.get_future().get(); return result; } diff --git a/src/libutil/util.hh b/src/libutil/util.hh index b74c1d41739..4e3a011b349 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -14,6 +14,7 @@ #include #include #include +#include #ifndef HAVE_STRUCT_DIRENT_D_TYPE #define DT_UNKNOWN 0 @@ -232,7 +233,8 @@ pid_t startProcess(std::function fun, const ProcessOptions & options = P /* Run a program and return its stdout in a string (i.e., like the shell backtick operator). */ string runProgram(Path program, bool searchPath = false, - const Strings & args = Strings(), const string & input = ""); + const Strings & args = Strings(), + const std::experimental::optional & input = {}); class ExecError : public Error { diff --git a/src/nix-prefetch-url/nix-prefetch-url.cc b/src/nix-prefetch-url/nix-prefetch-url.cc index acf60302569..b3b2fcac713 100644 --- a/src/nix-prefetch-url/nix-prefetch-url.cc +++ b/src/nix-prefetch-url/nix-prefetch-url.cc @@ -170,10 +170,10 @@ int main(int argc, char * * argv) Path unpacked = (Path) tmpDir + "/unpacked"; createDirs(unpacked); if (hasSuffix(baseNameOf(uri), ".zip")) - runProgram("unzip", true, {"-qq", tmpFile, "-d", unpacked}, ""); + runProgram("unzip", true, {"-qq", tmpFile, "-d", unpacked}); else // FIXME: this requires GNU tar for decompression. - runProgram("tar", true, {"xf", tmpFile, "-C", unpacked}, ""); + runProgram("tar", true, {"xf", tmpFile, "-C", unpacked}); /* If the archive unpacks to a single file/directory, then use that as the top-level. */ From 3f35612c041ecc4e6a4f29f01d5a87fba983cf0f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 15 Mar 2017 17:20:19 +0100 Subject: [PATCH 0180/2196] More precise compression settings --- src/libstore/s3-binary-cache-store.cc | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 3804e0b0fb8..571bf7dfd80 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -141,15 +141,16 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore S3Helper s3Helper; - std::string textCompression, logCompression; + std::string narinfoCompression, lsCompression, logCompression; S3BinaryCacheStoreImpl( const Params & params, const std::string & bucketName) : S3BinaryCacheStore(params) , bucketName(bucketName) , s3Helper(get(params, "aws-region", Aws::Region::US_EAST_1)) - , textCompression(get(params, "text-compression", "")) - , logCompression(get(params, "log-compression", textCompression)) + , narinfoCompression(get(params, "narinfo-compression", "")) + , lsCompression(get(params, "ls-compression", "")) + , logCompression(get(params, "log-compression", "")) { diskCache = getNarInfoDiskCache(); } @@ -267,8 +268,10 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore void upsertFile(const std::string & path, const std::string & data, const std::string & mimeType) override { - if (textCompression != "" && (hasSuffix(path, ".narinfo") || hasSuffix(path, ".ls"))) - uploadFile(path, *compress(textCompression, data), mimeType, textCompression); + if (narinfoCompression != "" && hasSuffix(path, ".narinfo")) + uploadFile(path, *compress(narinfoCompression, data), mimeType, narinfoCompression); + else if (lsCompression != "" && hasSuffix(path, ".ls")) + uploadFile(path, *compress(lsCompression, data), mimeType, lsCompression); else if (logCompression != "" && hasPrefix(path, "log/")) uploadFile(path, *compress(logCompression, data), mimeType, logCompression); else From 43f158bb0885c2eb9180ef7ab24034b1b9353e8b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 16 Mar 2017 10:45:45 +0100 Subject: [PATCH 0181/2196] nix-copy-closure: Fix assertion failure $ ./inst/bin/nix-copy-closure --to bla $(type -p firefox) nix-copy-closure: src/libstore/store-api.cc:80: std::__cxx11::string nix::storePathToHash(const Path&): Assertion `base.size() >= storePathHashLen' failed. --- src/nix-copy-closure/nix-copy-closure.cc | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/nix-copy-closure/nix-copy-closure.cc b/src/nix-copy-closure/nix-copy-closure.cc index 4340443b5cc..05a4deea3d5 100755 --- a/src/nix-copy-closure/nix-copy-closure.cc +++ b/src/nix-copy-closure/nix-copy-closure.cc @@ -51,8 +51,12 @@ int main(int argc, char ** argv) auto to = toMode ? openStore(remoteUri) : openStore(); auto from = toMode ? openStore() : openStore(remoteUri); + PathSet storePaths2; + for (auto & path : storePaths) + storePaths2.insert(from->followLinksToStorePath(path)); + PathSet closure; - from->computeFSClosure(storePaths, closure, false, includeOutputs); + from->computeFSClosure(storePaths2, closure, false, includeOutputs); copyPaths(from, to, Paths(closure.begin(), closure.end()), useSubstitutes); }); From 0ec7f47b00659cf043eb7fbe72385a08f7e9f8e2 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 16 Mar 2017 10:52:28 +0100 Subject: [PATCH 0182/2196] Remove "killing process " messages They convey no useful information. --- src/libmain/shared.cc | 6 +----- src/libstore/build.cc | 4 ++-- src/libutil/util.cc | 5 ++--- src/libutil/util.hh | 2 +- 4 files changed, 6 insertions(+), 11 deletions(-) diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 326202d295f..a720afd6cdd 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -332,11 +332,7 @@ RunPager::~RunPager() pid.wait(); } } catch (...) { - try { - pid.kill(true); - } catch (...) { - ignoreException(); - } + ignoreException(); } } diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 2b0f8e592d5..fc840df81a5 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -642,7 +642,7 @@ HookInstance::~HookInstance() { try { toHook.writeSide = -1; - if (pid != -1) pid.kill(true); + if (pid != -1) pid.kill(); } catch (...) { ignoreException(); } @@ -1437,7 +1437,7 @@ void DerivationGoal::buildDone() to have terminated. In fact, the builder could also have simply have closed its end of the pipe, so just to be sure, kill it. */ - int status = hook ? hook->pid.kill(true) : pid.kill(true); + int status = hook ? hook->pid.kill() : pid.kill(); debug(format("builder process for ‘%1%’ finished") % drvPath); diff --git a/src/libutil/util.cc b/src/libutil/util.cc index d2d32782d02..99a91c8cc64 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -678,12 +678,11 @@ Pid::operator pid_t() } -int Pid::kill(bool quiet) +int Pid::kill() { assert(pid != -1); - if (!quiet) - printError(format("killing process %1%") % pid); + debug(format("killing process %1%") % pid); /* Send the requested signal to the child. If it has its own process group, send the signal to every process in the child diff --git a/src/libutil/util.hh b/src/libutil/util.hh index 4e3a011b349..f94c0ff1c5e 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -203,7 +203,7 @@ public: ~Pid(); void operator =(pid_t pid); operator pid_t(); - int kill(bool quiet = false); + int kill(); int wait(); void setSeparatePG(bool separatePG); From 7a716ef2a5759506e602f3ceaaadc42edbf6d12f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 16 Mar 2017 10:58:48 +0100 Subject: [PATCH 0183/2196] Fix nix-copy-closure --to --- src/libstore/legacy-ssh-store.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 4a2ac42f31c..d804b27c1c8 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -45,7 +45,7 @@ struct LegacySSHStore : public Store ref openConnection() { auto conn = make_ref(); - conn->sshConn = master.startCommand("nix-store --serve"); + conn->sshConn = master.startCommand("nix-store --serve --write"); conn->to = FdSink(conn->sshConn->in.get()); conn->from = FdSource(conn->sshConn->out.get()); From ea7fa88131eb486ffe54f32601d36a64374f7b7c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 16 Mar 2017 11:44:01 +0100 Subject: [PATCH 0184/2196] LegacySSHStore: Provide a faster implementation of computeFSClosure() This avoids the latency of the standard implementation, which can make a huge difference (e.g. 16.5s -> 0.5s on a NixOS system closure). --- src/libstore/legacy-ssh-store.cc | 22 ++++++++++++++++++++++ src/libstore/store-api.hh | 2 +- 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index d804b27c1c8..1a933259b52 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -204,6 +204,28 @@ struct LegacySSHStore : public Store bool isTrusted() override { return true; } + void computeFSClosure(const PathSet & paths, + PathSet & out, bool flipDirection = false, + bool includeOutputs = false, bool includeDerivers = false) override + { + if (flipDirection || includeDerivers) { + Store::computeFSClosure(paths, out, flipDirection, includeOutputs, includeDerivers); + return; + } + + auto conn(connections->get()); + + conn->to + << cmdQueryClosure + << includeOutputs + << paths; + conn->to.flush(); + + auto res = readStorePaths(*this, conn->from); + + out.insert(res.begin(), res.end()); + } + }; static RegisterStoreImplementation regStore([]( diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 3aea30c286a..92aa8862f65 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -511,7 +511,7 @@ public: `storePath' is returned; that is, the closures under the `referrers' relation instead of the `references' relation is returned. */ - void computeFSClosure(const PathSet & paths, + virtual void computeFSClosure(const PathSet & paths, PathSet & out, bool flipDirection = false, bool includeOutputs = false, bool includeDerivers = false); From 91d67692cfb05e4ece744fb9d144921ae920f2de Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 16 Mar 2017 12:05:51 +0100 Subject: [PATCH 0185/2196] copyPaths(): Don't query path info for a path the target already has For example, this cuts "nix-copy-closure --from" on a NixOS system closure from 15.9s to 0.5s. --- src/libstore/store-api.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 9c755965e45..9ab3f36329b 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -822,6 +822,7 @@ void copyPaths(ref from, ref to, const Paths & storePaths, bool su PathSet(storePaths.begin(), storePaths.end()), [&](const Path & storePath) { + if (to->isValidPath(storePath)) return PathSet(); return from->queryPathInfo(storePath)->references; }, From c5b83d8913d73ea58ff9437c41bf6bd0c6839ad0 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 16 Mar 2017 13:50:01 +0100 Subject: [PATCH 0186/2196] copyPaths(): Use queryValidPaths() to reduce SSH latency --- src/build-remote/build-remote.cc | 16 +++++++---- src/libstore/legacy-ssh-store.cc | 13 +++++++++ src/libstore/local-store.cc | 2 +- src/libstore/local-store.hh | 2 +- src/libstore/remote-store.cc | 2 +- src/libstore/remote-store.hh | 2 +- src/libstore/store-api.cc | 35 ++++++++---------------- src/libstore/store-api.hh | 8 ++++-- src/nix-copy-closure/nix-copy-closure.cc | 2 +- src/nix/copy.cc | 2 +- 10 files changed, 46 insertions(+), 38 deletions(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index 3908dfac487..6b142db9826 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -252,10 +252,10 @@ int main (int argc, char * * argv) string line; if (!getline(cin, line)) throw Error("hook caller didn't send inputs"); - auto inputs = tokenizeString>(line); + auto inputs = tokenizeString(line); if (!getline(cin, line)) throw Error("hook caller didn't send outputs"); - auto outputs = tokenizeString(line); + auto outputs = tokenizeString(line); AutoCloseFD uploadLock = openLockFile(currentLoad + "/" + hostName + ".upload-lock", true); auto old = signal(SIGALRM, handleAlarm); alarm(15 * 60); @@ -269,11 +269,15 @@ int main (int argc, char * * argv) printError("building ‘%s’ on ‘%s’", drvPath, hostName); sshStore->buildDerivation(drvPath, readDerivation(drvPath)); - std::remove_if(outputs.begin(), outputs.end(), [=](const Path & path) { return store->isValidPath(path); }); - if (!outputs.empty()) { - setenv("NIX_HELD_LOCKS", concatStringsSep(" ", outputs).c_str(), 1); /* FIXME: ugly */ - copyPaths(ref(sshStore), store, outputs); + PathSet missing; + for (auto & path : outputs) + if (!store->isValidPath(path)) missing.insert(path); + + if (!missing.empty()) { + setenv("NIX_HELD_LOCKS", concatStringsSep(" ", missing).c_str(), 1); /* FIXME: ugly */ + copyPaths(ref(sshStore), store, missing); } + return; }); } diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 1a933259b52..84bf0f727f4 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -226,6 +226,19 @@ struct LegacySSHStore : public Store out.insert(res.begin(), res.end()); } + PathSet queryValidPaths(const PathSet & paths, bool maybeSubstitute = false) override + { + auto conn(connections->get()); + + conn->to + << cmdQueryValidPaths + << false // lock + << maybeSubstitute + << paths; + conn->to.flush(); + + return readStorePaths(*this, conn->from); + } }; static RegisterStoreImplementation regStore([]( diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 63f069c2ff1..dcfa000c432 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -669,7 +669,7 @@ bool LocalStore::isValidPathUncached(const Path & path) } -PathSet LocalStore::queryValidPaths(const PathSet & paths) +PathSet LocalStore::queryValidPaths(const PathSet & paths, bool maybeSubstitute) { PathSet res; for (auto & i : paths) diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index 49a0d7e0d63..28e9a31c9fe 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -99,7 +99,7 @@ public: bool isValidPathUncached(const Path & path) override; - PathSet queryValidPaths(const PathSet & paths) override; + PathSet queryValidPaths(const PathSet & paths, bool maybeSubstitute = false) override; PathSet queryAllValidPaths() override; diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 1ac2d7b6e78..a1f2db5b0ec 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -187,7 +187,7 @@ bool RemoteStore::isValidPathUncached(const Path & path) } -PathSet RemoteStore::queryValidPaths(const PathSet & paths) +PathSet RemoteStore::queryValidPaths(const PathSet & paths, bool maybeSubstitute) { auto conn(connections->get()); if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 12) { diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh index 66540a2a2ec..a08bd305639 100644 --- a/src/libstore/remote-store.hh +++ b/src/libstore/remote-store.hh @@ -28,7 +28,7 @@ public: bool isValidPathUncached(const Path & path) override; - PathSet queryValidPaths(const PathSet & paths) override; + PathSet queryValidPaths(const PathSet & paths, bool maybeSubstitute = false) override; PathSet queryAllValidPaths() override; diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 9ab3f36329b..b1bf961e1bf 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -377,7 +377,7 @@ void Store::queryPathInfo(const Path & storePath, } -PathSet Store::queryValidPaths(const PathSet & paths) +PathSet Store::queryValidPaths(const PathSet & paths, bool maybeSubstitute) { struct State { @@ -550,6 +550,8 @@ void copyClosure(ref srcStore, ref dstStore, for (auto & path : storePaths) srcStore->computeFSClosure(path, closure); + // FIXME: use copyStorePaths() + PathSet valid = dstStore->queryValidPaths(closure); if (valid.size() == closure.size()) return; @@ -791,35 +793,22 @@ std::list> getDefaultSubstituters() } -void copyPaths(ref from, ref to, const Paths & storePaths, bool substitute) -{ - if (substitute) { - /* Filter out .drv files (we don't want to build anything). */ - PathSet paths2; - for (auto & path : storePaths) - if (!isDerivation(path)) paths2.insert(path); - unsigned long long downloadSize, narSize; - PathSet willBuild, willSubstitute, unknown; - to->queryMissing(PathSet(paths2.begin(), paths2.end()), - willBuild, willSubstitute, unknown, downloadSize, narSize); - /* FIXME: should use ensurePath(), but it only - does one path at a time. */ - if (!willSubstitute.empty()) - try { - to->buildPaths(willSubstitute); - } catch (Error & e) { - printMsg(lvlError, format("warning: %1%") % e.msg()); - } - } +void copyPaths(ref from, ref to, const PathSet & storePaths, bool substitute) +{ + PathSet valid = to->queryValidPaths(storePaths, substitute); + + PathSet missing; + for (auto & path : storePaths) + if (!valid.count(path)) missing.insert(path); std::string copiedLabel = "copied"; - logger->setExpected(copiedLabel, storePaths.size()); + logger->setExpected(copiedLabel, missing.size()); ThreadPool pool; processGraph(pool, - PathSet(storePaths.begin(), storePaths.end()), + PathSet(missing.begin(), missing.end()), [&](const Path & storePath) { if (to->isValidPath(storePath)) return PathSet(); diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 92aa8862f65..98f2803f813 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -324,8 +324,10 @@ protected: public: - /* Query which of the given paths is valid. */ - virtual PathSet queryValidPaths(const PathSet & paths); + /* Query which of the given paths is valid. Optionally, try to + substitute missing paths. */ + virtual PathSet queryValidPaths(const PathSet & paths, + bool maybeSubstitute = false); /* Query the set of all valid paths. Note that for some store backends, the name part of store paths may be omitted @@ -653,7 +655,7 @@ ref openStore(const std::string & uri = getEnv("NIX_REMOTE")); ref openStore(const std::string & uri, const Store::Params & params); -void copyPaths(ref from, ref to, const Paths & storePaths, bool substitute = false); +void copyPaths(ref from, ref to, const PathSet & storePaths, bool substitute = false); enum StoreType { tDaemon, diff --git a/src/nix-copy-closure/nix-copy-closure.cc b/src/nix-copy-closure/nix-copy-closure.cc index 05a4deea3d5..ab80d96b5ef 100755 --- a/src/nix-copy-closure/nix-copy-closure.cc +++ b/src/nix-copy-closure/nix-copy-closure.cc @@ -58,6 +58,6 @@ int main(int argc, char ** argv) PathSet closure; from->computeFSClosure(storePaths2, closure, false, includeOutputs); - copyPaths(from, to, Paths(closure.begin(), closure.end()), useSubstitutes); + copyPaths(from, to, closure, useSubstitutes); }); } diff --git a/src/nix/copy.cc b/src/nix/copy.cc index 976b0d3e0b8..083dc350686 100644 --- a/src/nix/copy.cc +++ b/src/nix/copy.cc @@ -46,7 +46,7 @@ struct CmdCopy : StorePathsCommand ref srcStore = srcUri.empty() ? store : openStore(srcUri); ref dstStore = dstUri.empty() ? store : openStore(dstUri); - copyPaths(srcStore, dstStore, storePaths); + copyPaths(srcStore, dstStore, PathSet(storePaths.begin(), storePaths.end())); } }; From 287084d688c3316d5840a9a7b5b2dff29b3dda94 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 16 Mar 2017 14:19:32 +0100 Subject: [PATCH 0187/2196] ssh:// -> ssh-ng://, legacy-ssh:// -> ssh:// --- src/build-remote/build-remote.cc | 2 +- src/libstore/legacy-ssh-store.cc | 2 +- src/libstore/ssh-store.cc | 2 +- src/nix-copy-closure/nix-copy-closure.cc | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index 6b142db9826..d7aee288670 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -233,7 +233,7 @@ int main (int argc, char * * argv) lock = -1; try { - sshStore = openStore("ssh://" + bestMachine->hostName, + sshStore = openStore("ssh-ng://" + bestMachine->hostName, { {"ssh-key", bestMachine->sshKey }, {"max-connections", "1" } }); hostName = bestMachine->hostName; diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 84bf0f727f4..0e838846c79 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -8,7 +8,7 @@ namespace nix { -static std::string uriScheme = "legacy-ssh://"; +static std::string uriScheme = "ssh://"; struct LegacySSHStore : public Store { diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc index 20f020bdada..2a81a8b1ebe 100644 --- a/src/libstore/ssh-store.cc +++ b/src/libstore/ssh-store.cc @@ -8,7 +8,7 @@ namespace nix { -static std::string uriScheme = "ssh://"; +static std::string uriScheme = "ssh-ng://"; class SSHStore : public RemoteStore { diff --git a/src/nix-copy-closure/nix-copy-closure.cc b/src/nix-copy-closure/nix-copy-closure.cc index ab80d96b5ef..ed43bffbc8c 100755 --- a/src/nix-copy-closure/nix-copy-closure.cc +++ b/src/nix-copy-closure/nix-copy-closure.cc @@ -47,7 +47,7 @@ int main(int argc, char ** argv) if (sshHost.empty()) throw UsageError("no host name specified"); - auto remoteUri = "legacy-ssh://" + sshHost + (gzip ? "?compress=true" : ""); + auto remoteUri = "ssh://" + sshHost + (gzip ? "?compress=true" : ""); auto to = toMode ? openStore(remoteUri) : openStore(); auto from = toMode ? openStore() : openStore(remoteUri); From 558eda01154d47b3c88983576eedb582185b2201 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 16 Mar 2017 14:25:54 +0100 Subject: [PATCH 0188/2196] nix copy: Make -r option use the "from" store Previously, we tried to compute the closure in the local store, which obviously doesn't work. --- src/nix/command.cc | 7 ++++++- src/nix/command.hh | 1 + src/nix/copy.cc | 10 +++++++--- 3 files changed, 14 insertions(+), 4 deletions(-) diff --git a/src/nix/command.cc b/src/nix/command.cc index 5a8288da912..a1b2c120a5d 100644 --- a/src/nix/command.cc +++ b/src/nix/command.cc @@ -79,9 +79,14 @@ StoreCommand::StoreCommand() mkFlag(0, "store", "store-uri", "URI of the Nix store to use", &storeUri); } +ref StoreCommand::createStore() +{ + return openStore(storeUri); +} + void StoreCommand::run() { - run(openStore(storeUri)); + run(createStore()); } StorePathsCommand::StorePathsCommand() diff --git a/src/nix/command.hh b/src/nix/command.hh index a29cdcf7f50..fa6c21abf8a 100644 --- a/src/nix/command.hh +++ b/src/nix/command.hh @@ -33,6 +33,7 @@ struct StoreCommand : virtual Command std::string storeUri; StoreCommand(); void run() override; + virtual ref createStore(); virtual void run(ref) = 0; }; diff --git a/src/nix/copy.cc b/src/nix/copy.cc index 083dc350686..b2165cb8f85 100644 --- a/src/nix/copy.cc +++ b/src/nix/copy.cc @@ -38,13 +38,17 @@ struct CmdCopy : StorePathsCommand }; } - void run(ref store, Paths storePaths) override + ref createStore() override + { + return srcUri.empty() ? StoreCommand::createStore() : openStore(srcUri); + } + + void run(ref srcStore, Paths storePaths) override { if (srcUri.empty() && dstUri.empty()) throw UsageError("you must pass ‘--from’ and/or ‘--to’"); - ref srcStore = srcUri.empty() ? store : openStore(srcUri); - ref dstStore = dstUri.empty() ? store : openStore(dstUri); + ref dstStore = dstUri.empty() ? openStore() : openStore(dstUri); copyPaths(srcStore, dstStore, PathSet(storePaths.begin(), storePaths.end())); } From 3229f85585136e5c1d53a2ef2a434fdec75d912e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 21 Mar 2017 14:35:50 +0100 Subject: [PATCH 0189/2196] Honor $NIX_SSHOPTS again NixOps needs this. --- src/libstore/ssh.cc | 21 ++++++++++++--------- src/libstore/ssh.hh | 2 ++ 2 files changed, 14 insertions(+), 9 deletions(-) diff --git a/src/libstore/ssh.cc b/src/libstore/ssh.cc index 4f88fa64dbd..e54f3f4ba28 100644 --- a/src/libstore/ssh.cc +++ b/src/libstore/ssh.cc @@ -2,6 +2,16 @@ namespace nix { +void SSHMaster::addCommonSSHOpts(Strings & args) +{ + for (auto & i : tokenizeString(getEnv("NIX_SSHOPTS"))) + args.push_back(i); + if (!keyFile.empty()) + args.insert(args.end(), {"-i", keyFile}); + if (compress) + args.push_back("-C"); +} + std::unique_ptr SSHMaster::startCommand(const std::string & command) { Path socketPath = startMaster(); @@ -23,10 +33,7 @@ std::unique_ptr SSHMaster::startCommand(const std::string throw SysError("duping over stdout"); Strings args = { "ssh", host.c_str(), "-x", "-a" }; - if (!keyFile.empty()) - args.insert(args.end(), {"-i", keyFile}); - if (compress) - args.push_back("-C"); + addCommonSSHOpts(args); if (socketPath != "") args.insert(args.end(), {"-S", socketPath}); args.push_back(command); @@ -73,11 +80,7 @@ Path SSHMaster::startMaster() , "-o", "LocalCommand=echo started" , "-o", "PermitLocalCommand=yes" }; - if (!keyFile.empty()) - args.insert(args.end(), {"-i", keyFile}); - if (compress) - args.push_back("-C"); - + addCommonSSHOpts(args); execvp(args.begin()->c_str(), stringsToCharPtrs(args).data()); throw SysError("starting SSH master"); diff --git a/src/libstore/ssh.hh b/src/libstore/ssh.hh index 72238dad79a..b4396467e54 100644 --- a/src/libstore/ssh.hh +++ b/src/libstore/ssh.hh @@ -23,6 +23,8 @@ private: Sync state_; + void addCommonSSHOpts(Strings & args); + public: SSHMaster(const std::string & host, const std::string & keyFile, bool useMaster, bool compress) From f8b84a3b8c5eff26114008ababc333c7d14c92de Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 21 Mar 2017 14:43:03 +0100 Subject: [PATCH 0190/2196] Move istringstream_nocopy to a separate file --- src/libstore/derivations.cc | 2 +- src/libstore/s3-binary-cache-store.cc | 1 + src/libutil/hash.cc | 2 +- src/libutil/istringstream_nocopy.hh | 92 +++++++++++++++++++++++++++ src/libutil/util.hh | 88 ------------------------- 5 files changed, 95 insertions(+), 90 deletions(-) create mode 100644 src/libutil/istringstream_nocopy.hh diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 38a87240c3c..0c6ceb9f674 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -4,7 +4,7 @@ #include "util.hh" #include "worker-protocol.hh" #include "fs-accessor.hh" - +#include "istringstream_nocopy.hh" namespace nix { diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 5a8acfb506e..3053f908c4e 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -7,6 +7,7 @@ #include "globals.hh" #include "compression.hh" #include "download.hh" +#include "istringstream_nocopy.hh" #include #include diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index a8bbcf8c175..9f4afd93c2f 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -7,12 +7,12 @@ #include "hash.hh" #include "archive.hh" #include "util.hh" +#include "istringstream_nocopy.hh" #include #include #include - namespace nix { diff --git a/src/libutil/istringstream_nocopy.hh b/src/libutil/istringstream_nocopy.hh new file mode 100644 index 00000000000..f7beac578e3 --- /dev/null +++ b/src/libutil/istringstream_nocopy.hh @@ -0,0 +1,92 @@ +/* This file provides a variant of std::istringstream that doesn't + copy its string argument. This is useful for large strings. The + caller must ensure that the string object is not destroyed while + it's referenced by this object. */ + +#pragma once + +#include +#include + +template , class Allocator = std::allocator> +class basic_istringbuf_nocopy : public std::basic_streambuf +{ +public: + typedef std::basic_string string_type; + + typedef typename std::basic_streambuf::off_type off_type; + + typedef typename std::basic_streambuf::pos_type pos_type; + + typedef typename std::basic_streambuf::int_type int_type; + + typedef typename std::basic_streambuf::traits_type traits_type; + +private: + const string_type & s; + + off_type off; + +public: + basic_istringbuf_nocopy(const string_type & s) : s{s}, off{0} + { + } + +private: + pos_type seekoff(off_type off, std::ios_base::seekdir dir, std::ios_base::openmode which) + { + if (which & std::ios_base::in) { + this->off = dir == std::ios_base::beg + ? off + : (dir == std::ios_base::end + ? s.size() + off + : this->off + off); + } + return pos_type(this->off); + } + + pos_type seekpos(pos_type pos, std::ios_base::openmode which) + { + return seekoff(pos, std::ios_base::beg, which); + } + + std::streamsize showmanyc() + { + return s.size() - off; + } + + int_type underflow() + { + if (typename string_type::size_type(off) == s.size()) + return traits_type::eof(); + return traits_type::to_int_type(s[off]); + } + + int_type uflow() + { + if (typename string_type::size_type(off) == s.size()) + return traits_type::eof(); + return traits_type::to_int_type(s[off++]); + } + + int_type pbackfail(int_type ch) + { + if (off == 0 || (ch != traits_type::eof() && ch != s[off - 1])) + return traits_type::eof(); + + return traits_type::to_int_type(s[--off]); + } + +}; + +template , class Allocator = std::allocator> +class basic_istringstream_nocopy : public std::basic_iostream +{ + typedef basic_istringbuf_nocopy buf_type; + buf_type buf; +public: + basic_istringstream_nocopy(const typename buf_type::string_type & s) : + std::basic_iostream(&buf), buf(s) {}; +}; + +typedef basic_istringstream_nocopy istringstream_nocopy; diff --git a/src/libutil/util.hh b/src/libutil/util.hh index 645289f67c9..0e6941e4a8d 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -451,92 +451,4 @@ struct ReceiveInterrupts { } }; - -template , class Allocator = std::allocator> -class basic_istringbuf_nocopy : public std::basic_streambuf -{ -public: - typedef std::basic_string string_type; - - typedef typename std::basic_streambuf::off_type off_type; - - typedef typename std::basic_streambuf::pos_type pos_type; - - typedef typename std::basic_streambuf::int_type int_type; - - typedef typename std::basic_streambuf::traits_type traits_type; - -private: - const string_type & s; - - off_type off; - -public: - basic_istringbuf_nocopy(const string_type & s) : s{s}, off{0} - { - } - -private: - pos_type seekoff(off_type off, std::ios_base::seekdir dir, std::ios_base::openmode which) - { - if (which & std::ios_base::in) { - this->off = dir == std::ios_base::beg - ? off - : (dir == std::ios_base::end - ? s.size() + off - : this->off + off); - } - return pos_type(this->off); - } - - pos_type seekpos(pos_type pos, std::ios_base::openmode which) - { - return seekoff(pos, std::ios_base::beg, which); - } - - std::streamsize showmanyc() - { - return s.size() - off; - } - - int_type underflow() - { - if (typename string_type::size_type(off) == s.size()) - return traits_type::eof(); - return traits_type::to_int_type(s[off]); - } - - int_type uflow() - { - if (typename string_type::size_type(off) == s.size()) - return traits_type::eof(); - return traits_type::to_int_type(s[off++]); - } - - int_type pbackfail(int_type ch) - { - if (off == 0 || (ch != traits_type::eof() && ch != s[off - 1])) - return traits_type::eof(); - - return traits_type::to_int_type(s[--off]); - } - -}; - -template , class Allocator = std::allocator> -class basic_istringstream_nocopy : public std::basic_iostream -{ - typedef basic_istringbuf_nocopy buf_type; - buf_type buf; -public: - basic_istringstream_nocopy(const typename buf_type::string_type & s) : - std::basic_iostream(&buf), buf(s) {}; -}; - -/* A variant of std::istringstream that doesn't its string - argument. This is useful for large strings. The caller must ensure - that the string object is not destroyed while it's referenced by - this object. */ -typedef basic_istringstream_nocopy istringstream_nocopy; - } From 7347daba8cf3a12706d3714bdaf9b7c104accde6 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 21 Mar 2017 14:47:35 +0100 Subject: [PATCH 0191/2196] Don't make brotli a hard dependency --- configure.ac | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configure.ac b/configure.ac index 21ca78af0fd..f18ba799e74 100644 --- a/configure.ac +++ b/configure.ac @@ -128,7 +128,7 @@ NEED_PROG(gzip, gzip) NEED_PROG(xz, xz) AC_PATH_PROG(dot, dot) AC_PATH_PROG(pv, pv, pv) -NEED_PROG(bro, bro) +AC_PATH_PROG(bro, bro, bro) # Test that Perl has the open/fork feature (Perl 5.8.0 and beyond). From 4bb38591e591e3ef59f7881575ca1f28015a8c47 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 21 Mar 2017 15:06:46 +0100 Subject: [PATCH 0192/2196] Restore cache.nixos.org as the default substituter Fixes #1283. --- perl/lib/Nix/Config.pm.in | 10 ---------- src/libstore/crypto.cc | 4 +++- src/libstore/store-api.cc | 5 +---- 3 files changed, 4 insertions(+), 15 deletions(-) diff --git a/perl/lib/Nix/Config.pm.in b/perl/lib/Nix/Config.pm.in index 3575d99cb67..3613926f573 100644 --- a/perl/lib/Nix/Config.pm.in +++ b/perl/lib/Nix/Config.pm.in @@ -19,10 +19,6 @@ $useBindings = "@perlbindings@" eq "yes"; %config = (); -%binaryCachePublicKeys = (); - -$defaultPublicKeys = "cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY="; - sub readConfig { if (defined $ENV{'_NIX_OPTIONS'}) { foreach my $s (split '\n', $ENV{'_NIX_OPTIONS'}) { @@ -40,12 +36,6 @@ sub readConfig { } close CONFIG; } - - foreach my $s (split(/ /, $config{"binary-cache-public-keys"} // $defaultPublicKeys)) { - my ($keyName, $publicKey) = split ":", $s; - next unless defined $keyName && defined $publicKey; - $binaryCachePublicKeys{$keyName} = decode_base64($publicKey); - } } return 1; diff --git a/src/libstore/crypto.cc b/src/libstore/crypto.cc index 747483afb30..0fc86a1fe92 100644 --- a/src/libstore/crypto.cc +++ b/src/libstore/crypto.cc @@ -105,7 +105,9 @@ PublicKeys getDefaultPublicKeys() // FIXME: filter duplicates - for (auto s : settings.get("binary-cache-public-keys", Strings())) { + for (auto s : settings.get("binary-cache-public-keys", + Strings{"cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY="})) + { PublicKey key(s); publicKeys.emplace(key.name, key); } diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index b1bf961e1bf..8c3422e0b43 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -778,10 +778,7 @@ std::list> getDefaultSubstituters() state->stores.push_back(openStore(uri)); }; - for (auto uri : settings.get("substituters", Strings())) - addStore(uri); - - for (auto uri : settings.get("binary-caches", Strings())) + for (auto uri : settings.get("substituters", settings.get("binary-caches", Strings{"https://cache.nixos.org/"}))) addStore(uri); for (auto uri : settings.get("extra-binary-caches", Strings())) From ecbc3fedd3d5bdc5a0e1a0a51b29062f2874ac8b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 21 Mar 2017 15:10:35 +0100 Subject: [PATCH 0193/2196] Require signatures by default This corresponds to the NixOS default. --- src/libstore/local-store.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index dcfa000c432..8610841d722 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -44,7 +44,7 @@ LocalStore::LocalStore(const Params & params) , reservedPath(dbDir + "/reserved") , schemaPath(dbDir + "/schema") , trashDir(realStoreDir + "/trash") - , requireSigs(trim(settings.get("signed-binary-caches", std::string(""))) != "") // FIXME: rename option + , requireSigs(trim(settings.get("signed-binary-caches", std::string("*"))) != "") // FIXME: rename option , publicKeys(getDefaultPublicKeys()) { auto state(_state.lock()); From e1e49c58e1218fb6240dbd9780a8c67462e95d2d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 21 Mar 2017 17:59:18 +0100 Subject: [PATCH 0194/2196] Only use cache.nixos.org when the store is /nix/store This is consistent with the behaviour of the old download-from-binary-cache substituter. --- src/libstore/store-api.cc | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 8c3422e0b43..441166d04d8 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -778,7 +778,11 @@ std::list> getDefaultSubstituters() state->stores.push_back(openStore(uri)); }; - for (auto uri : settings.get("substituters", settings.get("binary-caches", Strings{"https://cache.nixos.org/"}))) + Strings defaultSubstituters; + if (settings.nixStore == "/nix/store") + defaultSubstituters.push_back("https://cache.nixos.org/"); + + for (auto uri : settings.get("substituters", settings.get("binary-caches", defaultSubstituters))) addStore(uri); for (auto uri : settings.get("extra-binary-caches", Strings())) From aa23bba27f402913a7cfa7854b5d3b6ddf321e74 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 21 Mar 2017 18:06:13 +0100 Subject: [PATCH 0195/2196] Fix tests to reflect the signed-binary-caches default change --- doc/manual/command-ref/conf-file.xml | 7 ++++--- tests/binary-cache.sh | 10 +++++----- tests/repair.sh | 4 ++-- 3 files changed, 11 insertions(+), 10 deletions(-) diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index 3bd133918f4..6952829e8f7 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -394,9 +394,10 @@ flag, e.g. --option gc-keep-outputs false. signed-binary-caches - If set to *, Nix will only - download binaries if they are signed using one of the keys listed - in . + If set to * (the default), Nix + will only download binaries if they are signed using one of the + keys listed in . Set to + the empty string to disable signature checking. diff --git a/tests/binary-cache.sh b/tests/binary-cache.sh index 4ce428f643e..532099d0214 100644 --- a/tests/binary-cache.sh +++ b/tests/binary-cache.sh @@ -18,7 +18,7 @@ basicTests() { nix-env --option binary-caches "file://$cacheDir" -f dependencies.nix -qas \* | grep -- "---" - nix-store --option binary-caches "file://$cacheDir" -r $outPath + nix-store --option binary-caches "file://$cacheDir" --option signed-binary-caches '' -r $outPath [ -x $outPath/program ] @@ -34,7 +34,7 @@ basicTests() { x=$(nix-env -f dependencies.nix -qas \* --prebuilt-only) [ -z "$x" ] - nix-store --option binary-caches "file://$cacheDir" -r $outPath + nix-store --option binary-caches "file://$cacheDir" --option signed-binary-caches '' -r $outPath nix-store --check-validity $outPath nix-store -qR $outPath | grep input-2 @@ -63,7 +63,7 @@ mv $nar $nar.good mkdir -p $TEST_ROOT/empty nix-store --dump $TEST_ROOT/empty | xz > $nar -nix-build --option binary-caches "file://$cacheDir" dependencies.nix -o $TEST_ROOT/result 2>&1 | tee $TEST_ROOT/log +nix-build --option binary-caches "file://$cacheDir" --option signed-binary-caches '' dependencies.nix -o $TEST_ROOT/result 2>&1 | tee $TEST_ROOT/log grep -q "hash mismatch" $TEST_ROOT/log mv $nar.good $nar @@ -73,7 +73,7 @@ mv $nar.good $nar clearStore clearCacheCache -if nix-store --option binary-caches "file://$cacheDir" --option signed-binary-caches '*' -r $outPath; then +if nix-store --option binary-caches "file://$cacheDir" -r $outPath; then echo "unsigned binary cache incorrectly accepted" exit 1 fi @@ -99,7 +99,7 @@ clearStore rm $(grep -l "StorePath:.*dependencies-input-2" $cacheDir/*.narinfo) -nix-build --option binary-caches "file://$cacheDir" dependencies.nix -o $TEST_ROOT/result 2>&1 | tee $TEST_ROOT/log +nix-build --option binary-caches "file://$cacheDir" --option signed-binary-caches '' dependencies.nix -o $TEST_ROOT/result 2>&1 | tee $TEST_ROOT/log grep -q "fetching path" $TEST_ROOT/log diff --git a/tests/repair.sh b/tests/repair.sh index 782838704da..57152d450a1 100644 --- a/tests/repair.sh +++ b/tests/repair.sh @@ -51,7 +51,7 @@ nix copy --recursive --to file://$cacheDir $path chmod u+w $path2 rm -rf $path2 -nix-store --verify --check-contents --repair --option binary-caches "file://$cacheDir" +nix-store --verify --check-contents --repair --option binary-caches "file://$cacheDir" --option signed-binary-caches '' if [ "$(nix-hash $path2)" != "$hash" -o -e $path2/bad ]; then echo "path not repaired properly" >&2 @@ -69,7 +69,7 @@ if nix-store --verify-path $path2; then exit 1 fi -nix-store --repair-path $path2 --option binary-caches "file://$cacheDir" +nix-store --repair-path $path2 --option binary-caches "file://$cacheDir" --option signed-binary-caches '' if [ "$(nix-hash $path2)" != "$hash" -o -e $path2/bad ]; then echo "path not repaired properly" >&2 From ed5c0f69f28732879a7aac2d67367446f6d3152d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 21 Mar 2017 19:20:21 +0100 Subject: [PATCH 0196/2196] Don't hang in decompression if bzip2 data ends prematurely --- src/libutil/compression.cc | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc index 8ffd55efb23..11eec7a7bc2 100644 --- a/src/libutil/compression.cc +++ b/src/libutil/compression.cc @@ -49,6 +49,9 @@ static ref decompressXZ(const std::string & in) if (ret != LZMA_OK) throw Error("error while decompressing xz file"); + + if (strm.avail_in == 0) + throw Error("xz data ends prematurely"); } } @@ -86,6 +89,9 @@ static ref decompressBzip2(const std::string & in) if (ret != BZ_OK) throw Error("error while decompressing bzip2 file"); + + if (strm.avail_in == 0) + throw Error("bzip2 data ends prematurely"); } } From 895a74a814cd67cd2e13d0621603583a2d15b159 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 21 Mar 2017 19:23:07 +0100 Subject: [PATCH 0197/2196] LocalFSStore::getBuildLog(): Handle corrupted logs --- src/libstore/local-fs-store.cc | 9 +++++++-- src/libutil/compression.cc | 24 ++++++++++++------------ src/libutil/compression.hh | 2 ++ 3 files changed, 21 insertions(+), 14 deletions(-) diff --git a/src/libstore/local-fs-store.cc b/src/libstore/local-fs-store.cc index 002ee4a65ce..57e1b8a09fe 100644 --- a/src/libstore/local-fs-store.cc +++ b/src/libstore/local-fs-store.cc @@ -94,6 +94,7 @@ std::shared_ptr LocalFSStore::getBuildLog(const Path & path_) assertStorePath(path); + if (!isDerivation(path)) { try { path = queryPathInfo(path)->deriver; @@ -116,8 +117,12 @@ std::shared_ptr LocalFSStore::getBuildLog(const Path & path_) if (pathExists(logPath)) return std::make_shared(readFile(logPath)); - else if (pathExists(logBz2Path)) - return decompress("bzip2", readFile(logBz2Path)); + else if (pathExists(logBz2Path)) { + try { + return decompress("bzip2", readFile(logBz2Path)); + } catch (Error &) { } + } + } return nullptr; diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc index 11eec7a7bc2..f913d0f5bad 100644 --- a/src/libutil/compression.cc +++ b/src/libutil/compression.cc @@ -18,7 +18,7 @@ static ref decompressXZ(const std::string & in) lzma_ret ret = lzma_stream_decoder( &strm, UINT64_MAX, LZMA_CONCATENATED); if (ret != LZMA_OK) - throw Error("unable to initialise lzma decoder"); + throw CompressionError("unable to initialise lzma decoder"); Finally free([&]() { lzma_end(&strm); }); @@ -48,10 +48,10 @@ static ref decompressXZ(const std::string & in) return res; if (ret != LZMA_OK) - throw Error("error while decompressing xz file"); + throw CompressionError("error while decompressing xz file"); if (strm.avail_in == 0) - throw Error("xz data ends prematurely"); + throw CompressionError("xz data ends prematurely"); } } @@ -62,7 +62,7 @@ static ref decompressBzip2(const std::string & in) int ret = BZ2_bzDecompressInit(&strm, 0, 0); if (ret != BZ_OK) - throw Error("unable to initialise bzip2 decoder"); + throw CompressionError("unable to initialise bzip2 decoder"); Finally free([&]() { BZ2_bzDecompressEnd(&strm); }); @@ -88,10 +88,10 @@ static ref decompressBzip2(const std::string & in) return res; if (ret != BZ_OK) - throw Error("error while decompressing bzip2 file"); + throw CompressionError("error while decompressing bzip2 file"); if (strm.avail_in == 0) - throw Error("bzip2 data ends prematurely"); + throw CompressionError("bzip2 data ends prematurely"); } } @@ -144,7 +144,7 @@ struct XzSink : CompressionSink lzma_ret ret = lzma_easy_encoder( &strm, 6, LZMA_CHECK_CRC64); if (ret != LZMA_OK) - throw Error("unable to initialise lzma encoder"); + throw CompressionError("unable to initialise lzma encoder"); // FIXME: apply the x86 BCJ filter? strm.next_out = outbuf; @@ -168,7 +168,7 @@ struct XzSink : CompressionSink lzma_ret ret = lzma_code(&strm, LZMA_FINISH); if (ret != LZMA_OK && ret != LZMA_STREAM_END) - throw Error("error while flushing xz file"); + throw CompressionError("error while flushing xz file"); if (strm.avail_out == 0 || ret == LZMA_STREAM_END) { nextSink(outbuf, sizeof(outbuf) - strm.avail_out); @@ -192,7 +192,7 @@ struct XzSink : CompressionSink lzma_ret ret = lzma_code(&strm, LZMA_RUN); if (ret != LZMA_OK) - throw Error("error while compressing xz file"); + throw CompressionError("error while compressing xz file"); if (strm.avail_out == 0) { nextSink(outbuf, sizeof(outbuf)); @@ -215,7 +215,7 @@ struct BzipSink : CompressionSink memset(&strm, 0, sizeof(strm)); int ret = BZ2_bzCompressInit(&strm, 9, 0, 30); if (ret != BZ_OK) - throw Error("unable to initialise bzip2 encoder"); + throw CompressionError("unable to initialise bzip2 encoder"); strm.next_out = outbuf; strm.avail_out = sizeof(outbuf); @@ -238,7 +238,7 @@ struct BzipSink : CompressionSink int ret = BZ2_bzCompress(&strm, BZ_FINISH); if (ret != BZ_FINISH_OK && ret != BZ_STREAM_END) - throw Error("error while flushing bzip2 file"); + throw CompressionError("error while flushing bzip2 file"); if (strm.avail_out == 0 || ret == BZ_STREAM_END) { nextSink((unsigned char *) outbuf, sizeof(outbuf) - strm.avail_out); @@ -262,7 +262,7 @@ struct BzipSink : CompressionSink int ret = BZ2_bzCompress(&strm, BZ_RUN); if (ret != BZ_OK) - Error("error while compressing bzip2 file"); + CompressionError("error while compressing bzip2 file"); if (strm.avail_out == 0) { nextSink((unsigned char *) outbuf, sizeof(outbuf)); diff --git a/src/libutil/compression.hh b/src/libutil/compression.hh index eacf559d65e..e3e6f5a9930 100644 --- a/src/libutil/compression.hh +++ b/src/libutil/compression.hh @@ -21,4 +21,6 @@ ref makeCompressionSink(const std::string & method, Sink & next MakeError(UnknownCompressionMethod, Error); +MakeError(CompressionError, Error); + } From 0a7ca24c26d57d532df614bdfe78b8b5c228e90d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 22 Mar 2017 11:53:33 +0100 Subject: [PATCH 0198/2196] Fix xz decompression Fixes #1285. --- src/libutil/compression.cc | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc index f913d0f5bad..b0b1d709fa4 100644 --- a/src/libutil/compression.cc +++ b/src/libutil/compression.cc @@ -48,10 +48,7 @@ static ref decompressXZ(const std::string & in) return res; if (ret != LZMA_OK) - throw CompressionError("error while decompressing xz file"); - - if (strm.avail_in == 0) - throw CompressionError("xz data ends prematurely"); + throw CompressionError("error %d while decompressing xz file", ret); } } From 8edf1071773244095aacd059ebe5b03098d255e3 Mon Sep 17 00:00:00 2001 From: Matt Audesse Date: Wed, 22 Mar 2017 10:11:23 -0400 Subject: [PATCH 0199/2196] Fix minor grammatical nitpick ("it's" vs. "its") in `README.md`. See: http://data.grammarbook.com/blog/pronouns/1-grammar-error/ --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 1eb73b256f5..3173c6c448a 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ Nix, the purely functional package manager ------------------------------------------ -Nix is a new take on package management that is fairly unique. Because of it's +Nix is a new take on package management that is fairly unique. Because of its purity aspects, a lot of issues found in traditional package managers don't appear with Nix. From 023217f07c1acd75403bfea88bc38eb48905dc4e Mon Sep 17 00:00:00 2001 From: Daiderd Jordan Date: Fri, 24 Mar 2017 23:05:49 +0100 Subject: [PATCH 0200/2196] use std::tuple for ValueMap allocator --- src/libexpr/value.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh index 81f918d48de..802e8ed2ee7 100644 --- a/src/libexpr/value.hh +++ b/src/libexpr/value.hh @@ -256,7 +256,7 @@ size_t valueSize(Value & v); #if HAVE_BOEHMGC typedef std::vector > ValueVector; -typedef std::map, gc_allocator > ValueMap; +typedef std::map, gc_allocator > > ValueMap; #else typedef std::vector ValueVector; typedef std::map ValueMap; From c60715e937e3773bbb8a114fc9b9c6577f8c5cb5 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 28 Mar 2017 13:08:13 +0200 Subject: [PATCH 0201/2196] Ignore broken "Deriver: unknown-deriver" fields in .narinfo These were generated by a legacy tool. --- src/libstore/nar-info.cc | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/libstore/nar-info.cc b/src/libstore/nar-info.cc index 201cac671a5..d1042c6de25 100644 --- a/src/libstore/nar-info.cc +++ b/src/libstore/nar-info.cc @@ -59,9 +59,11 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & } } else if (name == "Deriver") { - auto p = store.storeDir + "/" + value; - if (!store.isStorePath(p)) corrupt(); - deriver = p; + if (value != "unknown-deriver") { + auto p = store.storeDir + "/" + value; + if (!store.isStorePath(p)) corrupt(); + deriver = p; + } } else if (name == "System") system = value; From 0bb8db257d98a32abde759f4d07d28b5178bd3bf Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Thu, 30 Mar 2017 08:04:21 -0400 Subject: [PATCH 0202/2196] Add exec primop behind allow-unsafe-native-code-during-evaluation. Execute a given program with the (optional) given arguments as the user running the evaluation, parsing stdout as an expression to be evaluated. There are many use cases for nix that would benefit from being able to run arbitrary code during evaluation, including but not limited to: * Automatic git fetching to get a sha256 from a git revision * git rev-parse HEAD * Automatic extraction of information from build specifications from other tools, particularly language-specific package managers like cabal or npm * Secrets decryption (e.g. with nixops) * Private repository fetching Ideally, we would add this functionality in a more principled way to nix, but in the mean time 'builtins.exec' can be used to get these tasks done. The primop is only available when the 'allow-unsafe-native-code-during-evaluation' nix option is true. That flag also enables the 'importNative' primop, which is strictly more powerful but less convenient (since it requires compiling a plugin against the running version of nix). --- src/libexpr/primops.cc | 56 ++++++++++++++++++++++++++++++++++++++++- src/libstore/globals.cc | 4 +-- src/libstore/globals.hh | 4 +-- 3 files changed, 59 insertions(+), 5 deletions(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 93097f3d1bf..a98da737e4a 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -178,6 +178,58 @@ static void prim_importNative(EvalState & state, const Pos & pos, Value * * args } +/* Execute a program and parse its output */ +static void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v) +{ + state.forceAttrs(*args[0], pos); + auto sProgram = state.symbols.create("program"); + auto sArguments = state.symbols.create("arguments"); + PathSet context; + string program; + bool programSet = false; + Strings commandArgs; + for (auto & attr : *args[0]->attrs) { + if (attr.name == sProgram) { + program = state.coerceToString(*attr.pos, *attr.value, context, false, false); + programSet = true; + } else if (attr.name == sArguments) { + state.forceList(*attr.value, *attr.pos); + auto elems = attr.value->listElems(); + for (unsigned int i = 0; i < attr.value->listSize(); ++i) { + commandArgs.emplace_back(state.coerceToString(*attr.pos, *elems[i], context, false, false)); + } + } else { + throw EvalError(format("unexpected attribute ‘%1%’ in argument to builtins.exec, at %2%") + % attr.name % pos); + } + } + if (!programSet) { + throw EvalError(format("attribute ‘programSet’ required, at %1%") % pos); + } + try { + state.realiseContext(context); + } catch (InvalidPathError & e) { + throw EvalError(format("cannot execute ‘%1%’, since path ‘%2%’ is not valid, at %3%") + % program % e.path % pos); + } + + auto output = runProgram(program, true, commandArgs); + Expr * parsed; + try { + parsed = state.parseExprFromString(output, pos.file); + } catch (Error & e) { + e.addPrefix(format("While parsing the output from ‘%1%’, at %2%\n") % program % pos); + throw; + } + try { + state.eval(parsed, v); + } catch (Error & e) { + e.addPrefix(format("While evaluating the output from ‘%1%’, at %2%\n") % program % pos); + throw; + } +} + + /* Return a string representing the type of the expression. */ static void prim_typeOf(EvalState & state, const Pos & pos, Value * * args, Value & v) { @@ -1903,8 +1955,10 @@ void EvalState::createBaseEnv() mkApp(v, *baseEnv.values[baseEnvDispl - 1], *v2); forceValue(v); addConstant("import", v); - if (settings.enableImportNative) + if (settings.enableNativeCode) { addPrimOp("__importNative", 2, prim_importNative); + addPrimOp("__exec", 1, prim_exec); + } addPrimOp("__typeOf", 1, prim_typeOf); addPrimOp("isNull", 1, prim_isNull); addPrimOp("__isFunction", 1, prim_isFunction); diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 012b3d5b8b9..8c900be77b8 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -67,7 +67,7 @@ Settings::Settings() envKeepDerivations = false; lockCPU = getEnv("NIX_AFFINITY_HACK", "1") == "1"; showTrace = false; - enableImportNative = false; + enableNativeCode = false; netrcFile = fmt("%s/%s", nixConfDir, "netrc"); caFile = getEnv("NIX_SSL_CERT_FILE", getEnv("SSL_CERT_FILE", "/etc/ssl/certs/ca-certificates.crt")); enableImportFromDerivation = true; @@ -179,7 +179,7 @@ void Settings::update() _get(envKeepDerivations, "env-keep-derivations"); _get(sshSubstituterHosts, "ssh-substituter-hosts"); _get(useSshSubstituter, "use-ssh-substituter"); - _get(enableImportNative, "allow-unsafe-native-code-during-evaluation"); + _get(enableNativeCode, "allow-unsafe-native-code-during-evaluation"); _get(useCaseHack, "use-case-hack"); _get(preBuildHook, "pre-build-hook"); _get(keepGoing, "keep-going"); diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 46272168191..ccec300f776 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -181,8 +181,8 @@ struct Settings { /* Whether to show a stack trace if Nix evaluation fails. */ bool showTrace; - /* Whether the importNative primop should be enabled */ - bool enableImportNative; + /* Whether native-code enabling primops should be enabled */ + bool enableNativeCode; /* The hook to run just before a build to set derivation-specific build settings */ From a75475ca611fbc9074792a30740d19fd3a3a6cf7 Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Thu, 30 Mar 2017 16:51:50 -0400 Subject: [PATCH 0203/2196] Remove tabs --- release.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/release.nix b/release.nix index b93b64ea923..c0e1385e13f 100644 --- a/release.nix +++ b/release.nix @@ -31,7 +31,7 @@ let git ]; - configureFlags = "--enable-gc"; + configureFlags = "--enable-gc"; postUnpack = '' # Clean up when building from a working tree. @@ -109,7 +109,7 @@ let [ (builtins.getAttr system jobs.build) curl bzip2 xz pkgconfig pkgs.perl ] ++ lib.optional stdenv.isLinux libsodium; - configureFlags = '' + configureFlags = '' --with-dbi=${perlPackages.DBI}/${pkgs.perl.libPrefix} --with-dbd-sqlite=${perlPackages.DBDSQLite}/${pkgs.perl.libPrefix} --with-www-curl=${perlPackages.WWWCurl}/${pkgs.perl.libPrefix} @@ -117,7 +117,7 @@ let enableParallelBuilding = true; - postUnpack = "sourceRoot=$sourceRoot/perl"; + postUnpack = "sourceRoot=$sourceRoot/perl"; preBuild = "unset NIX_INDENT_MAKE"; }); From d0877003475ecb3ea18a256239a9948fe0f28035 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 31 Mar 2017 15:31:34 +0200 Subject: [PATCH 0204/2196] Fix perl build --- perl/Makefile | 2 +- perl/Makefile.config.in | 1 - perl/configure.ac | 2 -- perl/local.mk | 42 +++++++++++++++++++---------------------- 4 files changed, 20 insertions(+), 27 deletions(-) diff --git a/perl/Makefile b/perl/Makefile index 41a32576e9b..cf655ae3d65 100644 --- a/perl/Makefile +++ b/perl/Makefile @@ -1,6 +1,6 @@ makefiles = local.mk -GLOBAL_CXXFLAGS += -std=c++11 -g -Wall +GLOBAL_CXXFLAGS += -std=c++14 -g -Wall -include nix/config.h -include Makefile.config diff --git a/perl/Makefile.config.in b/perl/Makefile.config.in index 901d1283e55..c87d4817e17 100644 --- a/perl/Makefile.config.in +++ b/perl/Makefile.config.in @@ -16,4 +16,3 @@ perl = @perl@ perllibdir = @perllibdir@ nixstoredir = @nixstoredir@ nixsysconfdir = @nixsysconfdir@ -perlbindings = @perlbindings@ diff --git a/perl/configure.ac b/perl/configure.ac index dea2b614004..d617c78535f 100644 --- a/perl/configure.ac +++ b/perl/configure.ac @@ -99,8 +99,6 @@ AC_SUBST(nixlocalstatedir) AC_SUBST(nixsysconfdir) AC_SUBST(nixstoredir) -AC_SUBST(perlbindings, "yes") - # Expand all variables in config.status. test "$prefix" = NONE && prefix=$ac_default_prefix test "$exec_prefix" = NONE && exec_prefix='${prefix}' diff --git a/perl/local.mk b/perl/local.mk index 1793ececfd6..35113bd960d 100644 --- a/perl/local.mk +++ b/perl/local.mk @@ -10,38 +10,34 @@ nix_perl_modules := $(nix_perl_sources:.in=) $(foreach x, $(nix_perl_modules), $(eval $(call install-data-in, $(x), $(perllibdir)/Nix))) -ifeq ($(perlbindings), yes) - - lib/Nix/Store.cc: lib/Nix/Store.xs +lib/Nix/Store.cc: lib/Nix/Store.xs $(trace-gen) xsubpp $^ -output $@ - libraries += Store - - Store_DIR := lib/Nix +libraries += Store - Store_SOURCES := $(Store_DIR)/Store.cc +Store_DIR := lib/Nix - Store_CXXFLAGS = \ - -I$(shell $(perl) -e 'use Config; print $$Config{archlibexp};')/CORE \ - -D_FILE_OFFSET_BITS=64 \ - -Wno-unknown-warning-option -Wno-unused-variable -Wno-literal-suffix \ - -Wno-reserved-user-defined-literal -Wno-duplicate-decl-specifier -Wno-pointer-bool-conversion \ - $(NIX_CFLAGS) +Store_SOURCES := $(Store_DIR)/Store.cc - Store_LDFLAGS := $(SODIUM_LIBS) $(NIX_LIBS) +Store_CXXFLAGS = \ + -I$(shell $(perl) -e 'use Config; print $$Config{archlibexp};')/CORE \ + -D_FILE_OFFSET_BITS=64 \ + -Wno-unknown-warning-option -Wno-unused-variable -Wno-literal-suffix \ + -Wno-reserved-user-defined-literal -Wno-duplicate-decl-specifier -Wno-pointer-bool-conversion \ + $(NIX_CFLAGS) - ifeq (CYGWIN,$(findstring CYGWIN,$(OS))) - archlib = $(shell perl -E 'use Config; print $$Config{archlib};') - libperl = $(shell perl -E 'use Config; print $$Config{libperl};') - Store_LDFLAGS += $(shell find ${archlib} -name ${libperl}) - endif +Store_LDFLAGS := $(SODIUM_LIBS) $(NIX_LIBS) - Store_ALLOW_UNDEFINED = 1 +ifeq (CYGWIN,$(findstring CYGWIN,$(OS))) + archlib = $(shell perl -E 'use Config; print $$Config{archlib};') + libperl = $(shell perl -E 'use Config; print $$Config{libperl};') + Store_LDFLAGS += $(shell find ${archlib} -name ${libperl}) +endif - Store_FORCE_INSTALL = 1 +Store_ALLOW_UNDEFINED = 1 - Store_INSTALL_DIR = $(perllibdir)/auto/Nix/Store +Store_FORCE_INSTALL = 1 -endif +Store_INSTALL_DIR = $(perllibdir)/auto/Nix/Store clean-files += lib/Nix/Config.pm lib/Nix/Store.cc Makefile.config From e1509adbbb12851c30ba230650005bd141958249 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 31 Mar 2017 15:50:11 +0200 Subject: [PATCH 0205/2196] Retry curl error 16 --- src/libstore/download.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 22bde086e6a..78fcdc62189 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -301,6 +301,7 @@ struct CurlDownloader : public Downloader || code == CURLE_COULDNT_RESOLVE_HOST || code == CURLE_RECV_ERROR #if LIBCURL_VERSION_NUM >= 0x073200 + || code == CURLE_HTTP2 || code == CURLE_HTTP2_STREAM #endif ) ? Transient : From b9b8b8a63ba49dc027b08950bd3cf30cc8f09ec5 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 31 Mar 2017 15:54:15 +0200 Subject: [PATCH 0206/2196] Fix evaluation error --- release.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/release.nix b/release.nix index 1a8d0927c10..8727c2520b1 100644 --- a/release.nix +++ b/release.nix @@ -101,7 +101,7 @@ let }); - perl = pkgs.lib.genAttrs systems (system: + perlBindings = pkgs.lib.genAttrs systems (system: let pkgs = import { inherit system; }; in with pkgs; From 3ecb09a40a8500d1052b087295b589ca4856fd7a Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Fri, 31 Mar 2017 11:58:41 -0400 Subject: [PATCH 0207/2196] builtins.exec: Make the argument just a list --- src/libexpr/primops.cc | 31 +++++++++---------------------- 1 file changed, 9 insertions(+), 22 deletions(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index a98da737e4a..615cc813843 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -181,30 +181,17 @@ static void prim_importNative(EvalState & state, const Pos & pos, Value * * args /* Execute a program and parse its output */ static void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v) { - state.forceAttrs(*args[0], pos); - auto sProgram = state.symbols.create("program"); - auto sArguments = state.symbols.create("arguments"); + state.forceList(*args[0], pos); + auto elems = args[0]->listElems(); + auto count = args[0]->listSize(); + if (count == 0) { + throw EvalError(format("at least one argument to 'exec' required, at %1%") % pos); + } PathSet context; - string program; - bool programSet = false; + auto program = state.coerceToString(pos, *elems[0], context, false, false); Strings commandArgs; - for (auto & attr : *args[0]->attrs) { - if (attr.name == sProgram) { - program = state.coerceToString(*attr.pos, *attr.value, context, false, false); - programSet = true; - } else if (attr.name == sArguments) { - state.forceList(*attr.value, *attr.pos); - auto elems = attr.value->listElems(); - for (unsigned int i = 0; i < attr.value->listSize(); ++i) { - commandArgs.emplace_back(state.coerceToString(*attr.pos, *elems[i], context, false, false)); - } - } else { - throw EvalError(format("unexpected attribute ‘%1%’ in argument to builtins.exec, at %2%") - % attr.name % pos); - } - } - if (!programSet) { - throw EvalError(format("attribute ‘programSet’ required, at %1%") % pos); + for (unsigned int i = 1; i < args[0]->listSize(); ++i) { + commandArgs.emplace_back(state.coerceToString(pos, *elems[i], context, false, false)); } try { state.realiseContext(context); From 29d35805c63f316aa19b33a481f953ca332d9b65 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 31 Mar 2017 18:12:01 +0200 Subject: [PATCH 0208/2196] Sandbox: Fix /dev/ptmx on recent kernels This fixes "No such file or directory" when opening /dev/ptmx (e.g. http://hydra.nixos.org/build/51094249). The reason appears to be some changes to /dev/ptmx / /dev/pts handling between Linux 4.4 and 4.9. See https://patchwork.kernel.org/patch/7832531/. The fix is to go back to mounting a proper /dev/pts instance inside the sandbox. Happily, this now works inside user namespaces, even for unprivileged users. So NIX_REMOTE=local?root=/tmp/nix nix-build \ '' -A test works for non-root users. The downside is that the fix breaks sandbox builds on older kernels (probably pre-4.6), since mounting a devpts fails inside user namespaces for some reason I've never been able to figure out. Builds on those systems will fail with error: while setting up the build environment: mounting /dev/pts: Invalid argument Ah well. --- src/libstore/build.cc | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index fc840df81a5..43a8dadf822 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2391,8 +2391,6 @@ void DerivationGoal::runChild() ss.push_back("/dev/tty"); ss.push_back("/dev/urandom"); ss.push_back("/dev/zero"); - ss.push_back("/dev/ptmx"); - ss.push_back("/dev/pts"); createSymlink("/proc/self/fd", chrootRootDir + "/dev/fd"); createSymlink("/proc/self/fd/0", chrootRootDir + "/dev/stdin"); createSymlink("/proc/self/fd/1", chrootRootDir + "/dev/stdout"); @@ -2448,17 +2446,13 @@ void DerivationGoal::runChild() fmt("size=%s", settings.get("sandbox-dev-shm-size", std::string("50%"))).c_str()) == -1) throw SysError("mounting /dev/shm"); -#if 0 - // FIXME: can't figure out how to do this in a user - // namespace. - /* Mount a new devpts on /dev/pts. Note that this requires the kernel to be compiled with CONFIG_DEVPTS_MULTIPLE_INSTANCES=y (which is the case if /dev/ptx/ptmx exists). */ if (pathExists("/dev/pts/ptmx") && !pathExists(chrootRootDir + "/dev/ptmx") - && dirsInChroot.find("/dev/pts") == dirsInChroot.end()) + && !dirsInChroot.count("/dev/pts")) { if (mount("none", (chrootRootDir + "/dev/pts").c_str(), "devpts", 0, "newinstance,mode=0620") == -1) throw SysError("mounting /dev/pts"); @@ -2468,7 +2462,6 @@ void DerivationGoal::runChild() Linux versions, it is created with permissions 0. */ chmod_(chrootRootDir + "/dev/pts/ptmx", 0666); } -#endif /* Do the chroot(). */ if (chdir(chrootRootDir.c_str()) == -1) From 3b8946e09a55a27c01da863d8c6e008e3fe4076e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 31 Mar 2017 18:30:50 +0200 Subject: [PATCH 0209/2196] Maybe fix the RPM build http://hydra.nixos.org/build/51095532 --- nix.spec.in | 5 ----- 1 file changed, 5 deletions(-) diff --git a/nix.spec.in b/nix.spec.in index 0c9b9ab2013..e879352fe82 100644 --- a/nix.spec.in +++ b/nix.spec.in @@ -16,12 +16,7 @@ Source0: %{name}-%{version}.tar.bz2 %if 0%{?el5} BuildRoot: %(mktemp -ud %{_tmppath}/%{name}-%{version}-%{release}-XXXXXX) %endif -BuildRequires: perl(DBD::SQLite) -BuildRequires: perl(DBI) -BuildRequires: perl(ExtUtils::ParseXS) -Requires: /usr/bin/perl Requires: curl -Requires: perl-DBD-SQLite Requires: bzip2 Requires: gzip Requires: xz From dfcd78d851c14355a09f5b8845042ade098e739a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 3 Apr 2017 16:25:20 +0200 Subject: [PATCH 0210/2196] Really fix the RPM build --- nix.spec.in | 7 ------- 1 file changed, 7 deletions(-) diff --git a/nix.spec.in b/nix.spec.in index e879352fe82..390893d64dc 100644 --- a/nix.spec.in +++ b/nix.spec.in @@ -87,11 +87,6 @@ the emacs-%{name} package to edit Nix expressions with GNU Emacs. %prep %setup -q -# Install Perl modules to vendor_perl -# configure.ac need to be changed to make this global; however, this will -# also affect NixOS. Use discretion. -%{__sed} -i 's|perl5/site_perl/$perlversion/$perlarchname|perl5/vendor_perl|' \ - configure %build @@ -164,8 +159,6 @@ systemctl start nix-daemon.socket %files %{_bindir}/nix* %{_libdir}/*.so -%{perl_vendorarch}/* -%exclude %dir %{perl_vendorarch}/auto/ %{_prefix}/libexec/* %if ! 0%{?rhel} || 0%{?rhel} >= 7 %{_prefix}/lib/systemd/system/nix-daemon.socket From 488792a87d3dea18109fee7fa987f7edf6ba1149 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 4 Apr 2017 17:40:50 +0200 Subject: [PATCH 0211/2196] Make /var/run/nscd/socket optional Not every distribution uses nscd. --- src/libstore/build.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 43a8dadf822..14982424d5e 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2405,7 +2405,8 @@ void DerivationGoal::runChild() ss.push_back("/etc/nsswitch.conf"); ss.push_back("/etc/services"); ss.push_back("/etc/hosts"); - ss.push_back("/var/run/nscd/socket"); + if (pathExists("/var/run/nscd/socket")) + ss.push_back("/var/run/nscd/socket"); } for (auto & i : ss) dirsInChroot[i] = i; From 8decb07c31581febab664bedde12c8bf1367279e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 4 Apr 2017 17:54:16 +0200 Subject: [PATCH 0212/2196] Allow default sandbox paths to be overriden E.g. you can now redirect /etc/resolv.conf to a different file. --- src/libstore/build.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 14982424d5e..968e291129f 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2409,7 +2409,7 @@ void DerivationGoal::runChild() ss.push_back("/var/run/nscd/socket"); } - for (auto & i : ss) dirsInChroot[i] = i; + for (auto & i : ss) dirsInChroot.emplace(i, i); /* Bind-mount all the directories from the "host" filesystem that we want in the chroot From 256940fc48a6db950136fb0bc43590b701a3e857 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 6 Apr 2017 14:30:31 +0200 Subject: [PATCH 0213/2196] nix-daemon: Disable path info cache This is useless because the client also caches path info, and can cause problems for long-running clients like hydra-queue-runner (i.e. it may return cached info about paths that have been garbage-collected). --- src/libstore/store-api.cc | 1 + src/libstore/store-api.hh | 7 ++++++- src/libutil/lru-cache.hh | 8 +++++--- src/libutil/sync.hh | 1 + src/nix-daemon/nix-daemon.cc | 5 ++++- 5 files changed, 17 insertions(+), 5 deletions(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 441166d04d8..59348c5d0b5 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -242,6 +242,7 @@ Path Store::computeStorePathForText(const string & name, const string & s, Store::Store(const Params & params) : storeDir(get(params, "store", settings.nixStore)) + , state({std::stoi(get(params, "path-info-cache-size", "65536"))}) { } diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 98f2803f813..f58dbde350c 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -241,7 +241,7 @@ protected: struct State { - LRUCache> pathInfoCache{64 * 1024}; + LRUCache> pathInfoCache; }; Sync state; @@ -252,6 +252,11 @@ protected: public: + size_t getCacheSize() + { + return state.lock()->pathInfoCache.size(); + } + virtual ~Store() { } virtual std::string getUri() = 0; diff --git a/src/libutil/lru-cache.hh b/src/libutil/lru-cache.hh index 35983aa2c91..3cb5d50889d 100644 --- a/src/libutil/lru-cache.hh +++ b/src/libutil/lru-cache.hh @@ -11,7 +11,7 @@ class LRUCache { private: - size_t maxSize; + size_t capacity; // Stupid wrapper to get around circular dependency between Data // and LRU. @@ -27,14 +27,16 @@ private: public: - LRUCache(size_t maxSize) : maxSize(maxSize) { } + LRUCache(size_t capacity) : capacity(capacity) { } /* Insert or upsert an item in the cache. */ void upsert(const Key & key, const Value & value) { + if (capacity == 0) return; + erase(key); - if (data.size() >= maxSize) { + if (data.size() >= capacity) { /* Retire the oldest item. */ auto oldest = lru.begin(); data.erase(*oldest); diff --git a/src/libutil/sync.hh b/src/libutil/sync.hh index 2aa074299b2..611c900e0a3 100644 --- a/src/libutil/sync.hh +++ b/src/libutil/sync.hh @@ -33,6 +33,7 @@ public: Sync() { } Sync(const T & data) : data(data) { } + Sync(T && data) noexcept : data(std::move(data)) { } class Lock { diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index ab5826b0d1a..b6a46642c7c 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -637,7 +637,10 @@ static void processConnection(bool trusted) #endif /* Open the store. */ - auto store = make_ref(Store::Params()); // FIXME: get params from somewhere + Store::Params params; // FIXME: get params from somewhere + // Disable caching since the client already does that. + params["path-info-cache-size"] = "0"; + auto store = make_ref(params); stopWork(); to.flush(); From 6b5e2711632c345f2b6a371ef8a859106436980a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 6 Apr 2017 15:22:37 +0200 Subject: [PATCH 0214/2196] Add a method to allow hydra-queue-runner to flush the path info cache --- src/libstore/store-api.hh | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index f58dbde350c..e07dec49547 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -252,11 +252,6 @@ protected: public: - size_t getCacheSize() - { - return state.lock()->pathInfoCache.size(); - } - virtual ~Store() { } virtual std::string getUri() = 0; @@ -578,6 +573,13 @@ public: virtual std::shared_ptr getBuildLog(const Path & path) { return nullptr; } + /* Hack to allow long-running processes like hydra-queue-runner to + occasionally flush their path info cache. */ + void clearPathInfoCache() + { + state.lock()->pathInfoCache.clear(); + } + protected: Stats stats; From 963f2bf12b4098a3b46059a26d21cdfe27662cdd Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 6 Apr 2017 17:18:56 +0200 Subject: [PATCH 0215/2196] Fix bogus "unexpected Nix daemon error: interrupted by the user" --- src/libutil/util.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 99a91c8cc64..a640a64c724 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -487,6 +487,7 @@ void readFull(int fd, unsigned char * buf, size_t count) void writeFull(int fd, const unsigned char * buf, size_t count, bool allowInterrupts) { while (count) { + if (allowInterrupts) checkInterrupt(); ssize_t res = write(fd, (char *) buf, count); if (res == -1 && errno != EINTR) throw SysError("writing to file"); @@ -494,7 +495,6 @@ void writeFull(int fd, const unsigned char * buf, size_t count, bool allowInterr count -= res; buf += res; } - if (allowInterrupts) checkInterrupt(); } } @@ -1212,7 +1212,7 @@ static void signalHandlerThread(sigset_t set) void triggerInterrupt() { - _isInterrupted = 1; + _isInterrupted = true; { auto interruptCallbacks(_interruptCallbacks.lock()); From ba20730b3f7ad6b09aa86b66748df575e56d442e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 6 Apr 2017 18:40:19 +0200 Subject: [PATCH 0216/2196] Implement RemoteStore::queryMissing() This provides a significant speedup, e.g. 64 s -> 12 s for nix-build --dry-run -I nixpkgs=channel:nixos-16.03 '' -A test on a cold local and CloudFront cache. The alternative is to use lots of concurrent daemon connections but that seems wasteful. --- src/libstore/remote-store.cc | 25 +++++++++++++++++++++++++ src/libstore/remote-store.hh | 4 ++++ src/libstore/store-api.hh | 2 +- src/libstore/worker-protocol.hh | 5 +++-- src/nix-daemon/nix-daemon.cc | 11 +++++++++++ 5 files changed, 44 insertions(+), 3 deletions(-) diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index a1f2db5b0ec..c9c59078745 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -588,6 +588,31 @@ void RemoteStore::addSignatures(const Path & storePath, const StringSet & sigs) } +void RemoteStore::queryMissing(const PathSet & targets, + PathSet & willBuild, PathSet & willSubstitute, PathSet & unknown, + unsigned long long & downloadSize, unsigned long long & narSize) +{ + { + auto conn(connections->get()); + if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 19) + // Don't hold the connection handle in the fallback case + // to prevent a deadlock. + goto fallback; + conn->to << wopQueryMissing << targets; + conn->processStderr(); + willBuild = readStorePaths(*this, conn->from); + willSubstitute = readStorePaths(*this, conn->from); + unknown = readStorePaths(*this, conn->from); + conn->from >> downloadSize >> narSize; + return; + } + + fallback: + return Store::queryMissing(targets, willBuild, willSubstitute, + unknown, downloadSize, narSize); +} + + RemoteStore::Connection::~Connection() { try { diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh index a08bd305639..db8da7eaa8c 100644 --- a/src/libstore/remote-store.hh +++ b/src/libstore/remote-store.hh @@ -85,6 +85,10 @@ public: void addSignatures(const Path & storePath, const StringSet & sigs) override; + void queryMissing(const PathSet & targets, + PathSet & willBuild, PathSet & willSubstitute, PathSet & unknown, + unsigned long long & downloadSize, unsigned long long & narSize) override; + protected: struct Connection diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index e07dec49547..68c59a9f292 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -524,7 +524,7 @@ public: /* Given a set of paths that are to be built, return the set of derivations that will be built, and the set of output paths that will be substituted. */ - void queryMissing(const PathSet & targets, + virtual void queryMissing(const PathSet & targets, PathSet & willBuild, PathSet & willSubstitute, PathSet & unknown, unsigned long long & downloadSize, unsigned long long & narSize); diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh index 6a4ed47cc9f..6c6766b3612 100644 --- a/src/libstore/worker-protocol.hh +++ b/src/libstore/worker-protocol.hh @@ -6,7 +6,7 @@ namespace nix { #define WORKER_MAGIC_1 0x6e697863 #define WORKER_MAGIC_2 0x6478696f -#define PROTOCOL_VERSION 0x112 +#define PROTOCOL_VERSION 0x113 #define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00) #define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff) @@ -47,7 +47,8 @@ typedef enum { wopBuildDerivation = 36, wopAddSignatures = 37, wopNarFromPath = 38, - wopAddToStoreNar = 39 + wopAddToStoreNar = 39, + wopQueryMissing = 40, } WorkerOp; diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index b6a46642c7c..8786e2561b9 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -592,6 +592,17 @@ static void performOp(ref store, bool trusted, unsigned int clientVe break; } + case wopQueryMissing: { + PathSet targets = readStorePaths(*store, from); + startWork(); + PathSet willBuild, willSubstitute, unknown; + unsigned long long downloadSize, narSize; + store->queryMissing(targets, willBuild, willSubstitute, unknown, downloadSize, narSize); + stopWork(); + to << willBuild << willSubstitute << unknown << downloadSize << narSize; + break; + } + default: throw Error(format("invalid operation %1%") % op); } From 98283915f54df275170cc1c8bdaa0b00d343cd69 Mon Sep 17 00:00:00 2001 From: Dan Peebles Date: Thu, 6 Apr 2017 18:18:43 +0000 Subject: [PATCH 0217/2196] Retry downloads on transient SSL errors too --- src/libstore/download.cc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 78fcdc62189..d1f760fdc30 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -300,6 +300,8 @@ struct CurlDownloader : public Downloader || httpStatus == 504 || httpStatus == 522 || httpStatus == 524 || code == CURLE_COULDNT_RESOLVE_HOST || code == CURLE_RECV_ERROR + // this is a generic SSL failure that in some cases (e.g., certificate error) is permanent but also appears in transient cases, so we consider it retryable + || code == CURLE_SSL_CONNECT_ERROR #if LIBCURL_VERSION_NUM >= 0x073200 || code == CURLE_HTTP2 || code == CURLE_HTTP2_STREAM From 30f89e0d6564394747e20e04d486b81aeec96752 Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Wed, 5 Apr 2017 07:34:10 -0500 Subject: [PATCH 0218/2196] Process nix.conf options in "new" nix commands, add test. Without this (minor) change, the options set using "--option" or read from nix.conf were parsed but not used. --- src/nix/main.cc | 1 + tests/timeout.sh | 5 +++++ 2 files changed, 6 insertions(+) diff --git a/src/nix/main.cc b/src/nix/main.cc index 440ced97dfc..fdb8f6e3a19 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -42,6 +42,7 @@ void mainWrapped(int argc, char * * argv) NixArgs args; args.parseCmdline(argvToStrings(argc, argv)); + settings.update(); assert(args.command); diff --git a/tests/timeout.sh b/tests/timeout.sh index ce1ae7d674a..77b227e89ba 100644 --- a/tests/timeout.sh +++ b/tests/timeout.sh @@ -29,3 +29,8 @@ if nix-build timeout.nix -A closeLog; then echo "build should have failed" exit 1 fi + +if nix build -f timeout.nix silent --option build-max-silent-time 2; then + echo "build should have failed" + exit 1 +fi From a0c56197fcb6d13e77ccbdd28322a540352b880c Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Wed, 5 Apr 2017 08:13:07 -0500 Subject: [PATCH 0219/2196] tests/timeout: create output so tests don't trivially pass Timeout tests rely on failed build to determine success, so make sure these derivations (silent in particular) don't fail regardless of timeout behavior. --- tests/timeout.nix | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/timeout.nix b/tests/timeout.nix index 540fba934ff..e18d717eff1 100644 --- a/tests/timeout.nix +++ b/tests/timeout.nix @@ -5,6 +5,7 @@ with import ./config.nix; infiniteLoop = mkDerivation { name = "timeout"; buildCommand = '' + touch $out echo "‘timeout’ builder entering an infinite loop" while true ; do echo -n .; done ''; @@ -13,6 +14,7 @@ with import ./config.nix; silent = mkDerivation { name = "silent"; buildCommand = '' + touch $out sleep 60 ''; }; @@ -20,6 +22,7 @@ with import ./config.nix; closeLog = mkDerivation { name = "silent"; buildCommand = '' + touch $out exec > /dev/null 2>&1 sleep 1000000000 ''; From f12a048a0595ea75453c6b67a3b72da8106fc6ad Mon Sep 17 00:00:00 2001 From: Neil Mayhew Date: Sat, 8 Apr 2017 18:31:11 -0600 Subject: [PATCH 0220/2196] Propagate NIX_BUILD_CORES to nix-shell environments --- src/nix-build/nix-build.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index b4206033cf5..065447684aa 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -406,6 +406,7 @@ int main(int argc, char ** argv) env["NIX_BUILD_TOP"] = env["TMPDIR"] = env["TEMPDIR"] = env["TMP"] = env["TEMP"] = tmp; env["NIX_STORE"] = store->storeDir; + env["NIX_BUILD_CORES"] = settings.buildCores; for (auto & var : drv.env) env[var.first] = var.second; From 95295482ea629fd21992f564b8e4f1710a7a61e6 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 6 Apr 2017 19:01:05 +0200 Subject: [PATCH 0221/2196] Allow "auto" as a store URI Using the empty string is likely to be ambiguous in some contexts. --- src/libstore/store-api.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 59348c5d0b5..53c802044ea 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -731,7 +731,7 @@ StoreType getStoreType(const std::string & uri, const std::string & stateDir) return tDaemon; } else if (uri == "local") { return tLocal; - } else if (uri == "") { + } else if (uri == "" || uri == "auto") { if (access(stateDir.c_str(), R_OK | W_OK) == 0) return tLocal; else if (pathExists(settings.nixDaemonSocketFile)) From 105f8ffc98d25e23c9a7eb5a5327e711b1e21e50 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 10 Apr 2017 11:12:45 +0200 Subject: [PATCH 0222/2196] Minor cleanup Also, possible fix for #1310 on 32-bit systems. --- src/libstore/gc.cc | 2 +- src/libstore/local-store.hh | 11 +++-------- src/libstore/store-api.hh | 7 +------ 3 files changed, 5 insertions(+), 15 deletions(-) diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index 8e90913cc3f..0b03d61a789 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -679,7 +679,7 @@ void LocalStore::removeUnusedLinks(const GCState & state) if (unlink(path.c_str()) == -1) throw SysError(format("deleting ‘%1%’") % path); - state.results.bytesFreed += st.st_blocks * 512; + state.results.bytesFreed += st.st_blocks * 512ULL; } struct stat st; diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index 28e9a31c9fe..750da0c142d 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -26,14 +26,9 @@ struct Derivation; struct OptimiseStats { - unsigned long filesLinked; - unsigned long long bytesFreed; - unsigned long long blocksFreed; - OptimiseStats() - { - filesLinked = 0; - bytesFreed = blocksFreed = 0; - } + unsigned long filesLinked = 0; + unsigned long long bytesFreed = 0; + unsigned long long blocksFreed = 0; }; diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 68c59a9f292..c0a52145af5 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -81,12 +81,7 @@ struct GCResults /* For `gcReturnDead', `gcDeleteDead' and `gcDeleteSpecific', the number of bytes that would be or was freed. */ - unsigned long long bytesFreed; - - GCResults() - { - bytesFreed = 0; - } + unsigned long long bytesFreed = 0; }; From 53edb55588e362ee6aa8f6b436c6db6b0409b615 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 10 Apr 2017 11:50:01 +0200 Subject: [PATCH 0223/2196] shell.nix: Remove obsolete flags --- shell.nix | 2 -- 1 file changed, 2 deletions(-) diff --git a/shell.nix b/shell.nix index df0ad01df58..17cfbfbe6f9 100644 --- a/shell.nix +++ b/shell.nix @@ -22,8 +22,6 @@ with import {}; configureFlags = [ "--disable-init-state" "--enable-gc" - "--with-dbi=${perlPackages.DBI}/${perl.libPrefix}" - "--with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix}" ]; enableParallelBuilding = true; From e43e8be8e76b4457c1dfe7578870b2c995dc6a43 Mon Sep 17 00:00:00 2001 From: Dan Peebles Date: Mon, 10 Apr 2017 09:22:24 -0400 Subject: [PATCH 0224/2196] Default to 5 download retries This should help certain downloaders that don't request anything special for the number of retries, like nix-channel. --- src/libstore/download.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/download.hh b/src/libstore/download.hh index e2e16b36103..62f3860b9da 100644 --- a/src/libstore/download.hh +++ b/src/libstore/download.hh @@ -15,7 +15,7 @@ struct DownloadRequest bool verifyTLS = true; enum { yes, no, automatic } showProgress = yes; bool head = false; - size_t tries = 1; + size_t tries = 5; unsigned int baseRetryTimeMs = 250; DownloadRequest(const std::string & uri) : uri(uri) { } From d1fdade75562d711ec62e9cdf953f2a6b0e891ce Mon Sep 17 00:00:00 2001 From: Dan Peebles Date: Mon, 10 Apr 2017 09:28:44 -0400 Subject: [PATCH 0225/2196] Add CURLE_WRITE_ERROR as a transient error condition We've observed it failing downloads in the wild and retrying the same URL a few moments later seemed to fix it. --- src/libstore/download.cc | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/libstore/download.cc b/src/libstore/download.cc index d1f760fdc30..f8f57869503 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -300,6 +300,11 @@ struct CurlDownloader : public Downloader || httpStatus == 504 || httpStatus == 522 || httpStatus == 524 || code == CURLE_COULDNT_RESOLVE_HOST || code == CURLE_RECV_ERROR + + // this seems to occur occasionally for retriable reasons, and shows up in an error like this: + // curl: (23) Failed writing body (315 != 16366) + || code == CURLE_WRITE_ERROR + // this is a generic SSL failure that in some cases (e.g., certificate error) is permanent but also appears in transient cases, so we consider it retryable || code == CURLE_SSL_CONNECT_ERROR #if LIBCURL_VERSION_NUM >= 0x073200 From 915f62fa19790d8f826aeb4dd3d2bb5bde2f67e9 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 10 Apr 2017 17:22:52 +0200 Subject: [PATCH 0226/2196] shell.nix: Remove more dependencies Thanks @copumpkin. --- shell.nix | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/shell.nix b/shell.nix index 17cfbfbe6f9..425eb0a191f 100644 --- a/shell.nix +++ b/shell.nix @@ -6,7 +6,7 @@ with import {}; name = "nix"; buildInputs = - [ curl bison flex perl libxml2 libxslt + [ curl bison flex libxml2 libxslt bzip2 xz brotli pkgconfig sqlite libsodium boehmgc docbook5 docbook5_xsl @@ -16,7 +16,6 @@ with import {}; customMemoryManagement = false; }) autoreconfHook - perlPackages.DBDSQLite ]; configureFlags = From 503cc4431b8fa008caca7d06026dcfcab8626884 Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Mon, 10 Apr 2017 18:16:36 -0400 Subject: [PATCH 0227/2196] nix-channel: error out if direct tarball unpack fails. It's very unlikely a path ending in .tar.gz is a directory Fixes #1318 --- src/nix-channel/nix-channel.cc | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc index 36162782312..0f50f6242c4 100755 --- a/src/nix-channel/nix-channel.cc +++ b/src/nix-channel/nix-channel.cc @@ -103,12 +103,9 @@ static void update(const StringSet & channelNames) auto unpacked = false; if (std::regex_search(filename, std::regex("\\.tar\\.(gz|bz2|xz)$"))) { - try { - runProgram(settings.nixBinDir + "/nix-build", false, { "--no-out-link", "--expr", "import " - "{ name = \"" + cname + "\"; channelName = \"" + name + "\"; src = builtins.storePath \"" + filename + "\"; }" }); - unpacked = true; - } catch (ExecError & e) { - } + runProgram(settings.nixBinDir + "/nix-build", false, { "--no-out-link", "--expr", "import " + "{ name = \"" + cname + "\"; channelName = \"" + name + "\"; src = builtins.storePath \"" + filename + "\"; }" }); + unpacked = true; } if (!unpacked) { From b134c2d05280be9615c81f018a5309fc8dcbca0e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 11 Apr 2017 15:41:21 +0200 Subject: [PATCH 0228/2196] Drop WWW::Curl dependency Somehow this came back after d1da6967b8891763ce04d668027cf300c9bbf0b2. --- perl/configure.ac | 13 +------------ release.nix | 1 - 2 files changed, 1 insertion(+), 13 deletions(-) diff --git a/perl/configure.ac b/perl/configure.ac index d617c78535f..7a6b28be23e 100644 --- a/perl/configure.ac +++ b/perl/configure.ac @@ -52,7 +52,7 @@ PKG_CHECK_MODULES([SODIUM], [libsodium], have_sodium=1], [have_sodium=]) AC_SUBST(HAVE_SODIUM, [$have_sodium]) -# Check for the required Perl dependencies (DBI, DBD::SQLite and WWW::Curl). +# Check for the required Perl dependencies (DBI and DBD::SQLite). perlFlags="-I$perllibdir" AC_ARG_WITH(dbi, AC_HELP_STRING([--with-dbi=PATH], @@ -63,10 +63,6 @@ AC_ARG_WITH(dbd-sqlite, AC_HELP_STRING([--with-dbd-sqlite=PATH], [prefix of the Perl DBD::SQLite library]), perlFlags="$perlFlags -I$withval") -AC_ARG_WITH(www-curl, AC_HELP_STRING([--with-www-curl=PATH], - [prefix of the Perl WWW::Curl library]), - perlFlags="$perlFlags -I$withval") - AC_MSG_CHECKING([whether DBD::SQLite works]) if ! $perl $perlFlags -e 'use DBI; use DBD::SQLite;' 2>&5; then AC_MSG_RESULT(no) @@ -74,13 +70,6 @@ if ! $perl $perlFlags -e 'use DBI; use DBD::SQLite;' 2>&5; then fi AC_MSG_RESULT(yes) -AC_MSG_CHECKING([whether WWW::Curl works]) -if ! $perl $perlFlags -e 'use WWW::Curl;' 2>&5; then - AC_MSG_RESULT(no) - AC_MSG_FAILURE([The Perl module WWW::Curl is missing.]) -fi -AC_MSG_RESULT(yes) - AC_SUBST(perlFlags) PKG_CHECK_MODULES([NIX], [nix-store]) diff --git a/release.nix b/release.nix index 8727c2520b1..6136f650d95 100644 --- a/release.nix +++ b/release.nix @@ -116,7 +116,6 @@ let configureFlags = '' --with-dbi=${perlPackages.DBI}/${pkgs.perl.libPrefix} --with-dbd-sqlite=${perlPackages.DBDSQLite}/${pkgs.perl.libPrefix} - --with-www-curl=${perlPackages.WWWCurl}/${pkgs.perl.libPrefix} ''; enableParallelBuilding = true; From d267db0d758c7a926168cacdbaa7a9800564af11 Mon Sep 17 00:00:00 2001 From: Benjamin Staffin Date: Wed, 12 Apr 2017 18:01:43 -0400 Subject: [PATCH 0229/2196] Move note about float support out of the wrong release notes Looks like this snuck into the 1.11 release notes post-release, but float support isn't actually present until 1.12. --- doc/manual/release-notes/rl-1.11.xml | 7 ------- doc/manual/release-notes/rl-1.12.xml | 7 +++++++ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/doc/manual/release-notes/rl-1.11.xml b/doc/manual/release-notes/rl-1.11.xml index efb03d61393..fe422dd1f89 100644 --- a/doc/manual/release-notes/rl-1.11.xml +++ b/doc/manual/release-notes/rl-1.11.xml @@ -121,13 +121,6 @@ $ diffoscope /nix/store/11a27shh6n2i…-zlib-1.2.8 /nix/store/11a27shh6n2i…-zl also improves performance. - - The Nix language now supports floating point numbers. They are - based on regular C++ float and compatible with - existing integers and number-related operations. Export and import to and - from JSON and XML works, too. - - All "chroot"-containing strings got renamed to "sandbox". In particular, some Nix options got renamed, but the old names diff --git a/doc/manual/release-notes/rl-1.12.xml b/doc/manual/release-notes/rl-1.12.xml index d6864b3f55d..b7f45fc44a3 100644 --- a/doc/manual/release-notes/rl-1.12.xml +++ b/doc/manual/release-notes/rl-1.12.xml @@ -17,6 +17,13 @@ have write access to the Nix database. + + The Nix language now supports floating point numbers. They are + based on regular C++ float and compatible with + existing integers and number-related operations. Export and import to and + from JSON and XML works, too. + + This release has contributions from TBD. From 31cc9366fc81c3f478b31a206daa6b38801cd3b1 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 12 Apr 2017 14:52:49 +0200 Subject: [PATCH 0230/2196] Initialise logger --- perl/lib/Nix/Store.xs | 1 - src/libmain/shared.cc | 2 -- src/libutil/logging.cc | 2 +- 3 files changed, 1 insertion(+), 4 deletions(-) diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs index f613e3df329..46b41f92392 100644 --- a/perl/lib/Nix/Store.xs +++ b/perl/lib/Nix/Store.xs @@ -25,7 +25,6 @@ static ref store() static std::shared_ptr _store; if (!_store) { try { - logger = makeDefaultLogger(); settings.loadConfFile(); settings.update(); settings.lockCPU = false; diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index a720afd6cdd..c1828aa7db8 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -106,8 +106,6 @@ void initNix() std::cerr.rdbuf()->pubsetbuf(buf, sizeof(buf)); #endif - logger = makeDefaultLogger(); - /* Initialise OpenSSL locking. */ opensslLocks = std::vector(CRYPTO_num_locks()); CRYPTO_set_locking_callback(opensslLockCallback); diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index d9e8d22d768..53f6260b788 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -3,7 +3,7 @@ namespace nix { -Logger * logger = 0; +Logger * logger = makeDefaultLogger(); class SimpleLogger : public Logger { From 6d97d816565505606792050131b5d4d7fca33245 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 12 Apr 2017 14:53:10 +0200 Subject: [PATCH 0231/2196] Add warn function --- src/libutil/logging.cc | 7 ++++++- src/libutil/logging.hh | 10 ++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index 53f6260b788..afcc2ec5854 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -5,6 +5,11 @@ namespace nix { Logger * logger = makeDefaultLogger(); +void Logger::warn(const std::string & msg) +{ + log(lvlInfo, ANSI_RED "warning:" ANSI_NORMAL " " + msg); +} + class SimpleLogger : public Logger { public: @@ -52,7 +57,7 @@ Verbosity verbosity = lvlInfo; void warnOnce(bool & haveWarned, const FormatOrString & fs) { if (!haveWarned) { - printError(format("warning: %1%") % fs.s); + warn(fs.s); haveWarned = true; } } diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh index 3f83664794f..81aebccdca4 100644 --- a/src/libutil/logging.hh +++ b/src/libutil/logging.hh @@ -30,6 +30,8 @@ public: log(lvlInfo, fs); } + virtual void warn(const std::string & msg); + virtual void setExpected(const std::string & label, uint64_t value = 1) { } virtual void setProgress(const std::string & label, uint64_t value = 1) { } virtual void incExpected(const std::string & label, uint64_t value = 1) { } @@ -82,6 +84,14 @@ extern Verbosity verbosity; /* suppress msgs > this */ #define debug(args...) printMsg(lvlDebug, args) #define vomit(args...) printMsg(lvlVomit, args) +template +inline void warn(const std::string & fs, Args... args) +{ + boost::format f(fs); + formatHelper(f, args...); + logger->warn(f.str()); +} + void warnOnce(bool & haveWarned, const FormatOrString & fs); void writeToStderr(const string & s); From 568a099c889e7ccc5a49b15575078e99acf8bc2f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 13 Apr 2017 15:32:43 +0200 Subject: [PATCH 0232/2196] canonPath(): Check against empty paths --- src/libutil/util.cc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/libutil/util.cc b/src/libutil/util.cc index a640a64c724..0bd51afd1a9 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -96,6 +96,8 @@ Path absPath(Path path, Path dir) Path canonPath(const Path & path, bool resolveSymlinks) { + assert(path != ""); + string s; if (path[0] != '/') From 2040240e238a41c2eb799bf4dbf394fec297ac16 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 13 Apr 2017 15:55:38 +0200 Subject: [PATCH 0233/2196] Add a Config class to simplify adding configuration settings The typical use is to inherit Config and add Setting members: class MyClass : private Config { Setting foo{this, 123, "foo", "the number of foos to use"}; Setting bar{this, "blabla", "bar", "the name of the bar"}; MyClass() : Config(readConfigFile("/etc/my-app.conf")) { std::cout << foo << "\n"; // will print 123 unless overriden } }; Currently, this is used by Store and its subclasses for store parameters. You now get a warning if you specify a non-existant store parameter in a store URI. --- src/libstore/binary-cache-store.cc | 3 - src/libstore/binary-cache-store.hh | 10 +- src/libstore/build.cc | 8 +- src/libstore/legacy-ssh-store.cc | 10 +- src/libstore/local-fs-store.cc | 9 +- src/libstore/local-store.cc | 5 +- src/libstore/local-store.hh | 6 +- src/libstore/remote-store.cc | 2 +- src/libstore/remote-store.hh | 3 + src/libstore/s3-binary-cache-store.cc | 12 +- src/libstore/ssh-store.cc | 7 +- src/libstore/store-api.cc | 9 +- src/libstore/store-api.hh | 25 ++++- src/libutil/config.cc | 112 +++++++++++++++++++ src/libutil/config.hh | 151 ++++++++++++++++++++++++++ src/libutil/types.hh | 2 + 16 files changed, 334 insertions(+), 40 deletions(-) create mode 100644 src/libutil/config.cc create mode 100644 src/libutil/config.hh diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 25ad0d75b70..b536c6c0004 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -79,10 +79,7 @@ struct BinaryCacheStoreAccessor : public FSAccessor BinaryCacheStore::BinaryCacheStore(const Params & params) : Store(params) - , compression(get(params, "compression", "xz")) - , writeNARListing(get(params, "write-nar-listing", "0") == "1") { - auto secretKeyFile = get(params, "secret-key", ""); if (secretKeyFile != "") secretKey = std::unique_ptr(new SecretKey(readFile(secretKeyFile))); diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh index d42b1abd245..5c2d0acfdbb 100644 --- a/src/libstore/binary-cache-store.hh +++ b/src/libstore/binary-cache-store.hh @@ -13,13 +13,15 @@ struct NarInfo; class BinaryCacheStore : public Store { -private: +public: - std::unique_ptr secretKey; + const Setting compression{this, "xz", "compression", "NAR compression method ('xz', 'bzip2', or 'none')"}; + const Setting writeNARListing{this, false, "write-nar-listing", "whether to write a JSON file listing the files in each NAR"}; + const Setting secretKeyFile{this, "", "secret-key", "path to secret key used to sign the binary cache"}; - std::string compression; +private: - bool writeNARListing; + std::unique_ptr secretKey; protected: diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 968e291129f..d9c299d099b 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -3051,13 +3051,11 @@ Path DerivationGoal::openLogFile() string baseName = baseNameOf(drvPath); /* Create a log file. */ - Path dir = (format("%1%/%2%/%3%/") % worker.store.logDir % worker.store.drvsLogDir % string(baseName, 0, 2)).str(); + Path dir = fmt("%s/%s/%s/", worker.store.logDir, worker.store.drvsLogDir, string(baseName, 0, 2)); createDirs(dir); - Path logFileName = (format("%1%/%2%%3%") - % dir - % string(baseName, 2) - % (settings.compressLog ? ".bz2" : "")).str(); + Path logFileName = fmt("%s/%s%s", dir, string(baseName, 2), + settings.compressLog ? ".bz2" : ""); fdLogFile = open(logFileName.c_str(), O_CREAT | O_WRONLY | O_TRUNC | O_CLOEXEC, 0666); if (!fdLogFile) throw SysError(format("creating log file ‘%1%’") % logFileName); diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 0e838846c79..befc560bfce 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -12,6 +12,10 @@ static std::string uriScheme = "ssh://"; struct LegacySSHStore : public Store { + const Setting maxConnections{this, 1, "max-connections", "maximum number of concurrent SSH connections"}; + const Setting sshKey{this, "", "ssh-key", "path to an SSH private key"}; + const Setting compress{this, false, "compress", "whether to compress the connection"}; + struct Connection { std::unique_ptr sshConn; @@ -29,16 +33,16 @@ struct LegacySSHStore : public Store : Store(params) , host(host) , connections(make_ref>( - std::max(1, std::stoi(get(params, "max-connections", "1"))), + std::max(1, (int) maxConnections), [this]() { return openConnection(); }, [](const ref & r) { return true; } )) , master( host, - get(params, "ssh-key", ""), + sshKey, // Use SSH master only if using more than 1 connection. connections->capacity() > 1, - get(params, "compress", "") == "true") + compress) { } diff --git a/src/libstore/local-fs-store.cc b/src/libstore/local-fs-store.cc index 57e1b8a09fe..bf247903c9d 100644 --- a/src/libstore/local-fs-store.cc +++ b/src/libstore/local-fs-store.cc @@ -9,9 +9,6 @@ namespace nix { LocalFSStore::LocalFSStore(const Params & params) : Store(params) - , rootDir(get(params, "root")) - , stateDir(canonPath(get(params, "state", rootDir != "" ? rootDir + "/nix/var/nix" : settings.nixStateDir))) - , logDir(canonPath(get(params, "log", rootDir != "" ? rootDir + "/nix/var/log/nix" : settings.nixLogDir))) { } @@ -88,6 +85,8 @@ void LocalFSStore::narFromPath(const Path & path, Sink & sink) const string LocalFSStore::drvsLogDir = "drvs"; + + std::shared_ptr LocalFSStore::getBuildLog(const Path & path_) { auto path(path_); @@ -110,8 +109,8 @@ std::shared_ptr LocalFSStore::getBuildLog(const Path & path_) Path logPath = j == 0 - ? (format("%1%/%2%/%3%/%4%") % logDir % drvsLogDir % string(baseName, 0, 2) % string(baseName, 2)).str() - : (format("%1%/%2%/%3%") % logDir % drvsLogDir % baseName).str(); + ? fmt("%s/%s/%s/%s", logDir, drvsLogDir, string(baseName, 0, 2), string(baseName, 2)) + : fmt("%s/%s/%s", logDir, drvsLogDir, baseName); Path logBz2Path = logPath + ".bz2"; if (pathExists(logPath)) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 8610841d722..0ea897526bb 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -38,13 +38,14 @@ namespace nix { LocalStore::LocalStore(const Params & params) : Store(params) , LocalFSStore(params) - , realStoreDir(get(params, "real", rootDir != "" ? rootDir + "/nix/store" : storeDir)) + , realStoreDir_{this, false, rootDir != "" ? rootDir + "/nix/store" : storeDir, "real", + "physical path to the Nix store"} + , realStoreDir(realStoreDir_) , dbDir(stateDir + "/db") , linksDir(realStoreDir + "/.links") , reservedPath(dbDir + "/reserved") , schemaPath(dbDir + "/schema") , trashDir(realStoreDir + "/trash") - , requireSigs(trim(settings.get("signed-binary-caches", std::string("*"))) != "") // FIXME: rename option , publicKeys(getDefaultPublicKeys()) { auto state(_state.lock()); diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index 750da0c142d..fec67ee7d9e 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -67,6 +67,8 @@ private: public: + PathSetting realStoreDir_; + const Path realStoreDir; const Path dbDir; const Path linksDir; @@ -76,7 +78,9 @@ public: private: - bool requireSigs; + Setting requireSigs{(Store*) this, + trim(settings.get("signed-binary-caches", std::string("*"))) != "", + "require-sigs", "whether store paths should have a trusted signature on import"}; PublicKeys publicKeys; diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index c9c59078745..e1df137e4db 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -43,7 +43,7 @@ template Paths readStorePaths(Store & store, Source & from); RemoteStore::RemoteStore(const Params & params) : Store(params) , connections(make_ref>( - std::max(1, std::stoi(get(params, "max-connections", "1"))), + std::max(1, (int) maxConnections), [this]() { return openConnectionWrapper(); }, [](const ref & r) { return r->to.good() && r->from.good(); } )) diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh index db8da7eaa8c..479cf3a7909 100644 --- a/src/libstore/remote-store.hh +++ b/src/libstore/remote-store.hh @@ -22,6 +22,9 @@ class RemoteStore : public virtual Store { public: + const Setting maxConnections{(Store*) this, 1, + "max-connections", "maximum number of concurrent connections to the Nix daemon"}; + RemoteStore(const Params & params); /* Implementations of abstract store API methods. */ diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 3053f908c4e..24545529601 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -125,22 +125,22 @@ S3Helper::DownloadResult S3Helper::getObject( struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore { + const Setting region{this, Aws::Region::US_EAST_1, "region", {"aws-region"}}; + const Setting narinfoCompression{this, "", "narinfo-compression", "compression method for .narinfo files"}; + const Setting lsCompression{this, "", "ls-compression", "compression method for .ls files"}; + const Setting logCompression{this, "", "log-compression", "compression method for log/* files"}; + std::string bucketName; Stats stats; S3Helper s3Helper; - std::string narinfoCompression, lsCompression, logCompression; - S3BinaryCacheStoreImpl( const Params & params, const std::string & bucketName) : S3BinaryCacheStore(params) , bucketName(bucketName) - , s3Helper(get(params, "aws-region", Aws::Region::US_EAST_1)) - , narinfoCompression(get(params, "narinfo-compression", "")) - , lsCompression(get(params, "ls-compression", "")) - , logCompression(get(params, "log-compression", "")) + , s3Helper(region) { diskCache = getNarInfoDiskCache(); } diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc index 2a81a8b1ebe..bb536fadfd5 100644 --- a/src/libstore/ssh-store.cc +++ b/src/libstore/ssh-store.cc @@ -14,16 +14,19 @@ class SSHStore : public RemoteStore { public: + const Setting sshKey{(Store*) this, "", "ssh-key", "path to an SSH private key"}; + const Setting compress{(Store*) this, false, "compress", "whether to compress the connection"}; + SSHStore(const std::string & host, const Params & params) : Store(params) , RemoteStore(params) , host(host) , master( host, - get(params, "ssh-key", ""), + sshKey, // Use SSH master only if using more than 1 connection. connections->capacity() > 1, - get(params, "compress", "") == "true") + compress) { } diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 53c802044ea..cb62bdc0b62 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -241,8 +241,8 @@ Path Store::computeStorePathForText(const string & name, const string & s, Store::Store(const Params & params) - : storeDir(get(params, "store", settings.nixStore)) - , state({std::stoi(get(params, "path-info-cache-size", "65536"))}) + : Config(params) + , state({(size_t) pathInfoCacheSize}) { } @@ -718,7 +718,10 @@ ref openStore(const std::string & uri, const Store::Params & params) { for (auto fun : *RegisterStoreImplementation::implementations) { auto store = fun(uri, params); - if (store) return ref(store); + if (store) { + store->warnUnused(); + return ref(store); + } } throw Error(format("don't know how to open Nix store ‘%s’") % uri); diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index c0a52145af5..067309c9e95 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -6,6 +6,7 @@ #include "lru-cache.hh" #include "sync.hh" #include "globals.hh" +#include "config.hh" #include #include @@ -224,13 +225,17 @@ struct BuildResult }; -class Store : public std::enable_shared_from_this +class Store : public std::enable_shared_from_this, public Config { public: typedef std::map Params; - const Path storeDir; + const PathSetting storeDir_{this, false, settings.nixStore, + "store", "path to the Nix store"}; + const Path storeDir = storeDir_; + + const Setting pathInfoCacheSize{this, 65536, "path-info-cache-size", "size of the in-memory store path information cache"}; protected: @@ -585,9 +590,19 @@ protected: class LocalFSStore : public virtual Store { public: - const Path rootDir; - const Path stateDir; - const Path logDir; + + // FIXME: the (Store*) cast works around a bug in gcc that causes + // it to emit the call to the Option constructor. Clang works fine + // either way. + const PathSetting rootDir{(Store*) this, true, "", + "root", "directory prefixed to all other paths"}; + const PathSetting stateDir{(Store*) this, false, + rootDir != "" ? rootDir + "/nix/var/nix" : settings.nixStateDir, + "state", "directory where Nix will store state"}; + const PathSetting logDir{(Store*) this, false, + rootDir != "" ? rootDir + "/nix/var/log/nix" : settings.nixLogDir, + "log", "directory where Nix will store state"}; + const static string drvsLogDir; LocalFSStore(const Params & params); diff --git a/src/libutil/config.cc b/src/libutil/config.cc new file mode 100644 index 00000000000..2f9f988607e --- /dev/null +++ b/src/libutil/config.cc @@ -0,0 +1,112 @@ +#include "config.hh" +#include "args.hh" + +namespace nix { + +void Config::set(const std::string & name, const std::string & value) +{ + auto i = _settings.find(name); + if (i == _settings.end()) + throw UsageError("unknown setting '%s'", name); + i->second.setting->set(value); +} + +void Config::add(AbstractSetting * setting) +{ + _settings.emplace(setting->name, Config::SettingData{false, setting}); + for (auto & alias : setting->aliases) + _settings.emplace(alias, Config::SettingData{true, setting}); + + bool set = false; + + auto i = initials.find(setting->name); + if (i != initials.end()) { + setting->set(i->second); + initials.erase(i); + set = true; + } + + for (auto & alias : setting->aliases) { + auto i = initials.find(alias); + if (i != initials.end()) { + if (set) + warn("setting '%s' is set, but it's an alias of '%s' which is also set", + alias, setting->name); + else { + setting->set(i->second); + initials.erase(i); + set = true; + } + } + } +} + +void Config::warnUnused() +{ + for (auto & i : initials) + warn("unknown setting '%s'", i.first); +} + +std::string Config::dump() +{ + std::string res; + for (auto & opt : _settings) + if (!opt.second.isAlias) + res += opt.first + " = " + opt.second.setting->to_string() + "\n"; + return res; +} + +AbstractSetting::AbstractSetting( + const std::string & name, + const std::string & description, + const std::set & aliases) + : name(name), description(description), aliases(aliases) +{ +} + +template<> void Setting::set(const std::string & str) +{ + value = str; +} + +template<> std::string Setting::to_string() +{ + return value; +} + +template<> void Setting::set(const std::string & str) +{ + try { + value = std::stoi(str); + } catch (...) { + throw UsageError("setting '%s' has invalid value '%s'", name, str); + } +} + +template<> std::string Setting::to_string() +{ + return std::to_string(value); +} + +template<> void Setting::set(const std::string & str) +{ + value = str == "true" || str == "1"; +} + +template<> std::string Setting::to_string() +{ + return value ? "true" : "false"; +} + +void PathSetting::set(const std::string & str) +{ + if (str == "") { + if (allowEmpty) + value = ""; + else + throw UsageError("setting '%s' cannot be empty", name); + } else + value = canonPath(str); +} + +} diff --git a/src/libutil/config.hh b/src/libutil/config.hh new file mode 100644 index 00000000000..fb2d48e9c83 --- /dev/null +++ b/src/libutil/config.hh @@ -0,0 +1,151 @@ +#include +#include + +#include "types.hh" + +#pragma once + +namespace nix { + +class Args; +class AbstractSetting; + +/* A class to simplify providing configuration settings. The typical + use is to inherit Config and add Setting members: + + class MyClass : private Config + { + Setting foo{this, 123, "foo", "the number of foos to use"}; + Setting bar{this, "blabla", "bar", "the name of the bar"}; + + MyClass() : Config(readConfigFile("/etc/my-app.conf")) + { + std::cout << foo << "\n"; // will print 123 unless overriden + } + }; +*/ + +class Config +{ + friend class AbstractSetting; + + struct SettingData + { + bool isAlias = false; + AbstractSetting * setting; + }; + + std::map _settings; + + StringMap initials; + +public: + + Config(const StringMap & initials) + : initials(initials) + { } + + void set(const std::string & name, const std::string & value); + + void add(AbstractSetting * setting); + + void warnUnused(); + + std::string dump(); +}; + +class AbstractSetting +{ + friend class Config; + +public: + + const std::string name; + const std::string description; + const std::set aliases; + + int created = 123; + +protected: + + AbstractSetting( + const std::string & name, + const std::string & description, + const std::set & aliases); + + virtual ~AbstractSetting() + { + // Check against a gcc miscompilation causing our constructor + // not to run. + assert(created == 123); + } + + virtual void set(const std::string & value) = 0; + + virtual std::string to_string() = 0; +}; + +/* A setting of type T. */ +template +class Setting : public AbstractSetting +{ +protected: + + T value; + +public: + + Setting(Config * options, + const T & def, + const std::string & name, + const std::string & description, + const std::set & aliases = {}) + : AbstractSetting(name, description, aliases) + , value(def) + { + options->add(this); + } + + operator const T &() const { return value; } + bool operator ==(const T & v2) const { return value == v2; } + bool operator !=(const T & v2) const { return value != v2; } + void operator =(const T & v) { value = v; } + + void set(const std::string & str) override; + + std::string to_string() override; +}; + +template +std::ostream & operator <<(std::ostream & str, const Setting & opt) +{ + str << (const T &) opt; + return str; +} + +/* A special setting for Paths. These are automatically canonicalised + (e.g. "/foo//bar/" becomes "/foo/bar"). */ +class PathSetting : public Setting +{ + bool allowEmpty; + +public: + + PathSetting(Config * options, + bool allowEmpty, + const Path & def, + const std::string & name, + const std::string & description, + const std::set & aliases = {}) + : Setting(options, def, name, description, aliases) + , allowEmpty(allowEmpty) + { + set(value); + } + + void set(const std::string & str) override; + + Path operator +(const char * p) const { return value + p; } +}; + +} diff --git a/src/libutil/types.hh b/src/libutil/types.hh index 97d79af9b5d..1429c238513 100644 --- a/src/libutil/types.hh +++ b/src/libutil/types.hh @@ -7,6 +7,7 @@ #include #include #include +#include #include @@ -141,6 +142,7 @@ private: typedef list Strings; typedef set StringSet; +typedef std::map StringMap; /* Paths are just strings. */ From 0bf34de43b2fc4c9c3104b986eaea5c5cc856b83 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 13 Apr 2017 16:31:28 +0200 Subject: [PATCH 0234/2196] Validate Boolean settings better --- src/libutil/config.cc | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/libutil/config.cc b/src/libutil/config.cc index 2f9f988607e..893cdccce34 100644 --- a/src/libutil/config.cc +++ b/src/libutil/config.cc @@ -90,7 +90,12 @@ template<> std::string Setting::to_string() template<> void Setting::set(const std::string & str) { - value = str == "true" || str == "1"; + if (str == "true" || str == "yes" || str == "1") + value = true; + else if (str == "false" || str == "no" || str == "0") + value = false; + else + throw UsageError("Boolean setting '%s' has invalid value '%s'", name, str); } template<> std::string Setting::to_string() From 6bd9576aeb55927cb551736a47b4e8e3fd1063bb Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 13 Apr 2017 17:54:05 +0200 Subject: [PATCH 0235/2196] Support arbitrary numeric types for settings --- src/libutil/config.cc | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/src/libutil/config.cc b/src/libutil/config.cc index 893cdccce34..c05a3253bce 100644 --- a/src/libutil/config.cc +++ b/src/libutil/config.cc @@ -74,17 +74,25 @@ template<> std::string Setting::to_string() return value; } -template<> void Setting::set(const std::string & str) +template +void Setting::set(const std::string & str) { + static_assert(std::is_integral::value, "Integer required."); try { - value = std::stoi(str); - } catch (...) { + auto i = std::stoll(str); + if (i < std::numeric_limits::min() || + i > std::numeric_limits::max()) + throw UsageError("setting '%s' has out-of-range value %d", name, i); + value = i; + } catch (std::logic_error&) { throw UsageError("setting '%s' has invalid value '%s'", name, str); } } -template<> std::string Setting::to_string() +template +std::string Setting::to_string() { + static_assert(std::is_integral::value, "Integer required."); return std::to_string(value); } @@ -103,6 +111,11 @@ template<> std::string Setting::to_string() return value ? "true" : "false"; } +template class Setting; +template class Setting; +template class Setting; +template class Setting; + void PathSetting::set(const std::string & str) { if (str == "") { From ba9ad29fdbfda3836bb06b35817f08fd10beaa22 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 13 Apr 2017 20:53:23 +0200 Subject: [PATCH 0236/2196] Convert Settings to the new config system This makes all config options self-documenting. Unknown or unparseable config settings and --option flags now cause a warning. --- doc/manual/command-ref/conf-file.xml | 10 - perl/lib/Nix/Config.pm.in | 23 +- src/libmain/common-args.cc | 6 +- src/libmain/shared.cc | 10 +- src/libstore/build.cc | 11 +- src/libstore/crypto.cc | 4 +- src/libstore/download.cc | 2 +- src/libstore/globals.cc | 317 ++++----------------------- src/libstore/globals.hh | 312 +++++++++++++------------- src/libstore/local-store.cc | 4 +- src/libstore/remote-store.cc | 4 +- src/libstore/store-api.cc | 9 +- src/libutil/config.cc | 104 +++++++-- src/libutil/config.hh | 22 +- src/nix-daemon/nix-daemon.cc | 13 +- src/nix-store/nix-store.cc | 8 +- src/nix/main.cc | 1 - tests/shell.shebang.sh | 2 +- 18 files changed, 332 insertions(+), 530 deletions(-) diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index 6952829e8f7..3de9647aa4e 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -334,16 +334,6 @@ flag, e.g. --option gc-keep-outputs false. - use-binary-caches - - If set to true (the default), - Nix will check the binary caches specified by - and related options to obtain - binary substitutes. - - - - binary-caches A list of URLs of binary caches, separated by diff --git a/perl/lib/Nix/Config.pm.in b/perl/lib/Nix/Config.pm.in index f494e34a5e7..4bdee7fd89f 100644 --- a/perl/lib/Nix/Config.pm.in +++ b/perl/lib/Nix/Config.pm.in @@ -20,22 +20,15 @@ $useBindings = 1; %config = (); sub readConfig { - if (defined $ENV{'_NIX_OPTIONS'}) { - foreach my $s (split '\n', $ENV{'_NIX_OPTIONS'}) { - my ($n, $v) = split '=', $s, 2; - $config{$n} = $v; - } - } else { - my $config = "$confDir/nix.conf"; - return unless -f $config; - - open CONFIG, "<$config" or die "cannot open ‘$config’"; - while () { - /^\s*([\w\-\.]+)\s*=\s*(.*)$/ or next; - $config{$1} = $2; - } - close CONFIG; + my $config = "$confDir/nix.conf"; + return unless -f $config; + + open CONFIG, "<$config" or die "cannot open ‘$config’"; + while () { + /^\s*([\w\-\.]+)\s*=\s*(.*)$/ or next; + $config{$1} = $2; } + close CONFIG; } return 1; diff --git a/src/libmain/common-args.cc b/src/libmain/common-args.cc index 98693d78a7f..9a7a893138d 100644 --- a/src/libmain/common-args.cc +++ b/src/libmain/common-args.cc @@ -22,7 +22,11 @@ MixCommonArgs::MixCommonArgs(const string & programName) [](Strings ss) { auto name = ss.front(); ss.pop_front(); auto value = ss.front(); - settings.set(name, value); + try { + settings.set(name, value); + } catch (UsageError & e) { + warn(e.what()); + } }); } diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index c1828aa7db8..9d506d01923 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -138,9 +138,6 @@ void initNix() struct timeval tv; gettimeofday(&tv, 0); srandom(tv.tv_usec); - - if (char *pack = getenv("_NIX_OPTIONS")) - settings.unpack(pack); } @@ -156,10 +153,10 @@ struct LegacyArgs : public MixCommonArgs &settings.verboseBuild, false); mkFlag('K', "keep-failed", "keep temporary directories of failed builds", - &settings.keepFailed); + &(bool&) settings.keepFailed); mkFlag('k', "keep-going", "keep going after a build fails", - &settings.keepGoing); + &(bool&) settings.keepGoing); mkFlag(0, "fallback", "build from source if substitution fails", []() { settings.set("build-fallback", "true"); @@ -184,7 +181,7 @@ struct LegacyArgs : public MixCommonArgs &settings.readOnlyMode); mkFlag(0, "no-build-hook", "disable use of the build hook mechanism", - &settings.useBuildHook, false); + &(bool&) settings.useBuildHook, false); mkFlag(0, "show-trace", "show Nix expression stack trace in evaluation errors", &settings.showTrace); @@ -218,7 +215,6 @@ void parseCmdLine(int argc, char * * argv, std::function parseArg) { LegacyArgs(baseNameOf(argv[0]), parseArg).parseCmdline(argvToStrings(argc, argv)); - settings.update(); } diff --git a/src/libstore/build.cc b/src/libstore/build.cc index b23447fa073..33c9e37047f 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -462,7 +462,7 @@ UserLock::UserLock() assert(settings.buildUsersGroup != ""); /* Get the members of the build-users-group. */ - struct group * gr = getgrnam(settings.buildUsersGroup.c_str()); + struct group * gr = getgrnam(settings.buildUsersGroup.get().c_str()); if (!gr) throw Error(format("the group ‘%1%’ specified in ‘build-users-group’ does not exist") % settings.buildUsersGroup); @@ -1690,10 +1690,7 @@ void DerivationGoal::startBuilder() /* Are we doing a chroot build? */ { - string x = settings.useSandbox; - if (x != "true" && x != "false" && x != "relaxed") - throw Error("option ‘build-use-sandbox’ must be set to one of ‘true’, ‘false’ or ‘relaxed’"); - if (x == "true") { + if (settings.sandboxMode == smEnabled) { if (get(drv->env, "__noChroot") == "1") throw Error(format("derivation ‘%1%’ has ‘__noChroot’ set, " "but that's not allowed when ‘build-use-sandbox’ is ‘true’") % drvPath); @@ -1704,9 +1701,9 @@ void DerivationGoal::startBuilder() #endif useChroot = true; } - else if (x == "false") + else if (settings.sandboxMode == smDisabled) useChroot = false; - else if (x == "relaxed") + else if (settings.sandboxMode == smRelaxed) useChroot = !fixedOutput && get(drv->env, "__noChroot") != "1"; } diff --git a/src/libstore/crypto.cc b/src/libstore/crypto.cc index 9692dd83b4e..f56a6adab9c 100644 --- a/src/libstore/crypto.cc +++ b/src/libstore/crypto.cc @@ -105,12 +105,12 @@ PublicKeys getDefaultPublicKeys() // FIXME: filter duplicates - for (auto s : settings.binaryCachePublicKeys) { + for (auto s : settings.binaryCachePublicKeys.get()) { PublicKey key(s); publicKeys.emplace(key.name, key); } - for (auto secretKeyFile : settings.secretKeyFiles) { + for (auto secretKeyFile : settings.secretKeyFiles.get()) { try { SecretKey secretKey(readFile(secretKeyFile)); publicKeys.emplace(secretKey.name, secretKey.toPublicKey()); diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 95e6f7bace0..d073e870b4e 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -251,7 +251,7 @@ struct CurlDownloader : public Downloader /* If no file exist in the specified path, curl continues to work anyway as if netrc support was disabled. */ - curl_easy_setopt(req, CURLOPT_NETRC_FILE, settings.netrcFile.c_str()); + curl_easy_setopt(req, CURLOPT_NETRC_FILE, settings.netrcFile.get().c_str()); curl_easy_setopt(req, CURLOPT_NETRC, CURL_NETRC_OPTIONAL); result.data = std::make_shared(); diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index b9f4fada59f..bb61daa5164 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -1,6 +1,7 @@ #include "globals.hh" #include "util.hh" #include "archive.hh" +#include "args.hh" #include #include @@ -26,329 +27,89 @@ namespace nix { Settings settings; - Settings::Settings() + : Config({}) + , nixPrefix(NIX_PREFIX) + , nixStore(canonPath(getEnv("NIX_STORE_DIR", getEnv("NIX_STORE", NIX_STORE_DIR)))) + , nixDataDir(canonPath(getEnv("NIX_DATA_DIR", NIX_DATA_DIR))) + , nixLogDir(canonPath(getEnv("NIX_LOG_DIR", NIX_LOG_DIR))) + , nixStateDir(canonPath(getEnv("NIX_STATE_DIR", NIX_STATE_DIR))) + , nixConfDir(canonPath(getEnv("NIX_CONF_DIR", NIX_CONF_DIR))) + , nixLibexecDir(canonPath(getEnv("NIX_LIBEXEC_DIR", NIX_LIBEXEC_DIR))) + , nixBinDir(canonPath(getEnv("NIX_BIN_DIR", NIX_BIN_DIR))) + , nixDaemonSocketFile(canonPath(nixStateDir + DEFAULT_SOCKET_PATH)) { - deprecatedOptions = StringSet({ - "build-use-chroot", "build-chroot-dirs", "build-extra-chroot-dirs", - "this-option-never-existed-but-who-will-know" - }); - - nixPrefix = NIX_PREFIX; - nixStore = canonPath(getEnv("NIX_STORE_DIR", getEnv("NIX_STORE", NIX_STORE_DIR))); - nixDataDir = canonPath(getEnv("NIX_DATA_DIR", NIX_DATA_DIR)); - nixLogDir = canonPath(getEnv("NIX_LOG_DIR", NIX_LOG_DIR)); - nixStateDir = canonPath(getEnv("NIX_STATE_DIR", NIX_STATE_DIR)); - nixConfDir = canonPath(getEnv("NIX_CONF_DIR", NIX_CONF_DIR)); - nixLibexecDir = canonPath(getEnv("NIX_LIBEXEC_DIR", NIX_LIBEXEC_DIR)); - nixBinDir = canonPath(getEnv("NIX_BIN_DIR", NIX_BIN_DIR)); - nixDaemonSocketFile = canonPath(nixStateDir + DEFAULT_SOCKET_PATH); - - // should be set with the other config options, but depends on nixLibexecDir -#ifdef __APPLE__ - preBuildHook = nixLibexecDir + "/nix/resolve-system-dependencies"; -#endif - - keepFailed = false; - keepGoing = false; - tryFallback = false; - maxBuildJobs = 1; - buildCores = std::max(1U, std::thread::hardware_concurrency()); - readOnlyMode = false; - thisSystem = SYSTEM; - maxSilentTime = 0; - buildTimeout = 0; - useBuildHook = true; - reservedSize = 8 * 1024 * 1024; - fsyncMetadata = true; - useSQLiteWAL = true; - syncBeforeRegistering = false; - useSubstitutes = true; buildUsersGroup = getuid() == 0 ? "nixbld" : ""; - useSshSubstituter = true; - impersonateLinux26 = false; - keepLog = true; - compressLog = true; - maxLogSize = 0; - pollInterval = 5; - checkRootReachability = false; - gcKeepOutputs = false; - gcKeepDerivations = true; - autoOptimiseStore = false; - envKeepDerivations = false; lockCPU = getEnv("NIX_AFFINITY_HACK", "1") == "1"; - showTrace = false; - enableNativeCode = false; - netrcFile = fmt("%s/%s", nixConfDir, "netrc"); caFile = getEnv("NIX_SSL_CERT_FILE", getEnv("SSL_CERT_FILE", "/etc/ssl/certs/ca-certificates.crt")); - enableImportFromDerivation = true; - useSandbox = "false"; // TODO: make into an enum #if __linux__ sandboxPaths = tokenizeString("/bin/sh=" BASH_PATH); #endif - restrictEval = false; - buildRepeat = 0; allowedImpureHostPrefixes = tokenizeString(DEFAULT_ALLOWED_IMPURE_PREFIXES); - sandboxShmSize = "50%"; - darwinLogSandboxViolations = false; - runDiffHook = false; - diffHook = ""; - enforceDeterminism = true; - binaryCachePublicKeys = Strings{"cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY="}; - secretKeyFiles = Strings(); - binaryCachesParallelConnections = 25; - enableHttp2 = true; - tarballTtl = 60 * 60; - signedBinaryCaches = ""; - substituters = Strings(); - binaryCaches = nixStore == "/nix/store" ? Strings{"https://cache.nixos.org/"} : Strings(); - extraBinaryCaches = Strings(); - trustedUsers = Strings({"root"}); - allowedUsers = Strings({"*"}); - printMissing = true; } - void Settings::loadConfFile() { - Path settingsFile = (format("%1%/%2%") % nixConfDir % "nix.conf").str(); - if (!pathExists(settingsFile)) return; - string contents = readFile(settingsFile); - - unsigned int pos = 0; - - while (pos < contents.size()) { - string line; - while (pos < contents.size() && contents[pos] != '\n') - line += contents[pos++]; - pos++; - - string::size_type hash = line.find('#'); - if (hash != string::npos) - line = string(line, 0, hash); - - vector tokens = tokenizeString >(line); - if (tokens.empty()) continue; - - if (tokens.size() < 2 || tokens[1] != "=") - throw Error(format("illegal configuration line ‘%1%’ in ‘%2%’") % line % settingsFile); - - string name = tokens[0]; - - vector::iterator i = tokens.begin(); - advance(i, 2); - settings[name] = concatStringsSep(" ", Strings(i, tokens.end())); // FIXME: slow - }; + applyConfigFile(nixConfDir + "/nix.conf"); } - void Settings::set(const string & name, const string & value) { - settings[name] = value; overrides[name] = value; + Config::set(name, value); } -void Settings::update() -{ - _get(tryFallback, "build-fallback"); - - std::string s = "1"; - _get(s, "build-max-jobs"); - if (s == "auto") - maxBuildJobs = std::max(1U, std::thread::hardware_concurrency()); - else - if (!string2Int(s, maxBuildJobs)) - throw Error("configuration setting ‘build-max-jobs’ should be ‘auto’ or an integer"); - - _get(buildCores, "build-cores"); - _get(thisSystem, "system"); - _get(maxSilentTime, "build-max-silent-time"); - _get(buildTimeout, "build-timeout"); - _get(reservedSize, "gc-reserved-space"); - _get(fsyncMetadata, "fsync-metadata"); - _get(useSQLiteWAL, "use-sqlite-wal"); - _get(syncBeforeRegistering, "sync-before-registering"); - _get(useSubstitutes, "build-use-substitutes"); - _get(buildUsersGroup, "build-users-group"); - _get(impersonateLinux26, "build-impersonate-linux-26"); - _get(keepLog, "build-keep-log"); - _get(compressLog, "build-compress-log"); - _get(maxLogSize, "build-max-log-size"); - _get(pollInterval, "build-poll-interval"); - _get(checkRootReachability, "gc-check-reachability"); - _get(gcKeepOutputs, "gc-keep-outputs"); - _get(gcKeepDerivations, "gc-keep-derivations"); - _get(autoOptimiseStore, "auto-optimise-store"); - _get(envKeepDerivations, "env-keep-derivations"); - _get(sshSubstituterHosts, "ssh-substituter-hosts"); - _get(useSshSubstituter, "use-ssh-substituter"); - _get(enableNativeCode, "allow-unsafe-native-code-during-evaluation"); - _get(useCaseHack, "use-case-hack"); - _get(preBuildHook, "pre-build-hook"); - _get(keepGoing, "keep-going"); - _get(keepFailed, "keep-failed"); - _get(netrcFile, "netrc-file"); - _get(enableImportFromDerivation, "allow-import-from-derivation"); - _get(useSandbox, "build-use-sandbox", "build-use-chroot"); - _get(sandboxPaths, "build-sandbox-paths", "build-chroot-dirs"); - _get(extraSandboxPaths, "build-extra-sandbox-paths", "build-extra-chroot-dirs"); - _get(restrictEval, "restrict-eval"); - _get(buildRepeat, "build-repeat"); - _get(allowedImpureHostPrefixes, "allowed-impure-host-deps"); - _get(sandboxShmSize, "sandbox-dev-shm-size"); - _get(darwinLogSandboxViolations, "darwin-log-sandbox-violations"); - _get(runDiffHook, "run-diff-hook"); - _get(diffHook, "diff-hook"); - _get(enforceDeterminism, "enforce-determinism"); - _get(binaryCachePublicKeys, "binary-cache-public-keys"); - _get(secretKeyFiles, "secret-key-files"); - _get(binaryCachesParallelConnections, "binary-caches-parallel-connections"); - _get(enableHttp2, "enable-http2"); - _get(tarballTtl, "tarball-ttl"); - _get(signedBinaryCaches, "signed-binary-caches"); - _get(substituters, "substituters"); - _get(binaryCaches, "binary-caches"); - _get(extraBinaryCaches, "extra-binary-caches"); - _get(trustedUsers, "trusted-users"); - _get(allowedUsers, "allowed-users"); - _get(printMissing, "print-missing"); - - /* Clear out any deprecated options that might be left, so users know we recognize the option - but aren't processing it anymore */ - for (auto &i : deprecatedOptions) { - if (settings.find(i) != settings.end()) { - printError(format("warning: deprecated option '%1%' is no longer supported and will be ignored") % i); - settings.erase(i); - } - } - - if (settings.size() != 0) { - string bad; - for (auto &i : settings) - bad += "'" + i.first + "', "; - bad.pop_back(); - bad.pop_back(); - throw Error(format("unrecognized options: %s") % bad); - } -} - -void Settings::checkDeprecated(const string & name) +StringMap Settings::getOverrides() { - if (deprecatedOptions.find(name) != deprecatedOptions.end()) - printError(format("warning: deprecated option '%1%' will soon be unsupported") % name); -} - -void Settings::_get(string & res, const string & name) -{ - SettingsMap::iterator i = settings.find(name); - if (i == settings.end()) return; - checkDeprecated(i->first); - settings.erase(i); - res = i->second; -} - -void Settings::_get(string & res, const string & name1, const string & name2) -{ - SettingsMap::iterator i = settings.find(name1); - if (i == settings.end()) i = settings.find(name2); - if (i == settings.end()) return; - checkDeprecated(i->first); - settings.erase(i); - res = i->second; + return overrides; } - -void Settings::_get(bool & res, const string & name) +unsigned int Settings::getDefaultCores() { - SettingsMap::iterator i = settings.find(name); - if (i == settings.end()) return; - checkDeprecated(i->first); - settings.erase(i); - if (i->second == "true") res = true; - else if (i->second == "false") res = false; - else throw Error(format("configuration option ‘%1%’ should be either ‘true’ or ‘false’, not ‘%2%’") - % name % i->second); + return std::max(1U, std::thread::hardware_concurrency()); } +const string nixVersion = PACKAGE_VERSION; -void Settings::_get(StringSet & res, const string & name) +template<> void Setting::set(const std::string & str) { - SettingsMap::iterator i = settings.find(name); - if (i == settings.end()) return; - checkDeprecated(i->first); - settings.erase(i); - res.clear(); - Strings ss = tokenizeString(i->second); - res.insert(ss.begin(), ss.end()); + if (str == "true") value = smEnabled; + else if (str == "relaxed") value = smRelaxed; + else if (str == "false") value = smDisabled; + else throw UsageError("option '%s' has invalid value '%s'", name, str); } -void Settings::_get(StringSet & res, const string & name1, const string & name2) +template<> std::string Setting::to_string() { - SettingsMap::iterator i = settings.find(name1); - if (i == settings.end()) i = settings.find(name2); - if (i == settings.end()) return; - checkDeprecated(i->first); - settings.erase(i); - res.clear(); - Strings ss = tokenizeString(i->second); - res.insert(ss.begin(), ss.end()); + if (value == smEnabled) return "true"; + else if (value == smRelaxed) return "relaxed"; + else if (value == smDisabled) return "false"; + else abort(); } -void Settings::_get(Strings & res, const string & name) +template<> void Setting::set(const std::string & str) { - SettingsMap::iterator i = settings.find(name); - if (i == settings.end()) return; - checkDeprecated(i->first); - settings.erase(i); - res = tokenizeString(i->second); + if (str == "auto") value = std::max(1U, std::thread::hardware_concurrency()); + else if (!string2Int(str, value)) + throw UsageError("configuration setting ‘%s’ should be ‘auto’ or an integer", name); } - -template void Settings::_get(N & res, const string & name) +template<> std::string Setting::to_string() { - SettingsMap::iterator i = settings.find(name); - if (i == settings.end()) return; - checkDeprecated(i->first); - settings.erase(i); - if (!string2Int(i->second, res)) - throw Error(format("configuration setting ‘%1%’ should have an integer value") % name); + return std::to_string(value); } - -string Settings::pack() +template<> void Setting::set(const std::string & str) { - string s; - for (auto & i : settings) { - if (i.first.find('\n') != string::npos || - i.first.find('=') != string::npos || - i.second.find('\n') != string::npos) - throw Error("illegal option name/value"); - s += i.first; s += '='; s += i.second; s += '\n'; - } - return s; + value = parseBool(str); + nix::useCaseHack = true; } - -void Settings::unpack(const string & pack) { - Strings lines = tokenizeString(pack, "\n"); - for (auto & i : lines) { - string::size_type eq = i.find('='); - if (eq == string::npos) - throw Error("illegal option name/value"); - set(i.substr(0, eq), i.substr(eq + 1)); - } -} - - -Settings::SettingsMap Settings::getOverrides() +template<> std::string Setting::to_string() { - return overrides; + return printBool(value); } - -const string nixVersion = PACKAGE_VERSION; - - } diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index d47fdb7c9de..95c8859cfa9 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -1,7 +1,7 @@ #pragma once #include "types.hh" -#include "logging.hh" +#include "config.hh" #include #include @@ -9,36 +9,33 @@ namespace nix { +typedef enum { smEnabled, smRelaxed, smDisabled } SandboxMode; -struct Settings { +extern bool useCaseHack; // FIXME - typedef std::map SettingsMap; +class Settings : public Config { - Settings(); + StringMap overrides; - void loadConfFile(); + unsigned int getDefaultCores(); - void set(const string & name, const string & value); +public: - void update(); + Settings(); - string pack(); + void loadConfFile(); - void unpack(const string & pack); + void set(const string & name, const string & value); - SettingsMap getOverrides(); + StringMap getOverrides(); - /* TODO: the comments below should be strings and exposed via a nice command-line UI or similar. - We should probably replace it with some sort of magic template or macro to minimize the amount - of duplication and pain here. */ + Path nixPrefix; /* The directory where we store sources and derived files. */ Path nixStore; Path nixDataDir; /* !!! fix */ - Path nixPrefix; - /* The directory where we log various operations. */ Path nixLogDir; @@ -57,17 +54,14 @@ struct Settings { /* File name of the socket the daemon listens to. */ Path nixDaemonSocketFile; - /* Whether to keep temporary directories of failed builds. */ - bool keepFailed; + Setting keepFailed{this, false, "keep-failed", + "Whether to keep temporary directories of failed builds."}; - /* Whether to keep building subgoals when a sibling (another - subgoal of the same goal) fails. */ - bool keepGoing; + Setting keepGoing{this, false, "keep-going", + "Whether to keep building derivations when another build fails."}; - /* Whether, if we cannot realise the known closure corresponding - to a derivation, we should try to normalise the derivation - instead. */ - bool tryFallback; + Setting tryFallback{this, tryFallback, "build-fallback", + "Whether to fall back to building when substitution fails."}; /* Whether to show build log output in real time. */ bool verboseBuild = true; @@ -76,206 +70,206 @@ struct Settings { the log to show if a build fails. */ size_t logLines = 10; - /* Maximum number of parallel build jobs. 0 means unlimited. */ - unsigned int maxBuildJobs; + struct MaxBuildJobsTag { }; + Setting maxBuildJobs{this, 1, "build-max-jobs", + "Maximum number of parallel build jobs. \"auto\" means use number of cores."}; - /* Number of CPU cores to utilize in parallel within a build, - i.e. by passing this number to Make via '-j'. 0 means that the - number of actual CPU cores on the local host ought to be - auto-detected. */ - unsigned int buildCores; + Setting buildCores{this, getDefaultCores(), "build-cores", + "Number of CPU cores to utilize in parallel within a build, " + "i.e. by passing this number to Make via '-j'. 0 means that the " + "number of actual CPU cores on the local host ought to be " + "auto-detected."}; /* Read-only mode. Don't copy stuff to the store, don't change the database. */ - bool readOnlyMode; - - /* The canonical system name, as returned by config.guess. */ - string thisSystem; - - /* The maximum time in seconds that a builer can go without - producing any output on stdout/stderr before it is killed. 0 - means infinity. */ - time_t maxSilentTime; + bool readOnlyMode = false; - /* The maximum duration in seconds that a builder can run. 0 - means infinity. */ - time_t buildTimeout; + Setting thisSystem{this, SYSTEM, "system", + "The canonical Nix system name."}; - /* Whether to use build hooks (for distributed builds). Sometimes - users want to disable this from the command-line. */ - bool useBuildHook; + Setting maxSilentTime{this, 0, "build-max-silent-time", + "The maximum time in seconds that a builer can go without " + "producing any output on stdout/stderr before it is killed. " + "0 means infinity."}; - /* Amount of reserved space for the garbage collector - (/nix/var/nix/db/reserved). */ - off_t reservedSize; + Setting buildTimeout{this, 0, "build-timeout", + "The maximum duration in seconds that a builder can run. " + "0 means infinity."}; - /* Whether SQLite should use fsync. */ - bool fsyncMetadata; + Setting useBuildHook{this, true, "remote-builds", + "Whether to use build hooks (for distributed builds)."}; - /* Whether SQLite should use WAL mode. */ - bool useSQLiteWAL; + Setting reservedSize{this, 8 * 1024 * 1024, "gc-reserved-space", + "Amount of reserved disk space for the garbage collector."}; - /* Whether to call sync() before registering a path as valid. */ - bool syncBeforeRegistering; + Setting fsyncMetadata{this, true, "fsync-metadata", + "Whether SQLite should use fsync()."}; - /* Whether to use substitutes. */ - bool useSubstitutes; + Setting useSQLiteWAL{this, true, "use-sqlite-wal", + "Whether SQLite should use WAL mode."}; - /* The Unix group that contains the build users. */ - string buildUsersGroup; + Setting syncBeforeRegistering{this, false, "sync-before-registering", + "Whether to call sync() before registering a path as valid."}; - /* Set of ssh connection strings for the ssh substituter */ - Strings sshSubstituterHosts; + Setting useSubstitutes{this, true, "build-use-substitutes", + "Whether to use substitutes."}; - /* Whether to use the ssh substituter at all */ - bool useSshSubstituter; + Setting buildUsersGroup{this, "", "build-users-group", + "The Unix group that contains the build users."}; - /* Whether to impersonate a Linux 2.6 machine on newer kernels. */ - bool impersonateLinux26; + Setting impersonateLinux26{this, false, "build-impersonate-linux-26", + "Whether to impersonate a Linux 2.6 machine on newer kernels."}; - /* Whether to store build logs. */ - bool keepLog; + Setting keepLog{this, true, "build-keep-log", + "Whether to store build logs."}; - /* Whether to compress logs. */ - bool compressLog; + Setting compressLog{this, true, "build-compress-log", + "Whether to compress logs."}; - /* Maximum number of bytes a builder can write to stdout/stderr - before being killed (0 means no limit). */ - unsigned long maxLogSize; + Setting maxLogSize{this, 0, "build-max-log-size", + "Maximum number of bytes a builder can write to stdout/stderr " + "before being killed (0 means no limit)."}; /* When build-repeat > 0 and verboseBuild == true, whether to print repeated builds (i.e. builds other than the first one) to stderr. Hack to prevent Hydra logs from being polluted. */ bool printRepeatedBuilds = true; - /* How often (in seconds) to poll for locks. */ - unsigned int pollInterval; + Setting pollInterval{this, 5, "build-poll-interval", + "How often (in seconds) to poll for locks."}; - /* Whether to check if new GC roots can in fact be found by the - garbage collector. */ - bool checkRootReachability; + Setting checkRootReachability{this, false, "gc-check-reachability", + "Whether to check if new GC roots can in fact be found by the " + "garbage collector."}; - /* Whether the garbage collector should keep outputs of live - derivations. */ - bool gcKeepOutputs; + Setting gcKeepOutputs{this, false, "gc-keep-outputs", + "Whether the garbage collector should keep outputs of live derivations."}; - /* Whether the garbage collector should keep derivers of live - paths. */ - bool gcKeepDerivations; + Setting gcKeepDerivations{this, true, "gc-keep-derivations", + "Whether the garbage collector should keep derivers of live paths."}; - /* Whether to automatically replace files with identical contents - with hard links. */ - bool autoOptimiseStore; + Setting autoOptimiseStore{this, false, "auto-optimise-store", + "Whether to automatically replace files with identical contents with hard links."}; - /* Whether to add derivations as a dependency of user environments - (to prevent them from being GCed). */ - bool envKeepDerivations; + Setting envKeepDerivations{this, false, "env-keep-derivations", + "Whether to add derivations as a dependency of user environments " + "(to prevent them from being GCed)."}; /* Whether to lock the Nix client and worker to the same CPU. */ bool lockCPU; /* Whether to show a stack trace if Nix evaluation fails. */ - bool showTrace; + bool showTrace = false; - /* Whether native-code enabling primops should be enabled */ - bool enableNativeCode; + Setting enableNativeCode{this, false, "allow-unsafe-native-code-during-evaluation", + "Whether builtin functions that allow executing native code should be enabled."}; - /* Whether to enable sandboxed builds (string until we get an enum for true/false/relaxed) */ - string useSandbox; + Setting sandboxMode{this, smDisabled, "build-use-sandbox", + "Whether to enable sandboxed builds. Can be \"true\", \"false\" or \"relaxed\".", + {"build-use-chroot"}}; - /* The basic set of paths to expose in a sandbox */ - PathSet sandboxPaths; + Setting sandboxPaths{this, {}, "build-sandbox-paths", + "The paths to make available inside the build sandbox.", + {"build-chroot-dirs"}}; - /* Any extra sandbox paths to expose */ - PathSet extraSandboxPaths; + Setting extraSandboxPaths{this, {}, "build-extra-sandbox-paths", + "Additional paths to make available inside the build sandbox.", + {"build-extra-chroot-dirs"}}; - /* Whether to allow certain questionable operations (like fetching) during evaluation */ - bool restrictEval; + Setting restrictEval{this, false, "restrict-eval", + "Whether to restrict file system access to paths in $NIX_PATH, " + "and to disallow fetching files from the network."}; - /* The number of times to repeat a build to check for determinism */ - int buildRepeat; + Setting buildRepeat{this, 0, "build-repeat", + "The number of times to repeat a build in order to verify determinism."}; - /* Which prefixes to allow derivations to ask for access to (primarily for Darwin) */ - PathSet allowedImpureHostPrefixes; +#if __linux__ + Setting sandboxShmSize{this, "50%", "sandbox-dev-shm-size", + "The size of /dev/shm in the build sandbox."}; +#endif - /* The size of /dev/shm in the build sandbox (for Linux) */ - string sandboxShmSize; + Setting allowedImpureHostPrefixes{this, {}, "allowed-impure-host-deps", + "Which prefixes to allow derivations to ask for access to (primarily for Darwin)."}; - /* Whether to log Darwin sandbox access violations to the system log */ - bool darwinLogSandboxViolations; +#if __APPLE__ + Setting darwinLogSandboxViolations{this, false, "darwin-log-sandbox-violations", + "Whether to log Darwin sandbox access violations to the system log."}; +#endif - /* ??? */ - bool runDiffHook; + Setting runDiffHook{this, false, "run-diff-hook", + "Whether to run the program specified by the diff-hook setting " + "repeated builds produce a different result. Typically used to " + "plug in diffoscope."}; - /* ??? */ - string diffHook; + PathSetting diffHook{this, true, "", "diff-hook", + "A program that prints out the differences between the two paths " + "specified on its command line."}; - /* Whether to fail if repeated builds produce different output */ - bool enforceDeterminism; + Setting enforceDeterminism{this, true, "enforce-determinism", + "Whether to fail if repeated builds produce different output."}; - /* The known public keys for a binary cache */ - Strings binaryCachePublicKeys; + Setting binaryCachePublicKeys{this, + {"cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY="}, + "binary-cache-public-keys", + "Trusted public keys for secure substitution."}; - /* Secret keys to use for build output signing */ - Strings secretKeyFiles; + Setting secretKeyFiles{this, {}, "secret-key-files", + "Secret keys with which to sign local builds."}; - /* Number of parallel connections to hit a binary cache with when finding out if it contains hashes */ - int binaryCachesParallelConnections; + Setting binaryCachesParallelConnections{this, 25, "binary-caches-parallel-connections", + "Number of parallel connections to binary caches."}; - /* Whether to enable HTTP2 */ - bool enableHttp2; + Setting enableHttp2{this, true, "enable-http2", + "Whether to enable HTTP/2 support."}; - /* How soon to expire tarballs like builtins.fetchTarball and (ugh, bad name) builtins.fetchurl */ - int tarballTtl; + Setting tarballTtl{this, 60 * 60, "tarball-ttl", + "How soon to expire files fetched by builtins.fetchTarball and builtins.fetchurl."}; - /* ??? */ - string signedBinaryCaches; + Setting signedBinaryCaches{this, "*", "signed-binary-caches", + "Obsolete."}; - /* ??? */ - Strings substituters; + Setting substituters{this, + nixStore == "/nix/store" ? Strings{"https://cache.nixos.org/"} : Strings(), + "substituters", + "The URIs of substituters (such as https://cache.nixos.org/).", + {"binary-caches"}}; - /* ??? */ - Strings binaryCaches; + // FIXME: provide a way to add to option values. + Setting extraSubstituters{this, {}, "extra-substituters", + "Additional URIs of substituters.", + {"extra-binary-caches"}}; - /* ??? */ - Strings extraBinaryCaches; - - /* Who we trust to ask the daemon to do unsafe things */ - Strings trustedUsers; + Setting trustedUsers{this, {"root"}, "trusted-users", + "Which users or groups are trusted to ask the daemon to do unsafe things."}; /* ?Who we trust to use the daemon in safe ways */ - Strings allowedUsers; + Setting allowedUsers{this, {"*"}, "allowed-users", + "Which users or groups are allowed to connect to the daemon."}; - /* ??? */ - bool printMissing; + Setting printMissing{this, true, "print-missing", + "Whether to print what paths need to be built or downloaded."}; - /* The hook to run just before a build to set derivation-specific - build settings */ - Path preBuildHook; + Setting preBuildHook{this, +#if __APPLE__ + nixLibexecDir + "/nix/resolve-system-dependencies", +#else + "", +#endif + "pre-build-hook", + "A program to run just before a build to set derivation-specific build settings."}; - /* Path to the netrc file used to obtain usernames/passwords for - downloads. */ - Path netrcFile; + Setting netrcFile{this, fmt("%s/%s", nixConfDir, "netrc"), "netrc-file", + "Path to the netrc file used to obtain usernames/passwords for downloads."}; /* Path to the SSL CA file used */ Path caFile; - /* Whether we allow import-from-derivation */ - bool enableImportFromDerivation; - -private: - StringSet deprecatedOptions; - SettingsMap settings, overrides; - - void checkDeprecated(const string & name); + Setting enableImportFromDerivation{this, true, "allow-import-from-derivation", + "Whether the evaluator allows importing the result of a derivation."}; - void _get(string & res, const string & name); - void _get(string & res, const string & name1, const string & name2); - void _get(bool & res, const string & name); - void _get(StringSet & res, const string & name); - void _get(StringSet & res, const string & name1, const string & name2); - void _get(Strings & res, const string & name); - template void _get(N & res, const string & name); + struct CaseHackTag { }; + Setting useCaseHack{this, nix::useCaseHack, "use-case-hack", + "Whether to enable a Darwin-specific hack for dealing with file name collisions."}; }; diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 9111a45f886..5a98454ab38 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -75,7 +75,7 @@ LocalStore::LocalStore(const Params & params) mode_t perm = 01775; - struct group * gr = getgrnam(settings.buildUsersGroup.c_str()); + struct group * gr = getgrnam(settings.buildUsersGroup.get().c_str()); if (!gr) printError(format("warning: the group ‘%1%’ specified in ‘build-users-group’ does not exist") % settings.buildUsersGroup); @@ -1335,7 +1335,7 @@ void LocalStore::signPathInfo(ValidPathInfo & info) auto secretKeyFiles = settings.secretKeyFiles; - for (auto & secretKeyFile : secretKeyFiles) { + for (auto & secretKeyFile : secretKeyFiles.get()) { SecretKey secretKey(readFile(secretKeyFile)); info.sign(secretKey); } diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index e1df137e4db..da3c8eb8d89 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -166,9 +166,7 @@ void RemoteStore::setOptions(Connection & conn) << settings.useSubstitutes; if (GET_PROTOCOL_MINOR(conn.daemonVersion) >= 12) { - Settings::SettingsMap overrides = settings.getOverrides(); - if (overrides["ssh-auth-sock"] == "") - overrides["ssh-auth-sock"] = getEnv("SSH_AUTH_SOCK"); + StringMap overrides = settings.getOverrides(); conn.to << overrides.size(); for (auto & i : overrides) conn.to << i.first << i.second; diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 514d1c2ff8b..835bbb90e0b 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -719,7 +719,7 @@ ref openStore(const std::string & uri, const Store::Params & params) for (auto fun : *RegisterStoreImplementation::implementations) { auto store = fun(uri, params); if (store) { - store->warnUnused(); + store->warnUnknownSettings(); return ref(store); } } @@ -782,13 +782,10 @@ std::list> getDefaultSubstituters() state->stores.push_back(openStore(uri)); }; - for (auto uri : settings.substituters) + for (auto uri : settings.substituters.get()) addStore(uri); - for (auto uri : settings.binaryCaches) - addStore(uri); - - for (auto uri : settings.extraBinaryCaches) + for (auto uri : settings.extraSubstituters.get()) addStore(uri); state->done = true; diff --git a/src/libutil/config.cc b/src/libutil/config.cc index c05a3253bce..85e5ce330be 100644 --- a/src/libutil/config.cc +++ b/src/libutil/config.cc @@ -11,7 +11,7 @@ void Config::set(const std::string & name, const std::string & value) i->second.setting->set(value); } -void Config::add(AbstractSetting * setting) +void Config::addSetting(AbstractSetting * setting) { _settings.emplace(setting->name, Config::SettingData{false, setting}); for (auto & alias : setting->aliases) @@ -41,21 +41,59 @@ void Config::add(AbstractSetting * setting) } } -void Config::warnUnused() +void Config::warnUnknownSettings() { for (auto & i : initials) warn("unknown setting '%s'", i.first); } -std::string Config::dump() +StringMap Config::getSettings() { - std::string res; + StringMap res; for (auto & opt : _settings) if (!opt.second.isAlias) - res += opt.first + " = " + opt.second.setting->to_string() + "\n"; + res.emplace(opt.first, opt.second.setting->to_string()); return res; } +void Config::applyConfigFile(const Path & path, bool fatal) +{ + try { + string contents = readFile(path); + + unsigned int pos = 0; + + while (pos < contents.size()) { + string line; + while (pos < contents.size() && contents[pos] != '\n') + line += contents[pos++]; + pos++; + + string::size_type hash = line.find('#'); + if (hash != string::npos) + line = string(line, 0, hash); + + vector tokens = tokenizeString >(line); + if (tokens.empty()) continue; + + if (tokens.size() < 2 || tokens[1] != "=") + throw UsageError("illegal configuration line ‘%1%’ in ‘%2%’", line, path); + + string name = tokens[0]; + + vector::iterator i = tokens.begin(); + advance(i, 2); + + try { + set(name, concatStringsSep(" ", Strings(i, tokens.end()))); // FIXME: slow + } catch (UsageError & e) { + if (fatal) throw; + warn("in configuration file '%s': %s", path, e.what()); + } + }; + } catch (SysError &) { } +} + AbstractSetting::AbstractSetting( const std::string & name, const std::string & description, @@ -74,41 +112,65 @@ template<> std::string Setting::to_string() return value; } -template -void Setting::set(const std::string & str) +template +void Setting::set(const std::string & str) { static_assert(std::is_integral::value, "Integer required."); - try { - auto i = std::stoll(str); - if (i < std::numeric_limits::min() || - i > std::numeric_limits::max()) - throw UsageError("setting '%s' has out-of-range value %d", name, i); - value = i; - } catch (std::logic_error&) { + if (!string2Int(str, value)) throw UsageError("setting '%s' has invalid value '%s'", name, str); - } } -template -std::string Setting::to_string() +template +std::string Setting::to_string() { static_assert(std::is_integral::value, "Integer required."); return std::to_string(value); } -template<> void Setting::set(const std::string & str) +bool AbstractSetting::parseBool(const std::string & str) { if (str == "true" || str == "yes" || str == "1") - value = true; + return true; else if (str == "false" || str == "no" || str == "0") - value = false; + return false; else throw UsageError("Boolean setting '%s' has invalid value '%s'", name, str); } +template<> void Setting::set(const std::string & str) +{ + value = parseBool(str); +} + +std::string AbstractSetting::printBool(bool b) +{ + return b ? "true" : "false"; +} + + template<> std::string Setting::to_string() { - return value ? "true" : "false"; + return printBool(value); +} + +template<> void Setting::set(const std::string & str) +{ + value = tokenizeString(str); +} + +template<> std::string Setting::to_string() +{ + return concatStringsSep(" ", value); +} + +template<> void Setting::set(const std::string & str) +{ + value = tokenizeString(str); +} + +template<> std::string Setting::to_string() +{ + return concatStringsSep(" ", value); } template class Setting; diff --git a/src/libutil/config.hh b/src/libutil/config.hh index fb2d48e9c83..6c8612f675c 100644 --- a/src/libutil/config.hh +++ b/src/libutil/config.hh @@ -47,11 +47,13 @@ public: void set(const std::string & name, const std::string & value); - void add(AbstractSetting * setting); + void addSetting(AbstractSetting * setting); - void warnUnused(); + void warnUnknownSettings(); - std::string dump(); + StringMap getSettings(); + + void applyConfigFile(const Path & path, bool fatal = false); }; class AbstractSetting @@ -83,10 +85,15 @@ protected: virtual void set(const std::string & value) = 0; virtual std::string to_string() = 0; + + bool parseBool(const std::string & str); + std::string printBool(bool b); }; +struct DefaultSettingTag { }; + /* A setting of type T. */ -template +template class Setting : public AbstractSetting { protected: @@ -103,10 +110,12 @@ public: : AbstractSetting(name, description, aliases) , value(def) { - options->add(this); + options->addSetting(this); } operator const T &() const { return value; } + operator T &() { return value; } + const T & get() const { return value; } bool operator ==(const T & v2) const { return value == v2; } bool operator !=(const T & v2) const { return value != v2; } void operator =(const T & v) { value = v; } @@ -123,6 +132,9 @@ std::ostream & operator <<(std::ostream & str, const Setting & opt) return str; } +template +bool operator ==(const T & v1, const Setting & v2) { return v1 == (const T &) v2; } + /* A special setting for Paths. These are automatically canonicalised (e.g. "/foo//bar/" becomes "/foo/bar"). */ class PathSetting : public Setting diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index f4285693f7f..9fcb81dd5aa 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -436,30 +436,29 @@ static void performOp(ref store, bool trusted, unsigned int clientVe } case wopSetOptions: { - from >> settings.keepFailed; - from >> settings.keepGoing; - settings.set("build-fallback", readInt(from) ? "true" : "false"); + settings.keepFailed = readInt(from); + settings.keepGoing = readInt(from); + settings.tryFallback = readInt(from); verbosity = (Verbosity) readInt(from); settings.set("build-max-jobs", std::to_string(readInt(from))); - settings.set("build-max-silent-time", std::to_string(readInt(from))); + settings.maxSilentTime = readInt(from); settings.useBuildHook = readInt(from) != 0; settings.verboseBuild = lvlError == (Verbosity) readInt(from); readInt(from); // obsolete logType readInt(from); // obsolete printBuildTrace settings.set("build-cores", std::to_string(readInt(from))); - settings.set("build-use-substitutes", readInt(from) ? "true" : "false"); + settings.useSubstitutes = readInt(from); if (GET_PROTOCOL_MINOR(clientVersion) >= 12) { unsigned int n = readInt(from); for (unsigned int i = 0; i < n; i++) { string name = readString(from); string value = readString(from); - if (name == "build-timeout" || name == "use-ssh-substituter") + if (name == "build-timeout") settings.set(name, value); else settings.set(trusted ? name : "untrusted-" + name, value); } } - settings.update(); startWork(); stopWork(); break; diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index a40cca9824e..9131b74dfb4 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -795,11 +795,11 @@ static void opServe(Strings opFlags, Strings opArgs) settings.maxSilentTime = readInt(in); settings.buildTimeout = readInt(in); if (GET_PROTOCOL_MINOR(clientVersion) >= 2) - in >> settings.maxLogSize; + settings.maxLogSize = readNum(in); if (GET_PROTOCOL_MINOR(clientVersion) >= 3) { - settings.set("build-repeat", std::to_string(readInt(in))); - settings.set("enforce-determinism", readInt(in) != 0 ? "true" : "false"); - settings.set("run-diff-hook", "true"); + settings.buildRepeat = readInt(in); + settings.enforceDeterminism = readInt(in); + settings.runDiffHook = readInt(in); } settings.printRepeatedBuilds = false; }; diff --git a/src/nix/main.cc b/src/nix/main.cc index fdb8f6e3a19..440ced97dfc 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -42,7 +42,6 @@ void mainWrapped(int argc, char * * argv) NixArgs args; args.parseCmdline(argvToStrings(argc, argv)); - settings.update(); assert(args.command); diff --git a/tests/shell.shebang.sh b/tests/shell.shebang.sh index 3dadd591572..c9a83aaf83d 100755 --- a/tests/shell.shebang.sh +++ b/tests/shell.shebang.sh @@ -1,4 +1,4 @@ #! @ENV_PROG@ nix-shell -#! nix-shell -I nixpkgs=shell.nix --option use-binary-caches false +#! nix-shell -I nixpkgs=shell.nix --option build-use-substitutes false #! nix-shell --pure -i bash -p foo bar echo "$(foo) $(bar) $@" From 872ba75d8b6212c3a5e399ec62cbda86e58d3680 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 13 Apr 2017 20:59:38 +0200 Subject: [PATCH 0237/2196] Add "nix show-config" command This dumps the entire Nix configuration, including all options that have default values. --- src/nix/show-config.cc | 43 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 src/nix/show-config.cc diff --git a/src/nix/show-config.cc b/src/nix/show-config.cc new file mode 100644 index 00000000000..ba39e2bb29b --- /dev/null +++ b/src/nix/show-config.cc @@ -0,0 +1,43 @@ +#include "command.hh" +#include "common-args.hh" +#include "installables.hh" +#include "shared.hh" +#include "store-api.hh" +#include "json.hh" + +using namespace nix; + +struct CmdShowConfig : Command +{ + bool json = false; + + CmdShowConfig() + { + mkFlag(0, "json", "produce JSON output", &json); + } + + std::string name() override + { + return "show-config"; + } + + std::string description() override + { + return "show the Nix configuration"; + } + + void run() override + { + if (json) { + // FIXME: use appropriate JSON types (bool, ints, etc). + JSONObject jsonObj(std::cout, true); + for (auto & s : settings.getSettings()) + jsonObj.attr(s.first, s.second); + } else { + for (auto & s : settings.getSettings()) + std::cout << s.first + " = " + s.second + "\n"; + } + } +}; + +static RegisterCommand r1(make_ref()); From 1673c373c9ddd1fc7ce1e12c79621dafe13fe930 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 14 Apr 2017 11:57:02 +0200 Subject: [PATCH 0238/2196] nix-daemon: Don't die if the user sends an unknown setting --- src/nix-daemon/nix-daemon.cc | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index 9fcb81dd5aa..b4e753f5521 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -453,10 +453,14 @@ static void performOp(ref store, bool trusted, unsigned int clientVe for (unsigned int i = 0; i < n; i++) { string name = readString(from); string value = readString(from); - if (name == "build-timeout") - settings.set(name, value); - else - settings.set(trusted ? name : "untrusted-" + name, value); + try { + if (name == "build-timeout") + settings.set(name, value); + else + settings.set(trusted ? name : "untrusted-" + name, value); + } catch (UsageError & e) { + warn(e.what()); + } } } startWork(); From 01dcdfcf336a127438b35b81751a783cde063ab8 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 14 Apr 2017 11:59:51 +0200 Subject: [PATCH 0239/2196] nix-daemon: Don't set untrusted-* settings These are no longer used anywhere. --- src/nix-daemon/nix-daemon.cc | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index b4e753f5521..90bfc67d877 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -454,10 +454,8 @@ static void performOp(ref store, bool trusted, unsigned int clientVe string name = readString(from); string value = readString(from); try { - if (name == "build-timeout") + if (trusted || name == "build-timeout") settings.set(name, value); - else - settings.set(trusted ? name : "untrusted-" + name, value); } catch (UsageError & e) { warn(e.what()); } From 3872371f25acb9250d34554192196caee855fcb4 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 14 Apr 2017 13:42:22 +0200 Subject: [PATCH 0240/2196] Minor cleanup --- src/libmain/shared.cc | 2 +- src/nix-daemon/nix-daemon.cc | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 9d506d01923..4747b9bf9b4 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -159,7 +159,7 @@ struct LegacyArgs : public MixCommonArgs &(bool&) settings.keepGoing); mkFlag(0, "fallback", "build from source if substitution fails", []() { - settings.set("build-fallback", "true"); + settings.tryFallback = true; }); mkFlag1('j', "max-jobs", "jobs", "maximum number of parallel builds", [=](std::string s) { diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index 90bfc67d877..1389353bb5d 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -440,13 +440,13 @@ static void performOp(ref store, bool trusted, unsigned int clientVe settings.keepGoing = readInt(from); settings.tryFallback = readInt(from); verbosity = (Verbosity) readInt(from); - settings.set("build-max-jobs", std::to_string(readInt(from))); + settings.maxBuildJobs = readInt(from); settings.maxSilentTime = readInt(from); settings.useBuildHook = readInt(from) != 0; settings.verboseBuild = lvlError == (Verbosity) readInt(from); readInt(from); // obsolete logType readInt(from); // obsolete printBuildTrace - settings.set("build-cores", std::to_string(readInt(from))); + settings.buildCores = readInt(from); settings.useSubstitutes = readInt(from); if (GET_PROTOCOL_MINOR(clientVersion) >= 12) { unsigned int n = readInt(from); From 3b36c64b15ac31f56c13591092c8a24aff4ee6c2 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 14 Apr 2017 13:47:10 +0200 Subject: [PATCH 0241/2196] Fix Perl bindings --- perl/lib/Nix/Store.xs | 1 - 1 file changed, 1 deletion(-) diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs index 46b41f92392..1920942a4c0 100644 --- a/perl/lib/Nix/Store.xs +++ b/perl/lib/Nix/Store.xs @@ -26,7 +26,6 @@ static ref store() if (!_store) { try { settings.loadConfFile(); - settings.update(); settings.lockCPU = false; _store = openStore(); } catch (Error & e) { From 6520b757c58df15a2d5d5daf234ff7a7740049cd Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 14 Apr 2017 13:59:39 +0200 Subject: [PATCH 0242/2196] Fix 32-bit build http://hydra.nixos.org/build/51569816 --- src/libutil/config.cc | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/libutil/config.cc b/src/libutil/config.cc index 85e5ce330be..e7a810cec4d 100644 --- a/src/libutil/config.cc +++ b/src/libutil/config.cc @@ -176,7 +176,9 @@ template<> std::string Setting::to_string() template class Setting; template class Setting; template class Setting; -template class Setting; +template class Setting; +template class Setting; +template class Setting; void PathSetting::set(const std::string & str) { From da76c72bc9b247092e68411f0bd91ca37c176d0a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 14 Apr 2017 14:02:43 +0200 Subject: [PATCH 0243/2196] Build on aarch64-linux --- release.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/release.nix b/release.nix index 6136f650d95..294af54cd61 100644 --- a/release.nix +++ b/release.nix @@ -7,7 +7,7 @@ let pkgs = import {}; - systems = [ "x86_64-linux" "i686-linux" "x86_64-darwin" ]; + systems = [ "x86_64-linux" "i686-linux" "x86_64-darwin" "aarch64-linux" ]; jobs = rec { From dd3714f6eff44e0777c5e8afcc7a29e283986629 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 14 Apr 2017 14:42:08 +0200 Subject: [PATCH 0244/2196] Doh --- src/libstore/globals.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 95c8859cfa9..f3a6038cafa 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -60,7 +60,7 @@ public: Setting keepGoing{this, false, "keep-going", "Whether to keep building derivations when another build fails."}; - Setting tryFallback{this, tryFallback, "build-fallback", + Setting tryFallback{this, false, "build-fallback", "Whether to fall back to building when substitution fails."}; /* Whether to show build log output in real time. */ From f8a2e8a55203f2cc16d70ad43afcc186adaab6b3 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 14 Apr 2017 14:42:20 +0200 Subject: [PATCH 0245/2196] Shut up some warnings --- local.mk | 3 +-- src/boost/format/feed_args.hpp | 7 +++++++ src/libexpr/eval.cc | 2 +- src/libexpr/local.mk | 2 -- src/libstore/build.cc | 14 +++++++------- src/libstore/download.cc | 2 +- 6 files changed, 17 insertions(+), 13 deletions(-) diff --git a/local.mk b/local.mk index dc10e6870a8..0a225423741 100644 --- a/local.mk +++ b/local.mk @@ -7,8 +7,7 @@ dist-files += configure config.h.in nix.spec perl/configure clean-files += Makefile.config -GLOBAL_CXXFLAGS += -I . -I src -I src/libutil -I src/libstore -I src/libmain -I src/libexpr \ - -Wno-unneeded-internal-declaration +GLOBAL_CXXFLAGS += -I . -I src -I src/libutil -I src/libstore -I src/libmain -I src/libexpr $(foreach i, config.h $(call rwildcard, src/lib*, *.hh), \ $(eval $(call install-file-in, $(i), $(includedir)/nix, 0644))) diff --git a/src/boost/format/feed_args.hpp b/src/boost/format/feed_args.hpp index 3d0b47b4a12..cdd57fdf2bf 100644 --- a/src/boost/format/feed_args.hpp +++ b/src/boost/format/feed_args.hpp @@ -37,6 +37,13 @@ namespace { os.str(emptyStr); } + void do_pad( std::string & s, + std::streamsize w, + const char c, + std::ios::fmtflags f, + bool center) + __attribute__ ((unused)); + void do_pad( std::string & s, std::streamsize w, const char c, diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index f6cdedb3797..5e1ae63c482 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -202,7 +202,7 @@ void initGC() GC_INIT(); - GC_oom_fn = oomHandler; + GC_set_oom_fn(oomHandler); /* Set the initial heap size to something fairly big (25% of physical RAM, up to a maximum of 384 MiB) so that in most cases diff --git a/src/libexpr/local.mk b/src/libexpr/local.mk index 620050a13b0..daa3258f0d3 100644 --- a/src/libexpr/local.mk +++ b/src/libexpr/local.mk @@ -6,8 +6,6 @@ libexpr_DIR := $(d) libexpr_SOURCES := $(wildcard $(d)/*.cc) $(wildcard $(d)/primops/*.cc) $(d)/lexer-tab.cc $(d)/parser-tab.cc -libexpr_CXXFLAGS := -Wno-deprecated-register - libexpr_LIBS = libutil libstore libformat libexpr_LDFLAGS = diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 33c9e37047f..9bf1ab5aa58 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -3376,10 +3376,10 @@ void SubstitutionGoal::tryToRun() trace("trying to run"); /* Make sure that we are allowed to start a build. Note that even - is maxBuildJobs == 0 (no local builds allowed), we still allow + if maxBuildJobs == 0 (no local builds allowed), we still allow a substituter to run. This is because substitutions cannot be distributed to another machine via the build hook. */ - if (worker.getNrLocalBuilds() >= (settings.maxBuildJobs == 0 ? 1 : settings.maxBuildJobs)) { + if (worker.getNrLocalBuilds() >= std::min(1U, (unsigned int) settings.maxBuildJobs)) { worker.waitForBuildSlot(shared_from_this()); return; } @@ -3660,7 +3660,7 @@ void Worker::run(const Goals & _topGoals) if (!children.empty() || !waitingForAWhile.empty()) waitForInput(); else { - if (awake.empty() && settings.maxBuildJobs == 0) throw Error( + if (awake.empty() && 0 == settings.maxBuildJobs) throw Error( "unable to start any build; either increase ‘--max-jobs’ " "or enable distributed builds"); assert(!awake.empty()); @@ -3697,9 +3697,9 @@ void Worker::waitForInput() auto nearest = steady_time_point::max(); // nearest deadline for (auto & i : children) { if (!i.respectTimeouts) continue; - if (settings.maxSilentTime != 0) + if (0 != settings.maxSilentTime) nearest = std::min(nearest, i.lastOutput + std::chrono::seconds(settings.maxSilentTime)); - if (settings.buildTimeout != 0) + if (0 != settings.buildTimeout) nearest = std::min(nearest, i.timeStarted + std::chrono::seconds(settings.buildTimeout)); } if (nearest != steady_time_point::max()) { @@ -3777,7 +3777,7 @@ void Worker::waitForInput() } if (goal->getExitCode() == Goal::ecBusy && - settings.maxSilentTime != 0 && + 0 != settings.maxSilentTime && j->respectTimeouts && after - j->lastOutput >= std::chrono::seconds(settings.maxSilentTime)) { @@ -3788,7 +3788,7 @@ void Worker::waitForInput() } else if (goal->getExitCode() == Goal::ecBusy && - settings.buildTimeout != 0 && + 0 != settings.buildTimeout && j->respectTimeouts && after - j->timeStarted >= std::chrono::seconds(settings.buildTimeout)) { diff --git a/src/libstore/download.cc b/src/libstore/download.cc index d073e870b4e..42fd05bd80d 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -369,7 +369,7 @@ struct CurlDownloader : public Downloader curl_multi_setopt(curlm, CURLMOPT_PIPELINING, CURLPIPE_MULTIPLEX); #endif curl_multi_setopt(curlm, CURLMOPT_MAX_TOTAL_CONNECTIONS, - settings.binaryCachesParallelConnections); + settings.binaryCachesParallelConnections.get()); enableHttp2 = settings.enableHttp2; From 9622d00afa6e94ef3ca026f9e15a7d122566e7d0 Mon Sep 17 00:00:00 2001 From: Linus Heckemann Date: Mon, 17 Apr 2017 10:33:53 +0100 Subject: [PATCH 0246/2196] Manual: document tryEval --- doc/manual/expressions/builtins.xml | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/doc/manual/expressions/builtins.xml b/doc/manual/expressions/builtins.xml index e9baff65961..8a18b71008c 100644 --- a/doc/manual/expressions/builtins.xml +++ b/doc/manual/expressions/builtins.xml @@ -1163,6 +1163,19 @@ stdenv.mkDerivation (rec { + builtins.tryEval + e + + Try to evaluate e. + Return a set containing the attributes success + (true if e evaluated + successfully, false if an error was thrown) and + value, equalling e + if successful and false otherwise. + + + + builtins.typeOf e From b0cb11722626e906a73f10dd9a0c9eea29faf43a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 19 Apr 2017 14:18:26 +0200 Subject: [PATCH 0247/2196] getDerivations(): Filter out packages with bad derivation names In particular, this disallows attribute names containing dots or starting with dots. Hydra already disallowed these. This affects the following packages in Nixpkgs master: 2048-in-terminal 2bwm 389-ds-base 90secondportraits lispPackages.3bmd lispPackages.hu.dwim.asdf lispPackages.hu.dwim.def Closes #1342. --- src/libexpr/get-drvs.cc | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc index 5342739c53c..ae9fb0e5ec3 100644 --- a/src/libexpr/get-drvs.cc +++ b/src/libexpr/get-drvs.cc @@ -3,6 +3,7 @@ #include "eval-inline.hh" #include +#include namespace nix { @@ -262,6 +263,9 @@ static string addToPath(const string & s1, const string & s2) } +static std::regex attrRegex("[A-Za-z_][A-Za-z0-9-_+]*"); + + static void getDerivations(EvalState & state, Value & vIn, const string & pathPrefix, Bindings & autoArgs, DrvInfos & drvs, Done & done, @@ -286,6 +290,8 @@ static void getDerivations(EvalState & state, Value & vIn, precedence). */ for (auto & i : v.attrs->lexicographicOrder()) { Activity act(*logger, lvlDebug, format("evaluating attribute ‘%1%’") % i->name); + if (!std::regex_match(std::string(i->name), attrRegex)) + continue; string pathPrefix2 = addToPath(pathPrefix, i->name); if (combineChannels) getDerivations(state, *i->value, pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures); From 9cc8047f44b3d333c2c55c140165bfd507f4d41e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 19 Apr 2017 14:54:52 +0200 Subject: [PATCH 0248/2196] Reimplement connect-timeout Fixes #1339. --- src/libstore/download.cc | 2 ++ src/libstore/globals.hh | 3 +++ 2 files changed, 5 insertions(+) diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 42fd05bd80d..4d502219ed8 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -249,6 +249,8 @@ struct CurlDownloader : public Downloader curl_easy_setopt(req, CURLOPT_SSL_VERIFYHOST, 0); } + curl_easy_setopt(req, CURLOPT_CONNECTTIMEOUT, settings.connectTimeout.get()); + /* If no file exist in the specified path, curl continues to work anyway as if netrc support was disabled. */ curl_easy_setopt(req, CURLOPT_NETRC_FILE, settings.netrcFile.get().c_str()); diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index f3a6038cafa..72863920de9 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -270,6 +270,9 @@ public: struct CaseHackTag { }; Setting useCaseHack{this, nix::useCaseHack, "use-case-hack", "Whether to enable a Darwin-specific hack for dealing with file name collisions."}; + + Setting connectTimeout{this, 0, "connect-timeout", + "Timeout for connecting to servers during downloads. 0 means use curl's builtin default."}; }; From 00b6c6d0c3047e6e0b2ab6ea8fef9bef94586ce4 Mon Sep 17 00:00:00 2001 From: kballou Date: Wed, 19 Apr 2017 09:27:28 -0600 Subject: [PATCH 0249/2196] Fix small grammar issue about page Fix subject-verb agreement issue in introduction/about. --- doc/manual/introduction/about-nix.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/introduction/about-nix.xml b/doc/manual/introduction/about-nix.xml index 0c58984ac48..e6dfb7a5a56 100644 --- a/doc/manual/introduction/about-nix.xml +++ b/doc/manual/introduction/about-nix.xml @@ -93,7 +93,7 @@ time window in which the package has some files from the old version and some files from the new version — which would be bad because a program might well crash if it’s started during that period. -And since package aren’t overwritten, the old versions are still +And since packages aren’t overwritten, the old versions are still there after an upgrade. This means that you can roll back to the old version: From 1559c596f6301628bbf04468cb897ea0a0010c1e Mon Sep 17 00:00:00 2001 From: Armijn Hemel Date: Wed, 19 Apr 2017 19:10:12 +0200 Subject: [PATCH 0250/2196] document option --- doc/manual/command-ref/nix-env.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/command-ref/nix-env.xml b/doc/manual/command-ref/nix-env.xml index 85f10e0760b..8462cf8a027 100644 --- a/doc/manual/command-ref/nix-env.xml +++ b/doc/manual/command-ref/nix-env.xml @@ -1136,7 +1136,7 @@ user environment elements, etc. --> Print all of the meta-attributes of the derivation. This option is only available with - . + or . From 76cb3c702cc1769438940b65b83971f483cca062 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 20 Apr 2017 13:20:49 +0200 Subject: [PATCH 0251/2196] Reimplement trusted-substituters (aka trusted-binary-caches) --- src/libstore/globals.hh | 4 +++ src/nix-daemon/nix-daemon.cc | 48 +++++++++++++++++++++++++++++++----- 2 files changed, 46 insertions(+), 6 deletions(-) diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 72863920de9..a5d5a3f5057 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -239,6 +239,10 @@ public: "Additional URIs of substituters.", {"extra-binary-caches"}}; + Setting trustedSubstituters{this, {}, "trusted-substituters", + "Disabled substituters that may be enabled via the substituters option by untrusted users.", + {"trusted-binary-caches"}}; + Setting trustedUsers{this, {"root"}, "trusted-users", "Which users or groups are trusted to ask the daemon to do unsafe things."}; diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index 1389353bb5d..5c2641eac6f 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -448,20 +448,56 @@ static void performOp(ref store, bool trusted, unsigned int clientVe readInt(from); // obsolete printBuildTrace settings.buildCores = readInt(from); settings.useSubstitutes = readInt(from); + + StringMap overrides; if (GET_PROTOCOL_MINOR(clientVersion) >= 12) { unsigned int n = readInt(from); for (unsigned int i = 0; i < n; i++) { string name = readString(from); string value = readString(from); - try { - if (trusted || name == "build-timeout") - settings.set(name, value); - } catch (UsageError & e) { - warn(e.what()); - } + overrides.emplace(name, value); } } + startWork(); + + for (auto & i : overrides) { + auto & name(i.first); + auto & value(i.second); + + auto setSubstituters = [&](Setting & res) { + if (name != res.name && res.aliases.count(name) == 0) + return false; + StringSet trusted = settings.trustedSubstituters; + for (auto & s : settings.substituters.get()) + trusted.insert(s); + Strings subs; + auto ss = tokenizeString(value); + for (auto & s : ss) + if (trusted.count(s)) + subs.push_back(s); + else + warn("ignoring untrusted substituter '%s'", s); + res = subs; + return true; + }; + + try { + if (trusted + || name == settings.buildTimeout.name + || name == settings.connectTimeout.name) + settings.set(name, value); + else if (setSubstituters(settings.substituters)) + ; + else if (setSubstituters(settings.extraSubstituters)) + ; + else + debug("ignoring untrusted setting '%s'", name); + } catch (UsageError & e) { + warn(e.what()); + } + } + stopWork(); break; } From 4222402219ca3a356d23e05e0474f8648b111d91 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 20 Apr 2017 13:39:01 +0200 Subject: [PATCH 0252/2196] nix.conf man page: binary-caches -> substituters --- doc/manual/command-ref/conf-file.xml | 34 +++++++++++----------------- 1 file changed, 13 insertions(+), 21 deletions(-) diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index 3de9647aa4e..73e0017ccef 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -334,15 +334,16 @@ flag, e.g. --option gc-keep-outputs false. - binary-caches + substituters - A list of URLs of binary caches, separated by + A list of URLs of substituters, separated by whitespace. The default is https://cache.nixos.org. + - trusted-binary-caches + trusted-substituters - A list of URLs of binary caches, separated by + A list of URLs of substituters, separated by whitespace. These are not used by default, but can be enabled by users of the Nix daemon by specifying --option - binary-caches urls on the + substituters urls on the command line. Unprivileged users are only allowed to pass a - subset of the URLs listed in binary-caches and - trusted-binary-caches. + subset of the URLs listed in substituters and + trusted-substituters. - extra-binary-caches + extra-substituters Additional binary caches appended to those - specified in and - . When used by unprivileged - users, untrusted binary caches (i.e. those not listed in - ) are silently + specified in . When used by + unprivileged users, untrusted substituters (i.e. those not listed + in ) are silently ignored. @@ -412,15 +413,6 @@ flag, e.g. --option gc-keep-outputs false. - verify-https-binary-caches - - Whether HTTPS binary caches are required to have a - certificate that can be verified. Defaults to - true. - - - - netrc-file If set to an absolute path to a netrc From 562585e901d9e5f2ef62be11c2e74badcacb1f50 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 20 Apr 2017 14:04:00 +0200 Subject: [PATCH 0253/2196] binary-caches-parallel-connections -> http-connections --- doc/manual/command-ref/conf-file.xml | 2 +- src/libstore/globals.hh | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index 73e0017ccef..79e18de9b17 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -404,7 +404,7 @@ flag, e.g. --option gc-keep-outputs false. - binary-caches-parallel-connections + http-connections The maximum number of parallel TCP connections used to fetch files from binary caches and by other downloads. It diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index a5d5a3f5057..de64e982659 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -216,8 +216,9 @@ public: Setting secretKeyFiles{this, {}, "secret-key-files", "Secret keys with which to sign local builds."}; - Setting binaryCachesParallelConnections{this, 25, "binary-caches-parallel-connections", - "Number of parallel connections to binary caches."}; + Setting binaryCachesParallelConnections{this, 25, "http-connections", + "Number of parallel HTTP connections.", + {"binary-caches-parallel-connections"}}; Setting enableHttp2{this, true, "enable-http2", "Whether to enable HTTP/2 support."}; From f05d5f89ff4ec52ed2f6d576b2b2323b5292f815 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 20 Apr 2017 14:58:16 +0200 Subject: [PATCH 0254/2196] Read per-user settings from ~/.config/nix/nix.conf --- doc/manual/command-ref/conf-file.xml | 36 +++++++++++++++++++++------- src/libstore/globals.cc | 12 +++++----- src/libstore/globals.hh | 4 ---- src/libstore/remote-store.cc | 2 +- src/libutil/config.cc | 13 ++++++++-- src/libutil/config.hh | 10 ++++++-- src/libutil/util.cc | 12 ++++++++++ src/libutil/util.hh | 5 +++- 8 files changed, 70 insertions(+), 24 deletions(-) diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index 79e18de9b17..616983bc7f0 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -17,13 +17,32 @@ Description -A number of persistent settings of Nix are stored in the file -sysconfdir/nix/nix.conf or -$NIX_CONF_DIR/nix.conf if NIX_CONF_DIR is set. -This file is a list of name = +Nix reads settings from two configuration files: + + + + + The system-wide configuration file + sysconfdir/nix/nix.conf + (i.e. /etc/nix/nix.conf on most systems), or + $NIX_CONF_DIR/nix.conf if + NIX_CONF_DIR is set. + + + + The user configuration file + $XDG_CONFIG_HOME/nix/nix.conf, or + ~/.config/nix/nix.conf if + XDG_CONFIG_HOME is not set. + + + + +The configuration files consist of +name = value pairs, one per line. -Comments start with a # character. Here is an example -configuration file: +Comments start with a # character. Here is an +example configuration file: gc-keep-outputs = true # Nice for developers @@ -31,8 +50,9 @@ gc-keep-derivations = true # Idem env-keep-derivations = false -You can override settings using the -flag, e.g. --option gc-keep-outputs false. +You can override settings on the command line using the + flag, e.g. --option gc-keep-outputs +false. The following settings are currently available: diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index bb61daa5164..6b9d077469c 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -53,19 +53,19 @@ Settings::Settings() void Settings::loadConfFile() { applyConfigFile(nixConfDir + "/nix.conf"); + + /* We only want to send overrides to the daemon, i.e. stuff from + ~/.nix/nix.conf or the command line. */ + resetOverriden(); + + applyConfigFile(getConfigDir() + "/nix/nix.conf"); } void Settings::set(const string & name, const string & value) { - overrides[name] = value; Config::set(name, value); } -StringMap Settings::getOverrides() -{ - return overrides; -} - unsigned int Settings::getDefaultCores() { return std::max(1U, std::thread::hardware_concurrency()); diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index de64e982659..d3ecaadb639 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -15,8 +15,6 @@ extern bool useCaseHack; // FIXME class Settings : public Config { - StringMap overrides; - unsigned int getDefaultCores(); public: @@ -27,8 +25,6 @@ public: void set(const string & name, const string & value); - StringMap getOverrides(); - Path nixPrefix; /* The directory where we store sources and derived files. */ diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index da3c8eb8d89..bc9ef3d47c5 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -166,7 +166,7 @@ void RemoteStore::setOptions(Connection & conn) << settings.useSubstitutes; if (GET_PROTOCOL_MINOR(conn.daemonVersion) >= 12) { - StringMap overrides = settings.getOverrides(); + auto overrides = settings.getSettings(true); conn.to << overrides.size(); for (auto & i : overrides) conn.to << i.first << i.second; diff --git a/src/libutil/config.cc b/src/libutil/config.cc index e7a810cec4d..bf137299718 100644 --- a/src/libutil/config.cc +++ b/src/libutil/config.cc @@ -9,6 +9,7 @@ void Config::set(const std::string & name, const std::string & value) if (i == _settings.end()) throw UsageError("unknown setting '%s'", name); i->second.setting->set(value); + i->second.setting->overriden = true; } void Config::addSetting(AbstractSetting * setting) @@ -22,6 +23,7 @@ void Config::addSetting(AbstractSetting * setting) auto i = initials.find(setting->name); if (i != initials.end()) { setting->set(i->second); + setting->overriden = true; initials.erase(i); set = true; } @@ -34,6 +36,7 @@ void Config::addSetting(AbstractSetting * setting) alias, setting->name); else { setting->set(i->second); + setting->overriden = true; initials.erase(i); set = true; } @@ -47,11 +50,11 @@ void Config::warnUnknownSettings() warn("unknown setting '%s'", i.first); } -StringMap Config::getSettings() +StringMap Config::getSettings(bool overridenOnly) { StringMap res; for (auto & opt : _settings) - if (!opt.second.isAlias) + if (!opt.second.isAlias && (!overridenOnly || opt.second.setting->overriden)) res.emplace(opt.first, opt.second.setting->to_string()); return res; } @@ -94,6 +97,12 @@ void Config::applyConfigFile(const Path & path, bool fatal) } catch (SysError &) { } } +void Config::resetOverriden() +{ + for (auto & s : _settings) + s.second.setting->overriden = false; +} + AbstractSetting::AbstractSetting( const std::string & name, const std::string & description, diff --git a/src/libutil/config.hh b/src/libutil/config.hh index 6c8612f675c..952bf04b8a2 100644 --- a/src/libutil/config.hh +++ b/src/libutil/config.hh @@ -51,9 +51,11 @@ public: void warnUnknownSettings(); - StringMap getSettings(); + StringMap getSettings(bool overridenOnly = false); void applyConfigFile(const Path & path, bool fatal = false); + + void resetOverriden(); }; class AbstractSetting @@ -68,6 +70,8 @@ public: int created = 123; + bool overriden = false; + protected: AbstractSetting( @@ -78,7 +82,7 @@ protected: virtual ~AbstractSetting() { // Check against a gcc miscompilation causing our constructor - // not to run. + // not to run (https://gcc.gnu.org/bugzilla/show_bug.cgi?id=80431). assert(created == 123); } @@ -88,6 +92,8 @@ protected: bool parseBool(const std::string & str); std::string printBool(bool b); + + bool isOverriden() { return overriden; } }; struct DefaultSettingTag { }; diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 0bd51afd1a9..37dfccd0669 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -429,6 +429,18 @@ Path getCacheDir() } +Path getConfigDir() +{ + Path configDir = getEnv("XDG_CONFIG_HOME"); + if (configDir.empty()) { + Path homeDir = getEnv("HOME"); + if (homeDir.empty()) throw Error("$XDG_CONFIG_HOME and $HOME are not set"); + configDir = homeDir + "/.config"; + } + return configDir; +} + + Paths createDirs(const Path & path) { Paths created; diff --git a/src/libutil/util.hh b/src/libutil/util.hh index 0e6941e4a8d..121423cdaa3 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -110,9 +110,12 @@ void deletePath(const Path & path, unsigned long long & bytesFreed); Path createTempDir(const Path & tmpRoot = "", const Path & prefix = "nix", bool includePid = true, bool useGlobalCounter = true, mode_t mode = 0755); -/* Return the path to $XDG_CACHE_HOME/.cache. */ +/* Return $XDG_CACHE_HOME or $HOME/.cache. */ Path getCacheDir(); +/* Return $XDG_CONFIG_HOME or $HOME/.config. */ +Path getConfigDir(); + /* Create a directory and all its parents, if necessary. Returns the list of created directories, in order of creation. */ Paths createDirs(const Path & path); From 4410e9d995bcd53a7a4cff0bbee3917375adcba3 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 20 Apr 2017 16:52:53 +0200 Subject: [PATCH 0255/2196] Setting: Remove "Tag" template argument --- src/libstore/globals.cc | 22 ++------------- src/libstore/globals.hh | 39 +++++++++++++++++++++++--- src/libutil/config.cc | 53 ++++++++++++++---------------------- src/libutil/config.hh | 48 ++++++++++++++++++++------------ src/nix-daemon/nix-daemon.cc | 2 +- 5 files changed, 90 insertions(+), 74 deletions(-) diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 6b9d077469c..3242ef9d635 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -73,7 +73,7 @@ unsigned int Settings::getDefaultCores() const string nixVersion = PACKAGE_VERSION; -template<> void Setting::set(const std::string & str) +template<> void BaseSetting::set(const std::string & str) { if (str == "true") value = smEnabled; else if (str == "relaxed") value = smRelaxed; @@ -81,7 +81,7 @@ template<> void Setting::set(const std::string & str) else throw UsageError("option '%s' has invalid value '%s'", name, str); } -template<> std::string Setting::to_string() +template<> std::string BaseSetting::to_string() { if (value == smEnabled) return "true"; else if (value == smRelaxed) return "relaxed"; @@ -89,27 +89,11 @@ template<> std::string Setting::to_string() else abort(); } -template<> void Setting::set(const std::string & str) +void MaxBuildJobsSetting::set(const std::string & str) { if (str == "auto") value = std::max(1U, std::thread::hardware_concurrency()); else if (!string2Int(str, value)) throw UsageError("configuration setting ‘%s’ should be ‘auto’ or an integer", name); } -template<> std::string Setting::to_string() -{ - return std::to_string(value); -} - -template<> void Setting::set(const std::string & str) -{ - value = parseBool(str); - nix::useCaseHack = true; -} - -template<> std::string Setting::to_string() -{ - return printBool(value); -} - } diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index d3ecaadb639..b4f44de2e65 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -13,6 +13,39 @@ typedef enum { smEnabled, smRelaxed, smDisabled } SandboxMode; extern bool useCaseHack; // FIXME +struct CaseHackSetting : public BaseSetting +{ + CaseHackSetting(Config * options, + const std::string & name, + const std::string & description, + const std::set & aliases = {}) + : BaseSetting(useCaseHack, name, description, aliases) + { + options->addSetting(this); + } + + void set(const std::string & str) override + { + BaseSetting::set(str); + nix::useCaseHack = true; + } +}; + +struct MaxBuildJobsSetting : public BaseSetting +{ + MaxBuildJobsSetting(Config * options, + unsigned int def, + const std::string & name, + const std::string & description, + const std::set & aliases = {}) + : BaseSetting(def, name, description, aliases) + { + options->addSetting(this); + } + + void set(const std::string & str) override; +}; + class Settings : public Config { unsigned int getDefaultCores(); @@ -66,8 +99,7 @@ public: the log to show if a build fails. */ size_t logLines = 10; - struct MaxBuildJobsTag { }; - Setting maxBuildJobs{this, 1, "build-max-jobs", + MaxBuildJobsSetting maxBuildJobs{this, 1, "build-max-jobs", "Maximum number of parallel build jobs. \"auto\" means use number of cores."}; Setting buildCores{this, getDefaultCores(), "build-cores", @@ -268,8 +300,7 @@ public: Setting enableImportFromDerivation{this, true, "allow-import-from-derivation", "Whether the evaluator allows importing the result of a derivation."}; - struct CaseHackTag { }; - Setting useCaseHack{this, nix::useCaseHack, "use-case-hack", + CaseHackSetting useCaseHack{this, "use-case-hack", "Whether to enable a Darwin-specific hack for dealing with file name collisions."}; Setting connectTimeout{this, 0, "connect-timeout", diff --git a/src/libutil/config.cc b/src/libutil/config.cc index bf137299718..72b6cf80684 100644 --- a/src/libutil/config.cc +++ b/src/libutil/config.cc @@ -111,83 +111,72 @@ AbstractSetting::AbstractSetting( { } -template<> void Setting::set(const std::string & str) +template<> void BaseSetting::set(const std::string & str) { value = str; } -template<> std::string Setting::to_string() +template<> std::string BaseSetting::to_string() { return value; } -template -void Setting::set(const std::string & str) +template +void BaseSetting::set(const std::string & str) { static_assert(std::is_integral::value, "Integer required."); if (!string2Int(str, value)) throw UsageError("setting '%s' has invalid value '%s'", name, str); } -template -std::string Setting::to_string() +template +std::string BaseSetting::to_string() { static_assert(std::is_integral::value, "Integer required."); return std::to_string(value); } -bool AbstractSetting::parseBool(const std::string & str) +template<> void BaseSetting::set(const std::string & str) { if (str == "true" || str == "yes" || str == "1") - return true; + value = true; else if (str == "false" || str == "no" || str == "0") - return false; + value = false; else throw UsageError("Boolean setting '%s' has invalid value '%s'", name, str); } -template<> void Setting::set(const std::string & str) +template<> std::string BaseSetting::to_string() { - value = parseBool(str); + return value ? "true" : "false"; } -std::string AbstractSetting::printBool(bool b) -{ - return b ? "true" : "false"; -} - - -template<> std::string Setting::to_string() -{ - return printBool(value); -} - -template<> void Setting::set(const std::string & str) +template<> void BaseSetting::set(const std::string & str) { value = tokenizeString(str); } -template<> std::string Setting::to_string() +template<> std::string BaseSetting::to_string() { return concatStringsSep(" ", value); } -template<> void Setting::set(const std::string & str) +template<> void BaseSetting::set(const std::string & str) { value = tokenizeString(str); } -template<> std::string Setting::to_string() +template<> std::string BaseSetting::to_string() { return concatStringsSep(" ", value); } -template class Setting; -template class Setting; -template class Setting; -template class Setting; -template class Setting; -template class Setting; +template class BaseSetting; +template class BaseSetting; +template class BaseSetting; +template class BaseSetting; +template class BaseSetting; +template class BaseSetting; void PathSetting::set(const std::string & str) { diff --git a/src/libutil/config.hh b/src/libutil/config.hh index 952bf04b8a2..130f59e2bd8 100644 --- a/src/libutil/config.hh +++ b/src/libutil/config.hh @@ -90,17 +90,12 @@ protected: virtual std::string to_string() = 0; - bool parseBool(const std::string & str); - std::string printBool(bool b); - bool isOverriden() { return overriden; } }; -struct DefaultSettingTag { }; - /* A setting of type T. */ -template -class Setting : public AbstractSetting +template +class BaseSetting : public AbstractSetting { protected: @@ -108,23 +103,21 @@ protected: public: - Setting(Config * options, - const T & def, + BaseSetting(const T & def, const std::string & name, const std::string & description, const std::set & aliases = {}) : AbstractSetting(name, description, aliases) , value(def) - { - options->addSetting(this); - } + { } operator const T &() const { return value; } operator T &() { return value; } const T & get() const { return value; } bool operator ==(const T & v2) const { return value == v2; } bool operator !=(const T & v2) const { return value != v2; } - void operator =(const T & v) { value = v; } + void operator =(const T & v) { assign(v); } + virtual void assign(const T & v) { value = v; } void set(const std::string & str) override; @@ -132,18 +125,35 @@ public: }; template -std::ostream & operator <<(std::ostream & str, const Setting & opt) +std::ostream & operator <<(std::ostream & str, const BaseSetting & opt) { str << (const T &) opt; return str; } template -bool operator ==(const T & v1, const Setting & v2) { return v1 == (const T &) v2; } +bool operator ==(const T & v1, const BaseSetting & v2) { return v1 == (const T &) v2; } + +template +class Setting : public BaseSetting +{ +public: + Setting(Config * options, + const T & def, + const std::string & name, + const std::string & description, + const std::set & aliases = {}) + : BaseSetting(def, name, description, aliases) + { + options->addSetting(this); + } + + void operator =(const T & v) { this->assign(v); } +}; /* A special setting for Paths. These are automatically canonicalised (e.g. "/foo//bar/" becomes "/foo/bar"). */ -class PathSetting : public Setting +class PathSetting : public BaseSetting { bool allowEmpty; @@ -155,15 +165,17 @@ public: const std::string & name, const std::string & description, const std::set & aliases = {}) - : Setting(options, def, name, description, aliases) + : BaseSetting(def, name, description, aliases) , allowEmpty(allowEmpty) { - set(value); + options->addSetting(this); } void set(const std::string & str) override; Path operator +(const char * p) const { return value + p; } + + void operator =(const Path & v) { this->assign(v); } }; } diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index 5c2641eac6f..07ad0b45b3e 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -440,7 +440,7 @@ static void performOp(ref store, bool trusted, unsigned int clientVe settings.keepGoing = readInt(from); settings.tryFallback = readInt(from); verbosity = (Verbosity) readInt(from); - settings.maxBuildJobs = readInt(from); + settings.maxBuildJobs.assign(readInt(from)); settings.maxSilentTime = readInt(from); settings.useBuildHook = readInt(from) != 0; settings.verboseBuild = lvlError == (Verbosity) readInt(from); From efa4bdbfcd1489527bcf6f20a49c9a3bca8bbf6b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 20 Apr 2017 17:34:47 +0200 Subject: [PATCH 0256/2196] Improve nix show-config --json In particular, show descriptions. This could be used for manpage generation etc. --- src/libstore/globals.cc | 5 +++++ src/libutil/config.cc | 38 ++++++++++++++++++++++++++++++++++++ src/libutil/config.hh | 8 ++++++++ src/libutil/json.cc | 43 +++++++++++++---------------------------- src/libutil/json.hh | 11 +++-------- src/nix/show-config.cc | 3 +-- 6 files changed, 68 insertions(+), 40 deletions(-) diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 3242ef9d635..953bf6aaaa0 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -89,6 +89,11 @@ template<> std::string BaseSetting::to_string() else abort(); } +template<> void BaseSetting::toJSON(JSONPlaceholder & out) +{ + AbstractSetting::toJSON(out); +} + void MaxBuildJobsSetting::set(const std::string & str) { if (str == "auto") value = std::max(1U, std::thread::hardware_concurrency()); diff --git a/src/libutil/config.cc b/src/libutil/config.cc index 72b6cf80684..62c6433c741 100644 --- a/src/libutil/config.cc +++ b/src/libutil/config.cc @@ -1,5 +1,6 @@ #include "config.hh" #include "args.hh" +#include "json.hh" namespace nix { @@ -103,6 +104,17 @@ void Config::resetOverriden() s.second.setting->overriden = false; } +void Config::toJSON(JSONObject & out) +{ + for (auto & s : _settings) + if (!s.second.isAlias) { + JSONObject out2(out.object(s.first)); + out2.attr("description", s.second.setting->description); + JSONPlaceholder out3(out2.placeholder("value")); + s.second.setting->toJSON(out3); + } +} + AbstractSetting::AbstractSetting( const std::string & name, const std::string & description, @@ -111,6 +123,17 @@ AbstractSetting::AbstractSetting( { } +void AbstractSetting::toJSON(JSONPlaceholder & out) +{ + out.write(to_string()); +} + +template +void BaseSetting::toJSON(JSONPlaceholder & out) +{ + out.write(value); +} + template<> void BaseSetting::set(const std::string & str) { value = str; @@ -161,6 +184,13 @@ template<> std::string BaseSetting::to_string() return concatStringsSep(" ", value); } +template<> void BaseSetting::toJSON(JSONPlaceholder & out) +{ + JSONList list(out.list()); + for (auto & s : value) + list.elem(s); +} + template<> void BaseSetting::set(const std::string & str) { value = tokenizeString(str); @@ -171,12 +201,20 @@ template<> std::string BaseSetting::to_string() return concatStringsSep(" ", value); } +template<> void BaseSetting::toJSON(JSONPlaceholder & out) +{ + JSONList list(out.list()); + for (auto & s : value) + list.elem(s); +} + template class BaseSetting; template class BaseSetting; template class BaseSetting; template class BaseSetting; template class BaseSetting; template class BaseSetting; +template class BaseSetting; void PathSetting::set(const std::string & str) { diff --git a/src/libutil/config.hh b/src/libutil/config.hh index 130f59e2bd8..91962109100 100644 --- a/src/libutil/config.hh +++ b/src/libutil/config.hh @@ -9,6 +9,8 @@ namespace nix { class Args; class AbstractSetting; +class JSONPlaceholder; +class JSONObject; /* A class to simplify providing configuration settings. The typical use is to inherit Config and add Setting members: @@ -56,6 +58,8 @@ public: void applyConfigFile(const Path & path, bool fatal = false); void resetOverriden(); + + void toJSON(JSONObject & out); }; class AbstractSetting @@ -90,6 +94,8 @@ protected: virtual std::string to_string() = 0; + virtual void toJSON(JSONPlaceholder & out); + bool isOverriden() { return overriden; } }; @@ -122,6 +128,8 @@ public: void set(const std::string & str) override; std::string to_string() override; + + void toJSON(JSONPlaceholder & out) override; }; template diff --git a/src/libutil/json.cc b/src/libutil/json.cc index 6023d1d4fb8..b8b8ef9c8cc 100644 --- a/src/libutil/json.cc +++ b/src/libutil/json.cc @@ -19,49 +19,32 @@ void toJSON(std::ostream & str, const char * start, const char * end) str << '"'; } -void toJSON(std::ostream & str, const std::string & s) -{ - toJSON(str, s.c_str(), s.c_str() + s.size()); -} - void toJSON(std::ostream & str, const char * s) { if (!s) str << "null"; else toJSON(str, s, s + strlen(s)); } -void toJSON(std::ostream & str, unsigned long long n) -{ - str << n; -} - -void toJSON(std::ostream & str, unsigned long n) -{ - str << n; -} - -void toJSON(std::ostream & str, long n) -{ - str << n; -} +template<> void toJSON(std::ostream & str, const int & n) { str << n; } +template<> void toJSON(std::ostream & str, const unsigned int & n) { str << n; } +template<> void toJSON(std::ostream & str, const long & n) { str << n; } +template<> void toJSON(std::ostream & str, const unsigned long & n) { str << n; } +template<> void toJSON(std::ostream & str, const long long & n) { str << n; } +template<> void toJSON(std::ostream & str, const unsigned long long & n) { str << n; } +template<> void toJSON(std::ostream & str, const float & n) { str << n; } -void toJSON(std::ostream & str, unsigned int n) +template<> void toJSON(std::ostream & str, const std::string & s) { - str << n; -} - -void toJSON(std::ostream & str, int n) -{ - str << n; + toJSON(str, s.c_str(), s.c_str() + s.size()); } -void toJSON(std::ostream & str, double f) +template<> void toJSON(std::ostream & str, const bool & b) { - str << f; + str << (b ? "true" : "false"); } -void toJSON(std::ostream & str, bool b) +template<> void toJSON(std::ostream & str, const std::nullptr_t & b) { - str << (b ? "true" : "false"); + str << "null"; } JSONWriter::JSONWriter(std::ostream & str, bool indent) diff --git a/src/libutil/json.hh b/src/libutil/json.hh index 03eecb73258..595e9bbe349 100644 --- a/src/libutil/json.hh +++ b/src/libutil/json.hh @@ -7,15 +7,10 @@ namespace nix { void toJSON(std::ostream & str, const char * start, const char * end); -void toJSON(std::ostream & str, const std::string & s); void toJSON(std::ostream & str, const char * s); -void toJSON(std::ostream & str, unsigned long long n); -void toJSON(std::ostream & str, unsigned long n); -void toJSON(std::ostream & str, long n); -void toJSON(std::ostream & str, unsigned int n); -void toJSON(std::ostream & str, int n); -void toJSON(std::ostream & str, double f); -void toJSON(std::ostream & str, bool b); + +template +void toJSON(std::ostream & str, const T & n); class JSONWriter { diff --git a/src/nix/show-config.cc b/src/nix/show-config.cc index ba39e2bb29b..aade2adeace 100644 --- a/src/nix/show-config.cc +++ b/src/nix/show-config.cc @@ -31,8 +31,7 @@ struct CmdShowConfig : Command if (json) { // FIXME: use appropriate JSON types (bool, ints, etc). JSONObject jsonObj(std::cout, true); - for (auto & s : settings.getSettings()) - jsonObj.attr(s.first, s.second); + settings.toJSON(jsonObj); } else { for (auto & s : settings.getSettings()) std::cout << s.first + " = " + s.second + "\n"; From 749696e71c9d637a36ab09a1368dff01b2b1bda8 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 20 Apr 2017 19:11:45 +0200 Subject: [PATCH 0257/2196] Detect lsof Also, don't use lsof on Linux since it's not needed. Fixes #1328. --- Makefile.config.in | 1 + configure.ac | 1 + src/libstore/gc.cc | 11 ++++++++--- src/libstore/local.mk | 3 ++- 4 files changed, 12 insertions(+), 4 deletions(-) diff --git a/Makefile.config.in b/Makefile.config.in index 53dca1fcf10..6948dad5a60 100644 --- a/Makefile.config.in +++ b/Makefile.config.in @@ -15,6 +15,7 @@ SQLITE3_LIBS = @SQLITE3_LIBS@ bash = @bash@ bindir = @bindir@ bro = @bro@ +lsof = @lsof@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ diff --git a/configure.ac b/configure.ac index 3e6a894e3b1..c7026cf954d 100644 --- a/configure.ac +++ b/configure.ac @@ -128,6 +128,7 @@ NEED_PROG(xz, xz) AC_PATH_PROG(dot, dot) AC_PATH_PROG(pv, pv, pv) AC_PATH_PROG(bro, bro, bro) +AC_PATH_PROG(lsof, lsof, lsof) NEED_PROG(cat, cat) diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index 0b03d61a789..b6d462d2bff 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -426,22 +426,27 @@ void LocalStore::findRuntimeRoots(PathSet & roots) throw SysError("iterating /proc"); } +#if !defined(__linux__) try { - auto lsofRegex = std::regex(R"(^n(/.*)$)"); + printError("RUN LSOF %s", LSOF); + std::regex lsofRegex(R"(^n(/.*)$)"); auto lsofLines = - tokenizeString>(runProgram("lsof", true, { "-n", "-w", "-F", "n" }), "\n"); + tokenizeString>(runProgram(LSOF, true, { "-n", "-w", "-F", "n" }), "\n"); for (const auto & line : lsofLines) { - auto match = std::smatch{}; + std::smatch match; if (std::regex_match(line, match, lsofRegex)) paths.emplace(match[1]); } } catch (ExecError & e) { /* lsof not installed, lsof failed */ } +#endif +#if defined(__linux__) readFileRoots("/proc/sys/kernel/modprobe", paths); readFileRoots("/proc/sys/kernel/fbsplash", paths); readFileRoots("/proc/sys/kernel/poweroff_cmd", paths); +#endif for (auto & i : paths) if (isInStore(i)) { diff --git a/src/libstore/local.mk b/src/libstore/local.mk index 9d5c04dca0c..4da20330cf3 100644 --- a/src/libstore/local.mk +++ b/src/libstore/local.mk @@ -27,7 +27,8 @@ libstore_CXXFLAGS = \ -DNIX_CONF_DIR=\"$(sysconfdir)/nix\" \ -DNIX_LIBEXEC_DIR=\"$(libexecdir)\" \ -DNIX_BIN_DIR=\"$(bindir)\" \ - -DBASH_PATH="\"$(bash)\"" + -DBASH_PATH="\"$(bash)\"" \ + -DLSOF=\"$(lsof)\" $(d)/local-store.cc: $(d)/schema.sql.hh From 111d347237d7f313ab9acfafc0de859715c31564 Mon Sep 17 00:00:00 2001 From: James Earl Douglas Date: Thu, 20 Apr 2017 18:07:23 -0600 Subject: [PATCH 0258/2196] Drop misleading ellipses This portion of the quick start guide may lead to confusion for newcomers to Nix. This change clarifies the example to one that can be copied in its entirety. --- doc/manual/introduction/quick-start.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/introduction/quick-start.xml b/doc/manual/introduction/quick-start.xml index 5ae9f6ad543..aa239b7538b 100644 --- a/doc/manual/introduction/quick-start.xml +++ b/doc/manual/introduction/quick-start.xml @@ -40,7 +40,7 @@ libxslt-1.1.28 Install some packages from the channel: -$ nix-env -i hello ... +$ nix-env -i hello This should download pre-built packages; it should not build them locally (if it does, something went wrong). From d35231ec6017d72492a23b882b9275a60e1737a6 Mon Sep 17 00:00:00 2001 From: David McFarland Date: Fri, 21 Apr 2017 11:27:27 -0300 Subject: [PATCH 0259/2196] set _GNU_SOURCE on cygwin this is needed for pipe2() --- mk/lib.mk | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mk/lib.mk b/mk/lib.mk index bb82801d3b4..1da51d87973 100644 --- a/mk/lib.mk +++ b/mk/lib.mk @@ -53,8 +53,8 @@ BUILD_SHARED_LIBS ?= 1 ifeq ($(BUILD_SHARED_LIBS), 1) ifeq (CYGWIN,$(findstring CYGWIN,$(OS))) - GLOBAL_CFLAGS += -U__STRICT_ANSI__ - GLOBAL_CXXFLAGS += -U__STRICT_ANSI__ + GLOBAL_CFLAGS += -U__STRICT_ANSI__ -D_GNU_SOURCE + GLOBAL_CXXFLAGS += -U__STRICT_ANSI__ -D_GNU_SOURCE else GLOBAL_CFLAGS += -fPIC GLOBAL_CXXFLAGS += -fPIC From 804ac524891ead6aa70e27c093686b71391ea232 Mon Sep 17 00:00:00 2001 From: David McFarland Date: Fri, 21 Apr 2017 11:28:10 -0300 Subject: [PATCH 0260/2196] add helper function to set 'interruptThrown' this fixes a linker failure on cygwin 64 due to some bad interaction between tls and shared libraries. see: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=64697 --- src/libmain/shared.cc | 2 +- src/libutil/util.cc | 7 ++++++- src/libutil/util.hh | 2 +- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 4747b9bf9b4..d6c1c0c9cb4 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -259,7 +259,7 @@ int handleExceptions(const string & programName, std::function fun) condition is discharged before we reach printMsg() below, since otherwise it will throw an (uncaught) exception. */ - interruptThrown = true; + setInterruptThrown(); throw; } } catch (Exit & e) { diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 37dfccd0669..88a2f752c02 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -946,7 +946,12 @@ void closeOnExec(int fd) bool _isInterrupted = false; -thread_local bool interruptThrown = false; +static thread_local bool interruptThrown = false; + +void setInterruptThrown() +{ + interruptThrown = true; +} void _interrupted() { diff --git a/src/libutil/util.hh b/src/libutil/util.hh index 121423cdaa3..ae40dcd4cd2 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -267,7 +267,7 @@ void closeOnExec(int fd); extern bool _isInterrupted; -extern thread_local bool interruptThrown; +void setInterruptThrown(); void _interrupted(); From 9b63bb88c8873d192b8b01608e5d230817dd3375 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 24 Apr 2017 12:04:01 +0200 Subject: [PATCH 0261/2196] nix-shell -p: Use runCommandCC This restores pre-17.03 behaviour by making gcc available. --- src/nix-build/nix-build.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index b4206033cf5..bb031d51513 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -325,7 +325,7 @@ int main(int argc, char ** argv) if (packages) { instArgs.push_back("--expr"); std::ostringstream joined; - joined << "with import { }; runCommand \"shell\" { buildInputs = [ "; + joined << "with import { }; (pkgs.runCommandCC or pkgs.runCommand) \"shell\" { buildInputs = [ "; for (const auto & i : exprs) joined << '(' << i << ") "; joined << "]; } \"\""; From 66577a1c64ac5d9f07aa2c207c96e13077576a4e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 24 Apr 2017 14:21:36 +0200 Subject: [PATCH 0262/2196] Factor out --json --- src/libmain/common-args.hh | 12 +++++++++++- src/nix/path-info.cc | 5 ++--- src/nix/show-config.cc | 5 +---- 3 files changed, 14 insertions(+), 8 deletions(-) diff --git a/src/libmain/common-args.hh b/src/libmain/common-args.hh index 2c0d71edd81..a4de3dccf0a 100644 --- a/src/libmain/common-args.hh +++ b/src/libmain/common-args.hh @@ -12,7 +12,7 @@ struct MixCommonArgs : virtual Args struct MixDryRun : virtual Args { - bool dryRun; + bool dryRun = false; MixDryRun() { @@ -20,4 +20,14 @@ struct MixDryRun : virtual Args } }; +struct MixJSON : virtual Args +{ + bool json = false; + + MixJSON() + { + mkFlag(0, "json", "produce JSON output", &json); + } +}; + } diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc index 0f9a1125f2e..30b193798f6 100644 --- a/src/nix/path-info.cc +++ b/src/nix/path-info.cc @@ -2,25 +2,24 @@ #include "shared.hh" #include "store-api.hh" #include "json.hh" +#include "common-args.hh" #include #include using namespace nix; -struct CmdPathInfo : StorePathsCommand +struct CmdPathInfo : StorePathsCommand, MixJSON { bool showSize = false; bool showClosureSize = false; bool showSigs = false; - bool json = false; CmdPathInfo() { mkFlag('s', "size", "print size of the NAR dump of each path", &showSize); mkFlag('S', "closure-size", "print sum size of the NAR dumps of the closure of each path", &showClosureSize); mkFlag(0, "sigs", "show signatures", &showSigs); - mkFlag(0, "json", "produce JSON output", &json); } std::string name() override diff --git a/src/nix/show-config.cc b/src/nix/show-config.cc index aade2adeace..e354891a82e 100644 --- a/src/nix/show-config.cc +++ b/src/nix/show-config.cc @@ -7,13 +7,10 @@ using namespace nix; -struct CmdShowConfig : Command +struct CmdShowConfig : Command, MixJSON { - bool json = false; - CmdShowConfig() { - mkFlag(0, "json", "produce JSON output", &json); } std::string name() override From 1bb87c0487ba2a10f20c07dfd828b5d043249e31 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 24 Apr 2017 15:01:28 +0200 Subject: [PATCH 0263/2196] Remove debug statement --- src/libstore/gc.cc | 1 - 1 file changed, 1 deletion(-) diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index b6d462d2bff..3e7e42cbc96 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -428,7 +428,6 @@ void LocalStore::findRuntimeRoots(PathSet & roots) #if !defined(__linux__) try { - printError("RUN LSOF %s", LSOF); std::regex lsofRegex(R"(^n(/.*)$)"); auto lsofLines = tokenizeString>(runProgram(LSOF, true, { "-n", "-w", "-F", "n" }), "\n"); From bcecc990071fd36bb88c8fd29cb009ed4c04d6a2 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 25 Apr 2017 11:20:37 +0200 Subject: [PATCH 0264/2196] Restructure installables handling in the "nix" command --- src/libexpr/value.hh | 8 ++ src/nix/build.cc | 19 +--- src/nix/command.cc | 7 ++ src/nix/command.hh | 4 + src/nix/installables.cc | 233 +++++++++++++++++++++++++++++++--------- src/nix/installables.hh | 55 ++++++---- src/nix/log.cc | 35 +++--- src/nix/path-info.cc | 1 - src/nix/run.cc | 19 +--- 9 files changed, 257 insertions(+), 124 deletions(-) diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh index 802e8ed2ee7..9df516f062e 100644 --- a/src/libexpr/value.hh +++ b/src/libexpr/value.hh @@ -220,6 +220,14 @@ static inline void mkApp(Value & v, Value & left, Value & right) } +static inline void mkPrimOpApp(Value & v, Value & left, Value & right) +{ + v.type = tPrimOpApp; + v.app.left = &left; + v.app.right = &right; +} + + static inline void mkStringNoCopy(Value & v, const char * s) { v.type = tString; diff --git a/src/nix/build.cc b/src/nix/build.cc index 812464d7582..0a34c68f881 100644 --- a/src/nix/build.cc +++ b/src/nix/build.cc @@ -6,7 +6,7 @@ using namespace nix; -struct CmdBuild : StoreCommand, MixDryRun, MixInstallables +struct CmdBuild : MixDryRun, MixInstallables { CmdBuild() { @@ -24,22 +24,9 @@ struct CmdBuild : StoreCommand, MixDryRun, MixInstallables void run(ref store) override { - auto elems = evalInstallables(store); + auto paths = buildInstallables(store, dryRun); - PathSet pathsToBuild; - - for (auto & elem : elems) { - if (elem.isDrv) - pathsToBuild.insert(elem.drvPath); - else - pathsToBuild.insert(elem.outPaths.begin(), elem.outPaths.end()); - } - - printMissing(store, pathsToBuild); - - if (dryRun) return; - - store->buildPaths(pathsToBuild); + printInfo("build result: %s", showPaths(paths)); } }; diff --git a/src/nix/command.cc b/src/nix/command.cc index a1b2c120a5d..4034de96c16 100644 --- a/src/nix/command.cc +++ b/src/nix/command.cc @@ -79,6 +79,13 @@ StoreCommand::StoreCommand() mkFlag(0, "store", "store-uri", "URI of the Nix store to use", &storeUri); } +ref StoreCommand::getStore() +{ + if (!_store) + _store = createStore(); + return ref(_store); +} + ref StoreCommand::createStore() { return openStore(storeUri); diff --git a/src/nix/command.hh b/src/nix/command.hh index fa6c21abf8a..bb667ee325c 100644 --- a/src/nix/command.hh +++ b/src/nix/command.hh @@ -33,8 +33,12 @@ struct StoreCommand : virtual Command std::string storeUri; StoreCommand(); void run() override; + ref getStore(); virtual ref createStore(); virtual void run(ref) = 0; + +private: + std::shared_ptr _store; }; /* A command that operates on zero or more store paths. */ diff --git a/src/nix/installables.cc b/src/nix/installables.cc index 8341bbc5a3a..70007d62a29 100644 --- a/src/nix/installables.cc +++ b/src/nix/installables.cc @@ -6,16 +6,21 @@ #include "get-drvs.hh" #include "installables.hh" #include "store-api.hh" +#include "shared.hh" + +#include namespace nix { -Value * MixInstallables::buildSourceExpr(EvalState & state) +Value * MixInstallables::getSourceExpr(EvalState & state) { - Value * vRoot = state.allocValue(); + if (vSourceExpr) return vSourceExpr; + + vSourceExpr = state.allocValue(); if (file != "") { Expr * e = state.parseExprFromFile(resolveExprPath(lookupFileArg(state, file))); - state.eval(e, *vRoot); + state.eval(e, *vSourceExpr); } else { @@ -24,7 +29,7 @@ Value * MixInstallables::buildSourceExpr(EvalState & state) auto searchPath = state.getSearchPath(); - state.mkAttrs(*vRoot, searchPath.size()); + state.mkAttrs(*vSourceExpr, searchPath.size()); std::unordered_set seen; @@ -32,76 +37,208 @@ Value * MixInstallables::buildSourceExpr(EvalState & state) if (i.first == "") continue; if (seen.count(i.first)) continue; seen.insert(i.first); - if (!pathExists(i.second)) continue; - mkApp(*state.allocAttr(*vRoot, state.symbols.create(i.first)), +#if 0 + auto res = state.resolveSearchPathElem(i); + if (!res.first) continue; + if (!pathExists(res.second)) continue; + mkApp(*state.allocAttr(*vSourceExpr, state.symbols.create(i.first)), state.getBuiltin("import"), - mkString(*state.allocValue(), i.second)); + mkString(*state.allocValue(), res.second)); +#endif + Value * v1 = state.allocValue(); + mkPrimOpApp(*v1, state.getBuiltin("findFile"), state.getBuiltin("nixPath")); + Value * v2 = state.allocValue(); + mkApp(*v2, *v1, mkString(*state.allocValue(), i.first)); + mkApp(*state.allocAttr(*vSourceExpr, state.symbols.create(i.first)), + state.getBuiltin("import"), *v2); } - vRoot->attrs->sort(); + vSourceExpr->attrs->sort(); } - return vRoot; + return vSourceExpr; } -UserEnvElems MixInstallables::evalInstallables(ref store) +struct InstallableStoreDrv : Installable +{ + Path storePath; + + InstallableStoreDrv(const Path & storePath) : storePath(storePath) { } + + std::string what() override { return storePath; } + + PathSet toBuildable() override + { + return {storePath}; + } +}; + +struct InstallableStorePath : Installable { - UserEnvElems res; + Path storePath; + + InstallableStorePath(const Path & storePath) : storePath(storePath) { } + + std::string what() override { return storePath; } + + PathSet toBuildable() override + { + return {storePath}; + } +}; + +struct InstallableExpr : Installable +{ + MixInstallables & installables; + std::string text; + + InstallableExpr(MixInstallables & installables, const std::string & text) + : installables(installables), text(text) { } + + std::string what() override { return text; } + + PathSet toBuildable() override + { + auto state = installables.getEvalState(); + + auto v = toValue(*state); + + // FIXME + std::map autoArgs_; + Bindings & autoArgs(*evalAutoArgs(*state, autoArgs_)); + + DrvInfos drvs; + getDerivations(*state, *v, "", autoArgs, drvs, false); + + PathSet res; + + for (auto & i : drvs) + res.insert(i.queryDrvPath()); + + return res; + } + + Value * toValue(EvalState & state) override + { + auto v = state.allocValue(); + state.eval(state.parseExprFromString(text, absPath(".")), *v); + return v; + } +}; + +struct InstallableAttrPath : Installable +{ + MixInstallables & installables; + std::string attrPath; + + InstallableAttrPath(MixInstallables & installables, const std::string & attrPath) + : installables(installables), attrPath(attrPath) + { } + + std::string what() override { return attrPath; } + + PathSet toBuildable() override + { + auto state = installables.getEvalState(); + + auto v = toValue(*state); + + // FIXME + std::map autoArgs_; + Bindings & autoArgs(*evalAutoArgs(*state, autoArgs_)); + + DrvInfos drvs; + getDerivations(*state, *v, "", autoArgs, drvs, false); + + PathSet res; + + for (auto & i : drvs) + res.insert(i.queryDrvPath()); + + return res; + } + + Value * toValue(EvalState & state) override + { + auto source = installables.getSourceExpr(state); + + // FIXME + std::map autoArgs_; + Bindings & autoArgs(*evalAutoArgs(state, autoArgs_)); + + Value * v = findAlongAttrPath(state, attrPath, autoArgs, *source); + state.forceValue(*v); + + return v; + } +}; + +// FIXME: extend +std::string attrRegex = R"([A-Za-z_][A-Za-z0-9-_+]*)"; +static std::regex attrPathRegex(fmt(R"(%1%(\.%1%)*)", attrRegex)); + +std::vector> MixInstallables::parseInstallables(ref store, Strings installables) +{ + std::vector> result; for (auto & installable : installables) { if (std::string(installable, 0, 1) == "/") { if (store->isStorePath(installable)) { + if (isDerivation(installable)) + result.push_back(std::make_shared(installable)); + else + result.push_back(std::make_shared(installable)); + } - if (isDerivation(installable)) { - UserEnvElem elem; - // FIXME: handle empty case, drop version - elem.attrPath = {storePathToName(installable)}; - elem.isDrv = true; - elem.drvPath = installable; - res.push_back(elem); - } - - else { - UserEnvElem elem; - // FIXME: handle empty case, drop version - elem.attrPath = {storePathToName(installable)}; - elem.isDrv = false; - elem.outPaths = {installable}; - res.push_back(elem); - } + else { + result.push_back(std::make_shared( + store->toStorePath(store->followLinksToStore(installable)))); } - else - throw UsageError(format("don't know what to do with ‘%1%’") % installable); } - else { - - EvalState state({}, store); + else if (installable.compare(0, 1, "(") == 0) + result.push_back(std::make_shared(*this, installable)); - auto vRoot = buildSourceExpr(state); + else if (std::regex_match(installable, attrPathRegex)) + result.push_back(std::make_shared(*this, installable)); - std::map autoArgs_; - Bindings & autoArgs(*evalAutoArgs(state, autoArgs_)); + else + throw UsageError("don't know what to do with argument ‘%s’", installable); + } - Value & v(*findAlongAttrPath(state, installable, autoArgs, *vRoot)); - state.forceValue(v); + return result; +} - DrvInfos drvs; - getDerivations(state, v, "", autoArgs, drvs, false); +PathSet MixInstallables::buildInstallables(ref store, bool dryRun) +{ + PathSet buildables; - for (auto & i : drvs) { - UserEnvElem elem; - elem.isDrv = true; - elem.drvPath = i.queryDrvPath(); - res.push_back(elem); - } - } + for (auto & i : installables) { + auto b = i->toBuildable(); + buildables.insert(b.begin(), b.end()); } - return res; + printMissing(store, buildables); + + if (!dryRun) + store->buildPaths(buildables); + + return buildables; +} + +ref MixInstallables::getEvalState() +{ + if (!evalState) + evalState = std::make_shared(Strings{}, getStore()); + return ref(evalState); +} + +void MixInstallables::prepare() +{ + installables = parseInstallables(getStore(), _installables); } } diff --git a/src/nix/installables.hh b/src/nix/installables.hh index a58f7dc59bb..5f0b0a29224 100644 --- a/src/nix/installables.hh +++ b/src/nix/installables.hh @@ -1,48 +1,61 @@ #pragma once #include "args.hh" +#include "command.hh" namespace nix { -struct UserEnvElem -{ - Strings attrPath; +struct Value; +class EvalState; +class Expr; - // FIXME: should use boost::variant or so. - bool isDrv; +struct Installable +{ + virtual std::string what() = 0; - // Derivation case: - Path drvPath; - StringSet outputNames; + virtual PathSet toBuildable() + { + throw Error("argument ‘%s’ cannot be built", what()); + } - // Non-derivation case: - PathSet outPaths; + virtual Value * toValue(EvalState & state) + { + throw Error("argument ‘%s’ cannot be evaluated", what()); + } }; -typedef std::vector UserEnvElems; - -struct Value; -class EvalState; - -struct MixInstallables : virtual Args +struct MixInstallables : virtual Args, StoreCommand { - Strings installables; + std::vector> installables; Path file; MixInstallables() { mkFlag('f', "file", "file", "evaluate FILE rather than the default", &file); - expectArgs("installables", &installables); + expectArgs("installables", &_installables); } - UserEnvElems evalInstallables(ref store); - /* Return a value representing the Nix expression from which we are installing. This is either the file specified by ‘--file’, or an attribute set constructed from $NIX_PATH, e.g. ‘{ nixpkgs = import ...; bla = import ...; }’. */ - Value * buildSourceExpr(EvalState & state); + Value * getSourceExpr(EvalState & state); + + std::vector> parseInstallables(ref store, Strings installables); + + PathSet buildInstallables(ref store, bool dryRun); + + ref getEvalState(); + + void prepare() override; + +private: + + Strings _installables; + + std::shared_ptr evalState; + Value * vSourceExpr = 0; }; } diff --git a/src/nix/log.cc b/src/nix/log.cc index d8a3830e91c..75f3c1ab0d6 100644 --- a/src/nix/log.cc +++ b/src/nix/log.cc @@ -6,7 +6,7 @@ using namespace nix; -struct CmdLog : StoreCommand, MixInstallables +struct CmdLog : MixInstallables { CmdLog() { @@ -24,32 +24,23 @@ struct CmdLog : StoreCommand, MixInstallables void run(ref store) override { - auto elems = evalInstallables(store); - - PathSet paths; - - for (auto & elem : elems) { - if (elem.isDrv) - paths.insert(elem.drvPath); - else - paths.insert(elem.outPaths.begin(), elem.outPaths.end()); - } - auto subs = getDefaultSubstituters(); subs.push_front(store); - for (auto & path : paths) { - bool found = false; - for (auto & sub : subs) { - auto log = sub->getBuildLog(path); - if (!log) continue; - std::cout << *log; - found = true; - break; + for (auto & inst : installables) { + for (auto & path : inst->toBuildable()) { + bool found = false; + for (auto & sub : subs) { + auto log = sub->getBuildLog(path); + if (!log) continue; + std::cout << *log; + found = true; + break; + } + if (!found) + throw Error("build log of path ‘%s’ is not available", path); } - if (!found) - throw Error("build log of path ‘%s’ is not available", path); } } }; diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc index 30b193798f6..f1620923861 100644 --- a/src/nix/path-info.cc +++ b/src/nix/path-info.cc @@ -99,7 +99,6 @@ struct CmdPathInfo : StorePathsCommand, MixJSON } } - } }; diff --git a/src/nix/run.cc b/src/nix/run.cc index a30031ad07b..f3333b77780 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -13,7 +13,7 @@ using namespace nix; -struct CmdRun : StoreCommand, MixInstallables +struct CmdRun : MixInstallables { CmdRun() { @@ -31,20 +31,7 @@ struct CmdRun : StoreCommand, MixInstallables void run(ref store) override { - auto elems = evalInstallables(store); - - PathSet pathsToBuild; - - for (auto & elem : elems) { - if (elem.isDrv) - pathsToBuild.insert(elem.drvPath); - else - pathsToBuild.insert(elem.outPaths.begin(), elem.outPaths.end()); - } - - printMissing(store, pathsToBuild); - - store->buildPaths(pathsToBuild); + auto paths = buildInstallables(store, false); auto store2 = store.dynamic_pointer_cast(); @@ -104,7 +91,7 @@ struct CmdRun : StoreCommand, MixInstallables } PathSet outPaths; - for (auto & path : pathsToBuild) + for (auto & path : paths) if (isDerivation(path)) { Derivation drv = store->derivationFromPath(path); for (auto & output : drv.outputs) From 6267d748891b3c6e6a41b5bd1f6684ae8b88f31c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 25 Apr 2017 11:23:47 +0200 Subject: [PATCH 0265/2196] Add "nix eval" command This replaces "nix-instantiate --eval". The result is evaluated strictly since this seems more useful. --- src/nix/eval.cc | 44 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 src/nix/eval.cc diff --git a/src/nix/eval.cc b/src/nix/eval.cc new file mode 100644 index 00000000000..7a6bf07c353 --- /dev/null +++ b/src/nix/eval.cc @@ -0,0 +1,44 @@ +#include "command.hh" +#include "common-args.hh" +#include "installables.hh" +#include "shared.hh" +#include "store-api.hh" +#include "eval.hh" +#include "json.hh" +#include "value-to-json.hh" + +using namespace nix; + +struct CmdEval : MixJSON, MixInstallables +{ + std::string name() override + { + return "eval"; + } + + std::string description() override + { + return "evaluate a Nix expression"; + } + + void run(ref store) override + { + auto state = getEvalState(); + + auto jsonOut = json ? std::make_unique(std::cout) : nullptr; + + for (auto & i : installables) { + auto v = i->toValue(*state); + if (json) { + PathSet context; + auto jsonElem = jsonOut->placeholder(); + printValueAsJSON(*state, true, *v, jsonElem, context); + } else { + state->forceValueDeep(*v); + std::cout << *v << "\n"; + } + } + } +}; + +static RegisterCommand r1(make_ref()); From c769841bc4ecb9dd3d8456931fec78e102c3832f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 25 Apr 2017 12:06:32 +0200 Subject: [PATCH 0266/2196] Move code around --- src/nix/build.cc | 3 +- src/nix/command.hh | 54 ++++++++++++++++++++++++++++++++++++ src/nix/eval.cc | 3 +- src/nix/installables.cc | 20 +++++++------- src/nix/installables.hh | 61 ----------------------------------------- src/nix/log.cc | 3 +- src/nix/run.cc | 3 +- src/nix/show-config.cc | 1 - 8 files changed, 68 insertions(+), 80 deletions(-) delete mode 100644 src/nix/installables.hh diff --git a/src/nix/build.cc b/src/nix/build.cc index 0a34c68f881..00bda1fd104 100644 --- a/src/nix/build.cc +++ b/src/nix/build.cc @@ -1,12 +1,11 @@ #include "command.hh" #include "common-args.hh" -#include "installables.hh" #include "shared.hh" #include "store-api.hh" using namespace nix; -struct CmdBuild : MixDryRun, MixInstallables +struct CmdBuild : MixDryRun, InstallablesCommand { CmdBuild() { diff --git a/src/nix/command.hh b/src/nix/command.hh index bb667ee325c..ee9485e5dd1 100644 --- a/src/nix/command.hh +++ b/src/nix/command.hh @@ -4,6 +4,9 @@ namespace nix { +struct Value; +class EvalState; + /* A command is an argument parser that can be executed by calling its run() method. */ struct Command : virtual Args @@ -61,6 +64,57 @@ public: void run(ref store) override; }; +struct Installable +{ + virtual std::string what() = 0; + + virtual PathSet toBuildable() + { + throw Error("argument ‘%s’ cannot be built", what()); + } + + virtual Value * toValue(EvalState & state) + { + throw Error("argument ‘%s’ cannot be evaluated", what()); + } +}; + +/* A command that operates on a list of "installables", which can be + store paths, attribute paths, Nix expressions, etc. */ +struct InstallablesCommand : virtual Args, StoreCommand +{ + std::vector> installables; + Path file; + + InstallablesCommand() + { + mkFlag('f', "file", "file", "evaluate FILE rather than the default", &file); + expectArgs("installables", &_installables); + } + + /* Return a value representing the Nix expression from which we + are installing. This is either the file specified by ‘--file’, + or an attribute set constructed from $NIX_PATH, e.g. ‘{ nixpkgs + = import ...; bla = import ...; }’. */ + Value * getSourceExpr(EvalState & state); + + std::vector> parseInstallables(ref store, Strings installables); + + PathSet buildInstallables(ref store, bool dryRun); + + ref getEvalState(); + + void prepare() override; + +private: + + Strings _installables; + + std::shared_ptr evalState; + + Value * vSourceExpr = 0; +}; + typedef std::map> Commands; /* An argument parser that supports multiple subcommands, diff --git a/src/nix/eval.cc b/src/nix/eval.cc index 7a6bf07c353..eb2b13a2dcd 100644 --- a/src/nix/eval.cc +++ b/src/nix/eval.cc @@ -1,6 +1,5 @@ #include "command.hh" #include "common-args.hh" -#include "installables.hh" #include "shared.hh" #include "store-api.hh" #include "eval.hh" @@ -9,7 +8,7 @@ using namespace nix; -struct CmdEval : MixJSON, MixInstallables +struct CmdEval : MixJSON, InstallablesCommand { std::string name() override { diff --git a/src/nix/installables.cc b/src/nix/installables.cc index 70007d62a29..3cf4a6f8d32 100644 --- a/src/nix/installables.cc +++ b/src/nix/installables.cc @@ -1,10 +1,10 @@ +#include "command.hh" #include "attr-path.hh" #include "common-opts.hh" #include "derivations.hh" #include "eval-inline.hh" #include "eval.hh" #include "get-drvs.hh" -#include "installables.hh" #include "store-api.hh" #include "shared.hh" @@ -12,7 +12,7 @@ namespace nix { -Value * MixInstallables::getSourceExpr(EvalState & state) +Value * InstallablesCommand::getSourceExpr(EvalState & state) { if (vSourceExpr) return vSourceExpr; @@ -89,10 +89,10 @@ struct InstallableStorePath : Installable struct InstallableExpr : Installable { - MixInstallables & installables; + InstallablesCommand & installables; std::string text; - InstallableExpr(MixInstallables & installables, const std::string & text) + InstallableExpr(InstallablesCommand & installables, const std::string & text) : installables(installables), text(text) { } std::string what() override { return text; } @@ -128,10 +128,10 @@ struct InstallableExpr : Installable struct InstallableAttrPath : Installable { - MixInstallables & installables; + InstallablesCommand & installables; std::string attrPath; - InstallableAttrPath(MixInstallables & installables, const std::string & attrPath) + InstallableAttrPath(InstallablesCommand & installables, const std::string & attrPath) : installables(installables), attrPath(attrPath) { } @@ -177,7 +177,7 @@ struct InstallableAttrPath : Installable std::string attrRegex = R"([A-Za-z_][A-Za-z0-9-_+]*)"; static std::regex attrPathRegex(fmt(R"(%1%(\.%1%)*)", attrRegex)); -std::vector> MixInstallables::parseInstallables(ref store, Strings installables) +std::vector> InstallablesCommand::parseInstallables(ref store, Strings installables) { std::vector> result; @@ -212,7 +212,7 @@ std::vector> MixInstallables::parseInstallables(ref return result; } -PathSet MixInstallables::buildInstallables(ref store, bool dryRun) +PathSet InstallablesCommand::buildInstallables(ref store, bool dryRun) { PathSet buildables; @@ -229,14 +229,14 @@ PathSet MixInstallables::buildInstallables(ref store, bool dryRun) return buildables; } -ref MixInstallables::getEvalState() +ref InstallablesCommand::getEvalState() { if (!evalState) evalState = std::make_shared(Strings{}, getStore()); return ref(evalState); } -void MixInstallables::prepare() +void InstallablesCommand::prepare() { installables = parseInstallables(getStore(), _installables); } diff --git a/src/nix/installables.hh b/src/nix/installables.hh deleted file mode 100644 index 5f0b0a29224..00000000000 --- a/src/nix/installables.hh +++ /dev/null @@ -1,61 +0,0 @@ -#pragma once - -#include "args.hh" -#include "command.hh" - -namespace nix { - -struct Value; -class EvalState; -class Expr; - -struct Installable -{ - virtual std::string what() = 0; - - virtual PathSet toBuildable() - { - throw Error("argument ‘%s’ cannot be built", what()); - } - - virtual Value * toValue(EvalState & state) - { - throw Error("argument ‘%s’ cannot be evaluated", what()); - } -}; - -struct MixInstallables : virtual Args, StoreCommand -{ - std::vector> installables; - Path file; - - MixInstallables() - { - mkFlag('f', "file", "file", "evaluate FILE rather than the default", &file); - expectArgs("installables", &_installables); - } - - /* Return a value representing the Nix expression from which we - are installing. This is either the file specified by ‘--file’, - or an attribute set constructed from $NIX_PATH, e.g. ‘{ nixpkgs - = import ...; bla = import ...; }’. */ - Value * getSourceExpr(EvalState & state); - - std::vector> parseInstallables(ref store, Strings installables); - - PathSet buildInstallables(ref store, bool dryRun); - - ref getEvalState(); - - void prepare() override; - -private: - - Strings _installables; - - std::shared_ptr evalState; - - Value * vSourceExpr = 0; -}; - -} diff --git a/src/nix/log.cc b/src/nix/log.cc index 75f3c1ab0d6..ed610261d1c 100644 --- a/src/nix/log.cc +++ b/src/nix/log.cc @@ -1,12 +1,11 @@ #include "command.hh" #include "common-args.hh" -#include "installables.hh" #include "shared.hh" #include "store-api.hh" using namespace nix; -struct CmdLog : MixInstallables +struct CmdLog : InstallablesCommand { CmdLog() { diff --git a/src/nix/run.cc b/src/nix/run.cc index f3333b77780..a0ce56134b0 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -1,6 +1,5 @@ #include "command.hh" #include "common-args.hh" -#include "installables.hh" #include "shared.hh" #include "store-api.hh" #include "derivations.hh" @@ -13,7 +12,7 @@ using namespace nix; -struct CmdRun : MixInstallables +struct CmdRun : InstallablesCommand { CmdRun() { diff --git a/src/nix/show-config.cc b/src/nix/show-config.cc index e354891a82e..c628c2898d7 100644 --- a/src/nix/show-config.cc +++ b/src/nix/show-config.cc @@ -1,6 +1,5 @@ #include "command.hh" #include "common-args.hh" -#include "installables.hh" #include "shared.hh" #include "store-api.hh" #include "json.hh" From 7ee81f3887353cae2feecb98b482badf57a1fd5b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 25 Apr 2017 13:20:26 +0200 Subject: [PATCH 0267/2196] Make StorePathsCommand a subclass of InstallablesCommand This allows commands like 'nix path-info', 'nix copy', 'nix verify' etc. to work on arbitrary installables. E.g. to copy geeqie to a binary cache: $ nix copy -r --to file:///tmp/binary-cache nixpkgs.geeqie Or to get the closure size of thunderbird: $ nix path-info -S nixpkgs.thunderbird --- src/nix/command.cc | 18 ++++++++++++++---- src/nix/command.hh | 39 +++++++++++++++++++-------------------- 2 files changed, 33 insertions(+), 24 deletions(-) diff --git a/src/nix/command.cc b/src/nix/command.cc index 4034de96c16..2809a9b4f28 100644 --- a/src/nix/command.cc +++ b/src/nix/command.cc @@ -1,5 +1,6 @@ #include "command.hh" #include "store-api.hh" +#include "derivations.hh" namespace nix { @@ -98,23 +99,32 @@ void StoreCommand::run() StorePathsCommand::StorePathsCommand() { - expectArgs("paths", &storePaths); mkFlag('r', "recursive", "apply operation to closure of the specified paths", &recursive); mkFlag(0, "all", "apply operation to the entire store", &all); } void StorePathsCommand::run(ref store) { + Paths storePaths; + if (all) { - if (storePaths.size()) + if (installables.size()) throw UsageError("‘--all’ does not expect arguments"); for (auto & p : store->queryAllValidPaths()) storePaths.push_back(p); } else { - for (auto & storePath : storePaths) - storePath = store->followLinksToStorePath(storePath); + for (auto & i : installables) { + for (auto & path : i->toBuildable()) { + if (isDerivation(path)) { + Derivation drv = store->derivationFromPath(path); + for (auto & output : drv.outputs) + storePaths.push_back(output.second.path); + } else + storePaths.push_back(path); + } + } if (recursive) { PathSet closure; diff --git a/src/nix/command.hh b/src/nix/command.hh index ee9485e5dd1..dc7b2637d66 100644 --- a/src/nix/command.hh +++ b/src/nix/command.hh @@ -44,26 +44,6 @@ private: std::shared_ptr _store; }; -/* A command that operates on zero or more store paths. */ -struct StorePathsCommand : public StoreCommand -{ -private: - - Paths storePaths; - bool recursive = false; - bool all = false; - -public: - - StorePathsCommand(); - - using StoreCommand::run; - - virtual void run(ref store, Paths storePaths) = 0; - - void run(ref store) override; -}; - struct Installable { virtual std::string what() = 0; @@ -115,6 +95,25 @@ private: Value * vSourceExpr = 0; }; +/* A command that operates on zero or more store paths. */ +struct StorePathsCommand : public InstallablesCommand +{ +private: + + bool recursive = false; + bool all = false; + +public: + + StorePathsCommand(); + + using StoreCommand::run; + + virtual void run(ref store, Paths storePaths) = 0; + + void run(ref store) override; +}; + typedef std::map> Commands; /* An argument parser that supports multiple subcommands, From 0b6220fbd6869ec9f48094b85350ef2ea0429b14 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 25 Apr 2017 14:09:01 +0200 Subject: [PATCH 0268/2196] Interpret any installable containing a slash as a path So "nix path-info ./result" now works. --- src/nix/installables.cc | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/src/nix/installables.cc b/src/nix/installables.cc index 3cf4a6f8d32..45517443ca3 100644 --- a/src/nix/installables.cc +++ b/src/nix/installables.cc @@ -183,20 +183,16 @@ std::vector> InstallablesCommand::parseInstallables for (auto & installable : installables) { - if (std::string(installable, 0, 1) == "/") { + if (installable.find("/") != std::string::npos) { - if (store->isStorePath(installable)) { - if (isDerivation(installable)) - result.push_back(std::make_shared(installable)); - else - result.push_back(std::make_shared(installable)); - } + auto path = store->toStorePath(store->followLinksToStore(installable)); - else { - result.push_back(std::make_shared( - store->toStorePath(store->followLinksToStore(installable)))); + if (store->isStorePath(path)) { + if (isDerivation(path)) + result.push_back(std::make_shared(path)); + else + result.push_back(std::make_shared(path)); } - } else if (installable.compare(0, 1, "(") == 0) From d48c973ece20875391bebde3c167d6e0cc1e666e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 25 Apr 2017 15:18:05 +0200 Subject: [PATCH 0269/2196] Set default installable Thus $ nix build -f foo.nix will build foo.nix. And $ nix build will build default.nix. However, this may not be a good idea because it's kind of inconsistent, given that "nix build foo" will build the "foo" attribute from the default installation source (i.e. the synthesis of $NIX_PATH), rather than ./default.nix. So I may revert this. --- src/nix/installables.cc | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/nix/installables.cc b/src/nix/installables.cc index 45517443ca3..ff345c45d8d 100644 --- a/src/nix/installables.cc +++ b/src/nix/installables.cc @@ -181,6 +181,12 @@ std::vector> InstallablesCommand::parseInstallables { std::vector> result; + if (installables.empty()) { + if (file == "") + file = "."; + installables = Strings{""}; + } + for (auto & installable : installables) { if (installable.find("/") != std::string::npos) { @@ -198,7 +204,7 @@ std::vector> InstallablesCommand::parseInstallables else if (installable.compare(0, 1, "(") == 0) result.push_back(std::make_shared(*this, installable)); - else if (std::regex_match(installable, attrPathRegex)) + else if (installable == "" || std::regex_match(installable, attrPathRegex)) result.push_back(std::make_shared(*this, installable)); else From c30330df6f67c81986dfb124631bc756c8e58c0d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 25 Apr 2017 16:19:22 +0200 Subject: [PATCH 0270/2196] StorePathCommands: Build installables So for instance "nix copy --to ... nixpkgs.hello" will build nixpkgs.hello first. It's debatable whether this is a good idea. It seems desirable for commands like "nix copy" but maybe not for commands like "nix path-info". --- src/nix/command.cc | 12 ++---------- src/nix/installables.cc | 11 ++++++++++- src/nix/run.cc | 10 +--------- 3 files changed, 13 insertions(+), 20 deletions(-) diff --git a/src/nix/command.cc b/src/nix/command.cc index 2809a9b4f28..a45f2888bfb 100644 --- a/src/nix/command.cc +++ b/src/nix/command.cc @@ -115,16 +115,8 @@ void StorePathsCommand::run(ref store) } else { - for (auto & i : installables) { - for (auto & path : i->toBuildable()) { - if (isDerivation(path)) { - Derivation drv = store->derivationFromPath(path); - for (auto & output : drv.outputs) - storePaths.push_back(output.second.path); - } else - storePaths.push_back(path); - } - } + for (auto & p : buildInstallables(store, false)) + storePaths.push_back(p); if (recursive) { PathSet closure; diff --git a/src/nix/installables.cc b/src/nix/installables.cc index ff345c45d8d..57580049f25 100644 --- a/src/nix/installables.cc +++ b/src/nix/installables.cc @@ -228,7 +228,16 @@ PathSet InstallablesCommand::buildInstallables(ref store, bool dryRun) if (!dryRun) store->buildPaths(buildables); - return buildables; + PathSet outPaths; + for (auto & path : buildables) + if (isDerivation(path)) { + Derivation drv = store->derivationFromPath(path); + for (auto & output : drv.outputs) + outPaths.insert(output.second.path); + } else + outPaths.insert(path); + + return outPaths; } ref InstallablesCommand::getEvalState() diff --git a/src/nix/run.cc b/src/nix/run.cc index a0ce56134b0..bcfa74eb5f5 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -30,7 +30,7 @@ struct CmdRun : InstallablesCommand void run(ref store) override { - auto paths = buildInstallables(store, false); + auto outPaths = buildInstallables(store, false); auto store2 = store.dynamic_pointer_cast(); @@ -89,14 +89,6 @@ struct CmdRun : InstallablesCommand #endif } - PathSet outPaths; - for (auto & path : paths) - if (isDerivation(path)) { - Derivation drv = store->derivationFromPath(path); - for (auto & output : drv.outputs) - outPaths.insert(output.second.path); - } else - outPaths.insert(path); auto unixPath = tokenizeString(getEnv("PATH"), ":"); for (auto & path : outPaths) From 2dff9556a4131af8a50647f23fe03bfc3c295e12 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 25 Apr 2017 16:55:03 +0200 Subject: [PATCH 0271/2196] Fix build --- default.nix | 8 ++++---- nix-repl.cc | 4 +++- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/default.nix b/default.nix index 8690325e528..919082981e1 100644 --- a/default.nix +++ b/default.nix @@ -4,13 +4,13 @@ with import nixpkgs { inherit system; }; let nix = nixUnstable; in -runCommand "nix-repl" - { buildInputs = [ readline nix boehmgc ]; } +runCommandCC "nix-repl" + { buildInputs = [ pkgconfig readline nix boehmgc ]; } '' mkdir -p $out/bin - g++ -O3 -Wall -std=c++0x \ + g++ -O3 -Wall -std=c++14 \ -o $out/bin/nix-repl ${./nix-repl.cc} \ - -I${nix}/include/nix \ + $(pkg-config --cflags nix-main) \ -lnixformat -lnixutil -lnixstore -lnixexpr -lnixmain -lreadline -lgc \ -DNIX_VERSION=\"${(builtins.parseDrvName nix.name).version}\" '' diff --git a/nix-repl.cc b/nix-repl.cc index 0e8c67cf7f2..0c50f468330 100644 --- a/nix-repl.cc +++ b/nix-repl.cc @@ -1,3 +1,5 @@ +#include + #include #include @@ -291,7 +293,7 @@ static int runProgram(const string & program, const Strings & args) _exit(1); } - return pid.wait(true); + return pid.wait(); } From 40daf0d800d6a248a57bae0fff8c3989d4814840 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 25 Apr 2017 18:13:23 +0200 Subject: [PATCH 0272/2196] Cleanup in preparation of merging nix-repl repo into nix repo --- COPYING | 674 --------------------------------- README.md | 108 ------ default.nix | 16 - nix-repl.cc => src/nix/repl.cc | 0 4 files changed, 798 deletions(-) delete mode 100644 COPYING delete mode 100644 README.md delete mode 100644 default.nix rename nix-repl.cc => src/nix/repl.cc (100%) diff --git a/COPYING b/COPYING deleted file mode 100644 index 94a9ed024d3..00000000000 --- a/COPYING +++ /dev/null @@ -1,674 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - Copyright (C) - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -. diff --git a/README.md b/README.md deleted file mode 100644 index ac5ad98cf53..00000000000 --- a/README.md +++ /dev/null @@ -1,108 +0,0 @@ -nix-repl -======== - -`nix-repl` is a simple read–eval–print loop (REPL) for the Nix package -manager. - -Installation ------------- - -Assuming you have Nix installed, just do - - $ git clone https://github.com/edolstra/nix-repl.git - $ cd nix-repl - $ nix-env -f . -i nix-repl - -Example -------- - -Here is a typical `nix-repl` session: - - $ nix-repl - Welcome to Nix version 1.6pre3215_2c1ecf8. Type :? for help. - - nix-repl> 3 * 4 - 12 - - nix-repl> :l - Added 3337 variables. - - nix-repl> lib.range 1 5 - [ 1 2 3 4 5 ] - - nix-repl> :a lib - Added 299 variables. - - nix-repl> range 1 5 - [ 1 2 3 4 5 ] - - nix-repl> xs = range 1 5 - - nix-repl> map (x: x * x) xs - [ 1 4 9 16 25 ] - - nix-repl> :l - Added 7 variables. - - nix-repl> config.services.dhcpd - { configFile = null; enable = false; extraConfig = ""; interfaces = [ ... ]; machines = [ ... ]; } - - nix-repl> :p config.services.dhcpd - { configFile = null; enable = false; extraConfig = ""; interfaces = [ "eth0" ]; machines = [ ]; } - - nix-repl> config.fileSystems - { "/" = { ... }; "/boot" = { ... }; } - - nix-repl> mapAttrsToList (n: v: v.device) config.fileSystems - [ "/dev/disk/by-label/nixos" "/dev/disk/by-label/boot" ] - - nix-repl> :b libjson - these derivations will be built: - /nix/store/h910xqb36pysxcxkayb1zkr1zcvvk1zy-libjson_7.6.1.zip.drv - /nix/store/iv0rdx08di0fg704zyxklkvdz6i96lm8-libjson-7.6.1.drv - ... - this derivation produced the following outputs: - out -> /nix/store/ys6bvgfia81rjwqxjlgkwnx6jhsml8h9-libjson-7.6.1 - - nix-repl> :t makeFontsConf - a function - - nix-repl> :b makeFontsConf { fontDirectories = [ "${freefont_ttf}/share/fonts/truetype" ]; } - ... - this derivation produced the following outputs: - out -> /nix/store/jkw848xj0gkbhmvxi0hwpnhzn2716v3c-fonts.conf - - nix-repl> :s pan - # Builds dependencies of the ‘pan’ derivation, then starts a shell - # in which the environment variables of the derivation are set - - [nix-shell:/tmp/nix-repl]$ echo $src - /nix/store/0ibx15r02nnkwiclmfbpzrzjm2y204fh-pan-0.139.tar.bz2 - - [nix-shell:/tmp/nix-repl]$ exit - - nix-repl> - -Tab completion works on variables in scope and on attribute sets. For -example: - - $ nix-repl '' '' - Welcome to Nix version 1.6pre3215_2c1ecf8. Type :? for help. - - nix-repl> thunder => thunderbird - - nix-repl> - Display all 3634 possibilities? (y or n) - - nix-repl> lib - Display all 291 possibilities? (y or n) - - nix-repl> xorg.libX - xorg.libXdamage xorg.libXdmcp - - nix-repl> config.networking.use - config.networking.useDHCP config.networking.usePredictableInterfaceNames - -Input history is preserved by readline in ~/.nix-repl-history -The readline "application name" is nix-repl. This allows for nix-repl specific -settings in ~/.inputrc diff --git a/default.nix b/default.nix deleted file mode 100644 index 919082981e1..00000000000 --- a/default.nix +++ /dev/null @@ -1,16 +0,0 @@ -{ nixpkgs ? , system ? builtins.currentSystem }: - -with import nixpkgs { inherit system; }; - -let nix = nixUnstable; in - -runCommandCC "nix-repl" - { buildInputs = [ pkgconfig readline nix boehmgc ]; } - '' - mkdir -p $out/bin - g++ -O3 -Wall -std=c++14 \ - -o $out/bin/nix-repl ${./nix-repl.cc} \ - $(pkg-config --cflags nix-main) \ - -lnixformat -lnixutil -lnixstore -lnixexpr -lnixmain -lreadline -lgc \ - -DNIX_VERSION=\"${(builtins.parseDrvName nix.name).version}\" - '' diff --git a/nix-repl.cc b/src/nix/repl.cc similarity index 100% rename from nix-repl.cc rename to src/nix/repl.cc From 921a2aeb0537f34bc2b41e98e67a1c829321ee81 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 25 Apr 2017 18:48:40 +0200 Subject: [PATCH 0273/2196] Make "nix repl" build --- release.nix | 3 ++- shell.nix | 1 + src/nix/local.mk | 2 ++ src/nix/repl.cc | 66 +++++++++++++++++++++++++----------------------- 4 files changed, 39 insertions(+), 33 deletions(-) diff --git a/release.nix b/release.nix index 294af54cd61..534c218c112 100644 --- a/release.nix +++ b/release.nix @@ -73,7 +73,8 @@ let buildInputs = [ curl bzip2 xz brotli - openssl pkgconfig sqlite boehmgc + openssl pkgconfig sqlite boehmgc readline + ] ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) diff --git a/shell.nix b/shell.nix index 425eb0a191f..3c57826d1a1 100644 --- a/shell.nix +++ b/shell.nix @@ -16,6 +16,7 @@ with import {}; customMemoryManagement = false; }) autoreconfHook + readline ]; configureFlags = diff --git a/src/nix/local.mk b/src/nix/local.mk index f6e7073b6e7..21f190e476f 100644 --- a/src/nix/local.mk +++ b/src/nix/local.mk @@ -6,4 +6,6 @@ nix_SOURCES := $(wildcard $(d)/*.cc) nix_LIBS = libexpr libmain libstore libutil libformat +nix_LDFLAGS = -lreadline + $(eval $(call install-symlink, nix, $(bindir)/nix-hash)) diff --git a/src/nix/repl.cc b/src/nix/repl.cc index 71790eb481a..54e48e405b8 100644 --- a/src/nix/repl.cc +++ b/src/nix/repl.cc @@ -1,5 +1,3 @@ -#include - #include #include @@ -17,9 +15,11 @@ #include "derivations.hh" #include "affinity.hh" #include "globals.hh" +#include "command.hh" + +namespace nix { using namespace std; -using namespace nix; #define ESC_RED "\033[31m" #define ESC_GRE "\033[32m" @@ -49,6 +49,7 @@ struct NixRepl StringSet::iterator curCompletion; NixRepl(const Strings & searchPath, nix::ref store); + ~NixRepl(); void mainLoop(const Strings & files); void completePrefix(string prefix); bool getLine(string & input, const char * prompt); @@ -119,10 +120,16 @@ NixRepl::NixRepl(const Strings & searchPath, nix::ref store) } +NixRepl::~NixRepl() +{ + write_history(historyFile.c_str()); +} + + void NixRepl::mainLoop(const Strings & files) { string error = ANSI_RED "error:" ANSI_NORMAL " "; - std::cout << "Welcome to Nix version " << NIX_VERSION << ". Type :? for help." << std::endl << std::endl; + std::cout << "Welcome to Nix version " << nixVersion << ". Type :? for help." << std::endl << std::endl; for (auto & i : files) loadedFiles.push_back(i); @@ -685,35 +692,30 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m return str; } - -int main(int argc, char * * argv) +struct CmdRepl : StoreCommand { - return handleExceptions(argv[0], [&]() { - initNix(); - initGC(); - - Strings files, searchPath; - - parseCmdLine(argc, argv, [&](Strings::iterator & arg, const Strings::iterator & end) { - if (*arg == "--version") - printVersion("nix-repl"); - else if (*arg == "--help") { - printHelp(); - // exit with 0 since user asked for help - _exit(0); - } - else if (parseSearchPathArg(arg, end, searchPath)) - ; - else if (*arg != "" && arg->at(0) == '-') - return false; - else - files.push_back(*arg); - return true; - }); - - NixRepl repl(searchPath, openStore()); + Strings files; + + CmdRepl() + { + expectArgs("files", &files); + } + + std::string name() override { return "repl"; } + + std::string description() override + { + return "start an interactive environment for evaluating Nix expressions"; + } + + void run(ref store) override + { + // FIXME: pass searchPath + NixRepl repl({}, openStore()); repl.mainLoop(files); + } +}; + +static RegisterCommand r1(make_ref()); - write_history(historyFile.c_str()); - }); } From 5bd8795e1fa9255836fbbeab0a22f5038d92d53c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 25 Apr 2017 18:56:29 +0200 Subject: [PATCH 0274/2196] nix repl: Use $XDG_DATA_HOME for the readline history --- src/libutil/util.cc | 12 ++++++++++++ src/libutil/util.hh | 3 +++ src/nix/repl.cc | 7 ++++--- 3 files changed, 19 insertions(+), 3 deletions(-) diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 88a2f752c02..026e493514e 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -441,6 +441,18 @@ Path getConfigDir() } +Path getDataDir() +{ + Path dataDir = getEnv("XDG_DATA_HOME"); + if (dataDir.empty()) { + Path homeDir = getEnv("HOME"); + if (homeDir.empty()) throw Error("$XDG_DATA_HOME and $HOME are not set"); + dataDir = homeDir + "/.local/share"; + } + return dataDir; +} + + Paths createDirs(const Path & path) { Paths created; diff --git a/src/libutil/util.hh b/src/libutil/util.hh index ae40dcd4cd2..a9950f830c5 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -116,6 +116,9 @@ Path getCacheDir(); /* Return $XDG_CONFIG_HOME or $HOME/.config. */ Path getConfigDir(); +/* Return $XDG_DATA_HOME or $HOME/.local/share. */ +Path getDataDir(); + /* Create a directory and all its parents, if necessary. Returns the list of created directories, in order of creation. */ Paths createDirs(const Path & path); diff --git a/src/nix/repl.cc b/src/nix/repl.cc index 54e48e405b8..78d973ba832 100644 --- a/src/nix/repl.cc +++ b/src/nix/repl.cc @@ -29,9 +29,6 @@ using namespace std; #define ESC_CYA "\033[36m" #define ESC_END "\033[0m" -string programId = "nix-repl"; -const string historyFile = string(getenv("HOME")) + "/.nix-repl-history"; - struct NixRepl { string curDir; @@ -45,6 +42,8 @@ struct NixRepl int displ; StringSet varNames; + const Path historyFile; + StringSet completions; StringSet::iterator curCompletion; @@ -115,6 +114,7 @@ string removeWhitespace(string s) NixRepl::NixRepl(const Strings & searchPath, nix::ref store) : state(searchPath, store) , staticEnv(false, &state.staticBaseEnv) + , historyFile(getDataDir() + "/nix/repl-history") { curDir = absPath("."); } @@ -140,6 +140,7 @@ void NixRepl::mainLoop(const Strings & files) // Allow nix-repl specific settings in .inputrc rl_readline_name = "nix-repl"; using_history(); + createDirs(dirOf(historyFile)); read_history(historyFile.c_str()); string input; From 536f06176519072011aebe0d9b4b5b75e1691f67 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 25 Apr 2017 18:58:02 +0200 Subject: [PATCH 0275/2196] "using namespace std" considered harmful --- src/nix/repl.cc | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/src/nix/repl.cc b/src/nix/repl.cc index 78d973ba832..964bb85eb6f 100644 --- a/src/nix/repl.cc +++ b/src/nix/repl.cc @@ -19,8 +19,6 @@ namespace nix { -using namespace std; - #define ESC_RED "\033[31m" #define ESC_GRE "\033[32m" #define ESC_YEL "\033[33m" @@ -70,7 +68,8 @@ struct NixRepl void printHelp() { - cout << "Usage: nix-repl [--help] [--version] [-I path] paths...\n" + std::cout + << "Usage: nix-repl [--help] [--version] [-I path] paths...\n" << "\n" << "nix-repl is a simple read-eval-print loop (REPL) for the Nix package manager.\n" << "\n" @@ -97,8 +96,7 @@ void printHelp() << " If an element of paths starts with http:// or https://, it is interpreted\n" << " as the URL of a tarball that will be downloaded and unpacked to a temporary\n" << " location. The tarball must include a single top-level directory containing\n" - << " at least a file named default.nix.\n" - << flush; + << " at least a file named default.nix.\n"; } @@ -351,7 +349,8 @@ bool NixRepl::processLine(string line) } if (command == ":?" || command == ":help") { - cout << "The following commands are available:\n" + std::cout + << "The following commands are available:\n" << "\n" << " Evaluate and print expression\n" << " = Bind expression to variable\n" From 4c95ef3768f8415579d9dd8bda69407021b72017 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 25 Apr 2017 18:59:18 +0200 Subject: [PATCH 0276/2196] Fix nix-shell test --- tests/shell.nix | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/shell.nix b/tests/shell.nix index 1a092913b3b..5845d36fc16 100644 --- a/tests/shell.nix +++ b/tests/shell.nix @@ -2,7 +2,7 @@ with import ./config.nix; -rec { +let pkgs = rec { setupSh = builtins.toFile "setup" '' export VAR_FROM_STDENV_SETUP=foo for pkg in $buildInputs; do @@ -44,4 +44,6 @@ rec { ''; bash = shell; -} + + inherit pkgs; +}; in pkgs From 23aa1619daace5db30233a53183911adb42322d9 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 25 Apr 2017 19:10:47 +0200 Subject: [PATCH 0277/2196] Minor cleanup --- src/nix/repl.cc | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/src/nix/repl.cc b/src/nix/repl.cc index 964bb85eb6f..ae30502641a 100644 --- a/src/nix/repl.cc +++ b/src/nix/repl.cc @@ -289,18 +289,15 @@ void NixRepl::completePrefix(string prefix) static int runProgram(const string & program, const Strings & args) { - std::vector cargs; /* careful with c_str()! */ - cargs.push_back(program.c_str()); - for (Strings::const_iterator i = args.begin(); i != args.end(); ++i) - cargs.push_back(i->c_str()); - cargs.push_back(0); + Strings args2(args); + args2.push_front(program); Pid pid; pid = fork(); if (pid == -1) throw SysError("forking"); if (pid == 0) { restoreAffinity(); - execvp(program.c_str(), (char * *) &cargs[0]); + execvp(program.c_str(), stringsToCharPtrs(args2).data()); _exit(1); } @@ -394,7 +391,7 @@ bool NixRepl::processLine(string line) state.callFunction(f, v, result, Pos()); Path drvPath = getDerivationPath(result); - runProgram("nix-shell", Strings{drvPath}); + runProgram(settings.nixBinDir + "/nix-shell", Strings{drvPath}); } else if (command == ":b" || command == ":i" || command == ":s") { @@ -406,16 +403,16 @@ bool NixRepl::processLine(string line) /* We could do the build in this process using buildPaths(), but doing it in a child makes it easier to recover from problems / SIGINT. */ - if (runProgram("nix-store", Strings{"-r", drvPath}) == 0) { + if (runProgram(settings.nixBinDir + "/nix-store", Strings{"-r", drvPath}) == 0) { Derivation drv = readDerivation(drvPath); std::cout << std::endl << "this derivation produced the following outputs:" << std::endl; for (auto & i : drv.outputs) std::cout << format(" %1% -> %2%") % i.first % i.second.path << std::endl; } } else if (command == ":i") { - runProgram("nix-env", Strings{"-i", drvPath}); + runProgram(settings.nixBinDir + "/nix-env", Strings{"-i", drvPath}); } else { - runProgram("nix-shell", Strings{drvPath}); + runProgram(settings.nixBinDir + "/nix-shell", Strings{drvPath}); } } From 6734c18c99f8fa33a50a3045a8dd915bbf084255 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 25 Apr 2017 19:19:15 +0200 Subject: [PATCH 0278/2196] nix repl: Fix Ctrl-C --- src/nix/repl.cc | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/nix/repl.cc b/src/nix/repl.cc index ae30502641a..17203d3c299 100644 --- a/src/nix/repl.cc +++ b/src/nix/repl.cc @@ -217,6 +217,13 @@ bool NixRepl::getLine(string & input, const char * prompt) if (sigaction(SIGINT, &act, &old)) throw SysError("installing handler for SIGINT"); + static sigset_t savedSignalMask, set; + sigemptyset(&set); + sigaddset(&set, SIGINT); + + if (sigprocmask(SIG_UNBLOCK, &set, &savedSignalMask)) + throw SysError("unblocking SIGINT"); + if (sigsetjmp(sigintJmpBuf, 1)) { input.clear(); } else { @@ -236,6 +243,9 @@ bool NixRepl::getLine(string & input, const char * prompt) _isInterrupted = 0; + if (sigprocmask(SIG_SETMASK, &savedSignalMask, nullptr)) + throw SysError("restoring signals"); + if (sigaction(SIGINT, &old, 0)) throw SysError("restoring handler for SIGINT"); From 98a2adb1359ec35ac8da7a52754d2290531f8cef Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 26 Apr 2017 17:04:45 +0200 Subject: [PATCH 0279/2196] Simplify building nix-perl in nix-shell --- perl/Makefile | 2 +- perl/configure.ac | 2 +- perl/lib/Nix/Store.xs | 2 ++ perl/local.mk | 6 +++--- shell.nix | 9 ++++++++- 5 files changed, 15 insertions(+), 6 deletions(-) diff --git a/perl/Makefile b/perl/Makefile index cf655ae3d65..684a37e8121 100644 --- a/perl/Makefile +++ b/perl/Makefile @@ -1,6 +1,6 @@ makefiles = local.mk -GLOBAL_CXXFLAGS += -std=c++14 -g -Wall -include nix/config.h +GLOBAL_CXXFLAGS += -std=c++14 -g -Wall -include Makefile.config diff --git a/perl/configure.ac b/perl/configure.ac index 7a6b28be23e..80bcdb8ff5b 100644 --- a/perl/configure.ac +++ b/perl/configure.ac @@ -98,7 +98,7 @@ for name in $ac_subst_vars; do done rm -f Makefile.config -ln -s ../mk mk +ln -sfn ../mk mk AC_CONFIG_FILES([]) AC_OUTPUT diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs index 1920942a4c0..aa14bfa6270 100644 --- a/perl/lib/Nix/Store.xs +++ b/perl/lib/Nix/Store.xs @@ -1,3 +1,5 @@ +#include "config.h" + #include "EXTERN.h" #include "perl.h" #include "XSUB.h" diff --git a/perl/local.mk b/perl/local.mk index 35113bd960d..b13d4c0d639 100644 --- a/perl/local.mk +++ b/perl/local.mk @@ -20,11 +20,11 @@ Store_DIR := lib/Nix Store_SOURCES := $(Store_DIR)/Store.cc Store_CXXFLAGS = \ - -I$(shell $(perl) -e 'use Config; print $$Config{archlibexp};')/CORE \ + $(NIX_CFLAGS) \ + -I$(shell perl -e 'use Config; print $$Config{archlibexp};')/CORE \ -D_FILE_OFFSET_BITS=64 \ -Wno-unknown-warning-option -Wno-unused-variable -Wno-literal-suffix \ - -Wno-reserved-user-defined-literal -Wno-duplicate-decl-specifier -Wno-pointer-bool-conversion \ - $(NIX_CFLAGS) + -Wno-reserved-user-defined-literal -Wno-duplicate-decl-specifier -Wno-pointer-bool-conversion Store_LDFLAGS := $(SODIUM_LIBS) $(NIX_LIBS) diff --git a/shell.nix b/shell.nix index 3c57826d1a1..37a936fd2ef 100644 --- a/shell.nix +++ b/shell.nix @@ -17,6 +17,10 @@ with import {}; }) autoreconfHook readline + + # For nix-perl + perl + perlPackages.DBDSQLite ]; configureFlags = @@ -30,6 +34,9 @@ with import {}; shellHook = '' - configureFlags+=" --prefix=$(pwd)/inst" + export prefix=$(pwd)/inst + configureFlags+=" --prefix=prefix" + PKG_CONFIG_PATH=$prefix/lib/pkgconfig:$PKG_CONFIG_PATH + PATH=$prefix/bin:$PATH ''; } From 45ce2c7413bf8c87aa7d5bc70a5a1c920c40a3b8 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 26 Apr 2017 17:58:09 +0200 Subject: [PATCH 0280/2196] Doh --- src/nix-store/nix-store.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 9131b74dfb4..950222812e2 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -799,7 +799,7 @@ static void opServe(Strings opFlags, Strings opArgs) if (GET_PROTOCOL_MINOR(clientVersion) >= 3) { settings.buildRepeat = readInt(in); settings.enforceDeterminism = readInt(in); - settings.runDiffHook = readInt(in); + settings.runDiffHook = true; } settings.printRepeatedBuilds = false; }; From 4bc00760f9bc36d5b4e8bba7de9bd71a30d7f31a Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Wed, 26 Apr 2017 12:38:16 -0400 Subject: [PATCH 0281/2196] Add Store nesting to fix import-from-derivation within filterSource --- src/libstore/remote-store.cc | 2 ++ src/libutil/pool.hh | 16 ++++++++++++++++ 2 files changed, 18 insertions(+) diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index bc9ef3d47c5..af59d51106f 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -414,7 +414,9 @@ Path RemoteStore::addToStore(const string & name, const Path & _srcPath, try { conn->to.written = 0; conn->to.warn = true; + connections->incCapacity(); dumpPath(srcPath, conn->to, filter); + connections->decCapacity(); conn->to.warn = false; conn->processStderr(); } catch (SysError & e) { diff --git a/src/libutil/pool.hh b/src/libutil/pool.hh index 20df2194884..7033090020e 100644 --- a/src/libutil/pool.hh +++ b/src/libutil/pool.hh @@ -68,6 +68,22 @@ public: state_->max = max; } + void incCapacity() + { + auto state_(state.lock()); + state_->max++; + /* we could wakeup here, but this is only used when we're + * about to nest Pool usages, and we want to save the slot for + * the nested use if we can + */ + } + + void decCapacity() + { + auto state_(state.lock()); + state_->max--; + } + ~Pool() { auto state_(state.lock()); From 41c4558afe04d1cad0d0ef3b18a1a8155b40b06e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 28 Apr 2017 15:10:29 +0200 Subject: [PATCH 0282/2196] Fix hash computation when importing NARs greater than 4 GiB This caused "nix-store --import" to compute an incorrect hash on NARs that don't fit in an unsigned int. The import would succeed, but "nix-store --verify-path" or subsequent exports would detect an incorrect hash. A deeper issue is that the export/import format does not contain a hash, so we can't detect such issues early. Also, I learned that -Wall does not warn about this. --- src/libutil/hash.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index 9f4afd93c2f..fa1bb5d9718 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -224,7 +224,7 @@ static void start(HashType ht, Ctx & ctx) static void update(HashType ht, Ctx & ctx, - const unsigned char * bytes, unsigned int len) + const unsigned char * bytes, size_t len) { if (ht == htMD5) MD5_Update(&ctx.md5, bytes, len); else if (ht == htSHA1) SHA1_Update(&ctx.sha1, bytes, len); From a1a5e63e1456b5905b73065f635a324f3c309a5d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 28 Apr 2017 16:20:46 +0200 Subject: [PATCH 0283/2196] Fix brainfart --- src/libstore/build.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 9bf1ab5aa58..01a3203dd90 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -3379,7 +3379,7 @@ void SubstitutionGoal::tryToRun() if maxBuildJobs == 0 (no local builds allowed), we still allow a substituter to run. This is because substitutions cannot be distributed to another machine via the build hook. */ - if (worker.getNrLocalBuilds() >= std::min(1U, (unsigned int) settings.maxBuildJobs)) { + if (worker.getNrLocalBuilds() >= std::max(1U, (unsigned int) settings.maxBuildJobs)) { worker.waitForBuildSlot(shared_from_this()); return; } From 73bba12d8ba1bff2b2c0266a7fdbe1ee8ab0ad6a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 28 Apr 2017 16:53:56 +0200 Subject: [PATCH 0284/2196] Check for libreadline --- Makefile.config.in | 1 + configure.ac | 8 ++++++++ nix.spec.in | 2 ++ release.nix | 6 +++--- shell.nix | 2 +- src/nix/local.mk | 4 +++- src/nix/repl.cc | 4 ++++ 7 files changed, 22 insertions(+), 5 deletions(-) diff --git a/Makefile.config.in b/Makefile.config.in index 6948dad5a60..3cae30d487d 100644 --- a/Makefile.config.in +++ b/Makefile.config.in @@ -5,6 +5,7 @@ CXX = @CXX@ CXXFLAGS = @CXXFLAGS@ ENABLE_S3 = @ENABLE_S3@ HAVE_SODIUM = @HAVE_SODIUM@ +HAVE_READLINE = @HAVE_READLINE@ LIBCURL_LIBS = @LIBCURL_LIBS@ OPENSSL_LIBS = @OPENSSL_LIBS@ PACKAGE_NAME = @PACKAGE_NAME@ diff --git a/configure.ac b/configure.ac index c7026cf954d..ac37456ae5f 100644 --- a/configure.ac +++ b/configure.ac @@ -196,6 +196,14 @@ if test "$gc" = yes; then fi +# Check for readline, needed by "nix repl". +AX_LIB_READLINE +if test "$ax_cv_lib_readline" != "no"; then + have_readline=1 +fi +AC_SUBST(HAVE_READLINE, [$have_readline]) + + AC_ARG_ENABLE(init-state, AC_HELP_STRING([--disable-init-state], [do not initialise DB etc. in `make install']), init_state=$enableval, init_state=yes) diff --git a/nix.spec.in b/nix.spec.in index 390893d64dc..3ba2dfc94b4 100644 --- a/nix.spec.in +++ b/nix.spec.in @@ -20,9 +20,11 @@ Requires: curl Requires: bzip2 Requires: gzip Requires: xz +Requires: readline BuildRequires: bzip2-devel BuildRequires: sqlite-devel BuildRequires: libcurl-devel +BuildRequires: readline-devel # Hack to make that shitty RPM scanning hack shut up. Provides: perl(Nix::SSH) diff --git a/release.nix b/release.nix index 534c218c112..7adc87386f9 100644 --- a/release.nix +++ b/release.nix @@ -299,7 +299,7 @@ let src = jobs.tarball; diskImage = (diskImageFun vmTools.diskImageFuns) { extraPackages = - [ "sqlite" "sqlite-devel" "bzip2-devel" "emacs" "libcurl-devel" "openssl-devel" "xz-devel" ] + [ "sqlite" "sqlite-devel" "bzip2-devel" "emacs" "libcurl-devel" "openssl-devel" "xz-devel" "readline-devel" ] ++ extraPackages; }; memSize = 1024; meta.schedulingPriority = 50; @@ -321,14 +321,14 @@ let src = jobs.tarball; diskImage = (diskImageFun vmTools.diskImageFuns) { extraPackages = - [ "libsqlite3-dev" "libbz2-dev" "libcurl-dev" "libcurl3-nss" "libssl-dev" "liblzma-dev" ] + [ "libsqlite3-dev" "libbz2-dev" "libcurl-dev" "libcurl3-nss" "libssl-dev" "liblzma-dev" "libreadline-dev" ] ++ extraPackages; }; memSize = 1024; meta.schedulingPriority = 50; postInstall = "make installcheck"; configureFlags = "--sysconfdir=/etc"; debRequires = - [ "curl" "libsqlite3-0" "libbz2-1.0" "bzip2" "xz-utils" "libssl1.0.0" "liblzma5" ] + [ "curl" "libsqlite3-0" "libbz2-1.0" "bzip2" "xz-utils" "libssl1.0.0" "liblzma5" "libreadline6" ] ++ extraDebPackages; debMaintainer = "Eelco Dolstra "; doInstallCheck = true; diff --git a/shell.nix b/shell.nix index 37a936fd2ef..bbce68564b9 100644 --- a/shell.nix +++ b/shell.nix @@ -35,7 +35,7 @@ with import {}; shellHook = '' export prefix=$(pwd)/inst - configureFlags+=" --prefix=prefix" + configureFlags+=" --prefix=$prefix" PKG_CONFIG_PATH=$prefix/lib/pkgconfig:$PKG_CONFIG_PATH PATH=$prefix/bin:$PATH ''; diff --git a/src/nix/local.mk b/src/nix/local.mk index 21f190e476f..e71cf16fabf 100644 --- a/src/nix/local.mk +++ b/src/nix/local.mk @@ -6,6 +6,8 @@ nix_SOURCES := $(wildcard $(d)/*.cc) nix_LIBS = libexpr libmain libstore libutil libformat -nix_LDFLAGS = -lreadline +ifeq ($(HAVE_READLINE), 1) + nix_LDFLAGS += -lreadline +endif $(eval $(call install-symlink, nix, $(bindir)/nix-hash)) diff --git a/src/nix/repl.cc b/src/nix/repl.cc index 17203d3c299..13488bf1dbd 100644 --- a/src/nix/repl.cc +++ b/src/nix/repl.cc @@ -1,3 +1,5 @@ +#if HAVE_LIBREADLINE + #include #include @@ -726,3 +728,5 @@ struct CmdRepl : StoreCommand static RegisterCommand r1(make_ref()); } + +#endif From 895f00c37267086cc6fcd8326c8ecad2818223e8 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 28 Apr 2017 16:55:52 +0200 Subject: [PATCH 0285/2196] Suppress warning about ssh-auth-sock --- src/nix-daemon/nix-daemon.cc | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index 07ad0b45b3e..1b90fad165a 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -483,7 +483,9 @@ static void performOp(ref store, bool trusted, unsigned int clientVe }; try { - if (trusted + if (name == "ssh-auth-sock") // obsolete + ; + else if (trusted || name == settings.buildTimeout.name || name == settings.connectTimeout.name) settings.set(name, value); From 2f21d522c28b1e902bd7f0b5b9e7523975102d81 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 28 Apr 2017 17:13:55 +0200 Subject: [PATCH 0286/2196] Hopefully fix the Darwin build http://hydra.nixos.org/build/52080911 --- src/libutil/config.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libutil/config.cc b/src/libutil/config.cc index 62c6433c741..497afaa1fed 100644 --- a/src/libutil/config.cc +++ b/src/libutil/config.cc @@ -215,6 +215,7 @@ template class BaseSetting; template class BaseSetting; template class BaseSetting; template class BaseSetting; +template class BaseSetting; void PathSetting::set(const std::string & str) { From b3f55fdf621b6433967fd501423bedcb6ab2e4ae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Sat, 29 Apr 2017 11:33:55 +0200 Subject: [PATCH 0287/2196] nix-daemon.service: set XDG_CONFIG_HOME MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Otherwise starting nix-daemon fails ● nix-daemon.service - Nix Daemon Loaded: loaded (/nix/store/mnf00a6gc55xl47smk0b32gmi7xpvlfp-nix-1.12pre5308_2f21d522/lib/systemd/system/nix-daemon.service; enabled; vendor preset: enabled) Drop-In: /nix/store/m2rgjp71n4kyp8j5fxgbrlv13scd5vvv-system-units/nix-daemon.service.d └─overrides.conf Active: failed (Result: exit-code) since Sat 2017-04-29 11:29:21 CEST; 9s ago Process: 7299 ExecStart=nix-daemon --daemon (code=exited, status=1/FAILURE) Main PID: 7299 (code=exited, status=1/FAILURE) CPU: 19ms ... systemd[1]: Started Nix Daemon. ... nix-daemon[7299]: error: $XDG_CONFIG_HOME and $HOME are not set ... systemd[1]: nix-daemon.service: Main process exited, code=exited, status=1/FAILURE ... systemd[1]: nix-daemon.service: Unit entered failed state. ... systemd[1]: nix-daemon.service: Failed with result 'exit-code'. ... systemd[1]: nix-daemon.service: Start request repeated too quickly. ... systemd[1]: Failed to start Nix Daemon. ... systemd[1]: nix-daemon.service: Failed with result 'exit-code'. --- misc/systemd/nix-daemon.service.in | 1 + 1 file changed, 1 insertion(+) diff --git a/misc/systemd/nix-daemon.service.in b/misc/systemd/nix-daemon.service.in index fcd799e177d..9bfb00e306b 100644 --- a/misc/systemd/nix-daemon.service.in +++ b/misc/systemd/nix-daemon.service.in @@ -8,3 +8,4 @@ ConditionPathIsReadWrite=@localstatedir@/nix/daemon-socket ExecStart=@@bindir@/nix-daemon nix-daemon --daemon KillMode=process Environment=XDG_CACHE_HOME=/root/.cache +Environment=XDG_CONFIG_HOME=/root/.config From a4744254250e170f1b858e46a8b1c7904a030a2b Mon Sep 17 00:00:00 2001 From: Guillaume Maudoux Date: Mon, 1 May 2017 01:05:41 +0200 Subject: [PATCH 0288/2196] Fix lexer to support `$'` in multiline strings. --- src/libexpr/lexer.l | 3 ++- tests/lang/eval-okay-ind-string.exp | 2 +- tests/lang/eval-okay-ind-string.nix | 10 +++++++++- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index 5b1ff0350cd..d4fae2d7da1 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -160,7 +160,8 @@ or { return OR_KW; } yylval->e = new ExprIndStr(yytext); return IND_STR; } -\'\'\$ { +\'\'\$ | +\$ { yylval->e = new ExprIndStr("$"); return IND_STR; } diff --git a/tests/lang/eval-okay-ind-string.exp b/tests/lang/eval-okay-ind-string.exp index 886219dcf65..9cf4bd2ee78 100644 --- a/tests/lang/eval-okay-ind-string.exp +++ b/tests/lang/eval-okay-ind-string.exp @@ -1 +1 @@ -"This is an indented multi-line string\nliteral. An amount of whitespace at\nthe start of each line matching the minimum\nindentation of all lines in the string\nliteral together will be removed. Thus,\nin this case four spaces will be\nstripped from each line, even though\n THIS LINE is indented six spaces.\n\nAlso, empty lines don't count in the\ndetermination of the indentation level (the\nprevious empty line has indentation 0, but\nit doesn't matter).\nIf the string starts with whitespace\n followed by a newline, it's stripped, but\n that's not the case here. Two spaces are\n stripped because of the \" \" at the start. \nThis line is indented\na bit further.\nAnti-quotations, like so, are\nalso allowed.\n The \\ is not special here.\n' can be followed by any character except another ', e.g. 'x'.\nLikewise for $, e.g. $$ or $varName.\nBut ' followed by ' is special, as is $ followed by {.\nIf you want them, use anti-quotations: '', ${.\n Tabs are not interpreted as whitespace (since we can't guess\n what tab settings are intended), so don't use them.\n\tThis line starts with a space and a tab, so only one\n space will be stripped from each line.\nAlso note that if the last line (just before the closing ' ')\nconsists only of whitespace, it's ignored. But here there is\nsome non-whitespace stuff, so the line isn't removed. \nThis shows a hacky way to preserve an empty line after the start.\nBut there's no reason to do so: you could just repeat the empty\nline.\n Similarly you can force an indentation level,\n in this case to 2 spaces. This works because the anti-quote\n is significant (not whitespace).\nstart on network-interfaces\n\nstart script\n\n rm -f /var/run/opengl-driver\n ln -sf 123 /var/run/opengl-driver\n\n rm -f /var/log/slim.log\n \nend script\n\nenv SLIM_CFGFILE=abc\nenv SLIM_THEMESDIR=def\nenv FONTCONFIG_FILE=/etc/fonts/fonts.conf \t\t\t\t# !!! cleanup\nenv XKB_BINDIR=foo/bin \t\t\t\t# Needed for the Xkb extension.\nenv LD_LIBRARY_PATH=libX11/lib:libXext/lib:/usr/lib/ # related to xorg-sys-opengl - needed to load libglx for (AI)GLX support (for compiz)\n\nenv XORG_DRI_DRIVER_PATH=nvidiaDrivers/X11R6/lib/modules/drivers/ \n\nexec slim/bin/slim\nEscaping of ' followed by ': ''\nEscaping of $ followed by {: ${\nAnd finally to interpret \\n etc. as in a string: \n, \r, \t.\nfoo\n'bla'\nbar\n" +"This is an indented multi-line string\nliteral. An amount of whitespace at\nthe start of each line matching the minimum\nindentation of all lines in the string\nliteral together will be removed. Thus,\nin this case four spaces will be\nstripped from each line, even though\n THIS LINE is indented six spaces.\n\nAlso, empty lines don't count in the\ndetermination of the indentation level (the\nprevious empty line has indentation 0, but\nit doesn't matter).\nIf the string starts with whitespace\n followed by a newline, it's stripped, but\n that's not the case here. Two spaces are\n stripped because of the \" \" at the start. \nThis line is indented\na bit further.\nAnti-quotations, like so, are\nalso allowed.\n The \\ is not special here.\n' can be followed by any character except another ', e.g. 'x'.\nLikewise for $, e.g. $$ or $varName.\nBut ' followed by ' is special, as is $ followed by {.\nIf you want them, use anti-quotations: '', ${.\n Tabs are not interpreted as whitespace (since we can't guess\n what tab settings are intended), so don't use them.\n\tThis line starts with a space and a tab, so only one\n space will be stripped from each line.\nAlso note that if the last line (just before the closing ' ')\nconsists only of whitespace, it's ignored. But here there is\nsome non-whitespace stuff, so the line isn't removed. \nThis shows a hacky way to preserve an empty line after the start.\nBut there's no reason to do so: you could just repeat the empty\nline.\n Similarly you can force an indentation level,\n in this case to 2 spaces. This works because the anti-quote\n is significant (not whitespace).\nstart on network-interfaces\n\nstart script\n\n rm -f /var/run/opengl-driver\n ln -sf 123 /var/run/opengl-driver\n\n rm -f /var/log/slim.log\n \nend script\n\nenv SLIM_CFGFILE=abc\nenv SLIM_THEMESDIR=def\nenv FONTCONFIG_FILE=/etc/fonts/fonts.conf \t\t\t\t# !!! cleanup\nenv XKB_BINDIR=foo/bin \t\t\t\t# Needed for the Xkb extension.\nenv LD_LIBRARY_PATH=libX11/lib:libXext/lib:/usr/lib/ # related to xorg-sys-opengl - needed to load libglx for (AI)GLX support (for compiz)\n\nenv XORG_DRI_DRIVER_PATH=nvidiaDrivers/X11R6/lib/modules/drivers/ \n\nexec slim/bin/slim\nEscaping of ' followed by ': ''\nEscaping of $ followed by {: ${\nAnd finally to interpret \\n etc. as in a string: \n, \r, \t.\nfoo\n'bla'\nbar\ncut -d $'\\t' -f 1\nending dollar $$\n" diff --git a/tests/lang/eval-okay-ind-string.nix b/tests/lang/eval-okay-ind-string.nix index 1556aae9f54..1669dc0648e 100644 --- a/tests/lang/eval-okay-ind-string.nix +++ b/tests/lang/eval-okay-ind-string.nix @@ -117,4 +117,12 @@ let bar ''; -in s1 + s2 + s3 + s4 + s5 + s6 + s7 + s8 + s9 + s10 + s11 + s12 + s13 + s14 + s15 + # Regression test: accept $'. + s16 = '' + cut -d $'\t' -f 1 + ''; + + # Accept dollars at end of strings + s17 = ''ending dollar $'' + ''$'' + "\n"; + +in s1 + s2 + s3 + s4 + s5 + s6 + s7 + s8 + s9 + s10 + s11 + s12 + s13 + s14 + s15 + s16 + s17 From a143014d738758b5558efb73fee9f351cd00cbda Mon Sep 17 00:00:00 2001 From: Guillaume Maudoux Date: Mon, 1 May 2017 01:07:33 +0200 Subject: [PATCH 0289/2196] lexer: remove catch-all rules hiding real errors With catch-all rules, we hide potential errors. It turns out that a4744254 made one cath-all useless. Flex detected that is was impossible to reach. The other is more subtle, as it can only trigger on unfinished escapes in unfinished strings, which only occurs at EOF. --- src/libexpr/lexer.l | 27 +++++++++++++++++---------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index d4fae2d7da1..40ca7725803 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -142,18 +142,26 @@ or { return OR_KW; } \{ { return '{'; } \{ { PUSH_STATE(INSIDE_DOLLAR_CURLY); return '{'; } -\" { PUSH_STATE(STRING); return '"'; } +\" { + PUSH_STATE(STRING); return '"'; + } ([^\$\"\\]|\$[^\{\"\\]|\\.|\$\\.)*\$/\" | ([^\$\"\\]|\$[^\{\"\\]|\\.|\$\\.)+ { - /* It is impossible to match strings ending with '$' with one - regex because trailing contexts are only valid at the end - of a rule. (A sane but undocumented limitation.) */ - yylval->e = unescapeStr(data->symbols, yytext); - return STR; - } + /* It is impossible to match strings ending with '$' with one + regex because trailing contexts are only valid at the end + of a rule. (A sane but undocumented limitation.) */ + yylval->e = unescapeStr(data->symbols, yytext); + return STR; + } \$\{ { PUSH_STATE(INSIDE_DOLLAR_CURLY); return DOLLAR_CURLY; } -\" { POP_STATE(); return '"'; } -. return yytext[0]; /* just in case: shouldn't be reached */ +\" { POP_STATE(); return '"'; } +\$|\\|\$\\ { + /* This can only occur when we reach EOF, otherwise the above + (...|\$[^\{\"\\]|\\.|\$\\.)+ would have triggered. + This is technically invalid, but we leave the problem to the + parser who fails with exact location. */ + return STR; + } \'\'(\ *\n)? { PUSH_STATE(IND_STRING); return IND_STRING_OPEN; } ([^\$\']|\$[^\{\']|\'[^\'\$])+ { @@ -179,7 +187,6 @@ or { return OR_KW; } yylval->e = new ExprIndStr("'"); return IND_STR; } -. return yytext[0]; /* just in case: shouldn't be reached */ { From 0dddcf867a65723ddf96343dd694f970f2f9538a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 1 May 2017 13:43:34 +0200 Subject: [PATCH 0290/2196] Add a dummy Store::buildPaths() method This default implementation of buildPaths() does nothing if all requested paths are already valid, and throws an "unsupported operation" error otherwise. This fixes a regression introduced by c30330df6f67c81986dfb124631bc756c8e58c0d in binary cache and legacy SSH stores. --- src/libstore/binary-cache-store.cc | 5 ----- src/libstore/binary-cache-store.hh | 29 ++++++++++++----------------- src/libstore/legacy-ssh-store.cc | 9 --------- src/libstore/store-api.cc | 11 +++++++++++ src/libstore/store-api.hh | 20 +++++++++++++------- 5 files changed, 36 insertions(+), 38 deletions(-) diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index b536c6c0004..46c5aa21b2e 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -114,11 +114,6 @@ void BinaryCacheStore::init() } } -void BinaryCacheStore::notImpl() -{ - throw Error("operation not implemented for binary cache stores"); -} - std::shared_ptr BinaryCacheStore::getFile(const std::string & path) { std::promise> promise; diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh index 5c2d0acfdbb..87d4aa43838 100644 --- a/src/libstore/binary-cache-store.hh +++ b/src/libstore/binary-cache-store.hh @@ -27,8 +27,6 @@ protected: BinaryCacheStore(const Params & params); - [[noreturn]] void notImpl(); - public: virtual bool fileExists(const std::string & path) = 0; @@ -65,7 +63,7 @@ public: bool isValidPathUncached(const Path & path) override; PathSet queryAllValidPaths() override - { notImpl(); } + { unsupported(); } void queryPathInfoUncached(const Path & path, std::function)> success, @@ -73,16 +71,16 @@ public: void queryReferrers(const Path & path, PathSet & referrers) override - { notImpl(); } + { unsupported(); } PathSet queryDerivationOutputs(const Path & path) override - { notImpl(); } + { unsupported(); } StringSet queryDerivationOutputNames(const Path & path) override - { notImpl(); } + { unsupported(); } Path queryPathFromHashPart(const string & hashPart) override - { notImpl(); } + { unsupported(); } bool wantMassQuery() override { return wantMassQuery_; } @@ -99,32 +97,29 @@ public: void narFromPath(const Path & path, Sink & sink) override; - void buildPaths(const PathSet & paths, BuildMode buildMode) override - { notImpl(); } - BuildResult buildDerivation(const Path & drvPath, const BasicDerivation & drv, BuildMode buildMode) override - { notImpl(); } + { unsupported(); } void ensurePath(const Path & path) override - { notImpl(); } + { unsupported(); } void addTempRoot(const Path & path) override - { notImpl(); } + { unsupported(); } void addIndirectRoot(const Path & path) override - { notImpl(); } + { unsupported(); } Roots findRoots() override - { notImpl(); } + { unsupported(); } void collectGarbage(const GCOptions & options, GCResults & results) override - { notImpl(); } + { unsupported(); } ref getFSAccessor() override; void addSignatures(const Path & storePath, const StringSet & sigs) override - { notImpl(); } + { unsupported(); } std::shared_ptr getBuildLog(const Path & path) override; diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index befc560bfce..de0562aef49 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -148,12 +148,6 @@ struct LegacySSHStore : public Store sink(*savedNAR.data); } - /* Unsupported methods. */ - [[noreturn]] void unsupported() - { - throw Error("operation not supported on SSH stores"); - } - PathSet queryAllValidPaths() override { unsupported(); } void queryReferrers(const Path & path, PathSet & referrers) override @@ -177,9 +171,6 @@ struct LegacySSHStore : public Store const PathSet & references, bool repair) override { unsupported(); } - void buildPaths(const PathSet & paths, BuildMode buildMode) override - { unsupported(); } - BuildResult buildDerivation(const Path & drvPath, const BasicDerivation & drv, BuildMode buildMode) override { unsupported(); } diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 835bbb90e0b..850ea211dd5 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -523,6 +523,17 @@ const Store::Stats & Store::getStats() } +void Store::buildPaths(const PathSet & paths, BuildMode buildMode) +{ + for (auto & path : paths) + if (isDerivation(path)) + unsupported(); + + if (queryValidPaths(paths).size() != paths.size()) + unsupported(); +} + + void copyStorePath(ref srcStore, ref dstStore, const Path & storePath, bool repair, bool dontCheckSigs) { diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 067309c9e95..b763849ade9 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -18,6 +18,12 @@ namespace nix { +MakeError(SubstError, Error) +MakeError(BuildError, Error) /* denotes a permanent build failure */ +MakeError(InvalidPath, Error) +MakeError(Unsupported, Error) + + struct BasicDerivation; struct Derivation; class FSAccessor; @@ -414,7 +420,7 @@ public: output paths can be created by running the builder, after recursively building any sub-derivations. For inputs that are not derivations, substitute them. */ - virtual void buildPaths(const PathSet & paths, BuildMode buildMode = bmNormal) = 0; + virtual void buildPaths(const PathSet & paths, BuildMode buildMode = bmNormal); /* Build a single non-materialized derivation (i.e. not from an on-disk .drv file). Note that ‘drvPath’ is only used for @@ -584,6 +590,12 @@ protected: Stats stats; + /* Unsupported methods. */ + [[noreturn]] void unsupported() + { + throw Unsupported("requested operation is not supported by store ‘%s’", getUri()); + } + }; @@ -720,10 +732,4 @@ ValidPathInfo decodeValidPathInfo(std::istream & str, for paths created by makeFixedOutputPath() / addToStore(). */ std::string makeFixedOutputCA(bool recursive, const Hash & hash); - -MakeError(SubstError, Error) -MakeError(BuildError, Error) /* denotes a permanent build failure */ -MakeError(InvalidPath, Error) - - } From 227a48f86f8c26c495783c5946bb75c2819bb7ac Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 1 May 2017 14:36:56 +0200 Subject: [PATCH 0291/2196] Reduce severity of EMLINK warnings Fixes #1357. --- src/libstore/optimise-store.cc | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc index cf234e35d37..d354812e3da 100644 --- a/src/libstore/optimise-store.cc +++ b/src/libstore/optimise-store.cc @@ -220,8 +220,7 @@ void LocalStore::optimisePath_(OptimiseStats & stats, const Path & path, InodeHa rather than on the original link. (Probably it temporarily increases the st_nlink field before decreasing it again.) */ - if (st.st_size) - printInfo(format("‘%1%’ has maximum number of links") % linkPath); + debug("‘%s’ has reached maximum number of links", linkPath); return; } throw SysError(format("cannot rename ‘%1%’ to ‘%2%’") % tempLink % path); From b986c7f8b14c1270e012f22183737ebbaa33173d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 1 May 2017 14:43:14 +0200 Subject: [PATCH 0292/2196] Pass verbosity level to build hook --- src/build-remote/build-remote.cc | 9 ++++++--- src/libstore/build.cc | 7 ++++--- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index d7aee288670..f3195f6317d 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -130,14 +130,15 @@ int main (int argc, char * * argv) putenv(ssh_env) == -1) throw SysError("setting SSH env vars"); - if (argc != 4) + if (argc != 5) throw UsageError("called without required arguments"); auto store = openStore(); auto localSystem = argv[1]; - settings.maxSilentTime = stoull(string(argv[2])); - settings.buildTimeout = stoull(string(argv[3])); + settings.maxSilentTime = std::stoll(argv[2]); + settings.buildTimeout = std::stoll(argv[3]); + verbosity = (Verbosity) std::stoll(argv[4]); currentLoad = getEnv("NIX_CURRENT_LOAD", "/run/nix/current-load"); @@ -145,6 +146,8 @@ int main (int argc, char * * argv) AutoCloseFD bestSlotLock; auto machines = readConf(); + debug("got %d remote builders", machines.size()); + string drvPath; string hostName; for (string line; getline(cin, line);) { diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 01a3203dd90..8b869063d5e 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -614,10 +614,11 @@ HookInstance::HookInstance() throw SysError("dupping builder's stdout/stderr"); Strings args = { - baseNameOf(buildHook), + baseNameOf(settings.buildHook), settings.thisSystem, - (format("%1%") % settings.maxSilentTime).str(), - (format("%1%") % settings.buildTimeout).str() + std::to_string(settings.maxSilentTime), + std::to_string(settings.buildTimeout), + std::to_string(verbosity) }; execv(buildHook.c_str(), stringsToCharPtrs(args).data()); From ca9f589a93309ca548d772f1634169007568d6a0 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 1 May 2017 15:00:39 +0200 Subject: [PATCH 0293/2196] build-remote: Don't copy the .drv closure Since build-remote uses buildDerivation() now, we don't need to copy the .drv file anymore. This greatly reduces the set of input paths copied to the remote side (e.g. from 392 to 51 store paths for GNU hello on x86_64-darwin). --- src/build-remote/build-remote.cc | 7 +++++-- src/libstore/build.cc | 17 +++-------------- 2 files changed, 8 insertions(+), 16 deletions(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index f3195f6317d..1ee8a625b6b 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -269,8 +269,11 @@ int main (int argc, char * * argv) copyPaths(store, ref(sshStore), inputs); uploadLock = -1; - printError("building ‘%s’ on ‘%s’", drvPath, hostName); - sshStore->buildDerivation(drvPath, readDerivation(drvPath)); + BasicDerivation drv(readDerivation(drvPath)); + drv.inputSrcs = inputs; + + printError("building ‘%s’ on ‘%s’", drvPath, storeUri); + sshStore->buildDerivation(drvPath, drv); PathSet missing; for (auto & path : outputs) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 8b869063d5e..a0efd880400 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -1622,23 +1622,12 @@ HookReply DerivationGoal::tryBuildHook() hook = std::move(worker.hook); /* Tell the hook all the inputs that have to be copied to the - remote system. This unfortunately has to contain the entire - derivation closure to ensure that the validity invariant holds - on the remote system. (I.e., it's unfortunate that we have to - list it since the remote system *probably* already has it.) */ - PathSet allInputs; - allInputs.insert(inputPaths.begin(), inputPaths.end()); - worker.store.computeFSClosure(drvPath, allInputs); - - string s; - for (auto & i : allInputs) { s += i; s += ' '; } - writeLine(hook->toHook.writeSide.get(), s); + remote system. */ + writeLine(hook->toHook.writeSide.get(), concatStringsSep(" ", inputPaths)); /* Tell the hooks the missing outputs that have to be copied back from the remote system. */ - s = ""; - for (auto & i : missingPaths) { s += i; s += ' '; } - writeLine(hook->toHook.writeSide.get(), s); + writeLine(hook->toHook.writeSide.get(), concatStringsSep(" ", missingPaths)); hook->toHook.writeSide = -1; From d7653dfc6dea076ecbe00520c6137977e0fced35 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 1 May 2017 15:46:47 +0200 Subject: [PATCH 0294/2196] Remove $NIX_BUILD_HOOK and $NIX_CURRENT_LOAD This is to simplify remote build configuration. These environment variables predate nix.conf. The build hook now has a sensible default (namely build-remote). The current load is kept in the Nix state directory now. --- .../advanced-topics/distributed-builds.xml | 15 +- doc/manual/command-ref/env-common.xml | 133 ------------------ src/build-remote/build-remote.cc | 4 +- src/libstore/build.cc | 12 +- src/libstore/globals.hh | 3 + tests/remote-builds.nix | 1 - 6 files changed, 12 insertions(+), 156 deletions(-) diff --git a/doc/manual/advanced-topics/distributed-builds.xml b/doc/manual/advanced-topics/distributed-builds.xml index d5bc1c59255..1957e1105e6 100644 --- a/doc/manual/advanced-topics/distributed-builds.xml +++ b/doc/manual/advanced-topics/distributed-builds.xml @@ -22,10 +22,7 @@ will call whenever it wants to build a derivation. The build hook will perform it in the usual way if possible, or it can accept it, in which case it is responsible for somehow getting the inputs of the build to another machine, doing the build there, and getting the -results back. The details of the build hook protocol are described in -the documentation of the NIX_BUILD_HOOK -variable. +results back. Remote machine configuration: <filename>remote-systems.conf</filename> @@ -103,14 +100,6 @@ requiredSystemFeatures = [ "kvm" ]; -You should also set up the environment variable -NIX_CURRENT_LOAD to point at a directory (e.g., -/var/run/nix/current-load) that -build-remote uses to remember how many builds -it is currently executing remotely. It doesn't look at the actual -load on the remote machine, so if you have multiple instances of Nix -running, they should use the same NIX_CURRENT_LOAD -file. Maybe in the future build-remote will -look at the actual remote load. + diff --git a/doc/manual/command-ref/env-common.xml b/doc/manual/command-ref/env-common.xml index c757cb17bd1..a83aeaf2e57 100644 --- a/doc/manual/command-ref/env-common.xml +++ b/doc/manual/command-ref/env-common.xml @@ -148,139 +148,6 @@ $ mount -o bind /mnt/otherdisk/nix /nix -NIX_BUILD_HOOK - - - - Specifies the location of the build hook, - which is a program (typically some script) that Nix will call - whenever it wants to build a derivation. This is used to implement - distributed builds (see ). - - - - - - - - - NIX_REMOTE This variable should be set to diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index 1ee8a625b6b..c41383bcf27 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -140,7 +140,9 @@ int main (int argc, char * * argv) settings.buildTimeout = std::stoll(argv[3]); verbosity = (Verbosity) std::stoll(argv[4]); - currentLoad = getEnv("NIX_CURRENT_LOAD", "/run/nix/current-load"); + /* It would be more appropriate to use $XDG_RUNTIME_DIR, since + that gets cleared on reboot, but it wouldn't work on OS X. */ + currentLoad = settings.nixStateDir + "/current-load"; std::shared_ptr sshStore; AutoCloseFD bestSlotLock; diff --git a/src/libstore/build.cc b/src/libstore/build.cc index a0efd880400..9b3f799b9f2 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -583,11 +583,7 @@ struct HookInstance HookInstance::HookInstance() { - debug("starting build hook"); - - Path buildHook = getEnv("NIX_BUILD_HOOK"); - if (string(buildHook, 0, 1) != "/") buildHook = settings.nixLibexecDir + "/nix/" + buildHook; - buildHook = canonPath(buildHook); + debug("starting build hook ‘%s’", settings.buildHook); /* Create a pipe to get the output of the child. */ fromHook.create(); @@ -621,9 +617,9 @@ HookInstance::HookInstance() std::to_string(verbosity) }; - execv(buildHook.c_str(), stringsToCharPtrs(args).data()); + execv(settings.buildHook.get().c_str(), stringsToCharPtrs(args).data()); - throw SysError(format("executing ‘%1%’") % buildHook); + throw SysError("executing ‘%s’", settings.buildHook); }); pid.setSeparatePG(true); @@ -1569,7 +1565,7 @@ void DerivationGoal::buildDone() HookReply DerivationGoal::tryBuildHook() { - if (!settings.useBuildHook || getEnv("NIX_BUILD_HOOK") == "" || !useDerivation) return rpDecline; + if (!settings.useBuildHook || !useDerivation) return rpDecline; if (!worker.hook) worker.hook = std::make_unique(); diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index b4f44de2e65..25cc3e068ee 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -127,6 +127,9 @@ public: Setting useBuildHook{this, true, "remote-builds", "Whether to use build hooks (for distributed builds)."}; + PathSetting buildHook{this, true, nixLibexecDir + "/nix/build-remote", "build-hook", + "The path of the helper program that executes builds to remote machines."}; + Setting reservedSize{this, 8 * 1024 * 1024, "gc-reserved-space", "Amount of reserved disk space for the garbage collector."}; diff --git a/tests/remote-builds.nix b/tests/remote-builds.nix index 63aaa4d88f5..39bd090e43e 100644 --- a/tests/remote-builds.nix +++ b/tests/remote-builds.nix @@ -43,7 +43,6 @@ in { config, pkgs, ... }: { nix.maxJobs = 0; # force remote building nix.distributedBuilds = true; - nix.envVars = pkgs.lib.mkAfter { NIX_BUILD_HOOK = "${nix}/libexec/nix/build-remote"; }; nix.buildMachines = [ { hostName = "slave1"; sshUser = "root"; From 3f5b98e65a86abd31f97bd763ae5cb41ff4aeda8 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 1 May 2017 15:59:43 +0200 Subject: [PATCH 0295/2196] Chomp log output from the build hook --- src/libstore/build.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 9b3f799b9f2..ec368463252 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -3091,7 +3091,7 @@ void DerivationGoal::handleChildOutput(int fd, const string & data) } if (hook && fd == hook->fromHook.readSide.get()) - printError(data); // FIXME? + printError(chomp(data)); } From deac171925bf2e3960d2f837d95b71c0427d26dd Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 1 May 2017 16:08:13 +0200 Subject: [PATCH 0296/2196] Implement LegacySSHStore::buildDerivation() This makes LegacySSHStore usable by build-remote and hydra-queue-runner. --- src/libstore/legacy-ssh-store.cc | 37 +++++++++++++++++++++++++++----- 1 file changed, 32 insertions(+), 5 deletions(-) diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index de0562aef49..d6b70b99297 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -5,6 +5,7 @@ #include "store-api.hh" #include "worker-protocol.hh" #include "ssh.hh" +#include "derivations.hh" namespace nix { @@ -21,6 +22,7 @@ struct LegacySSHStore : public Store std::unique_ptr sshConn; FdSink to; FdSource from; + int remoteVersion; }; std::string host; @@ -53,8 +55,6 @@ struct LegacySSHStore : public Store conn->to = FdSink(conn->sshConn->in.get()); conn->from = FdSource(conn->sshConn->out.get()); - int remoteVersion; - try { conn->to << SERVE_MAGIC_1 << SERVE_PROTOCOL_VERSION; conn->to.flush(); @@ -62,8 +62,8 @@ struct LegacySSHStore : public Store unsigned int magic = readInt(conn->from); if (magic != SERVE_MAGIC_2) throw Error("protocol mismatch with ‘nix-store --serve’ on ‘%s’", host); - remoteVersion = readInt(conn->from); - if (GET_PROTOCOL_MAJOR(remoteVersion) != 0x200) + conn->remoteVersion = readInt(conn->from); + if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200) throw Error("unsupported ‘nix-store --serve’ protocol version on ‘%s’", host); } catch (EndOfFile & e) { @@ -173,7 +173,34 @@ struct LegacySSHStore : public Store BuildResult buildDerivation(const Path & drvPath, const BasicDerivation & drv, BuildMode buildMode) override - { unsupported(); } + { + auto conn(connections->get()); + + conn->to + << cmdBuildDerivation + << drvPath + << drv + << settings.maxSilentTime + << settings.buildTimeout; + if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 2) + conn->to + << settings.maxLogSize; + if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 3) + conn->to + << settings.buildRepeat + << settings.enforceDeterminism; + + conn->to.flush(); + + BuildResult status; + status.status = (BuildResult::Status) readInt(conn->from); + conn->from >> status.errorMsg; + + if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 3) + conn->from >> status.timesBuilt >> status.isNonDeterministic >> status.startTime >> status.stopTime; + + return status; + } void ensurePath(const Path & path) override { unsupported(); } From 3e4bdfedee747868a32b8a9c7a89a6f860415be8 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 1 May 2017 17:28:19 +0200 Subject: [PATCH 0297/2196] Minor cleanup --- src/libutil/util.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 026e493514e..98c0aff1e72 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -1078,9 +1078,9 @@ bool statusOk(int status) } -bool hasPrefix(const string & s, const string & suffix) +bool hasPrefix(const string & s, const string & prefix) { - return s.compare(0, suffix.size(), suffix) == 0; + return s.compare(0, prefix.size(), prefix) == 0; } From 031d70e5009fcce58afabc9113d5a5de4a16b19a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 1 May 2017 17:35:30 +0200 Subject: [PATCH 0298/2196] Support arbitrary store URIs in nix.machines For backwards compatibility, if the URI is just a hostname, ssh:// (i.e. LegacySSHStore) is prepended automatically. Also, all fields except the URI are now optional. For example, this is a valid nix.machines file: local?root=/tmp/nix This is useful for testing the remote build machinery since you don't have to mess around with ssh. --- src/build-remote/build-remote.cc | 77 ++++++++++++++++++-------------- src/libstore/build.cc | 1 + src/libstore/store-api.cc | 11 ++--- src/libstore/store-api.hh | 23 +++++++--- 4 files changed, 66 insertions(+), 46 deletions(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index c41383bcf27..a19dac241d0 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -27,12 +27,12 @@ class Machine { const std::set mandatoryFeatures; public: - const string hostName; + const string storeUri; const std::vector systemTypes; const string sshKey; const unsigned int maxJobs; const unsigned int speedFactor; - bool enabled; + bool enabled = true; bool allSupported(const std::set & features) const { return std::all_of(features.begin(), features.end(), @@ -49,7 +49,7 @@ class Machine { }); } - Machine(decltype(hostName) hostName, + Machine(decltype(storeUri) storeUri, decltype(systemTypes) systemTypes, decltype(sshKey) sshKey, decltype(maxJobs) maxJobs, @@ -58,14 +58,18 @@ class Machine { decltype(mandatoryFeatures) mandatoryFeatures) : supportedFeatures(supportedFeatures), mandatoryFeatures(mandatoryFeatures), - hostName(hostName), + storeUri( + // Backwards compatibility: if the URI is a hostname, + // prepend ssh://. + storeUri.find("://") != std::string::npos || hasPrefix(storeUri, "local") || hasPrefix(storeUri, "remote") || hasPrefix(storeUri, "auto") + ? storeUri + : "ssh://" + storeUri), systemTypes(systemTypes), sshKey(sshKey), maxJobs(maxJobs), - speedFactor(std::max(1U, speedFactor)), - enabled(true) - {}; -};; + speedFactor(std::max(1U, speedFactor)) + {} +}; static std::vector readConf() { @@ -87,13 +91,13 @@ static std::vector readConf() } auto tokens = tokenizeString>(line); auto sz = tokens.size(); - if (sz < 4) + if (sz < 1) throw FormatError("bad machines.conf file ‘%1%’", conf); machines.emplace_back(tokens[0], - tokenizeString>(tokens[1], ","), - tokens[2], - stoull(tokens[3]), - sz >= 5 ? stoull(tokens[4]) : 1LL, + sz >= 2 ? tokenizeString>(tokens[1], ",") : std::vector{settings.thisSystem}, + sz >= 3 ? tokens[2] : "", + sz >= 4 ? std::stoull(tokens[3]) : 1LL, + sz >= 5 ? std::stoull(tokens[4]) : 1LL, sz >= 6 ? tokenizeString>(tokens[5], ",") : std::set{}, @@ -104,31 +108,27 @@ static std::vector readConf() return machines; } +std::string escapeUri(std::string uri) +{ + std::replace(uri.begin(), uri.end(), '/', '_'); + return uri; +} + static string currentLoad; static AutoCloseFD openSlotLock(const Machine & m, unsigned long long slot) { - std::ostringstream fn_stream(currentLoad, std::ios_base::ate | std::ios_base::out); - fn_stream << "/"; - for (auto t : m.systemTypes) { - fn_stream << t << "-"; - } - fn_stream << m.hostName << "-" << slot; - return openLockFile(fn_stream.str(), true); + return openLockFile(fmt("%s/%s-%d", currentLoad, escapeUri(m.storeUri), slot), true); } -static char display_env[] = "DISPLAY="; -static char ssh_env[] = "SSH_ASKPASS="; - int main (int argc, char * * argv) { return handleExceptions(argv[0], [&]() { initNix(); /* Ensure we don't get any SSH passphrase or host key popups. */ - if (putenv(display_env) == -1 || - putenv(ssh_env) == -1) - throw SysError("setting SSH env vars"); + unsetenv("DISPLAY"); + unsetenv("SSH_ASKPASS"); if (argc != 5) throw UsageError("called without required arguments"); @@ -151,7 +151,7 @@ int main (int argc, char * * argv) debug("got %d remote builders", machines.size()); string drvPath; - string hostName; + string storeUri; for (string line; getline(cin, line);) { auto tokens = tokenizeString>(line); auto sz = tokens.size(); @@ -178,6 +178,8 @@ int main (int argc, char * * argv) Machine * bestMachine = nullptr; unsigned long long bestLoad = 0; for (auto & m : machines) { + debug("considering building on ‘%s’", m.storeUri); + if (m.enabled && std::find(m.systemTypes.begin(), m.systemTypes.end(), neededSystem) != m.systemTypes.end() && @@ -238,16 +240,21 @@ int main (int argc, char * * argv) lock = -1; try { - sshStore = openStore("ssh-ng://" + bestMachine->hostName, - { {"ssh-key", bestMachine->sshKey }, - {"max-connections", "1" } }); - hostName = bestMachine->hostName; + + Store::Params storeParams{{"max-connections", "1"}}; + if (bestMachine->sshKey != "") + storeParams["ssh-key"] = bestMachine->sshKey; + + sshStore = openStore(bestMachine->storeUri, storeParams); + storeUri = bestMachine->storeUri; + } catch (std::exception & e) { printError("unable to open SSH connection to ‘%s’: %s; trying other available machines...", - bestMachine->hostName, e.what()); + bestMachine->storeUri, e.what()); bestMachine->enabled = false; continue; } + goto connected; } } @@ -257,11 +264,15 @@ int main (int argc, char * * argv) string line; if (!getline(cin, line)) throw Error("hook caller didn't send inputs"); + auto inputs = tokenizeString(line); if (!getline(cin, line)) throw Error("hook caller didn't send outputs"); + auto outputs = tokenizeString(line); - AutoCloseFD uploadLock = openLockFile(currentLoad + "/" + hostName + ".upload-lock", true); + + AutoCloseFD uploadLock = openLockFile(currentLoad + "/" + escapeUri(storeUri) + ".upload-lock", true); + auto old = signal(SIGALRM, handleAlarm); alarm(15 * 60); if (!lockFile(uploadLock.get(), ltWrite, true)) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index ec368463252..a9649ea378c 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -1862,6 +1862,7 @@ void DerivationGoal::startBuilder() dirsInChroot[i] = r; else { Path p = chrootRootDir + i; + debug("linking ‘%1%’ to ‘%2%’", p, r); if (link(r.c_str(), p.c_str()) == -1) { /* Hard-linking fails if we exceed the maximum link count on a file (e.g. 32000 of ext3), diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 850ea211dd5..75de4c93323 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -709,10 +709,11 @@ namespace nix { RegisterStoreImplementation::Implementations * RegisterStoreImplementation::implementations = 0; -ref openStore(const std::string & uri_) +ref openStore(const std::string & uri_, + const Store::Params & extraParams) { auto uri(uri_); - Store::Params params; + Store::Params params(extraParams); auto q = uri.find('?'); if (q != std::string::npos) { for (auto s : tokenizeString(uri.substr(q + 1), "&")) { @@ -722,11 +723,7 @@ ref openStore(const std::string & uri_) } uri = uri_.substr(0, q); } - return openStore(uri, params); -} -ref openStore(const std::string & uri, const Store::Params & params) -{ for (auto fun : *RegisterStoreImplementation::implementations) { auto store = fun(uri, params); if (store) { @@ -735,7 +732,7 @@ ref openStore(const std::string & uri, const Store::Params & params) } } - throw Error(format("don't know how to open Nix store ‘%s’") % uri); + throw Error("don't know how to open Nix store ‘%s’", uri); } diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index b763849ade9..2388558b362 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -668,20 +668,31 @@ void removeTempRoots(); /* Return a Store object to access the Nix store denoted by ‘uri’ (slight misnomer...). Supported values are: - * ‘direct’: The Nix store in /nix/store and database in + * ‘local’: The Nix store in /nix/store and database in /nix/var/nix/db, accessed directly. * ‘daemon’: The Nix store accessed via a Unix domain socket connection to nix-daemon. + * ‘auto’ or ‘’: Equivalent to ‘local’ or ‘daemon’ depending on + whether the user has write access to the local Nix + store/database. + * ‘file://’: A binary cache stored in . - If ‘uri’ is empty, it defaults to ‘direct’ or ‘daemon’ depending on - whether the user has write access to the local Nix store/database. - set to true *unless* you're going to collect garbage. */ -ref openStore(const std::string & uri = getEnv("NIX_REMOTE")); + * ‘https://’: A binary cache accessed via HTTP. + + * ‘s3://’: A writable binary cache stored on Amazon's Simple + Storage Service. + + * ‘ssh://[user@]’: A remote Nix store accessed by running + ‘nix-store --serve’ via SSH. -ref openStore(const std::string & uri, const Store::Params & params); + You can pass parameters to the store implementation by appending + ‘?key=value&key=value&...’ to the URI. +*/ +ref openStore(const std::string & uri = getEnv("NIX_REMOTE"), + const Store::Params & extraParams = Store::Params()); void copyPaths(ref from, ref to, const PathSet & storePaths, bool substitute = false); From 3a5f04f48cc39eec5cc454e387aa290e08295aff Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 1 May 2017 20:03:25 +0200 Subject: [PATCH 0299/2196] build-remote: Don't require signatures This restores the old behaviour. --- src/build-remote/build-remote.cc | 4 ++-- src/libstore/legacy-ssh-store.cc | 3 --- src/libstore/local-store.cc | 2 ++ src/libstore/store-api.cc | 18 +++++++++++++----- src/libstore/store-api.hh | 7 ++----- src/nix-copy-closure/nix-copy-closure.cc | 2 +- 6 files changed, 20 insertions(+), 16 deletions(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index a19dac241d0..ba909ec44d6 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -279,7 +279,7 @@ int main (int argc, char * * argv) printError("somebody is hogging the upload lock for ‘%s’, continuing..."); alarm(0); signal(SIGALRM, old); - copyPaths(store, ref(sshStore), inputs); + copyPaths(store, ref(sshStore), inputs, false, true); uploadLock = -1; BasicDerivation drv(readDerivation(drvPath)); @@ -294,7 +294,7 @@ int main (int argc, char * * argv) if (!missing.empty()) { setenv("NIX_HELD_LOCKS", concatStringsSep(" ", missing).c_str(), 1); /* FIXME: ugly */ - copyPaths(ref(sshStore), store, missing); + copyPaths(ref(sshStore), store, missing, false, true); } return; diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index d6b70b99297..1f6ea4dc1cf 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -223,9 +223,6 @@ struct LegacySSHStore : public Store void addSignatures(const Path & storePath, const StringSet & sigs) override { unsupported(); } - bool isTrusted() override - { return true; } - void computeFSClosure(const PathSet & paths, PathSet & out, bool flipDirection = false, bool includeOutputs = false, bool includeDerivers = false) override diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 5a98454ab38..c8e61126c1b 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -915,6 +915,8 @@ void LocalStore::invalidatePath(State & state, const Path & path) void LocalStore::addToStore(const ValidPathInfo & info, const ref & nar, bool repair, bool dontCheckSigs, std::shared_ptr accessor) { + assert(info.narHash); + Hash h = hashString(htSHA256, *nar); if (h != info.narHash) throw Error(format("hash mismatch importing path ‘%s’; expected hash ‘%s’, got ‘%s’") % diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 75de4c93323..b5a91e53672 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -542,15 +542,22 @@ void copyStorePath(ref srcStore, ref dstStore, StringSink sink; srcStore->narFromPath({storePath}, sink); - if (srcStore->isTrusted()) - dontCheckSigs = true; - if (!info->narHash && dontCheckSigs) { auto info2 = make_ref(*info); info2->narHash = hashString(htSHA256, *sink.s); info = info2; } + assert(info->narHash); + + if (info->ultimate) { + auto info2 = make_ref(*info); + info2->ultimate = false; + info = info2; + } + + assert(info->narHash); + dstStore->addToStore(*info, sink.s, repair, dontCheckSigs); } @@ -802,7 +809,8 @@ std::list> getDefaultSubstituters() } -void copyPaths(ref from, ref to, const PathSet & storePaths, bool substitute) +void copyPaths(ref from, ref to, const PathSet & storePaths, + bool substitute, bool dontCheckSigs) { PathSet valid = to->queryValidPaths(storePaths, substitute); @@ -830,7 +838,7 @@ void copyPaths(ref from, ref to, const PathSet & storePaths, bool if (!to->isValidPath(storePath)) { Activity act(*logger, lvlInfo, format("copying ‘%s’...") % storePath); - copyStorePath(from, to, storePath); + copyStorePath(from, to, storePath, false, dontCheckSigs); logger->incProgress(copiedLabel); } else diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 2388558b362..8ca3f4b27d0 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -570,10 +570,6 @@ public: const Stats & getStats(); - /* Whether this store paths from this store can be imported even - if they lack a signature. */ - virtual bool isTrusted() { return false; } - /* Return the build log of the specified store path, if available, or null otherwise. */ virtual std::shared_ptr getBuildLog(const Path & path) @@ -695,7 +691,8 @@ ref openStore(const std::string & uri = getEnv("NIX_REMOTE"), const Store::Params & extraParams = Store::Params()); -void copyPaths(ref from, ref to, const PathSet & storePaths, bool substitute = false); +void copyPaths(ref from, ref to, const PathSet & storePaths, + bool substitute = false, bool dontCheckSigs = false); enum StoreType { tDaemon, diff --git a/src/nix-copy-closure/nix-copy-closure.cc b/src/nix-copy-closure/nix-copy-closure.cc index ed43bffbc8c..dc324abcb3b 100755 --- a/src/nix-copy-closure/nix-copy-closure.cc +++ b/src/nix-copy-closure/nix-copy-closure.cc @@ -58,6 +58,6 @@ int main(int argc, char ** argv) PathSet closure; from->computeFSClosure(storePaths2, closure, false, includeOutputs); - copyPaths(from, to, closure, useSubstitutes); + copyPaths(from, to, closure, useSubstitutes, true); }); } From feefcb3a982d3e3b8e89798d72d8afa996169569 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 2 May 2017 12:01:46 +0200 Subject: [PATCH 0300/2196] build-remote: Ugly hackery to get build logs to work The build hook mechanism expects build log output to go to file descriptor 4, so do that. --- src/build-remote/build-remote.cc | 2 +- src/libstore/legacy-ssh-store.cc | 6 +++++- src/libstore/ssh.cc | 2 ++ src/libstore/ssh.hh | 4 +++- 4 files changed, 11 insertions(+), 3 deletions(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index ba909ec44d6..acf571ff156 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -241,7 +241,7 @@ int main (int argc, char * * argv) try { - Store::Params storeParams{{"max-connections", "1"}}; + Store::Params storeParams{{"max-connections", "1"}, {"log-fd", "4"}}; if (bestMachine->sshKey != "") storeParams["ssh-key"] = bestMachine->sshKey; diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 1f6ea4dc1cf..a6479a450e3 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -17,6 +17,9 @@ struct LegacySSHStore : public Store const Setting sshKey{this, "", "ssh-key", "path to an SSH private key"}; const Setting compress{this, false, "compress", "whether to compress the connection"}; + // Hack for getting remote build log output. + const Setting logFD{this, -1, "log-fd", "file descriptor to which SSH's stderr is connected"}; + struct Connection { std::unique_ptr sshConn; @@ -44,7 +47,8 @@ struct LegacySSHStore : public Store sshKey, // Use SSH master only if using more than 1 connection. connections->capacity() > 1, - compress) + compress, + logFD) { } diff --git a/src/libstore/ssh.cc b/src/libstore/ssh.cc index e54f3f4ba28..6edabaa3a1d 100644 --- a/src/libstore/ssh.cc +++ b/src/libstore/ssh.cc @@ -31,6 +31,8 @@ std::unique_ptr SSHMaster::startCommand(const std::string throw SysError("duping over stdin"); if (dup2(out.writeSide.get(), STDOUT_FILENO) == -1) throw SysError("duping over stdout"); + if (logFD != -1 && dup2(logFD, STDERR_FILENO) == -1) + throw SysError("duping over stderr"); Strings args = { "ssh", host.c_str(), "-x", "-a" }; addCommonSSHOpts(args); diff --git a/src/libstore/ssh.hh b/src/libstore/ssh.hh index b4396467e54..18dea227ad1 100644 --- a/src/libstore/ssh.hh +++ b/src/libstore/ssh.hh @@ -13,6 +13,7 @@ private: const std::string keyFile; const bool useMaster; const bool compress; + const int logFD; struct State { @@ -27,11 +28,12 @@ private: public: - SSHMaster(const std::string & host, const std::string & keyFile, bool useMaster, bool compress) + SSHMaster(const std::string & host, const std::string & keyFile, bool useMaster, bool compress, int logFD = -1) : host(host) , keyFile(keyFile) , useMaster(useMaster) , compress(compress) + , logFD(logFD) { } From 70581b63633016329789872c73dc48b1d498c729 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 2 May 2017 12:07:56 +0200 Subject: [PATCH 0301/2196] Fix build hook test --- tests/build-hook.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/build-hook.sh b/tests/build-hook.sh index ef77a3ae528..2005c7cebdc 100644 --- a/tests/build-hook.sh +++ b/tests/build-hook.sh @@ -1,8 +1,8 @@ source common.sh -export NIX_BUILD_HOOK="$(pwd)/build-hook.hook.sh" +clearStore -outPath=$(nix-build build-hook.nix --no-out-link) +outPath=$(nix-build build-hook.nix --no-out-link --option build-hook $(pwd)/build-hook.hook.sh) echo "output path is $outPath" From 174b68a2a2e9e58fa1a1a0036858a566c51684dc Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 2 May 2017 12:16:29 +0200 Subject: [PATCH 0302/2196] build-hook: If there are no machines defined, quit permanently --- src/build-remote/build-remote.cc | 5 +++++ src/libstore/build.cc | 11 +++++++++-- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index acf571ff156..388f1e04686 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -150,6 +150,11 @@ int main (int argc, char * * argv) auto machines = readConf(); debug("got %d remote builders", machines.size()); + if (machines.empty()) { + std::cerr << "# decline-permanently\n"; + return; + } + string drvPath; string storeUri; for (string line; getline(cin, line);) { diff --git a/src/libstore/build.cc b/src/libstore/build.cc index a9649ea378c..e756d3377c3 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -1598,8 +1598,15 @@ HookReply DerivationGoal::tryBuildHook() debug(format("hook reply is ‘%1%’") % reply); - if (reply == "decline" || reply == "postpone") - return reply == "decline" ? rpDecline : rpPostpone; + if (reply == "decline") + return rpDecline; + else if (reply == "decline-permanently") { + settings.useBuildHook = false; + worker.hook = 0; + return rpDecline; + } + else if (reply == "postpone") + return rpPostpone; else if (reply != "accept") throw Error(format("bad hook reply ‘%1%’") % reply); From ebc9f36a8111ddecc8e265e8a6a70048218f244d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 2 May 2017 13:17:37 +0200 Subject: [PATCH 0303/2196] Factor out machines.conf parsing This allows hydra-queue-runner to use it. --- src/build-remote/build-remote.cc | 95 +++----------------------------- src/libstore/machines.cc | 65 ++++++++++++++++++++++ src/libstore/machines.hh | 35 ++++++++++++ 3 files changed, 108 insertions(+), 87 deletions(-) create mode 100644 src/libstore/machines.cc create mode 100644 src/libstore/machines.hh diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index 388f1e04686..8a9d4571fef 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -9,6 +9,7 @@ #include #endif +#include "machines.hh" #include "shared.hh" #include "pathlocks.hh" #include "globals.hh" @@ -22,92 +23,6 @@ using std::cin; static void handleAlarm(int sig) { } -class Machine { - const std::set supportedFeatures; - const std::set mandatoryFeatures; - -public: - const string storeUri; - const std::vector systemTypes; - const string sshKey; - const unsigned int maxJobs; - const unsigned int speedFactor; - bool enabled = true; - - bool allSupported(const std::set & features) const { - return std::all_of(features.begin(), features.end(), - [&](const string & feature) { - return supportedFeatures.count(feature) || - mandatoryFeatures.count(feature); - }); - } - - bool mandatoryMet(const std::set & features) const { - return std::all_of(mandatoryFeatures.begin(), mandatoryFeatures.end(), - [&](const string & feature) { - return features.count(feature); - }); - } - - Machine(decltype(storeUri) storeUri, - decltype(systemTypes) systemTypes, - decltype(sshKey) sshKey, - decltype(maxJobs) maxJobs, - decltype(speedFactor) speedFactor, - decltype(supportedFeatures) supportedFeatures, - decltype(mandatoryFeatures) mandatoryFeatures) : - supportedFeatures(supportedFeatures), - mandatoryFeatures(mandatoryFeatures), - storeUri( - // Backwards compatibility: if the URI is a hostname, - // prepend ssh://. - storeUri.find("://") != std::string::npos || hasPrefix(storeUri, "local") || hasPrefix(storeUri, "remote") || hasPrefix(storeUri, "auto") - ? storeUri - : "ssh://" + storeUri), - systemTypes(systemTypes), - sshKey(sshKey), - maxJobs(maxJobs), - speedFactor(std::max(1U, speedFactor)) - {} -}; - -static std::vector readConf() -{ - auto conf = getEnv("NIX_REMOTE_SYSTEMS", SYSCONFDIR "/nix/machines"); - - auto machines = std::vector{}; - auto lines = std::vector{}; - try { - lines = tokenizeString>(readFile(conf), "\n"); - } catch (const SysError & e) { - if (e.errNo != ENOENT) - throw; - } - for (auto line : lines) { - chomp(line); - line.erase(std::find(line.begin(), line.end(), '#'), line.end()); - if (line.empty()) { - continue; - } - auto tokens = tokenizeString>(line); - auto sz = tokens.size(); - if (sz < 1) - throw FormatError("bad machines.conf file ‘%1%’", conf); - machines.emplace_back(tokens[0], - sz >= 2 ? tokenizeString>(tokens[1], ",") : std::vector{settings.thisSystem}, - sz >= 3 ? tokens[2] : "", - sz >= 4 ? std::stoull(tokens[3]) : 1LL, - sz >= 5 ? std::stoull(tokens[4]) : 1LL, - sz >= 6 ? - tokenizeString>(tokens[5], ",") : - std::set{}, - sz >= 7 ? - tokenizeString>(tokens[6], ",") : - std::set{}); - } - return machines; -} - std::string escapeUri(std::string uri) { std::replace(uri.begin(), uri.end(), '/', '_'); @@ -147,7 +62,13 @@ int main (int argc, char * * argv) std::shared_ptr sshStore; AutoCloseFD bestSlotLock; - auto machines = readConf(); + Machines machines; + try { + parseMachines(readFile(getEnv("NIX_REMOTE_SYSTEMS", SYSCONFDIR "/nix/machines")), machines); + } catch (const SysError & e) { + if (e.errNo != ENOENT) + throw; + } debug("got %d remote builders", machines.size()); if (machines.empty()) { diff --git a/src/libstore/machines.cc b/src/libstore/machines.cc new file mode 100644 index 00000000000..471ce8efb9a --- /dev/null +++ b/src/libstore/machines.cc @@ -0,0 +1,65 @@ +#include "machines.hh" +#include "util.hh" +#include "globals.hh" + +#include + +namespace nix { + +Machine::Machine(decltype(storeUri) storeUri, + decltype(systemTypes) systemTypes, + decltype(sshKey) sshKey, + decltype(maxJobs) maxJobs, + decltype(speedFactor) speedFactor, + decltype(supportedFeatures) supportedFeatures, + decltype(mandatoryFeatures) mandatoryFeatures) : + storeUri( + // Backwards compatibility: if the URI is a hostname, + // prepend ssh://. + storeUri.find("://") != std::string::npos || hasPrefix(storeUri, "local") || hasPrefix(storeUri, "remote") || hasPrefix(storeUri, "auto") + ? storeUri + : "ssh://" + storeUri), + systemTypes(systemTypes), + sshKey(sshKey), + maxJobs(maxJobs), + speedFactor(std::max(1U, speedFactor)), + supportedFeatures(supportedFeatures), + mandatoryFeatures(mandatoryFeatures) +{} + +bool Machine::allSupported(const std::set & features) const { + return std::all_of(features.begin(), features.end(), + [&](const string & feature) { + return supportedFeatures.count(feature) || + mandatoryFeatures.count(feature); + }); +} + +bool Machine::mandatoryMet(const std::set & features) const { + return std::all_of(mandatoryFeatures.begin(), mandatoryFeatures.end(), + [&](const string & feature) { + return features.count(feature); + }); +} + +void parseMachines(const std::string & s, Machines & machines) +{ + for (auto line : tokenizeString>(s, "\n")) { + chomp(line); + line.erase(std::find(line.begin(), line.end(), '#'), line.end()); + if (line.empty()) continue; + auto tokens = tokenizeString>(line); + auto sz = tokens.size(); + if (sz < 1) + throw FormatError("bad machine specification ‘%s’", line); + machines.emplace_back(tokens[0], + sz >= 2 ? tokenizeString>(tokens[1], ",") : std::vector{settings.thisSystem}, + sz >= 3 ? tokens[2] : "", + sz >= 4 ? std::stoull(tokens[3]) : 1LL, + sz >= 5 ? std::stoull(tokens[4]) : 1LL, + sz >= 6 ? tokenizeString>(tokens[5], ",") : std::set{}, + sz >= 7 ? tokenizeString>(tokens[6], ",") : std::set{}); + } +} + +} diff --git a/src/libstore/machines.hh b/src/libstore/machines.hh new file mode 100644 index 00000000000..96c4bd81a46 --- /dev/null +++ b/src/libstore/machines.hh @@ -0,0 +1,35 @@ +#pragma once + +#include "types.hh" + +namespace nix { + +struct Machine { + + const string storeUri; + const std::vector systemTypes; + const string sshKey; + const unsigned int maxJobs; + const unsigned int speedFactor; + const std::set supportedFeatures; + const std::set mandatoryFeatures; + bool enabled = true; + + bool allSupported(const std::set & features) const; + + bool mandatoryMet(const std::set & features) const; + + Machine(decltype(storeUri) storeUri, + decltype(systemTypes) systemTypes, + decltype(sshKey) sshKey, + decltype(maxJobs) maxJobs, + decltype(speedFactor) speedFactor, + decltype(supportedFeatures) supportedFeatures, + decltype(mandatoryFeatures) mandatoryFeatures); +}; + +typedef std::vector Machines; + +void parseMachines(const std::string & s, Machines & machines); + +} From 1a68710d4dff609bbaf61db3e17a2573f0aadf17 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 2 May 2017 13:44:10 +0200 Subject: [PATCH 0304/2196] Add an option for specifying remote builders This is useful for one-off situations where you want to specify a builder on the command line instead of having to mess with nix.machines. E.g. $ nix-build -A hello --argstr system x86_64-darwin \ --option builders 'root@macstadium1 x86_64-darwin' will perform the specified build on "macstadium1". It also removes the need for a separate nix.machines file since you can specify builders in nix.conf directly. (In fact nix.machines is yet another hack that predates the general nix.conf configuration file, IIRC.) Note: this option is supported by the daemon for trusted users. The fact that this allows trusted users to specify paths to SSH keys to which they don't normally have access is maybe a bit too much trust... --- src/build-remote/build-remote.cc | 11 +++-------- src/build-remote/local.mk | 2 -- src/libstore/build.cc | 3 ++- src/libstore/globals.hh | 3 +++ src/libstore/machines.cc | 18 +++++++++++++++++- src/libstore/machines.hh | 2 ++ 6 files changed, 27 insertions(+), 12 deletions(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index 8a9d4571fef..5cd3c518b78 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -45,7 +45,7 @@ int main (int argc, char * * argv) unsetenv("DISPLAY"); unsetenv("SSH_ASKPASS"); - if (argc != 5) + if (argc != 6) throw UsageError("called without required arguments"); auto store = openStore(); @@ -54,6 +54,7 @@ int main (int argc, char * * argv) settings.maxSilentTime = std::stoll(argv[2]); settings.buildTimeout = std::stoll(argv[3]); verbosity = (Verbosity) std::stoll(argv[4]); + settings.builders = argv[5]; /* It would be more appropriate to use $XDG_RUNTIME_DIR, since that gets cleared on reboot, but it wouldn't work on OS X. */ @@ -62,13 +63,7 @@ int main (int argc, char * * argv) std::shared_ptr sshStore; AutoCloseFD bestSlotLock; - Machines machines; - try { - parseMachines(readFile(getEnv("NIX_REMOTE_SYSTEMS", SYSCONFDIR "/nix/machines")), machines); - } catch (const SysError & e) { - if (e.errNo != ENOENT) - throw; - } + auto machines = getMachines(); debug("got %d remote builders", machines.size()); if (machines.empty()) { diff --git a/src/build-remote/local.mk b/src/build-remote/local.mk index 62d5a010c24..64368a43ff7 100644 --- a/src/build-remote/local.mk +++ b/src/build-remote/local.mk @@ -7,5 +7,3 @@ build-remote_INSTALL_DIR := $(libexecdir)/nix build-remote_LIBS = libmain libutil libformat libstore build-remote_SOURCES := $(d)/build-remote.cc - -build-remote_CXXFLAGS = -DSYSCONFDIR="\"$(sysconfdir)\"" diff --git a/src/libstore/build.cc b/src/libstore/build.cc index e756d3377c3..8c2602a701b 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -614,7 +614,8 @@ HookInstance::HookInstance() settings.thisSystem, std::to_string(settings.maxSilentTime), std::to_string(settings.buildTimeout), - std::to_string(verbosity) + std::to_string(verbosity), + settings.builders }; execv(settings.buildHook.get().c_str(), stringsToCharPtrs(args).data()); diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 25cc3e068ee..d7a0b86a088 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -130,6 +130,9 @@ public: PathSetting buildHook{this, true, nixLibexecDir + "/nix/build-remote", "build-hook", "The path of the helper program that executes builds to remote machines."}; + Setting builders{this, "", "builders", + "A semicolon-separated list of build machines, in the format of nix.machines."}; + Setting reservedSize{this, 8 * 1024 * 1024, "gc-reserved-space", "Amount of reserved disk space for the garbage collector."}; diff --git a/src/libstore/machines.cc b/src/libstore/machines.cc index 471ce8efb9a..479ed1432fb 100644 --- a/src/libstore/machines.cc +++ b/src/libstore/machines.cc @@ -44,7 +44,7 @@ bool Machine::mandatoryMet(const std::set & features) const { void parseMachines(const std::string & s, Machines & machines) { - for (auto line : tokenizeString>(s, "\n")) { + for (auto line : tokenizeString>(s, "\n;")) { chomp(line); line.erase(std::find(line.begin(), line.end(), '#'), line.end()); if (line.empty()) continue; @@ -62,4 +62,20 @@ void parseMachines(const std::string & s, Machines & machines) } } +Machines getMachines() +{ + Machines machines; + + try { + parseMachines(readFile(getEnv("NIX_REMOTE_SYSTEMS", settings.nixConfDir + "/machines")), machines); + } catch (const SysError & e) { + if (e.errNo != ENOENT) + throw; + } + + parseMachines(settings.builders, machines); + + return machines; +} + } diff --git a/src/libstore/machines.hh b/src/libstore/machines.hh index 96c4bd81a46..e0455742844 100644 --- a/src/libstore/machines.hh +++ b/src/libstore/machines.hh @@ -32,4 +32,6 @@ typedef std::vector Machines; void parseMachines(const std::string & s, Machines & machines); +Machines getMachines(); + } From cd4d2705ec6e641ffa3b11dc1aabad22fc38251a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 2 May 2017 14:18:46 +0200 Subject: [PATCH 0305/2196] build-remote: Fix fallback to other machines when connecting fails Opening an SSHStore or LegacySSHStore does not actually establish a connection, so the try/catch block here did nothing. Added a Store::connect() method to test whether a connection can be established. --- src/build-remote/build-remote.cc | 1 + src/libstore/legacy-ssh-store.cc | 5 +++++ src/libstore/remote-store.cc | 8 +++++++- src/libstore/remote-store.hh | 2 ++ src/libstore/store-api.hh | 4 ++++ 5 files changed, 19 insertions(+), 1 deletion(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index 5cd3c518b78..8876da6c063 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -167,6 +167,7 @@ int main (int argc, char * * argv) storeParams["ssh-key"] = bestMachine->sshKey; sshStore = openStore(bestMachine->storeUri, storeParams); + sshStore->connect(); storeUri = bestMachine->storeUri; } catch (std::exception & e) { diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index a6479a450e3..e09932e3d18 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -262,6 +262,11 @@ struct LegacySSHStore : public Store return readStorePaths(*this, conn->from); } + + void connect() override + { + auto conn(connections->get()); + } }; static RegisterStoreImplementation regStore([]( diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index af59d51106f..be8819bbc00 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -100,7 +100,7 @@ ref UDSRemoteStore::openConnection() throw Error(format("socket path ‘%1%’ is too long") % socketPath); strcpy(addr.sun_path, socketPath.c_str()); - if (connect(conn->fd.get(), (struct sockaddr *) &addr, sizeof(addr)) == -1) + if (::connect(conn->fd.get(), (struct sockaddr *) &addr, sizeof(addr)) == -1) throw SysError(format("cannot connect to daemon at ‘%1%’") % socketPath); conn->from.fd = conn->fd.get(); @@ -613,6 +613,12 @@ void RemoteStore::queryMissing(const PathSet & targets, } +void RemoteStore::connect() +{ + auto conn(connections->get()); +} + + RemoteStore::Connection::~Connection() { try { diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh index 479cf3a7909..ed430e4cabb 100644 --- a/src/libstore/remote-store.hh +++ b/src/libstore/remote-store.hh @@ -92,6 +92,8 @@ public: PathSet & willBuild, PathSet & willSubstitute, PathSet & unknown, unsigned long long & downloadSize, unsigned long long & narSize) override; + void connect() override; + protected: struct Connection diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 8ca3f4b27d0..b06f5d86a93 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -582,6 +582,10 @@ public: state.lock()->pathInfoCache.clear(); } + /* Establish a connection to the store, for store types that have + a notion of connection. Otherwise this is a no-op. */ + virtual void connect() { }; + protected: Stats stats; From 7f6837a0f6e7702a9e8c6da622873b955aa414cd Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 2 May 2017 14:36:59 +0200 Subject: [PATCH 0306/2196] Replace $NIX_REMOTE_SYSTEMS with an option "builder-files" Also, to unify with hydra-queue-runner, allow it to be a list of files. --- src/libstore/globals.cc | 4 ++++ src/libstore/globals.hh | 4 ++++ src/libstore/machines.cc | 36 +++++++++++++++++++++++------------- src/libstore/machines.hh | 4 +++- 4 files changed, 34 insertions(+), 14 deletions(-) diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 953bf6aaaa0..4bdbde989ab 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -43,6 +43,10 @@ Settings::Settings() lockCPU = getEnv("NIX_AFFINITY_HACK", "1") == "1"; caFile = getEnv("NIX_SSL_CERT_FILE", getEnv("SSL_CERT_FILE", "/etc/ssl/certs/ca-certificates.crt")); + /* Backwards compatibility. */ + auto s = getEnv("NIX_REMOTE_SYSTEMS"); + if (s != "") builderFiles = tokenizeString(s, ":"); + #if __linux__ sandboxPaths = tokenizeString("/bin/sh=" BASH_PATH); #endif diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index d7a0b86a088..ac6f6a2cfa3 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -133,6 +133,10 @@ public: Setting builders{this, "", "builders", "A semicolon-separated list of build machines, in the format of nix.machines."}; + Setting builderFiles{this, + {nixConfDir + "/machines"}, "builder-files", + "A list of files specifying build machines."}; + Setting reservedSize{this, 8 * 1024 * 1024, "gc-reserved-space", "Amount of reserved disk space for the garbage collector."}; diff --git a/src/libstore/machines.cc b/src/libstore/machines.cc index 479ed1432fb..c1d9047537d 100644 --- a/src/libstore/machines.cc +++ b/src/libstore/machines.cc @@ -12,7 +12,8 @@ Machine::Machine(decltype(storeUri) storeUri, decltype(maxJobs) maxJobs, decltype(speedFactor) speedFactor, decltype(supportedFeatures) supportedFeatures, - decltype(mandatoryFeatures) mandatoryFeatures) : + decltype(mandatoryFeatures) mandatoryFeatures, + decltype(sshPublicHostKey) sshPublicHostKey) : storeUri( // Backwards compatibility: if the URI is a hostname, // prepend ssh://. @@ -24,7 +25,8 @@ Machine::Machine(decltype(storeUri) storeUri, maxJobs(maxJobs), speedFactor(std::max(1U, speedFactor)), supportedFeatures(supportedFeatures), - mandatoryFeatures(mandatoryFeatures) + mandatoryFeatures(mandatoryFeatures), + sshPublicHostKey(sshPublicHostKey) {} bool Machine::allSupported(const std::set & features) const { @@ -52,13 +54,19 @@ void parseMachines(const std::string & s, Machines & machines) auto sz = tokens.size(); if (sz < 1) throw FormatError("bad machine specification ‘%s’", line); + + auto isSet = [&](int n) { + return tokens.size() > n && tokens[n] != "" && tokens[n] != "-"; + }; + machines.emplace_back(tokens[0], - sz >= 2 ? tokenizeString>(tokens[1], ",") : std::vector{settings.thisSystem}, - sz >= 3 ? tokens[2] : "", - sz >= 4 ? std::stoull(tokens[3]) : 1LL, - sz >= 5 ? std::stoull(tokens[4]) : 1LL, - sz >= 6 ? tokenizeString>(tokens[5], ",") : std::set{}, - sz >= 7 ? tokenizeString>(tokens[6], ",") : std::set{}); + isSet(1) ? tokenizeString>(tokens[1], ",") : std::vector{settings.thisSystem}, + isSet(2) ? tokens[2] : "", + isSet(3) ? std::stoull(tokens[3]) : 1LL, + isSet(4) ? std::stoull(tokens[4]) : 1LL, + isSet(5) ? tokenizeString>(tokens[5], ",") : std::set{}, + isSet(6) ? tokenizeString>(tokens[6], ",") : std::set{}, + isSet(7) ? tokens[7] : ""); } } @@ -66,11 +74,13 @@ Machines getMachines() { Machines machines; - try { - parseMachines(readFile(getEnv("NIX_REMOTE_SYSTEMS", settings.nixConfDir + "/machines")), machines); - } catch (const SysError & e) { - if (e.errNo != ENOENT) - throw; + for (auto & file : settings.builderFiles.get()) { + try { + parseMachines(readFile(file), machines); + } catch (const SysError & e) { + if (e.errNo != ENOENT) + throw; + } } parseMachines(settings.builders, machines); diff --git a/src/libstore/machines.hh b/src/libstore/machines.hh index e0455742844..de92eb924e4 100644 --- a/src/libstore/machines.hh +++ b/src/libstore/machines.hh @@ -13,6 +13,7 @@ struct Machine { const unsigned int speedFactor; const std::set supportedFeatures; const std::set mandatoryFeatures; + const std::string sshPublicHostKey; bool enabled = true; bool allSupported(const std::set & features) const; @@ -25,7 +26,8 @@ struct Machine { decltype(maxJobs) maxJobs, decltype(speedFactor) speedFactor, decltype(supportedFeatures) supportedFeatures, - decltype(mandatoryFeatures) mandatoryFeatures); + decltype(mandatoryFeatures) mandatoryFeatures, + decltype(sshPublicHostKey) sshPublicHostKey); }; typedef std::vector Machines; From c5bea16611ae1aa99680bbd5d94bc69811869885 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 2 May 2017 15:07:11 +0200 Subject: [PATCH 0307/2196] LocalStoreAccessor: Fix handling of diverted stores --- src/libstore/local-fs-store.cc | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/libstore/local-fs-store.cc b/src/libstore/local-fs-store.cc index bf247903c9d..bf28a1c70c6 100644 --- a/src/libstore/local-fs-store.cc +++ b/src/libstore/local-fs-store.cc @@ -31,7 +31,7 @@ struct LocalStoreAccessor : public FSAccessor auto realPath = toRealPath(path); struct stat st; - if (lstat(path.c_str(), &st)) { + if (lstat(realPath.c_str(), &st)) { if (errno == ENOENT || errno == ENOTDIR) return {Type::tMissing, 0, false}; throw SysError(format("getting status of ‘%1%’") % path); } @@ -51,7 +51,7 @@ struct LocalStoreAccessor : public FSAccessor { auto realPath = toRealPath(path); - auto entries = nix::readDirectory(path); + auto entries = nix::readDirectory(realPath); StringSet res; for (auto & entry : entries) @@ -73,7 +73,8 @@ struct LocalStoreAccessor : public FSAccessor ref LocalFSStore::getFSAccessor() { - return make_ref(ref(std::dynamic_pointer_cast(shared_from_this()))); + return make_ref(ref( + std::dynamic_pointer_cast(shared_from_this()))); } void LocalFSStore::narFromPath(const Path & path, Sink & sink) From 7dedd3fa2455f1e219bc671d04d1dd1eaec54dfa Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 2 May 2017 15:09:35 +0200 Subject: [PATCH 0308/2196] Add a test for diverted stores --- tests/linux-sandbox.sh | 27 +++++++++++++++++++++++++++ tests/local.mk | 3 ++- 2 files changed, 29 insertions(+), 1 deletion(-) create mode 100644 tests/linux-sandbox.sh diff --git a/tests/linux-sandbox.sh b/tests/linux-sandbox.sh new file mode 100644 index 00000000000..740b2c35709 --- /dev/null +++ b/tests/linux-sandbox.sh @@ -0,0 +1,27 @@ +source common.sh + +clearStore + +if [[ $(uname) != Linux ]]; then exit; fi + +# Note: we need to bind-mount $SHELL into the chroot. Currently we +# only support the case where $SHELL is in the Nix store, because +# otherwise things get complicated (e.g. if it's in /bin, do we need +# /lib as well?). +if [[ ! $SHELL =~ /nix/store ]]; then exit; fi + +chmod -R u+w $TEST_ROOT/store0 || true +rm -rf $TEST_ROOT/store0 + +export NIX_STORE_DIR=/my/store +export NIX_REMOTE="local?root=$TEST_ROOT/store0" + +outPath=$( nix-build dependencies.nix --no-out-link --option build-sandbox-paths /nix/store) + +[[ $outPath =~ /my/store/.*-dependencies ]] + +nix path-info -r $outPath | grep input-2 + +nix ls-store -R -l $outPath | grep foobar + +nix cat-store $outPath/foobar | grep FOOBAR diff --git a/tests/local.mk b/tests/local.mk index b3ce39cda80..06532dc58d2 100644 --- a/tests/local.mk +++ b/tests/local.mk @@ -11,7 +11,8 @@ nix_tests = \ multiple-outputs.sh import-derivation.sh fetchurl.sh optimise-store.sh \ binary-cache.sh nix-profile.sh repair.sh dump-db.sh case-hack.sh \ check-reqs.sh pass-as-file.sh tarball.sh restricted.sh \ - placeholders.sh nix-shell.sh + placeholders.sh nix-shell.sh \ + linux-sandbox.sh # parallel.sh install-tests += $(foreach x, $(nix_tests), tests/$(x)) From cef8c169b1d100685d7c7b7bfb921eaa43b5521b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 2 May 2017 15:28:35 +0200 Subject: [PATCH 0309/2196] Fix "nix ... --all" When "--all" is used, we should not fill in a default installable. --- src/nix/command.hh | 6 +++++- src/nix/installables.cc | 22 +++++++++++----------- 2 files changed, 16 insertions(+), 12 deletions(-) diff --git a/src/nix/command.hh b/src/nix/command.hh index dc7b2637d66..cf0097d7892 100644 --- a/src/nix/command.hh +++ b/src/nix/command.hh @@ -78,7 +78,7 @@ struct InstallablesCommand : virtual Args, StoreCommand = import ...; bla = import ...; }’. */ Value * getSourceExpr(EvalState & state); - std::vector> parseInstallables(ref store, Strings installables); + std::vector> parseInstallables(ref store, Strings ss); PathSet buildInstallables(ref store, bool dryRun); @@ -86,6 +86,8 @@ struct InstallablesCommand : virtual Args, StoreCommand void prepare() override; + virtual bool useDefaultInstallables() { return true; } + private: Strings _installables; @@ -112,6 +114,8 @@ public: virtual void run(ref store, Paths storePaths) = 0; void run(ref store) override; + + bool useDefaultInstallables() override { return !all; } }; typedef std::map> Commands; diff --git a/src/nix/installables.cc b/src/nix/installables.cc index 57580049f25..4756fc44bba 100644 --- a/src/nix/installables.cc +++ b/src/nix/installables.cc @@ -177,21 +177,21 @@ struct InstallableAttrPath : Installable std::string attrRegex = R"([A-Za-z_][A-Za-z0-9-_+]*)"; static std::regex attrPathRegex(fmt(R"(%1%(\.%1%)*)", attrRegex)); -std::vector> InstallablesCommand::parseInstallables(ref store, Strings installables) +std::vector> InstallablesCommand::parseInstallables(ref store, Strings ss) { std::vector> result; - if (installables.empty()) { + if (ss.empty() && useDefaultInstallables()) { if (file == "") file = "."; - installables = Strings{""}; + ss = Strings{""}; } - for (auto & installable : installables) { + for (auto & s : ss) { - if (installable.find("/") != std::string::npos) { + if (s.find("/") != std::string::npos) { - auto path = store->toStorePath(store->followLinksToStore(installable)); + auto path = store->toStorePath(store->followLinksToStore(s)); if (store->isStorePath(path)) { if (isDerivation(path)) @@ -201,14 +201,14 @@ std::vector> InstallablesCommand::parseInstallables } } - else if (installable.compare(0, 1, "(") == 0) - result.push_back(std::make_shared(*this, installable)); + else if (s.compare(0, 1, "(") == 0) + result.push_back(std::make_shared(*this, s)); - else if (installable == "" || std::regex_match(installable, attrPathRegex)) - result.push_back(std::make_shared(*this, installable)); + else if (s == "" || std::regex_match(s, attrPathRegex)) + result.push_back(std::make_shared(*this, s)); else - throw UsageError("don't know what to do with argument ‘%s’", installable); + throw UsageError("don't know what to do with argument ‘%s’", s); } return result; From 16535552ad69ca5397967b84fd70cc0d27a38ac1 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 2 May 2017 15:35:35 +0200 Subject: [PATCH 0310/2196] build-remote: Add a basic test This only runs on Linux because it requires a diverted store (which uses mount/user namespaces). --- tests/build-hook.nix | 1 + tests/build-remote.sh | 24 ++++++++++++++++++++++++ tests/local.mk | 3 ++- 3 files changed, 27 insertions(+), 1 deletion(-) create mode 100644 tests/build-remote.sh diff --git a/tests/build-hook.nix b/tests/build-hook.nix index 666cc6ef804..8bff0fe7903 100644 --- a/tests/build-hook.nix +++ b/tests/build-hook.nix @@ -5,6 +5,7 @@ let input1 = mkDerivation { name = "build-hook-input-1"; builder = ./dependencies.builder1.sh; + requiredSystemFeatures = ["foo"]; }; input2 = mkDerivation { diff --git a/tests/build-remote.sh b/tests/build-remote.sh new file mode 100644 index 00000000000..071011dcb71 --- /dev/null +++ b/tests/build-remote.sh @@ -0,0 +1,24 @@ +source common.sh + +clearStore + +if [[ $(uname) != Linux ]]; then exit; fi +if [[ ! $SHELL =~ /nix/store ]]; then exit; fi + +chmod -R u+w $TEST_ROOT/store0 || true +chmod -R u+w $TEST_ROOT/store1 || true +rm -rf $TEST_ROOT/store0 $TEST_ROOT/store1 + +# FIXME: --option is not passed to build-remote, so have to create a config file. +export NIX_CONF_DIR=$TEST_ROOT/etc2 +mkdir -p $NIX_CONF_DIR +echo "build-sandbox-paths = /nix/store" > $NIX_CONF_DIR/nix.conf + +outPath=$(nix-build build-hook.nix --no-out-link -j0 --option builders "local?root=$TEST_ROOT/store0; local?root=$TEST_ROOT/store1 - - 1 1 foo" --option build-sandbox-paths /nix/store) + +cat $outPath/foobar | grep FOOBAR + +# Ensure that input1 was built on store1 due to the required feature. +p=$(readlink -f $outPath/input-2) +(! nix path-info --store local?root=$TEST_ROOT/store0 --all | grep dependencies.builder1.sh) +nix path-info --store local?root=$TEST_ROOT/store1 --all | grep dependencies.builder1.sh diff --git a/tests/local.mk b/tests/local.mk index 06532dc58d2..108e3febdb0 100644 --- a/tests/local.mk +++ b/tests/local.mk @@ -12,7 +12,8 @@ nix_tests = \ binary-cache.sh nix-profile.sh repair.sh dump-db.sh case-hack.sh \ check-reqs.sh pass-as-file.sh tarball.sh restricted.sh \ placeholders.sh nix-shell.sh \ - linux-sandbox.sh + linux-sandbox.sh \ + build-remote.sh # parallel.sh install-tests += $(foreach x, $(nix_tests), tests/$(x)) From a786d26dc27798a0d00b5433442c69815c8ba763 Mon Sep 17 00:00:00 2001 From: regnat Date: Wed, 3 May 2017 10:49:34 +0200 Subject: [PATCH 0311/2196] doc: fix the description of --xml and --json Those options seem to only apply with --eval and not with --parse. --- doc/manual/command-ref/nix-instantiate.xml | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/doc/manual/command-ref/nix-instantiate.xml b/doc/manual/command-ref/nix-instantiate.xml index 1e556c7ed7c..3d03358bea3 100644 --- a/doc/manual/command-ref/nix-instantiate.xml +++ b/doc/manual/command-ref/nix-instantiate.xml @@ -117,21 +117,19 @@ input. - When used with and - , print the resulting expression as an - XML representation of the abstract syntax tree rather than as an - ATerm. The schema is the same as that used by the toXML - built-in. + When used with , print the resulting + value as an XML representation of the abstract syntax tree rather than as + an ATerm. The schema is the same as that used by the toXML built-in. + - When used with and - , print the resulting expression as an - JSON representation of the abstract syntax tree rather than as an - ATerm. + When used with , print the resulting + value as an JSON representation of the abstract syntax tree rather + than as an ATerm. From d3dcdfa00691cfe6f6a939fde218f1980d3cf73c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 3 May 2017 11:30:02 +0200 Subject: [PATCH 0312/2196] Fix perlBindings.x86_64-darwin http://hydra.nixos.org/build/52401151 --- release.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/release.nix b/release.nix index 7adc87386f9..f21f5867cc8 100644 --- a/release.nix +++ b/release.nix @@ -112,7 +112,7 @@ let buildInputs = [ (builtins.getAttr system jobs.build) curl bzip2 xz pkgconfig pkgs.perl ] - ++ lib.optional stdenv.isLinux libsodium; + ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium; configureFlags = '' --with-dbi=${perlPackages.DBI}/${pkgs.perl.libPrefix} From 782c0bff45593e7116d9b17b7de71b7ee636a807 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 3 May 2017 14:08:18 +0200 Subject: [PATCH 0313/2196] nix eval: Add a --raw flag Similar to "jq -r", this prints the evaluation result (which must be a string value) unquoted. --- src/nix/eval.cc | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/src/nix/eval.cc b/src/nix/eval.cc index eb2b13a2dcd..981cefa800c 100644 --- a/src/nix/eval.cc +++ b/src/nix/eval.cc @@ -10,6 +10,13 @@ using namespace nix; struct CmdEval : MixJSON, InstallablesCommand { + bool raw = false; + + CmdEval() + { + mkFlag(0, "raw", "print strings unquoted", &raw); + } + std::string name() override { return "eval"; @@ -22,13 +29,18 @@ struct CmdEval : MixJSON, InstallablesCommand void run(ref store) override { + if (raw && json) + throw UsageError("--raw and --json are mutually exclusive"); + auto state = getEvalState(); auto jsonOut = json ? std::make_unique(std::cout) : nullptr; for (auto & i : installables) { auto v = i->toValue(*state); - if (json) { + if (raw) { + std::cout << state->forceString(*v); + } else if (json) { PathSet context; auto jsonElem = jsonOut->placeholder(); printValueAsJSON(*state, true, *v, jsonElem, context); From 08355643ab2811256b8d78265757d9aab216b38e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 3 May 2017 15:01:15 +0200 Subject: [PATCH 0314/2196] nix-shell: Implement passAsFile --- src/nix-build/nix-build.cc | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index bb031d51513..cd4dee32674 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -15,6 +15,7 @@ #include "shared.hh" using namespace nix; +using namespace std::string_literals; extern char * * environ; @@ -407,8 +408,20 @@ int main(int argc, char ** argv) env["NIX_BUILD_TOP"] = env["TMPDIR"] = env["TEMPDIR"] = env["TMP"] = env["TEMP"] = tmp; env["NIX_STORE"] = store->storeDir; + auto passAsFile = tokenizeString(get(drv.env, "passAsFile", "")); + + bool keepTmp = false; + int fileNr = 0; + for (auto & var : drv.env) - env[var.first] = var.second; + if (passAsFile.count(var.first)) { + keepTmp = true; + string fn = ".attr-" + std::to_string(fileNr++); + Path p = (Path) tmpDir + "/" + fn; + writeFile(p, var.second); + env[var.first + "Path"] = p; + } else + env[var.first] = var.second; restoreAffinity(); @@ -418,7 +431,7 @@ int main(int argc, char ** argv) // the current $PATH directories. auto rcfile = (Path) tmpDir + "/rc"; writeFile(rcfile, fmt( - "rm -rf '%1%'; " + (keepTmp ? "" : "rm -rf '%1%'; "s) + "[ -n \"$PS1\" ] && [ -e ~/.bashrc ] && source ~/.bashrc; " "%2%" "dontAddDisableDepTrack=1; " From 72fb2a7edc169fa480f3a3d8da5a9fd263868491 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 3 May 2017 16:08:48 +0200 Subject: [PATCH 0315/2196] Fix build on gcc 4.9 http://hydra.nixos.org/build/52408843 --- src/libutil/config.cc | 4 ++-- src/libutil/config.hh | 5 ++++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/libutil/config.cc b/src/libutil/config.cc index 497afaa1fed..f7a46bfee63 100644 --- a/src/libutil/config.cc +++ b/src/libutil/config.cc @@ -15,9 +15,9 @@ void Config::set(const std::string & name, const std::string & value) void Config::addSetting(AbstractSetting * setting) { - _settings.emplace(setting->name, Config::SettingData{false, setting}); + _settings.emplace(setting->name, Config::SettingData(false, setting)); for (auto & alias : setting->aliases) - _settings.emplace(alias, Config::SettingData{true, setting}); + _settings.emplace(alias, Config::SettingData(true, setting)); bool set = false; diff --git a/src/libutil/config.hh b/src/libutil/config.hh index 91962109100..77620d47d37 100644 --- a/src/libutil/config.hh +++ b/src/libutil/config.hh @@ -33,8 +33,11 @@ class Config struct SettingData { - bool isAlias = false; + bool isAlias; AbstractSetting * setting; + SettingData(bool isAlias, AbstractSetting * setting) + : isAlias(isAlias), setting(setting) + { } }; std::map _settings; From 493d4bd9490bd4a4b4cd5ad9dd62e9a01f94e84c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 3 May 2017 17:43:52 +0200 Subject: [PATCH 0316/2196] perl-bindings: Remove unused --with-store-dir flag --- perl/configure.ac | 5 ----- 1 file changed, 5 deletions(-) diff --git a/perl/configure.ac b/perl/configure.ac index 80bcdb8ff5b..9f49db4d281 100644 --- a/perl/configure.ac +++ b/perl/configure.ac @@ -40,11 +40,6 @@ perlarchname=$($perl -e 'use Config; print $Config{archname};') AC_SUBST(perllibdir, [${libdir}/perl5/site_perl/$perlversion/$perlarchname]) AC_MSG_RESULT($perllibdir) -AC_ARG_WITH(store-dir, AC_HELP_STRING([--with-store-dir=PATH], - [path of the Nix store (defaults to /nix/store)]), - storedir=$withval, storedir='/nix/store') -AC_SUBST(storedir) - # Look for libsodium, an optional dependency. PKG_CHECK_MODULES([SODIUM], [libsodium], [AC_DEFINE([HAVE_SODIUM], [1], [Whether to use libsodium for cryptography.]) From 44309c506767fcfb8aae15761b329a87b0dd4b8c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 3 May 2017 18:30:47 +0200 Subject: [PATCH 0317/2196] Fix Ubuntu 16.10 build http://hydra.nixos.org/build/52420073 --- release.nix | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/release.nix b/release.nix index f21f5867cc8..fa2fde4f609 100644 --- a/release.nix +++ b/release.nix @@ -198,15 +198,15 @@ let rpm_fedora25x86_64 = makeRPM_x86_64 (diskImageFunsFun: diskImageFunsFun.fedora25x86_64) [ "libsodium-devel" ]; - deb_debian8i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.debian8i386) [ "libsodium-dev" ] [ "libsodium13" ]; - deb_debian8x86_64 = makeDeb_x86_64 (diskImageFunsFun: diskImageFunsFun.debian8x86_64) [ "libsodium-dev" ] [ "libsodium13" ]; + deb_debian8i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.debian8i386) [ "libsodium-dev" ] [ "libsodium13" "libreadline6" ]; + deb_debian8x86_64 = makeDeb_x86_64 (diskImageFunsFun: diskImageFunsFun.debian8x86_64) [ "libsodium-dev" ] [ "libsodium13" "libreadline6" ]; - deb_ubuntu1410i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1410i386) [] []; - deb_ubuntu1410x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1410x86_64) [] []; - deb_ubuntu1604i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1604i386) [ "libsodium-dev" ] [ "libsodium18" ]; - deb_ubuntu1604x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1604x86_64) [ "libsodium-dev" ] [ "libsodium18" ]; - deb_ubuntu1610i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1610i386) [ "libsodium-dev" ] [ "libsodium18" ]; - deb_ubuntu1610x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1610x86_64) [ "libsodium-dev" ] [ "libsodium18" ]; + deb_ubuntu1410i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1410i386) [] [ "libreadline6" ]; + deb_ubuntu1410x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1410x86_64) [] [ "libreadline6" ]; + deb_ubuntu1604i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1604i386) [ "libsodium-dev" ] [ "libsodium18" "libreadline6" ]; + deb_ubuntu1604x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1604x86_64) [ "libsodium-dev" ] [ "libsodium18" "libreadline6" ]; + deb_ubuntu1610i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1610i386) [ "libsodium-dev" ] [ "libsodium18" "libreadline7" ]; + deb_ubuntu1610x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1610x86_64) [ "libsodium-dev" ] [ "libsodium18" "libreadline7" ]; # System tests. @@ -328,7 +328,7 @@ let postInstall = "make installcheck"; configureFlags = "--sysconfdir=/etc"; debRequires = - [ "curl" "libsqlite3-0" "libbz2-1.0" "bzip2" "xz-utils" "libssl1.0.0" "liblzma5" "libreadline6" ] + [ "curl" "libsqlite3-0" "libbz2-1.0" "bzip2" "xz-utils" "libssl1.0.0" "liblzma5" ] ++ extraDebPackages; debMaintainer = "Eelco Dolstra "; doInstallCheck = true; From 2da6a424486e16b4b30e448a15a9b4a608df602d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 4 May 2017 14:16:26 +0200 Subject: [PATCH 0318/2196] nix dump-path: Add This is primarily useful for extracting NARs from other stores (like binary caches), which "nix-store --dump" cannot do. --- src/nix/command.cc | 10 ++++++++++ src/nix/command.hh | 10 ++++++++++ src/nix/dump-path.cc | 35 +++++++++++++++++++++++++++++++++++ 3 files changed, 55 insertions(+) create mode 100644 src/nix/dump-path.cc diff --git a/src/nix/command.cc b/src/nix/command.cc index a45f2888bfb..3c82e0df57f 100644 --- a/src/nix/command.cc +++ b/src/nix/command.cc @@ -129,4 +129,14 @@ void StorePathsCommand::run(ref store) run(store, storePaths); } +void StorePathCommand::run(ref store) +{ + auto storePaths = buildInstallables(store, false); + + if (storePaths.size() != 1) + throw UsageError("this command requires exactly one store path"); + + run(store, *storePaths.begin()); +} + } diff --git a/src/nix/command.hh b/src/nix/command.hh index cf0097d7892..4800b5c912e 100644 --- a/src/nix/command.hh +++ b/src/nix/command.hh @@ -118,6 +118,16 @@ public: bool useDefaultInstallables() override { return !all; } }; +/* A command that operates on exactly one store path. */ +struct StorePathCommand : public InstallablesCommand +{ + using StoreCommand::run; + + virtual void run(ref store, const Path & storePath) = 0; + + void run(ref store) override; +}; + typedef std::map> Commands; /* An argument parser that supports multiple subcommands, diff --git a/src/nix/dump-path.cc b/src/nix/dump-path.cc new file mode 100644 index 00000000000..1a1866437b0 --- /dev/null +++ b/src/nix/dump-path.cc @@ -0,0 +1,35 @@ +#include "command.hh" +#include "store-api.hh" + +using namespace nix; + +struct CmdDumpPath : StorePathCommand +{ + std::string name() override + { + return "dump-path"; + } + + std::string description() override + { + return "dump a store path to stdout (in NAR format)"; + } + + Examples examples() override + { + return { + Example{ + "To get a NAR from the binary cache https://cache.nixos.org/:", + "nix dump-path --store https://cache.nixos.org/ /nix/store/7crrmih8c52r8fbnqb933dxrsp44md93-glibc-2.25" + }, + }; + } + + void run(ref store, const Path & storePath) override + { + FdSink sink(STDOUT_FILENO); + store->narFromPath(storePath, sink); + } +}; + +static RegisterCommand r1(make_ref()); From eba840c8a13b465ace90172ff76a0db2899ab11b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 4 May 2017 16:57:03 +0200 Subject: [PATCH 0319/2196] Linux sandbox: Use /build instead of /tmp as $TMPDIR There is a security issue when a build accidentally stores its $TMPDIR in some critical place, such as an RPATH. If TMPDIR=/tmp/nix-build-..., then any user on the system can recreate that directory and inject libraries into the RPATH of programs executed by other users. Since /build probably doesn't exist (or isn't world-writable), this mitigates the issue. --- src/libstore/build.cc | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 8c2602a701b..6d20512e1f8 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -1661,6 +1661,9 @@ int childEntry(void * arg) } +const std::string buildDir = "/build"; + + void DerivationGoal::startBuilder() { auto f = format( @@ -1721,7 +1724,14 @@ void DerivationGoal::startBuilder() /* In a sandbox, for determinism, always use the same temporary directory. */ +#if __linux__ + tmpDirInSandbox = useChroot ? buildDir : tmpDir; +#elif __APPLE__ + // On Darwin, we canonize /tmp because its probably a symlink to /private/tmp. tmpDirInSandbox = useChroot ? canonPath("/tmp", true) + "/nix-build-" + drvName + "-0" : tmpDir; +#else + tmpDirInSandbox = tmpDir; +#endif chownToBuilder(tmpDir); /* Substitute output placeholders with the actual output paths. */ @@ -1829,11 +1839,11 @@ void DerivationGoal::startBuilder() Samba-in-QEMU. */ createDirs(chrootRootDir + "/etc"); - writeFile(chrootRootDir + "/etc/passwd", - (format( - "root:x:0:0:Nix build user:/:/noshell\n" - "nixbld:x:%1%:%2%:Nix build user:/:/noshell\n" - "nobody:x:65534:65534:Nobody:/:/noshell\n") % sandboxUid % sandboxGid).str()); + writeFile(chrootRootDir + "/etc/passwd", fmt( + "root:x:0:0:Nix build user:%3%:/noshell\n" + "nixbld:x:%1%:%2%:Nix build user:%3%:/noshell\n" + "nobody:x:65534:65534:Nobody:/:/noshell\n", + sandboxUid, sandboxGid, buildDir)); /* Declare the build user's group so that programs get a consistent view of the system (e.g., "id -gn"). */ From 465cb6824401541d82489e11b5223dbfd50bb132 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 5 May 2017 16:40:12 +0200 Subject: [PATCH 0320/2196] Figure out the user's home directory if $HOME is not set --- misc/launchd/org.nixos.nix-daemon.plist.in | 2 - misc/systemd/nix-daemon.service.in | 2 - src/libexpr/parser.y | 2 +- src/libutil/lazy.hh | 48 ++++++++++++++++++++++ src/libutil/util.cc | 45 ++++++++++++-------- src/libutil/util.hh | 3 ++ src/nix-channel/nix-channel.cc | 4 +- src/nix-env/nix-env.cc | 14 ++----- 8 files changed, 84 insertions(+), 36 deletions(-) create mode 100644 src/libutil/lazy.hh diff --git a/misc/launchd/org.nixos.nix-daemon.plist.in b/misc/launchd/org.nixos.nix-daemon.plist.in index 5d57a5ec8ff..c5ef97ee9a3 100644 --- a/misc/launchd/org.nixos.nix-daemon.plist.in +++ b/misc/launchd/org.nixos.nix-daemon.plist.in @@ -16,8 +16,6 @@ NIX_SSL_CERT_FILE /nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt - XDG_CACHE_HOME - /root/.cache diff --git a/misc/systemd/nix-daemon.service.in b/misc/systemd/nix-daemon.service.in index 9bfb00e306b..5fc04a3f571 100644 --- a/misc/systemd/nix-daemon.service.in +++ b/misc/systemd/nix-daemon.service.in @@ -7,5 +7,3 @@ ConditionPathIsReadWrite=@localstatedir@/nix/daemon-socket [Service] ExecStart=@@bindir@/nix-daemon nix-daemon --daemon KillMode=process -Environment=XDG_CACHE_HOME=/root/.cache -Environment=XDG_CONFIG_HOME=/root/.config diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index d07eedddaf6..62982650a22 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -376,7 +376,7 @@ expr_simple $$ = stripIndentation(CUR_POS, data->symbols, *$2); } | PATH { $$ = new ExprPath(absPath($1, data->basePath)); } - | HPATH { $$ = new ExprPath(getEnv("HOME", "") + string{$1 + 1}); } + | HPATH { $$ = new ExprPath(getHome() + string{$1 + 1}); } | SPATH { string path($1 + 1, strlen($1) - 2); $$ = new ExprApp(CUR_POS, diff --git a/src/libutil/lazy.hh b/src/libutil/lazy.hh new file mode 100644 index 00000000000..d073e486c2e --- /dev/null +++ b/src/libutil/lazy.hh @@ -0,0 +1,48 @@ +#include +#include +#include + +namespace nix { + +/* A helper class for lazily-initialized variables. + + Lazy var([]() { return value; }); + + declares a variable of type T that is initialized to 'value' (in a + thread-safe way) on first use, that is, when var() is first + called. If the initialiser code throws an exception, then all + subsequent calls to var() will rethrow that exception. */ +template +class Lazy +{ + + typedef std::function Init; + + Init init; + + std::once_flag done; + + T value; + + std::exception_ptr ex; + +public: + + Lazy(Init init) : init(init) + { } + + const T & operator () () + { + std::call_once(done, [&]() { + try { + value = init(); + } catch (...) { + ex = std::current_exception(); + } + }); + if (ex) std::rethrow_exception(ex); + return value; + } +}; + +} diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 98c0aff1e72..1d1f68fc845 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -1,3 +1,4 @@ +#include "lazy.hh" #include "util.hh" #include "affinity.hh" #include "sync.hh" @@ -13,10 +14,12 @@ #include #include -#include -#include #include #include +#include +#include +#include +#include #ifdef __APPLE__ #include @@ -417,14 +420,28 @@ Path createTempDir(const Path & tmpRoot, const Path & prefix, } +static Lazy getHome2([]() { + Path homeDir = getEnv("HOME"); + if (homeDir.empty()) { + char buf[16384]; + struct passwd pwbuf; + struct passwd * pw; + if (getpwuid_r(getuid(), &pwbuf, buf, sizeof(buf), &pw) != 0 + || !pw || !pw->pw_dir || !pw->pw_dir[0]) + throw Error("cannot determine user's home directory"); + homeDir = pw->pw_dir; + } + return homeDir; +}); + +Path getHome() { return getHome2(); } + + Path getCacheDir() { Path cacheDir = getEnv("XDG_CACHE_HOME"); - if (cacheDir.empty()) { - Path homeDir = getEnv("HOME"); - if (homeDir.empty()) throw Error("$XDG_CACHE_HOME and $HOME are not set"); - cacheDir = homeDir + "/.cache"; - } + if (cacheDir.empty()) + cacheDir = getHome() + "/.cache"; return cacheDir; } @@ -432,11 +449,8 @@ Path getCacheDir() Path getConfigDir() { Path configDir = getEnv("XDG_CONFIG_HOME"); - if (configDir.empty()) { - Path homeDir = getEnv("HOME"); - if (homeDir.empty()) throw Error("$XDG_CONFIG_HOME and $HOME are not set"); - configDir = homeDir + "/.config"; - } + if (configDir.empty()) + configDir = getHome() + "/.config"; return configDir; } @@ -444,11 +458,8 @@ Path getConfigDir() Path getDataDir() { Path dataDir = getEnv("XDG_DATA_HOME"); - if (dataDir.empty()) { - Path homeDir = getEnv("HOME"); - if (homeDir.empty()) throw Error("$XDG_DATA_HOME and $HOME are not set"); - dataDir = homeDir + "/.local/share"; - } + if (dataDir.empty()) + dataDir = getHome() + "/.local/share"; return dataDir; } diff --git a/src/libutil/util.hh b/src/libutil/util.hh index a9950f830c5..5a9c9513fd5 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -110,6 +110,9 @@ void deletePath(const Path & path, unsigned long long & bytesFreed); Path createTempDir(const Path & tmpRoot = "", const Path & prefix = "nix", bool includePid = true, bool useGlobalCounter = true, mode_t mode = 0755); +/* Return $HOME or the user's home directory from /etc/passwd. */ +Path getHome(); + /* Return $XDG_CACHE_HOME or $HOME/.cache. */ Path getCacheDir(); diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc index 0f50f6242c4..2aaae2f471b 100755 --- a/src/nix-channel/nix-channel.cc +++ b/src/nix-channel/nix-channel.cc @@ -169,9 +169,7 @@ int main(int argc, char ** argv) setenv("NIX_DOWNLOAD_CACHE", channelCache.c_str(), 1); // Figure out the name of the `.nix-channels' file to use - auto home = getEnv("HOME"); - if (home.empty()) - throw Error("$HOME not set"); + auto home = getHome(); channelsList = home + "/.nix-channels"; nixDefExpr = home + "/.nix-defexpr"; diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index 908c09bc8c8..da39bf36ab6 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -192,17 +192,9 @@ static void loadDerivations(EvalState & state, Path nixExprPath, } -static Path getHomeDir() -{ - Path homeDir(getEnv("HOME", "")); - if (homeDir == "") throw Error("HOME environment variable not set"); - return homeDir; -} - - static Path getDefNixExprPath() { - return getHomeDir() + "/.nix-defexpr"; + return getHome() + "/.nix-defexpr"; } @@ -1188,7 +1180,7 @@ static void opSwitchProfile(Globals & globals, Strings opFlags, Strings opArgs) throw UsageError(format("exactly one argument expected")); Path profile = absPath(opArgs.front()); - Path profileLink = getHomeDir() + "/.nix-profile"; + Path profileLink = getHome() + "/.nix-profile"; switchLink(profileLink, profile); } @@ -1413,7 +1405,7 @@ int main(int argc, char * * argv) globals.profile = getEnv("NIX_PROFILE", ""); if (globals.profile == "") { - Path profileLink = getHomeDir() + "/.nix-profile"; + Path profileLink = getHome() + "/.nix-profile"; globals.profile = pathExists(profileLink) ? absPath(readLink(profileLink), dirOf(profileLink)) : canonPath(settings.nixStateDir + "/profiles/default"); From bb50c8931934d04dbf61bd245b4583f8c1ac4fd9 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 5 May 2017 17:45:22 +0200 Subject: [PATCH 0321/2196] Make the location of the build directory in the sandbox configurable This is mostly for use in the sandbox tests, since if the Nix store is under /build, then we can't use /build as the build directory. --- src/libstore/build.cc | 7 ++----- src/libstore/globals.hh | 3 +++ src/libstore/machines.cc | 2 +- tests/build-remote.sh | 8 ++++++-- 4 files changed, 12 insertions(+), 8 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 6d20512e1f8..70ecf4bad99 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -1661,9 +1661,6 @@ int childEntry(void * arg) } -const std::string buildDir = "/build"; - - void DerivationGoal::startBuilder() { auto f = format( @@ -1725,7 +1722,7 @@ void DerivationGoal::startBuilder() /* In a sandbox, for determinism, always use the same temporary directory. */ #if __linux__ - tmpDirInSandbox = useChroot ? buildDir : tmpDir; + tmpDirInSandbox = useChroot ? settings.sandboxBuildDir : tmpDir; #elif __APPLE__ // On Darwin, we canonize /tmp because its probably a symlink to /private/tmp. tmpDirInSandbox = useChroot ? canonPath("/tmp", true) + "/nix-build-" + drvName + "-0" : tmpDir; @@ -1843,7 +1840,7 @@ void DerivationGoal::startBuilder() "root:x:0:0:Nix build user:%3%:/noshell\n" "nixbld:x:%1%:%2%:Nix build user:%3%:/noshell\n" "nobody:x:65534:65534:Nobody:/:/noshell\n", - sandboxUid, sandboxGid, buildDir)); + sandboxUid, sandboxGid, settings.sandboxBuildDir)); /* Declare the build user's group so that programs get a consistent view of the system (e.g., "id -gn"). */ diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index ac6f6a2cfa3..7295b0d30af 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -224,6 +224,9 @@ public: #if __linux__ Setting sandboxShmSize{this, "50%", "sandbox-dev-shm-size", "The size of /dev/shm in the build sandbox."}; + + Setting sandboxBuildDir{this, "/build", "sandbox-build-dir", + "The build directory inside the sandbox."}; #endif Setting allowedImpureHostPrefixes{this, {}, "allowed-impure-host-deps", diff --git a/src/libstore/machines.cc b/src/libstore/machines.cc index c1d9047537d..7491037b2d7 100644 --- a/src/libstore/machines.cc +++ b/src/libstore/machines.cc @@ -55,7 +55,7 @@ void parseMachines(const std::string & s, Machines & machines) if (sz < 1) throw FormatError("bad machine specification ‘%s’", line); - auto isSet = [&](int n) { + auto isSet = [&](size_t n) { return tokens.size() > n && tokens[n] != "" && tokens[n] != "-"; }; diff --git a/tests/build-remote.sh b/tests/build-remote.sh index 071011dcb71..927a217f376 100644 --- a/tests/build-remote.sh +++ b/tests/build-remote.sh @@ -12,9 +12,13 @@ rm -rf $TEST_ROOT/store0 $TEST_ROOT/store1 # FIXME: --option is not passed to build-remote, so have to create a config file. export NIX_CONF_DIR=$TEST_ROOT/etc2 mkdir -p $NIX_CONF_DIR -echo "build-sandbox-paths = /nix/store" > $NIX_CONF_DIR/nix.conf +echo " +build-sandbox-paths = /nix/store +sandbox-build-dir = /build-tmp +" > $NIX_CONF_DIR/nix.conf -outPath=$(nix-build build-hook.nix --no-out-link -j0 --option builders "local?root=$TEST_ROOT/store0; local?root=$TEST_ROOT/store1 - - 1 1 foo" --option build-sandbox-paths /nix/store) +outPath=$(nix-build build-hook.nix --no-out-link -j0 \ + --option builders "local?root=$TEST_ROOT/store0; local?root=$TEST_ROOT/store1 - - 1 1 foo") cat $outPath/foobar | grep FOOBAR From 542fe0d8f3f25025a2642583d8593580a5d10e3a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Sun, 7 May 2017 07:41:19 +0100 Subject: [PATCH 0322/2196] nix-profile.sh: remove sbin from PATH sbin is a symlink to bin. profiles only contains packages, which have this symlink. It is a subset of bin. related to https://github.com/NixOS/nixpkgs/pull/25550 --- scripts/nix-profile.sh.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/nix-profile.sh.in b/scripts/nix-profile.sh.in index 3cdf431041c..ab95c09c830 100644 --- a/scripts/nix-profile.sh.in +++ b/scripts/nix-profile.sh.in @@ -85,6 +85,6 @@ if [ -n "$HOME" ] && [ -n "$USER" ]; then export MANPATH="$NIX_LINK/share/man:$MANPATH" fi - export PATH="$NIX_LINK/bin:$NIX_LINK/sbin:$__savedpath" + export PATH="$NIX_LINK/bin:$__savedpath" unset __savedpath NIX_LINK NIX_USER_PROFILE_DIR NIX_PROFILES fi From 0a97eb6bd7d459de80432a5dbf39daf16647bb8c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 8 May 2017 11:25:23 +0200 Subject: [PATCH 0323/2196] Remove superfluous #ifdef --- src/libstore/build.cc | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 70ecf4bad99..dc9b9e023cb 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2360,10 +2360,8 @@ void DerivationGoal::runChild() createDirs(chrootRootDir + "/dev/shm"); createDirs(chrootRootDir + "/dev/pts"); ss.push_back("/dev/full"); -#ifdef __linux__ if (pathExists("/dev/kvm")) ss.push_back("/dev/kvm"); -#endif ss.push_back("/dev/null"); ss.push_back("/dev/random"); ss.push_back("/dev/tty"); From ebfceeb333120411af46d0af4805f3e7d557159f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 8 May 2017 14:27:12 +0200 Subject: [PATCH 0324/2196] build-remote: Check remote build status --- src/build-remote/build-remote.cc | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index 8876da6c063..7ffbdca7c0f 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -208,7 +208,10 @@ int main (int argc, char * * argv) drv.inputSrcs = inputs; printError("building ‘%s’ on ‘%s’", drvPath, storeUri); - sshStore->buildDerivation(drvPath, drv); + auto result = sshStore->buildDerivation(drvPath, drv); + + if (!result.success()) + throw Error("build of ‘%s’ on ‘%s’ failed: %s", drvPath, storeUri, result.errorMsg); PathSet missing; for (auto & path : outputs) From 00b286275c1a77458e45bd73528c9ca729cca7f6 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 8 May 2017 15:42:59 +0200 Subject: [PATCH 0325/2196] Linux sandbox: Fix compatibility with older kernels --- src/libstore/build.cc | 38 +++++++++++++++++++++++--------------- tests/linux-sandbox.sh | 2 +- 2 files changed, 24 insertions(+), 16 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index dc9b9e023cb..270500d81c9 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2390,17 +2390,14 @@ void DerivationGoal::runChild() /* Bind-mount all the directories from the "host" filesystem that we want in the chroot environment. */ - for (auto & i : dirsInChroot) { - struct stat st; - Path source = i.second.source; - Path target = chrootRootDir + i.first; - if (source == "/proc") continue; // backwards compatibility + auto doBind = [&](const Path & source, const Path & target, bool optional = false) { debug(format("bind mounting ‘%1%’ to ‘%2%’") % source % target); + struct stat st; if (stat(source.c_str(), &st) == -1) { - if (i.second.optional && errno == ENOENT) - continue; + if (optional && errno == ENOENT) + return; else - throw SysError(format("getting attributes of path ‘%1%’") % source); + throw SysError("getting attributes of path ‘%1%’", source); } if (S_ISDIR(st.st_mode)) createDirs(target); @@ -2409,7 +2406,12 @@ void DerivationGoal::runChild() writeFile(target, ""); } if (mount(source.c_str(), target.c_str(), "", MS_BIND | MS_REC, 0) == -1) - throw SysError(format("bind mount from ‘%1%’ to ‘%2%’ failed") % source % target); + throw SysError("bind mount from ‘%1%’ to ‘%2%’ failed", source, target); + }; + + for (auto & i : dirsInChroot) { + if (i.second.source == "/proc") continue; // backwards compatibility + doBind(i.second.source, chrootRootDir + i.first, i.second.optional); } /* Bind a new instance of procfs on /proc. */ @@ -2431,13 +2433,19 @@ void DerivationGoal::runChild() !pathExists(chrootRootDir + "/dev/ptmx") && !dirsInChroot.count("/dev/pts")) { - if (mount("none", (chrootRootDir + "/dev/pts").c_str(), "devpts", 0, "newinstance,mode=0620") == -1) - throw SysError("mounting /dev/pts"); - createSymlink("/dev/pts/ptmx", chrootRootDir + "/dev/ptmx"); + if (mount("none", (chrootRootDir + "/dev/pts").c_str(), "devpts", 0, "newinstance,mode=0620") == 0) + { + createSymlink("/dev/pts/ptmx", chrootRootDir + "/dev/ptmx"); - /* Make sure /dev/pts/ptmx is world-writable. With some - Linux versions, it is created with permissions 0. */ - chmod_(chrootRootDir + "/dev/pts/ptmx", 0666); + /* Make sure /dev/pts/ptmx is world-writable. With some + Linux versions, it is created with permissions 0. */ + chmod_(chrootRootDir + "/dev/pts/ptmx", 0666); + } else { + if (errno != EINVAL) + throw SysError("mounting /dev/pts"); + doBind("/dev/pts", "/dev/pts"); + doBind("/dev/ptmx", "/dev/ptmx"); + } } /* Do the chroot(). */ diff --git a/tests/linux-sandbox.sh b/tests/linux-sandbox.sh index 740b2c35709..54cdef8178e 100644 --- a/tests/linux-sandbox.sh +++ b/tests/linux-sandbox.sh @@ -16,7 +16,7 @@ rm -rf $TEST_ROOT/store0 export NIX_STORE_DIR=/my/store export NIX_REMOTE="local?root=$TEST_ROOT/store0" -outPath=$( nix-build dependencies.nix --no-out-link --option build-sandbox-paths /nix/store) +outPath=$(nix-build dependencies.nix --no-out-link --option build-sandbox-paths /nix/store) [[ $outPath =~ /my/store/.*-dependencies ]] From 7689181e4f5921d3356736996079ec0310e834c6 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 8 May 2017 13:36:23 +0200 Subject: [PATCH 0326/2196] Minor cleanup --- src/libstore/store-api.cc | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index b5a91e53672..8405c66dd47 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -482,21 +482,23 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const PathSet & storePaths if (showClosureSize) jsonPath.attr("closureSize", getClosureSize(storePath)); - if (!includeImpureInfo) continue; + if (includeImpureInfo) { - if (info->deriver != "") - jsonPath.attr("deriver", info->deriver); + if (info->deriver != "") + jsonPath.attr("deriver", info->deriver); - if (info->registrationTime) - jsonPath.attr("registrationTime", info->registrationTime); + if (info->registrationTime) + jsonPath.attr("registrationTime", info->registrationTime); - if (info->ultimate) - jsonPath.attr("ultimate", info->ultimate); + if (info->ultimate) + jsonPath.attr("ultimate", info->ultimate); + + if (!info->sigs.empty()) { + auto jsonSigs = jsonPath.list("signatures"); + for (auto & sig : info->sigs) + jsonSigs.elem(sig); + } - if (!info->sigs.empty()) { - auto jsonSigs = jsonPath.list("signatures"); - for (auto & sig : info->sigs) - jsonSigs.elem(sig); } } } From 03ae5e64592d6d62f30158ccfa804f1b4135a596 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 8 May 2017 18:39:33 +0200 Subject: [PATCH 0327/2196] Add "nix edit" command This is a little convenience command that opens the Nix expression of the specified package. For example, nix edit nixpkgs.perlPackages.Moose opens in $EDITOR (at the right line number for some editors). This requires the package to have a meta.position attribute. --- src/nix/edit.cc | 75 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 75 insertions(+) create mode 100644 src/nix/edit.cc diff --git a/src/nix/edit.cc b/src/nix/edit.cc new file mode 100644 index 00000000000..632b555771c --- /dev/null +++ b/src/nix/edit.cc @@ -0,0 +1,75 @@ +#include "command.hh" +#include "shared.hh" +#include "eval.hh" +#include "attr-path.hh" + +#include + +using namespace nix; + +struct CmdEdit : InstallablesCommand +{ + std::string name() override + { + return "edit"; + } + + std::string description() override + { + return "open the Nix expression of a Nix package in $EDITOR"; + } + + Examples examples() override + { + return { + Example{ + "To open the Nix expression of the GNU Hello package:", + "nix edit nixpkgs.hello" + }, + }; + } + + void run(ref store) override + { + auto state = getEvalState(); + + for (auto & i : installables) { + auto v = i->toValue(*state); + + Value * v2; + try { + auto dummyArgs = state->allocBindings(0); + v2 = findAlongAttrPath(*state, "meta.position", *dummyArgs, *v); + } catch (Error &) { + throw Error("package ‘%s’ has no source location information", i->what()); + } + + auto pos = state->forceString(*v2); + debug("position is %s", pos); + + auto colon = pos.rfind(':'); + if (colon == std::string::npos) + throw Error("cannot parse meta.position attribute ‘%s’", pos); + + std::string filename(pos, 0, colon); + int lineno = std::stoi(std::string(pos, colon + 1)); + + auto editor = getEnv("EDITOR", "cat"); + + Strings args{editor}; + + if (editor.find("emacs") != std::string::npos || + editor.find("nano") != std::string::npos || + editor.find("vim") != std::string::npos) + args.push_back(fmt("+%d", lineno)); + + args.push_back(filename); + + execvp(editor.c_str(), stringsToCharPtrs(args).data()); + + throw SysError("cannot run editor ‘%s’", editor); + } + } +}; + +static RegisterCommand r1(make_ref()); From 82a9c93c7f090d5a4eebe84894669aa13d31ed61 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Domen=20Ko=C5=BEar?= Date: Wed, 10 May 2017 11:23:04 +0200 Subject: [PATCH 0328/2196] doc: builtins.attrNames returns alphabetically sorted list --- doc/manual/expressions/builtins.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/expressions/builtins.xml b/doc/manual/expressions/builtins.xml index 8a18b71008c..163153380ce 100644 --- a/doc/manual/expressions/builtins.xml +++ b/doc/manual/expressions/builtins.xml @@ -65,7 +65,7 @@ available as builtins.derivation. set Return the names of the attributes in the set - set in a sorted list. For instance, + set in alphabetically sorted list. For instance, builtins.attrNames { y = 1; x = "foo"; } evaluates to [ "x" "y" ]. From d48edcc3a5f7b20e116b3a1148af565d7f27f3cd Mon Sep 17 00:00:00 2001 From: Linus Heckemann Date: Sat, 22 Apr 2017 09:17:08 +0100 Subject: [PATCH 0329/2196] nix-shell: use appropriate prompt terminator If running nix-shell as root, the terminator should be # and not $. --- src/nix-build/nix-build.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index cd4dee32674..b7cf36d1607 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -438,7 +438,7 @@ int main(int argc, char ** argv) "[ -e $stdenv/setup ] && source $stdenv/setup; " "%3%" "set +e; " - "[ -n \"$PS1\" ] && PS1=\"\\n\\[\\033[1;32m\\][nix-shell:\\w]$\\[\\033[0m\\] \"; " + "[ -n \"$PS1\" ] && PS1=\"\\n\\[\\033[1;32m\\][nix-shell:\\w]\\$\\[\\033[0m\\] \"; " "if [ \"$(type -t runHook)\" = function ]; then runHook shellHook; fi; " "unset NIX_ENFORCE_PURITY; " "unset NIX_INDENT_MAKE; " From c5f23f10a84f568874321c04984b1a14d2dce978 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 10 May 2017 18:34:18 +0200 Subject: [PATCH 0330/2196] Replace readline by linenoise Using linenoise avoids a license compatibility issue (#1356), is a lot smaller and doesn't pull in ncurses. --- configure.ac | 8 - doc/manual/introduction/about-nix.xml | 6 + nix.spec.in | 2 - release.nix | 22 +- shell.nix | 1 - src/linenoise/LICENSE | 25 + src/linenoise/linenoise.c | 1199 +++++++++++++++++++++++++ src/linenoise/linenoise.h | 73 ++ src/nix/local.mk | 6 +- src/nix/repl.cc | 167 ++-- 10 files changed, 1377 insertions(+), 132 deletions(-) create mode 100644 src/linenoise/LICENSE create mode 100644 src/linenoise/linenoise.c create mode 100644 src/linenoise/linenoise.h diff --git a/configure.ac b/configure.ac index ac37456ae5f..c7026cf954d 100644 --- a/configure.ac +++ b/configure.ac @@ -196,14 +196,6 @@ if test "$gc" = yes; then fi -# Check for readline, needed by "nix repl". -AX_LIB_READLINE -if test "$ax_cv_lib_readline" != "no"; then - have_readline=1 -fi -AC_SUBST(HAVE_READLINE, [$have_readline]) - - AC_ARG_ENABLE(init-state, AC_HELP_STRING([--disable-init-state], [do not initialise DB etc. in `make install']), init_state=$enableval, init_state=yes) diff --git a/doc/manual/introduction/about-nix.xml b/doc/manual/introduction/about-nix.xml index e6dfb7a5a56..be065da3eb2 100644 --- a/doc/manual/introduction/about-nix.xml +++ b/doc/manual/introduction/about-nix.xml @@ -261,6 +261,12 @@ xlink:href="http://nixos.org/">NixOS homepage. xlink:href="http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html">GNU LGPLv2.1 or (at your option) any later version. +Nix uses the linenoise +library, which has the following license: + + + diff --git a/nix.spec.in b/nix.spec.in index 3ba2dfc94b4..390893d64dc 100644 --- a/nix.spec.in +++ b/nix.spec.in @@ -20,11 +20,9 @@ Requires: curl Requires: bzip2 Requires: gzip Requires: xz -Requires: readline BuildRequires: bzip2-devel BuildRequires: sqlite-devel BuildRequires: libcurl-devel -BuildRequires: readline-devel # Hack to make that shitty RPM scanning hack shut up. Provides: perl(Nix::SSH) diff --git a/release.nix b/release.nix index fa2fde4f609..54d20c868da 100644 --- a/release.nix +++ b/release.nix @@ -73,7 +73,7 @@ let buildInputs = [ curl bzip2 xz brotli - openssl pkgconfig sqlite boehmgc readline + openssl pkgconfig sqlite boehmgc ] ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium @@ -198,15 +198,15 @@ let rpm_fedora25x86_64 = makeRPM_x86_64 (diskImageFunsFun: diskImageFunsFun.fedora25x86_64) [ "libsodium-devel" ]; - deb_debian8i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.debian8i386) [ "libsodium-dev" ] [ "libsodium13" "libreadline6" ]; - deb_debian8x86_64 = makeDeb_x86_64 (diskImageFunsFun: diskImageFunsFun.debian8x86_64) [ "libsodium-dev" ] [ "libsodium13" "libreadline6" ]; + deb_debian8i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.debian8i386) [ "libsodium-dev" ] [ "libsodium13" ]; + deb_debian8x86_64 = makeDeb_x86_64 (diskImageFunsFun: diskImageFunsFun.debian8x86_64) [ "libsodium-dev" ] [ "libsodium13" ]; - deb_ubuntu1410i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1410i386) [] [ "libreadline6" ]; - deb_ubuntu1410x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1410x86_64) [] [ "libreadline6" ]; - deb_ubuntu1604i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1604i386) [ "libsodium-dev" ] [ "libsodium18" "libreadline6" ]; - deb_ubuntu1604x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1604x86_64) [ "libsodium-dev" ] [ "libsodium18" "libreadline6" ]; - deb_ubuntu1610i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1610i386) [ "libsodium-dev" ] [ "libsodium18" "libreadline7" ]; - deb_ubuntu1610x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1610x86_64) [ "libsodium-dev" ] [ "libsodium18" "libreadline7" ]; + deb_ubuntu1410i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1410i386) [] []; + deb_ubuntu1410x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1410x86_64) [] []; + deb_ubuntu1604i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1604i386) [ "libsodium-dev" ] [ "libsodium18" ]; + deb_ubuntu1604x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1604x86_64) [ "libsodium-dev" ] [ "libsodium18" ]; + deb_ubuntu1610i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1610i386) [ "libsodium-dev" ] [ "libsodium18" ]; + deb_ubuntu1610x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1610x86_64) [ "libsodium-dev" ] [ "libsodium18" ]; # System tests. @@ -299,7 +299,7 @@ let src = jobs.tarball; diskImage = (diskImageFun vmTools.diskImageFuns) { extraPackages = - [ "sqlite" "sqlite-devel" "bzip2-devel" "emacs" "libcurl-devel" "openssl-devel" "xz-devel" "readline-devel" ] + [ "sqlite" "sqlite-devel" "bzip2-devel" "emacs" "libcurl-devel" "openssl-devel" "xz-devel" ] ++ extraPackages; }; memSize = 1024; meta.schedulingPriority = 50; @@ -321,7 +321,7 @@ let src = jobs.tarball; diskImage = (diskImageFun vmTools.diskImageFuns) { extraPackages = - [ "libsqlite3-dev" "libbz2-dev" "libcurl-dev" "libcurl3-nss" "libssl-dev" "liblzma-dev" "libreadline-dev" ] + [ "libsqlite3-dev" "libbz2-dev" "libcurl-dev" "libcurl3-nss" "libssl-dev" "liblzma-dev" ] ++ extraPackages; }; memSize = 1024; meta.schedulingPriority = 50; diff --git a/shell.nix b/shell.nix index bbce68564b9..8645d36020e 100644 --- a/shell.nix +++ b/shell.nix @@ -16,7 +16,6 @@ with import {}; customMemoryManagement = false; }) autoreconfHook - readline # For nix-perl perl diff --git a/src/linenoise/LICENSE b/src/linenoise/LICENSE new file mode 100644 index 00000000000..18e814865a5 --- /dev/null +++ b/src/linenoise/LICENSE @@ -0,0 +1,25 @@ +Copyright (c) 2010-2014, Salvatore Sanfilippo +Copyright (c) 2010-2013, Pieter Noordhuis + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/src/linenoise/linenoise.c b/src/linenoise/linenoise.c new file mode 100644 index 00000000000..fce14a7c53a --- /dev/null +++ b/src/linenoise/linenoise.c @@ -0,0 +1,1199 @@ +/* linenoise.c -- guerrilla line editing library against the idea that a + * line editing lib needs to be 20,000 lines of C code. + * + * You can find the latest source code at: + * + * http://github.com/antirez/linenoise + * + * Does a number of crazy assumptions that happen to be true in 99.9999% of + * the 2010 UNIX computers around. + * + * ------------------------------------------------------------------------ + * + * Copyright (c) 2010-2016, Salvatore Sanfilippo + * Copyright (c) 2010-2013, Pieter Noordhuis + * + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * ------------------------------------------------------------------------ + * + * References: + * - http://invisible-island.net/xterm/ctlseqs/ctlseqs.html + * - http://www.3waylabs.com/nw/WWW/products/wizcon/vt220.html + * + * Todo list: + * - Filter bogus Ctrl+ combinations. + * - Win32 support + * + * Bloat: + * - History search like Ctrl+r in readline? + * + * List of escape sequences used by this program, we do everything just + * with three sequences. In order to be so cheap we may have some + * flickering effect with some slow terminal, but the lesser sequences + * the more compatible. + * + * EL (Erase Line) + * Sequence: ESC [ n K + * Effect: if n is 0 or missing, clear from cursor to end of line + * Effect: if n is 1, clear from beginning of line to cursor + * Effect: if n is 2, clear entire line + * + * CUF (CUrsor Forward) + * Sequence: ESC [ n C + * Effect: moves cursor forward n chars + * + * CUB (CUrsor Backward) + * Sequence: ESC [ n D + * Effect: moves cursor backward n chars + * + * The following is used to get the terminal width if getting + * the width with the TIOCGWINSZ ioctl fails + * + * DSR (Device Status Report) + * Sequence: ESC [ 6 n + * Effect: reports the current cusor position as ESC [ n ; m R + * where n is the row and m is the column + * + * When multi line mode is enabled, we also use an additional escape + * sequence. However multi line editing is disabled by default. + * + * CUU (Cursor Up) + * Sequence: ESC [ n A + * Effect: moves cursor up of n chars. + * + * CUD (Cursor Down) + * Sequence: ESC [ n B + * Effect: moves cursor down of n chars. + * + * When linenoiseClearScreen() is called, two additional escape sequences + * are used in order to clear the screen and position the cursor at home + * position. + * + * CUP (Cursor position) + * Sequence: ESC [ H + * Effect: moves the cursor to upper left corner + * + * ED (Erase display) + * Sequence: ESC [ 2 J + * Effect: clear the whole screen + * + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "linenoise.h" + +#define LINENOISE_DEFAULT_HISTORY_MAX_LEN 100 +#define LINENOISE_MAX_LINE 4096 +static char *unsupported_term[] = {"dumb","cons25","emacs",NULL}; +static linenoiseCompletionCallback *completionCallback = NULL; +static linenoiseHintsCallback *hintsCallback = NULL; +static linenoiseFreeHintsCallback *freeHintsCallback = NULL; + +static struct termios orig_termios; /* In order to restore at exit.*/ +static int rawmode = 0; /* For atexit() function to check if restore is needed*/ +static int mlmode = 0; /* Multi line mode. Default is single line. */ +static int atexit_registered = 0; /* Register atexit just 1 time. */ +static int history_max_len = LINENOISE_DEFAULT_HISTORY_MAX_LEN; +static int history_len = 0; +static char **history = NULL; + +/* The linenoiseState structure represents the state during line editing. + * We pass this state to functions implementing specific editing + * functionalities. */ +struct linenoiseState { + int ifd; /* Terminal stdin file descriptor. */ + int ofd; /* Terminal stdout file descriptor. */ + char *buf; /* Edited line buffer. */ + size_t buflen; /* Edited line buffer size. */ + const char *prompt; /* Prompt to display. */ + size_t plen; /* Prompt length. */ + size_t pos; /* Current cursor position. */ + size_t oldpos; /* Previous refresh cursor position. */ + size_t len; /* Current edited line length. */ + size_t cols; /* Number of columns in terminal. */ + size_t maxrows; /* Maximum num of rows used so far (multiline mode) */ + int history_index; /* The history index we are currently editing. */ +}; + +enum KEY_ACTION{ + KEY_NULL = 0, /* NULL */ + CTRL_A = 1, /* Ctrl+a */ + CTRL_B = 2, /* Ctrl-b */ + CTRL_C = 3, /* Ctrl-c */ + CTRL_D = 4, /* Ctrl-d */ + CTRL_E = 5, /* Ctrl-e */ + CTRL_F = 6, /* Ctrl-f */ + CTRL_H = 8, /* Ctrl-h */ + TAB = 9, /* Tab */ + CTRL_K = 11, /* Ctrl+k */ + CTRL_L = 12, /* Ctrl+l */ + ENTER = 13, /* Enter */ + CTRL_N = 14, /* Ctrl-n */ + CTRL_P = 16, /* Ctrl-p */ + CTRL_T = 20, /* Ctrl-t */ + CTRL_U = 21, /* Ctrl+u */ + CTRL_W = 23, /* Ctrl+w */ + ESC = 27, /* Escape */ + BACKSPACE = 127 /* Backspace */ +}; + +static void linenoiseAtExit(void); +int linenoiseHistoryAdd(const char *line); +static void refreshLine(struct linenoiseState *l); + +/* Debugging macro. */ +#if 0 +FILE *lndebug_fp = NULL; +#define lndebug(...) \ + do { \ + if (lndebug_fp == NULL) { \ + lndebug_fp = fopen("/tmp/lndebug.txt","a"); \ + fprintf(lndebug_fp, \ + "[%d %d %d] p: %d, rows: %d, rpos: %d, max: %d, oldmax: %d\n", \ + (int)l->len,(int)l->pos,(int)l->oldpos,plen,rows,rpos, \ + (int)l->maxrows,old_rows); \ + } \ + fprintf(lndebug_fp, ", " __VA_ARGS__); \ + fflush(lndebug_fp); \ + } while (0) +#else +#define lndebug(fmt, ...) +#endif + +/* ======================= Low level terminal handling ====================== */ + +/* Set if to use or not the multi line mode. */ +void linenoiseSetMultiLine(int ml) { + mlmode = ml; +} + +/* Return true if the terminal name is in the list of terminals we know are + * not able to understand basic escape sequences. */ +static int isUnsupportedTerm(void) { + char *term = getenv("TERM"); + int j; + + if (term == NULL) return 0; + for (j = 0; unsupported_term[j]; j++) + if (!strcasecmp(term,unsupported_term[j])) return 1; + return 0; +} + +/* Raw mode: 1960 magic shit. */ +static int enableRawMode(int fd) { + struct termios raw; + + if (!isatty(STDIN_FILENO)) goto fatal; + if (!atexit_registered) { + atexit(linenoiseAtExit); + atexit_registered = 1; + } + if (tcgetattr(fd,&orig_termios) == -1) goto fatal; + + raw = orig_termios; /* modify the original mode */ + /* input modes: no break, no CR to NL, no parity check, no strip char, + * no start/stop output control. */ + raw.c_iflag &= ~(BRKINT | ICRNL | INPCK | ISTRIP | IXON); + /* output modes - disable post processing */ + raw.c_oflag &= ~(OPOST); + /* control modes - set 8 bit chars */ + raw.c_cflag |= (CS8); + /* local modes - choing off, canonical off, no extended functions, + * no signal chars (^Z,^C) */ + raw.c_lflag &= ~(ECHO | ICANON | IEXTEN | ISIG); + /* control chars - set return condition: min number of bytes and timer. + * We want read to return every single byte, without timeout. */ + raw.c_cc[VMIN] = 1; raw.c_cc[VTIME] = 0; /* 1 byte, no timer */ + + /* put terminal in raw mode after flushing */ + if (tcsetattr(fd,TCSAFLUSH,&raw) < 0) goto fatal; + rawmode = 1; + return 0; + +fatal: + errno = ENOTTY; + return -1; +} + +static void disableRawMode(int fd) { + /* Don't even check the return value as it's too late. */ + if (rawmode && tcsetattr(fd,TCSAFLUSH,&orig_termios) != -1) + rawmode = 0; +} + +/* Use the ESC [6n escape sequence to query the horizontal cursor position + * and return it. On error -1 is returned, on success the position of the + * cursor. */ +static int getCursorPosition(int ifd, int ofd) { + char buf[32]; + int cols, rows; + unsigned int i = 0; + + /* Report cursor location */ + if (write(ofd, "\x1b[6n", 4) != 4) return -1; + + /* Read the response: ESC [ rows ; cols R */ + while (i < sizeof(buf)-1) { + if (read(ifd,buf+i,1) != 1) break; + if (buf[i] == 'R') break; + i++; + } + buf[i] = '\0'; + + /* Parse it. */ + if (buf[0] != ESC || buf[1] != '[') return -1; + if (sscanf(buf+2,"%d;%d",&rows,&cols) != 2) return -1; + return cols; +} + +/* Try to get the number of columns in the current terminal, or assume 80 + * if it fails. */ +static int getColumns(int ifd, int ofd) { + struct winsize ws; + + if (ioctl(1, TIOCGWINSZ, &ws) == -1 || ws.ws_col == 0) { + /* ioctl() failed. Try to query the terminal itself. */ + int start, cols; + + /* Get the initial position so we can restore it later. */ + start = getCursorPosition(ifd,ofd); + if (start == -1) goto failed; + + /* Go to right margin and get position. */ + if (write(ofd,"\x1b[999C",6) != 6) goto failed; + cols = getCursorPosition(ifd,ofd); + if (cols == -1) goto failed; + + /* Restore position. */ + if (cols > start) { + char seq[32]; + snprintf(seq,32,"\x1b[%dD",cols-start); + if (write(ofd,seq,strlen(seq)) == -1) { + /* Can't recover... */ + } + } + return cols; + } else { + return ws.ws_col; + } + +failed: + return 80; +} + +/* Clear the screen. Used to handle ctrl+l */ +void linenoiseClearScreen(void) { + if (write(STDOUT_FILENO,"\x1b[H\x1b[2J",7) <= 0) { + /* nothing to do, just to avoid warning. */ + } +} + +/* Beep, used for completion when there is nothing to complete or when all + * the choices were already shown. */ +static void linenoiseBeep(void) { + fprintf(stderr, "\x7"); + fflush(stderr); +} + +/* ============================== Completion ================================ */ + +/* Free a list of completion option populated by linenoiseAddCompletion(). */ +static void freeCompletions(linenoiseCompletions *lc) { + size_t i; + for (i = 0; i < lc->len; i++) + free(lc->cvec[i]); + if (lc->cvec != NULL) + free(lc->cvec); +} + +/* This is an helper function for linenoiseEdit() and is called when the + * user types the key in order to complete the string currently in the + * input. + * + * The state of the editing is encapsulated into the pointed linenoiseState + * structure as described in the structure definition. */ +static int completeLine(struct linenoiseState *ls) { + linenoiseCompletions lc = { 0, NULL }; + int nread, nwritten; + char c = 0; + + completionCallback(ls->buf,&lc); + if (lc.len == 0) { + linenoiseBeep(); + } else { + size_t stop = 0, i = 0; + + while(!stop) { + /* Show completion or original buffer */ + if (i < lc.len) { + struct linenoiseState saved = *ls; + + ls->len = ls->pos = strlen(lc.cvec[i]); + ls->buf = lc.cvec[i]; + refreshLine(ls); + ls->len = saved.len; + ls->pos = saved.pos; + ls->buf = saved.buf; + } else { + refreshLine(ls); + } + + nread = read(ls->ifd,&c,1); + if (nread <= 0) { + freeCompletions(&lc); + return -1; + } + + switch(c) { + case 9: /* tab */ + i = (i+1) % (lc.len+1); + if (i == lc.len) linenoiseBeep(); + break; + case 27: /* escape */ + /* Re-show original buffer */ + if (i < lc.len) refreshLine(ls); + stop = 1; + break; + default: + /* Update buffer and return */ + if (i < lc.len) { + nwritten = snprintf(ls->buf,ls->buflen,"%s",lc.cvec[i]); + ls->len = ls->pos = nwritten; + } + stop = 1; + break; + } + } + } + + freeCompletions(&lc); + return c; /* Return last read character */ +} + +/* Register a callback function to be called for tab-completion. */ +void linenoiseSetCompletionCallback(linenoiseCompletionCallback *fn) { + completionCallback = fn; +} + +/* Register a hits function to be called to show hits to the user at the + * right of the prompt. */ +void linenoiseSetHintsCallback(linenoiseHintsCallback *fn) { + hintsCallback = fn; +} + +/* Register a function to free the hints returned by the hints callback + * registered with linenoiseSetHintsCallback(). */ +void linenoiseSetFreeHintsCallback(linenoiseFreeHintsCallback *fn) { + freeHintsCallback = fn; +} + +/* This function is used by the callback function registered by the user + * in order to add completion options given the input string when the + * user typed . See the example.c source code for a very easy to + * understand example. */ +void linenoiseAddCompletion(linenoiseCompletions *lc, const char *str) { + size_t len = strlen(str); + char *copy, **cvec; + + copy = malloc(len+1); + if (copy == NULL) return; + memcpy(copy,str,len+1); + cvec = realloc(lc->cvec,sizeof(char*)*(lc->len+1)); + if (cvec == NULL) { + free(copy); + return; + } + lc->cvec = cvec; + lc->cvec[lc->len++] = copy; +} + +/* =========================== Line editing ================================= */ + +/* We define a very simple "append buffer" structure, that is an heap + * allocated string where we can append to. This is useful in order to + * write all the escape sequences in a buffer and flush them to the standard + * output in a single call, to avoid flickering effects. */ +struct abuf { + char *b; + int len; +}; + +static void abInit(struct abuf *ab) { + ab->b = NULL; + ab->len = 0; +} + +static void abAppend(struct abuf *ab, const char *s, int len) { + char *new = realloc(ab->b,ab->len+len); + + if (new == NULL) return; + memcpy(new+ab->len,s,len); + ab->b = new; + ab->len += len; +} + +static void abFree(struct abuf *ab) { + free(ab->b); +} + +/* Helper of refreshSingleLine() and refreshMultiLine() to show hints + * to the right of the prompt. */ +void refreshShowHints(struct abuf *ab, struct linenoiseState *l, int plen) { + char seq[64]; + if (hintsCallback && plen+l->len < l->cols) { + int color = -1, bold = 0; + char *hint = hintsCallback(l->buf,&color,&bold); + if (hint) { + int hintlen = strlen(hint); + int hintmaxlen = l->cols-(plen+l->len); + if (hintlen > hintmaxlen) hintlen = hintmaxlen; + if (bold == 1 && color == -1) color = 37; + if (color != -1 || bold != 0) + snprintf(seq,64,"\033[%d;%d;49m",bold,color); + abAppend(ab,seq,strlen(seq)); + abAppend(ab,hint,hintlen); + if (color != -1 || bold != 0) + abAppend(ab,"\033[0m",4); + /* Call the function to free the hint returned. */ + if (freeHintsCallback) freeHintsCallback(hint); + } + } +} + +/* Single line low level line refresh. + * + * Rewrite the currently edited line accordingly to the buffer content, + * cursor position, and number of columns of the terminal. */ +static void refreshSingleLine(struct linenoiseState *l) { + char seq[64]; + size_t plen = strlen(l->prompt); + int fd = l->ofd; + char *buf = l->buf; + size_t len = l->len; + size_t pos = l->pos; + struct abuf ab; + + while((plen+pos) >= l->cols) { + buf++; + len--; + pos--; + } + while (plen+len > l->cols) { + len--; + } + + abInit(&ab); + /* Cursor to left edge */ + snprintf(seq,64,"\r"); + abAppend(&ab,seq,strlen(seq)); + /* Write the prompt and the current buffer content */ + abAppend(&ab,l->prompt,strlen(l->prompt)); + abAppend(&ab,buf,len); + /* Show hits if any. */ + refreshShowHints(&ab,l,plen); + /* Erase to right */ + snprintf(seq,64,"\x1b[0K"); + abAppend(&ab,seq,strlen(seq)); + /* Move cursor to original position. */ + snprintf(seq,64,"\r\x1b[%dC", (int)(pos+plen)); + abAppend(&ab,seq,strlen(seq)); + if (write(fd,ab.b,ab.len) == -1) {} /* Can't recover from write error. */ + abFree(&ab); +} + +/* Multi line low level line refresh. + * + * Rewrite the currently edited line accordingly to the buffer content, + * cursor position, and number of columns of the terminal. */ +static void refreshMultiLine(struct linenoiseState *l) { + char seq[64]; + int plen = strlen(l->prompt); + int rows = (plen+l->len+l->cols-1)/l->cols; /* rows used by current buf. */ + int rpos = (plen+l->oldpos+l->cols)/l->cols; /* cursor relative row. */ + int rpos2; /* rpos after refresh. */ + int col; /* colum position, zero-based. */ + int old_rows = l->maxrows; + int fd = l->ofd, j; + struct abuf ab; + + /* Update maxrows if needed. */ + if (rows > (int)l->maxrows) l->maxrows = rows; + + /* First step: clear all the lines used before. To do so start by + * going to the last row. */ + abInit(&ab); + if (old_rows-rpos > 0) { + lndebug("go down %d", old_rows-rpos); + snprintf(seq,64,"\x1b[%dB", old_rows-rpos); + abAppend(&ab,seq,strlen(seq)); + } + + /* Now for every row clear it, go up. */ + for (j = 0; j < old_rows-1; j++) { + lndebug("clear+up"); + snprintf(seq,64,"\r\x1b[0K\x1b[1A"); + abAppend(&ab,seq,strlen(seq)); + } + + /* Clean the top line. */ + lndebug("clear"); + snprintf(seq,64,"\r\x1b[0K"); + abAppend(&ab,seq,strlen(seq)); + + /* Write the prompt and the current buffer content */ + abAppend(&ab,l->prompt,strlen(l->prompt)); + abAppend(&ab,l->buf,l->len); + + /* Show hits if any. */ + refreshShowHints(&ab,l,plen); + + /* If we are at the very end of the screen with our prompt, we need to + * emit a newline and move the prompt to the first column. */ + if (l->pos && + l->pos == l->len && + (l->pos+plen) % l->cols == 0) + { + lndebug(""); + abAppend(&ab,"\n",1); + snprintf(seq,64,"\r"); + abAppend(&ab,seq,strlen(seq)); + rows++; + if (rows > (int)l->maxrows) l->maxrows = rows; + } + + /* Move cursor to right position. */ + rpos2 = (plen+l->pos+l->cols)/l->cols; /* current cursor relative row. */ + lndebug("rpos2 %d", rpos2); + + /* Go up till we reach the expected positon. */ + if (rows-rpos2 > 0) { + lndebug("go-up %d", rows-rpos2); + snprintf(seq,64,"\x1b[%dA", rows-rpos2); + abAppend(&ab,seq,strlen(seq)); + } + + /* Set column. */ + col = (plen+(int)l->pos) % (int)l->cols; + lndebug("set col %d", 1+col); + if (col) + snprintf(seq,64,"\r\x1b[%dC", col); + else + snprintf(seq,64,"\r"); + abAppend(&ab,seq,strlen(seq)); + + lndebug("\n"); + l->oldpos = l->pos; + + if (write(fd,ab.b,ab.len) == -1) {} /* Can't recover from write error. */ + abFree(&ab); +} + +/* Calls the two low level functions refreshSingleLine() or + * refreshMultiLine() according to the selected mode. */ +static void refreshLine(struct linenoiseState *l) { + if (mlmode) + refreshMultiLine(l); + else + refreshSingleLine(l); +} + +/* Insert the character 'c' at cursor current position. + * + * On error writing to the terminal -1 is returned, otherwise 0. */ +int linenoiseEditInsert(struct linenoiseState *l, char c) { + if (l->len < l->buflen) { + if (l->len == l->pos) { + l->buf[l->pos] = c; + l->pos++; + l->len++; + l->buf[l->len] = '\0'; + if ((!mlmode && l->plen+l->len < l->cols && !hintsCallback)) { + /* Avoid a full update of the line in the + * trivial case. */ + if (write(l->ofd,&c,1) == -1) return -1; + } else { + refreshLine(l); + } + } else { + memmove(l->buf+l->pos+1,l->buf+l->pos,l->len-l->pos); + l->buf[l->pos] = c; + l->len++; + l->pos++; + l->buf[l->len] = '\0'; + refreshLine(l); + } + } + return 0; +} + +/* Move cursor on the left. */ +void linenoiseEditMoveLeft(struct linenoiseState *l) { + if (l->pos > 0) { + l->pos--; + refreshLine(l); + } +} + +/* Move cursor on the right. */ +void linenoiseEditMoveRight(struct linenoiseState *l) { + if (l->pos != l->len) { + l->pos++; + refreshLine(l); + } +} + +/* Move cursor to the start of the line. */ +void linenoiseEditMoveHome(struct linenoiseState *l) { + if (l->pos != 0) { + l->pos = 0; + refreshLine(l); + } +} + +/* Move cursor to the end of the line. */ +void linenoiseEditMoveEnd(struct linenoiseState *l) { + if (l->pos != l->len) { + l->pos = l->len; + refreshLine(l); + } +} + +/* Substitute the currently edited line with the next or previous history + * entry as specified by 'dir'. */ +#define LINENOISE_HISTORY_NEXT 0 +#define LINENOISE_HISTORY_PREV 1 +void linenoiseEditHistoryNext(struct linenoiseState *l, int dir) { + if (history_len > 1) { + /* Update the current history entry before to + * overwrite it with the next one. */ + free(history[history_len - 1 - l->history_index]); + history[history_len - 1 - l->history_index] = strdup(l->buf); + /* Show the new entry */ + l->history_index += (dir == LINENOISE_HISTORY_PREV) ? 1 : -1; + if (l->history_index < 0) { + l->history_index = 0; + return; + } else if (l->history_index >= history_len) { + l->history_index = history_len-1; + return; + } + strncpy(l->buf,history[history_len - 1 - l->history_index],l->buflen); + l->buf[l->buflen-1] = '\0'; + l->len = l->pos = strlen(l->buf); + refreshLine(l); + } +} + +/* Delete the character at the right of the cursor without altering the cursor + * position. Basically this is what happens with the "Delete" keyboard key. */ +void linenoiseEditDelete(struct linenoiseState *l) { + if (l->len > 0 && l->pos < l->len) { + memmove(l->buf+l->pos,l->buf+l->pos+1,l->len-l->pos-1); + l->len--; + l->buf[l->len] = '\0'; + refreshLine(l); + } +} + +/* Backspace implementation. */ +void linenoiseEditBackspace(struct linenoiseState *l) { + if (l->pos > 0 && l->len > 0) { + memmove(l->buf+l->pos-1,l->buf+l->pos,l->len-l->pos); + l->pos--; + l->len--; + l->buf[l->len] = '\0'; + refreshLine(l); + } +} + +/* Delete the previosu word, maintaining the cursor at the start of the + * current word. */ +void linenoiseEditDeletePrevWord(struct linenoiseState *l) { + size_t old_pos = l->pos; + size_t diff; + + while (l->pos > 0 && l->buf[l->pos-1] == ' ') + l->pos--; + while (l->pos > 0 && l->buf[l->pos-1] != ' ') + l->pos--; + diff = old_pos - l->pos; + memmove(l->buf+l->pos,l->buf+old_pos,l->len-old_pos+1); + l->len -= diff; + refreshLine(l); +} + +/* This function is the core of the line editing capability of linenoise. + * It expects 'fd' to be already in "raw mode" so that every key pressed + * will be returned ASAP to read(). + * + * The resulting string is put into 'buf' when the user type enter, or + * when ctrl+d is typed. + * + * The function returns the length of the current buffer. */ +static int linenoiseEdit(int stdin_fd, int stdout_fd, char *buf, size_t buflen, const char *prompt) +{ + struct linenoiseState l; + + /* Populate the linenoise state that we pass to functions implementing + * specific editing functionalities. */ + l.ifd = stdin_fd; + l.ofd = stdout_fd; + l.buf = buf; + l.buflen = buflen; + l.prompt = prompt; + l.plen = strlen(prompt); + l.oldpos = l.pos = 0; + l.len = 0; + l.cols = getColumns(stdin_fd, stdout_fd); + l.maxrows = 0; + l.history_index = 0; + + /* Buffer starts empty. */ + l.buf[0] = '\0'; + l.buflen--; /* Make sure there is always space for the nulterm */ + + /* The latest history entry is always our current buffer, that + * initially is just an empty string. */ + linenoiseHistoryAdd(""); + + if (write(l.ofd,prompt,l.plen) == -1) return -1; + while(1) { + char c; + int nread; + char seq[3]; + + nread = read(l.ifd,&c,1); + if (nread <= 0) return l.len; + + /* Only autocomplete when the callback is set. It returns < 0 when + * there was an error reading from fd. Otherwise it will return the + * character that should be handled next. */ + if (c == 9 && completionCallback != NULL) { + c = completeLine(&l); + /* Return on errors */ + if (c < 0) return l.len; + /* Read next character when 0 */ + if (c == 0) continue; + } + + switch(c) { + case ENTER: /* enter */ + history_len--; + free(history[history_len]); + if (mlmode) linenoiseEditMoveEnd(&l); + if (hintsCallback) { + /* Force a refresh without hints to leave the previous + * line as the user typed it after a newline. */ + linenoiseHintsCallback *hc = hintsCallback; + hintsCallback = NULL; + refreshLine(&l); + hintsCallback = hc; + } + return (int)l.len; + case CTRL_C: /* ctrl-c */ + errno = EAGAIN; + return -1; + case BACKSPACE: /* backspace */ + case 8: /* ctrl-h */ + linenoiseEditBackspace(&l); + break; + case CTRL_D: /* ctrl-d, remove char at right of cursor, or if the + line is empty, act as end-of-file. */ + if (l.len > 0) { + linenoiseEditDelete(&l); + } else { + history_len--; + free(history[history_len]); + return -1; + } + break; + case CTRL_T: /* ctrl-t, swaps current character with previous. */ + if (l.pos > 0 && l.pos < l.len) { + int aux = buf[l.pos-1]; + buf[l.pos-1] = buf[l.pos]; + buf[l.pos] = aux; + if (l.pos != l.len-1) l.pos++; + refreshLine(&l); + } + break; + case CTRL_B: /* ctrl-b */ + linenoiseEditMoveLeft(&l); + break; + case CTRL_F: /* ctrl-f */ + linenoiseEditMoveRight(&l); + break; + case CTRL_P: /* ctrl-p */ + linenoiseEditHistoryNext(&l, LINENOISE_HISTORY_PREV); + break; + case CTRL_N: /* ctrl-n */ + linenoiseEditHistoryNext(&l, LINENOISE_HISTORY_NEXT); + break; + case ESC: /* escape sequence */ + /* Read the next two bytes representing the escape sequence. + * Use two calls to handle slow terminals returning the two + * chars at different times. */ + if (read(l.ifd,seq,1) == -1) break; + if (read(l.ifd,seq+1,1) == -1) break; + + /* ESC [ sequences. */ + if (seq[0] == '[') { + if (seq[1] >= '0' && seq[1] <= '9') { + /* Extended escape, read additional byte. */ + if (read(l.ifd,seq+2,1) == -1) break; + if (seq[2] == '~') { + switch(seq[1]) { + case '3': /* Delete key. */ + linenoiseEditDelete(&l); + break; + } + } + } else { + switch(seq[1]) { + case 'A': /* Up */ + linenoiseEditHistoryNext(&l, LINENOISE_HISTORY_PREV); + break; + case 'B': /* Down */ + linenoiseEditHistoryNext(&l, LINENOISE_HISTORY_NEXT); + break; + case 'C': /* Right */ + linenoiseEditMoveRight(&l); + break; + case 'D': /* Left */ + linenoiseEditMoveLeft(&l); + break; + case 'H': /* Home */ + linenoiseEditMoveHome(&l); + break; + case 'F': /* End*/ + linenoiseEditMoveEnd(&l); + break; + } + } + } + + /* ESC O sequences. */ + else if (seq[0] == 'O') { + switch(seq[1]) { + case 'H': /* Home */ + linenoiseEditMoveHome(&l); + break; + case 'F': /* End*/ + linenoiseEditMoveEnd(&l); + break; + } + } + break; + default: + if (linenoiseEditInsert(&l,c)) return -1; + break; + case CTRL_U: /* Ctrl+u, delete the whole line. */ + buf[0] = '\0'; + l.pos = l.len = 0; + refreshLine(&l); + break; + case CTRL_K: /* Ctrl+k, delete from current to end of line. */ + buf[l.pos] = '\0'; + l.len = l.pos; + refreshLine(&l); + break; + case CTRL_A: /* Ctrl+a, go to the start of the line */ + linenoiseEditMoveHome(&l); + break; + case CTRL_E: /* ctrl+e, go to the end of the line */ + linenoiseEditMoveEnd(&l); + break; + case CTRL_L: /* ctrl+l, clear screen */ + linenoiseClearScreen(); + refreshLine(&l); + break; + case CTRL_W: /* ctrl+w, delete previous word */ + linenoiseEditDeletePrevWord(&l); + break; + } + } + return l.len; +} + +/* This special mode is used by linenoise in order to print scan codes + * on screen for debugging / development purposes. It is implemented + * by the linenoise_example program using the --keycodes option. */ +void linenoisePrintKeyCodes(void) { + char quit[4]; + + printf("Linenoise key codes debugging mode.\n" + "Press keys to see scan codes. Type 'quit' at any time to exit.\n"); + if (enableRawMode(STDIN_FILENO) == -1) return; + memset(quit,' ',4); + while(1) { + char c; + int nread; + + nread = read(STDIN_FILENO,&c,1); + if (nread <= 0) continue; + memmove(quit,quit+1,sizeof(quit)-1); /* shift string to left. */ + quit[sizeof(quit)-1] = c; /* Insert current char on the right. */ + if (memcmp(quit,"quit",sizeof(quit)) == 0) break; + + printf("'%c' %02x (%d) (type quit to exit)\n", + isprint(c) ? c : '?', (int)c, (int)c); + printf("\r"); /* Go left edge manually, we are in raw mode. */ + fflush(stdout); + } + disableRawMode(STDIN_FILENO); +} + +/* This function calls the line editing function linenoiseEdit() using + * the STDIN file descriptor set in raw mode. */ +static int linenoiseRaw(char *buf, size_t buflen, const char *prompt) { + int count; + + if (buflen == 0) { + errno = EINVAL; + return -1; + } + + if (enableRawMode(STDIN_FILENO) == -1) return -1; + count = linenoiseEdit(STDIN_FILENO, STDOUT_FILENO, buf, buflen, prompt); + disableRawMode(STDIN_FILENO); + printf("\n"); + return count; +} + +/* This function is called when linenoise() is called with the standard + * input file descriptor not attached to a TTY. So for example when the + * program using linenoise is called in pipe or with a file redirected + * to its standard input. In this case, we want to be able to return the + * line regardless of its length (by default we are limited to 4k). */ +static char *linenoiseNoTTY(void) { + char *line = NULL; + size_t len = 0, maxlen = 0; + + while(1) { + if (len == maxlen) { + if (maxlen == 0) maxlen = 16; + maxlen *= 2; + char *oldval = line; + line = realloc(line,maxlen); + if (line == NULL) { + if (oldval) free(oldval); + return NULL; + } + } + int c = fgetc(stdin); + if (c == EOF || c == '\n') { + if (c == EOF && len == 0) { + free(line); + return NULL; + } else { + line[len] = '\0'; + return line; + } + } else { + line[len] = c; + len++; + } + } +} + +/* The high level function that is the main API of the linenoise library. + * This function checks if the terminal has basic capabilities, just checking + * for a blacklist of stupid terminals, and later either calls the line + * editing function or uses dummy fgets() so that you will be able to type + * something even in the most desperate of the conditions. */ +char *linenoise(const char *prompt) { + char buf[LINENOISE_MAX_LINE]; + int count; + + if (!isatty(STDIN_FILENO)) { + /* Not a tty: read from file / pipe. In this mode we don't want any + * limit to the line size, so we call a function to handle that. */ + return linenoiseNoTTY(); + } else if (isUnsupportedTerm()) { + size_t len; + + printf("%s",prompt); + fflush(stdout); + if (fgets(buf,LINENOISE_MAX_LINE,stdin) == NULL) return NULL; + len = strlen(buf); + while(len && (buf[len-1] == '\n' || buf[len-1] == '\r')) { + len--; + buf[len] = '\0'; + } + return strdup(buf); + } else { + count = linenoiseRaw(buf,LINENOISE_MAX_LINE,prompt); + if (count == -1) return NULL; + return strdup(buf); + } +} + +/* This is just a wrapper the user may want to call in order to make sure + * the linenoise returned buffer is freed with the same allocator it was + * created with. Useful when the main program is using an alternative + * allocator. */ +void linenoiseFree(void *ptr) { + free(ptr); +} + +/* ================================ History ================================= */ + +/* Free the history, but does not reset it. Only used when we have to + * exit() to avoid memory leaks are reported by valgrind & co. */ +static void freeHistory(void) { + if (history) { + int j; + + for (j = 0; j < history_len; j++) + free(history[j]); + free(history); + } +} + +/* At exit we'll try to fix the terminal to the initial conditions. */ +static void linenoiseAtExit(void) { + disableRawMode(STDIN_FILENO); + freeHistory(); +} + +/* This is the API call to add a new entry in the linenoise history. + * It uses a fixed array of char pointers that are shifted (memmoved) + * when the history max length is reached in order to remove the older + * entry and make room for the new one, so it is not exactly suitable for huge + * histories, but will work well for a few hundred of entries. + * + * Using a circular buffer is smarter, but a bit more complex to handle. */ +int linenoiseHistoryAdd(const char *line) { + char *linecopy; + + if (history_max_len == 0) return 0; + + /* Initialization on first call. */ + if (history == NULL) { + history = malloc(sizeof(char*)*history_max_len); + if (history == NULL) return 0; + memset(history,0,(sizeof(char*)*history_max_len)); + } + + /* Don't add duplicated lines. */ + if (history_len && !strcmp(history[history_len-1], line)) return 0; + + /* Add an heap allocated copy of the line in the history. + * If we reached the max length, remove the older line. */ + linecopy = strdup(line); + if (!linecopy) return 0; + if (history_len == history_max_len) { + free(history[0]); + memmove(history,history+1,sizeof(char*)*(history_max_len-1)); + history_len--; + } + history[history_len] = linecopy; + history_len++; + return 1; +} + +/* Set the maximum length for the history. This function can be called even + * if there is already some history, the function will make sure to retain + * just the latest 'len' elements if the new history length value is smaller + * than the amount of items already inside the history. */ +int linenoiseHistorySetMaxLen(int len) { + char **new; + + if (len < 1) return 0; + if (history) { + int tocopy = history_len; + + new = malloc(sizeof(char*)*len); + if (new == NULL) return 0; + + /* If we can't copy everything, free the elements we'll not use. */ + if (len < tocopy) { + int j; + + for (j = 0; j < tocopy-len; j++) free(history[j]); + tocopy = len; + } + memset(new,0,sizeof(char*)*len); + memcpy(new,history+(history_len-tocopy), sizeof(char*)*tocopy); + free(history); + history = new; + } + history_max_len = len; + if (history_len > history_max_len) + history_len = history_max_len; + return 1; +} + +/* Save the history in the specified file. On success 0 is returned + * otherwise -1 is returned. */ +int linenoiseHistorySave(const char *filename) { + mode_t old_umask = umask(S_IXUSR|S_IRWXG|S_IRWXO); + FILE *fp; + int j; + + fp = fopen(filename,"w"); + umask(old_umask); + if (fp == NULL) return -1; + chmod(filename,S_IRUSR|S_IWUSR); + for (j = 0; j < history_len; j++) + fprintf(fp,"%s\n",history[j]); + fclose(fp); + return 0; +} + +/* Load the history from the specified file. If the file does not exist + * zero is returned and no operation is performed. + * + * If the file exists and the operation succeeded 0 is returned, otherwise + * on error -1 is returned. */ +int linenoiseHistoryLoad(const char *filename) { + FILE *fp = fopen(filename,"r"); + char buf[LINENOISE_MAX_LINE]; + + if (fp == NULL) return -1; + + while (fgets(buf,LINENOISE_MAX_LINE,fp) != NULL) { + char *p; + + p = strchr(buf,'\r'); + if (!p) p = strchr(buf,'\n'); + if (p) *p = '\0'; + linenoiseHistoryAdd(buf); + } + fclose(fp); + return 0; +} diff --git a/src/linenoise/linenoise.h b/src/linenoise/linenoise.h new file mode 100644 index 00000000000..ed20232c576 --- /dev/null +++ b/src/linenoise/linenoise.h @@ -0,0 +1,73 @@ +/* linenoise.h -- VERSION 1.0 + * + * Guerrilla line editing library against the idea that a line editing lib + * needs to be 20,000 lines of C code. + * + * See linenoise.c for more information. + * + * ------------------------------------------------------------------------ + * + * Copyright (c) 2010-2014, Salvatore Sanfilippo + * Copyright (c) 2010-2013, Pieter Noordhuis + * + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +#ifndef __LINENOISE_H +#define __LINENOISE_H + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct linenoiseCompletions { + size_t len; + char **cvec; +} linenoiseCompletions; + +typedef void(linenoiseCompletionCallback)(const char *, linenoiseCompletions *); +typedef char*(linenoiseHintsCallback)(const char *, int *color, int *bold); +typedef void(linenoiseFreeHintsCallback)(void *); +void linenoiseSetCompletionCallback(linenoiseCompletionCallback *); +void linenoiseSetHintsCallback(linenoiseHintsCallback *); +void linenoiseSetFreeHintsCallback(linenoiseFreeHintsCallback *); +void linenoiseAddCompletion(linenoiseCompletions *, const char *); + +char *linenoise(const char *prompt); +void linenoiseFree(void *ptr); +int linenoiseHistoryAdd(const char *line); +int linenoiseHistorySetMaxLen(int len); +int linenoiseHistorySave(const char *filename); +int linenoiseHistoryLoad(const char *filename); +void linenoiseClearScreen(void); +void linenoiseSetMultiLine(int ml); +void linenoisePrintKeyCodes(void); + +#ifdef __cplusplus +} +#endif + +#endif /* __LINENOISE_H */ diff --git a/src/nix/local.mk b/src/nix/local.mk index e71cf16fabf..c7d2d328aab 100644 --- a/src/nix/local.mk +++ b/src/nix/local.mk @@ -2,12 +2,8 @@ programs += nix nix_DIR := $(d) -nix_SOURCES := $(wildcard $(d)/*.cc) +nix_SOURCES := $(wildcard $(d)/*.cc) src/linenoise/linenoise.c nix_LIBS = libexpr libmain libstore libutil libformat -ifeq ($(HAVE_READLINE), 1) - nix_LDFLAGS += -lreadline -endif - $(eval $(call install-symlink, nix, $(bindir)/nix-hash)) diff --git a/src/nix/repl.cc b/src/nix/repl.cc index 13488bf1dbd..437c7903ed4 100644 --- a/src/nix/repl.cc +++ b/src/nix/repl.cc @@ -1,13 +1,8 @@ -#if HAVE_LIBREADLINE - #include #include #include -#include -#include - #include "shared.hh" #include "eval.hh" #include "eval-inline.hh" @@ -18,6 +13,9 @@ #include "affinity.hh" #include "globals.hh" #include "command.hh" +#include "finally.hh" + +#include "src/linenoise/linenoise.h" namespace nix { @@ -44,14 +42,11 @@ struct NixRepl const Path historyFile; - StringSet completions; - StringSet::iterator curCompletion; - NixRepl(const Strings & searchPath, nix::ref store); ~NixRepl(); void mainLoop(const Strings & files); - void completePrefix(string prefix); - bool getLine(string & input, const char * prompt); + StringSet completePrefix(string prefix); + bool getLine(string & input, const std::string &prompt); Path getDerivationPath(Value & v); bool processLine(string line); void loadFile(const Path & path); @@ -122,7 +117,17 @@ NixRepl::NixRepl(const Strings & searchPath, nix::ref store) NixRepl::~NixRepl() { - write_history(historyFile.c_str()); + linenoiseHistorySave(historyFile.c_str()); +} + + +static NixRepl * curRepl; // ugly + +static void completionCallback(const char * s, linenoiseCompletions *lc) +{ + /* Otherwise, return all symbols that start with the prefix. */ + for (auto & c : curRepl->completePrefix(s)) + linenoiseAddCompletion(lc, c.c_str()); } @@ -137,22 +142,20 @@ void NixRepl::mainLoop(const Strings & files) reloadFiles(); if (!loadedFiles.empty()) std::cout << std::endl; - // Allow nix-repl specific settings in .inputrc - rl_readline_name = "nix-repl"; - using_history(); createDirs(dirOf(historyFile)); - read_history(historyFile.c_str()); + linenoiseHistorySetMaxLen(1000); + linenoiseHistoryLoad(historyFile.c_str()); - string input; + curRepl = this; + linenoiseSetCompletionCallback(completionCallback); + + std::string input; while (true) { // When continuing input from previous lines, don't print a prompt, just align to the same // number of chars as the prompt. - const char * prompt = input.empty() ? "nix-repl> " : " "; - if (!getLine(input, prompt)) { - std::cout << std::endl; + if (!getLine(input, input.empty() ? "nix-repl> " : " ")) break; - } try { if (!removeWhitespace(input).empty() && !processLine(input)) return; @@ -170,103 +173,57 @@ void NixRepl::mainLoop(const Strings & files) printMsg(lvlError, format(error + "%1%%2%") % (settings.showTrace ? e.prefix() : "") % e.msg()); } - // We handled the current input fully, so we should clear it and read brand new input. + // We handled the current input fully, so we should clear it + // and read brand new input. + linenoiseHistoryAdd(input.c_str()); input.clear(); std::cout << std::endl; } } -/* Apparently, the only way to get readline() to return on Ctrl-C - (SIGINT) is to use siglongjmp(). That's fucked up... */ -static sigjmp_buf sigintJmpBuf; - - -static void sigintHandler(int signo) +bool NixRepl::getLine(string & input, const std::string &prompt) { - siglongjmp(sigintJmpBuf, 1); -} - - -/* Oh, if only g++ had nested functions... */ -NixRepl * curRepl; - -char * completerThunk(const char * s, int state) -{ - string prefix(s); - - /* If the prefix has a slash in it, use readline's builtin filename - completer. */ - if (prefix.find('/') != string::npos) - return rl_filename_completion_function(s, state); - - /* Otherwise, return all symbols that start with the prefix. */ - if (state == 0) { - curRepl->completePrefix(s); - curRepl->curCompletion = curRepl->completions.begin(); - } - if (curRepl->curCompletion == curRepl->completions.end()) return 0; - return strdup((curRepl->curCompletion++)->c_str()); + char * s = linenoise(prompt.c_str()); + Finally doFree([&]() { linenoiseFree(s); }); + if (!s) return false; + input += s; + return true; } -bool NixRepl::getLine(string & input, const char * prompt) +StringSet NixRepl::completePrefix(string prefix) { - struct sigaction act, old; - act.sa_handler = sigintHandler; - sigfillset(&act.sa_mask); - act.sa_flags = 0; - if (sigaction(SIGINT, &act, &old)) - throw SysError("installing handler for SIGINT"); - - static sigset_t savedSignalMask, set; - sigemptyset(&set); - sigaddset(&set, SIGINT); - - if (sigprocmask(SIG_UNBLOCK, &set, &savedSignalMask)) - throw SysError("unblocking SIGINT"); + StringSet completions; - if (sigsetjmp(sigintJmpBuf, 1)) { - input.clear(); + size_t start = prefix.find_last_of(" \n\r\t(){}[]"); + std::string prev, cur; + if (start == std::string::npos) { + prev = ""; + cur = prefix; } else { - curRepl = this; - rl_completion_entry_function = completerThunk; - - char * s = readline(prompt); - if (!s) return false; - input.append(s); - input.push_back('\n'); - if (!removeWhitespace(s).empty()) { - add_history(s); - append_history(1, 0); - } - free(s); + prev = std::string(prefix, 0, start + 1); + cur = std::string(prefix, start + 1); } - _isInterrupted = 0; - - if (sigprocmask(SIG_SETMASK, &savedSignalMask, nullptr)) - throw SysError("restoring signals"); - - if (sigaction(SIGINT, &old, 0)) - throw SysError("restoring handler for SIGINT"); - - return true; -} - - -void NixRepl::completePrefix(string prefix) -{ - completions.clear(); - - size_t dot = prefix.rfind('.'); + size_t slash, dot; - if (dot == string::npos) { + if ((slash = cur.rfind('/')) != string::npos) { + try { + auto dir = std::string(cur, 0, slash); + auto prefix2 = std::string(cur, slash + 1); + for (auto & entry : readDirectory(dir == "" ? "/" : dir)) { + if (entry.name[0] != '.' && hasPrefix(entry.name, prefix2)) + completions.insert(prev + dir + "/" + entry.name); + } + } catch (Error &) { + } + } else if ((dot = cur.rfind('.')) == string::npos) { /* This is a variable name; look it up in the current scope. */ - StringSet::iterator i = varNames.lower_bound(prefix); + StringSet::iterator i = varNames.lower_bound(cur); while (i != varNames.end()) { - if (string(*i, 0, prefix.size()) != prefix) break; - completions.insert(*i); + if (string(*i, 0, cur.size()) != cur) break; + completions.insert(prev + *i); i++; } } else { @@ -274,8 +231,8 @@ void NixRepl::completePrefix(string prefix) /* This is an expression that should evaluate to an attribute set. Evaluate it to get the names of the attributes. */ - string expr(prefix, 0, dot); - string prefix2 = string(prefix, dot + 1); + string expr(cur, 0, dot); + string cur2 = string(cur, dot + 1); Expr * e = parseString(expr); Value v; @@ -284,8 +241,8 @@ void NixRepl::completePrefix(string prefix) for (auto & i : *v.attrs) { string name = i.name; - if (string(name, 0, prefix2.size()) != prefix2) continue; - completions.insert(expr + "." + name); + if (string(name, 0, cur2.size()) != cur2) continue; + completions.insert(prev + expr + "." + name); } } catch (ParseError & e) { @@ -296,6 +253,8 @@ void NixRepl::completePrefix(string prefix) // Quietly ignore undefined variable errors. } } + + return completions; } @@ -728,5 +687,3 @@ struct CmdRepl : StoreCommand static RegisterCommand r1(make_ref()); } - -#endif From 1fd59447d56a88add8874f9a8b0885a1acd13606 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 10 May 2017 18:38:17 +0200 Subject: [PATCH 0331/2196] Typo --- doc/manual/expressions/builtins.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/expressions/builtins.xml b/doc/manual/expressions/builtins.xml index 163153380ce..63d13e184a1 100644 --- a/doc/manual/expressions/builtins.xml +++ b/doc/manual/expressions/builtins.xml @@ -65,7 +65,7 @@ available as builtins.derivation. set Return the names of the attributes in the set - set in alphabetically sorted list. For instance, + set in an alphabetically sorted list. For instance, builtins.attrNames { y = 1; x = "foo"; } evaluates to [ "x" "y" ]. From 45d7b1a9e9018a2be5add1dc6d983e6b7b339c61 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 11 May 2017 13:26:03 +0200 Subject: [PATCH 0332/2196] LocalStore::addToStore(): Check info.narSize It allowed the client to specify bogus narSize values. In particular, Downloader::downloadCached wasn't setting narSize at all. --- src/libstore/download.cc | 1 + src/libstore/local-store.cc | 8 ++++++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 4d502219ed8..93519ec663a 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -652,6 +652,7 @@ Path Downloader::downloadCached(ref store, const string & url_, bool unpa Hash hash = hashString(expectedHash ? expectedHash.type : htSHA256, *res.data); info.path = store->makeFixedOutputPath(false, hash, name); info.narHash = hashString(htSHA256, *sink.s); + info.narSize = sink.s->size(); info.ca = makeFixedOutputCA(false, hash); store->addToStore(info, sink.s, false, true); storePath = info.path; diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index c8e61126c1b..ed03c3f425b 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -919,8 +919,12 @@ void LocalStore::addToStore(const ValidPathInfo & info, const ref & Hash h = hashString(htSHA256, *nar); if (h != info.narHash) - throw Error(format("hash mismatch importing path ‘%s’; expected hash ‘%s’, got ‘%s’") % - info.path % info.narHash.to_string() % h.to_string()); + throw Error("hash mismatch importing path ‘%s’; expected hash ‘%s’, got ‘%s’", + info.path, info.narHash.to_string(), h.to_string()); + + if (nar->size() != info.narSize) + throw Error("szie mismatch importing path ‘%s’; expected %s, got %s", + info.path, info.narSize, nar->size()); if (requireSigs && !dontCheckSigs && !info.checkSignatures(*this, publicKeys)) throw Error("cannot add path ‘%s’ because it lacks a valid signature", info.path); From 6f245bf24a1154142acf2b8dfa620b891f461d55 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 11 May 2017 13:31:23 +0200 Subject: [PATCH 0333/2196] Change the meaning of info.ultimate It now means "paths that were built locally". It no longer includes paths that were added locally. For those we don't need info.ultimate, since we have the content-addressability assertion (info.ca). --- src/libstore/local-store.cc | 2 -- src/libstore/store-api.hh | 5 ++--- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index ed03c3f425b..207e8a40b6d 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1010,7 +1010,6 @@ Path LocalStore::addToStoreFromDump(const string & dump, const string & name, info.path = dstPath; info.narHash = hash.first; info.narSize = hash.second; - info.ultimate = true; info.ca = makeFixedOutputCA(recursive, h); registerValidPath(info); } @@ -1073,7 +1072,6 @@ Path LocalStore::addTextToStore(const string & name, const string & s, info.narHash = narHash; info.narSize = sink.s->size(); info.references = references; - info.ultimate = true; info.ca = "text:" + hash.to_string(); registerValidPath(info); } diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index b06f5d86a93..929c95a0f2f 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -113,9 +113,8 @@ struct ValidPathInfo uint64_t narSize = 0; // 0 = unknown uint64_t id; // internal use only - /* Whether the path is ultimately trusted, that is, it was built - locally or is content-addressable (e.g. added via addToStore() - or the result of a fixed-output derivation). */ + /* Whether the path is ultimately trusted, that is, it's a + derivation output that was built locally. */ bool ultimate = false; StringSet sigs; // note: not necessarily verified From 2b2de5ef6aeb023f22e551e74e454acc73106cc8 Mon Sep 17 00:00:00 2001 From: Frederik Rietdijk Date: Thu, 11 May 2017 13:38:13 +0200 Subject: [PATCH 0334/2196] Document fetchTarball can take a sha256 Note that I refer to `nix-prefetch-url`. --- doc/manual/expressions/builtins.xml | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/doc/manual/expressions/builtins.xml b/doc/manual/expressions/builtins.xml index 63d13e184a1..e9c641ab305 100644 --- a/doc/manual/expressions/builtins.xml +++ b/doc/manual/expressions/builtins.xml @@ -292,6 +292,24 @@ with import (fetchTarball https://github.com/NixOS/nixpkgs-channels/archive/nixo stdenv.mkDerivation { … } + Note that when obtaining the hash with nix-prefetch-url + the option --unpack is required. + + + This function can also verify the contents against a hash. + In that case, the function takes a set instead of a URL. The set + requires the attribute url and the attribute + sha256, e.g. + + +with import (fetchTarball { + url = https://github.com/NixOS/nixpkgs-channels/archive/nixos-14.12.tar.gz; + sha256 = "1jppksrfvbk5ypiqdz4cddxdl8z6zyzdb2srq8fcffr327ld5jj2"; +}) {}; + +stdenv.mkDerivation { … } + + This function is not available if Date: Thu, 11 May 2017 13:58:09 +0200 Subject: [PATCH 0335/2196] Don't allow untrusted users to set info.ultimate Note that a trusted signature was still required in this case so it was not a huge deal. --- src/nix-daemon/nix-daemon.cc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index 1b90fad165a..d2bb7b8c88b 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -621,6 +621,8 @@ static void performOp(ref store, bool trusted, unsigned int clientVe from >> info.ca >> repair >> dontCheckSigs; if (!trusted && dontCheckSigs) dontCheckSigs = false; + if (!trusted) + info.ultimate = false; TeeSink tee(from); parseDump(tee, tee.source); From ea65ae0f9c3461f9b098c8757e01f5c3add8b40c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 11 May 2017 13:59:47 +0200 Subject: [PATCH 0336/2196] Tweak error message --- src/libstore/download.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 93519ec663a..5390bdbf5ed 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -690,7 +690,7 @@ Path Downloader::downloadCached(ref store, const string & url_, bool unpa } if (expectedStorePath != "" && storePath != expectedStorePath) - throw nix::Error(format("hash mismatch in file downloaded from ‘%s’") % url); + throw nix::Error("store path mismatch in file downloaded from ‘%s’", url); return storePath; } From 62d476c7ee5dbb79fb435895e0cda3fac8f53ba3 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 11 May 2017 14:02:03 +0200 Subject: [PATCH 0337/2196] Fix typo --- src/libstore/store-api.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 8405c66dd47..96799adb1e6 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -167,7 +167,7 @@ void checkStoreName(const string & name) collisions (for security). For instance, it shouldn't be feasible to come up with a derivation whose output path collides with the path for a copied source. The former would have a starting with - "output:out:", while the latter would have a <2> starting with + "output:out:", while the latter would have a starting with "source:". */ From 510bc1735b3507b0f434303fdec5e824c879c838 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 11 May 2017 15:09:09 +0200 Subject: [PATCH 0338/2196] Add an option for extending the user agent header This is useful e.g. for distinguishing traffic to a binary cache (e.g. certain machines can use a different tag in the user agent). --- src/libstore/download.cc | 4 +++- src/libstore/globals.hh | 3 +++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 5390bdbf5ed..dc1a3d94f74 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -220,7 +220,9 @@ struct CurlDownloader : public Downloader curl_easy_setopt(req, CURLOPT_URL, request.uri.c_str()); curl_easy_setopt(req, CURLOPT_FOLLOWLOCATION, 1L); curl_easy_setopt(req, CURLOPT_NOSIGNAL, 1); - curl_easy_setopt(req, CURLOPT_USERAGENT, ("curl/" LIBCURL_VERSION " Nix/" + nixVersion).c_str()); + curl_easy_setopt(req, CURLOPT_USERAGENT, + ("curl/" LIBCURL_VERSION " Nix/" + nixVersion + + (settings.userAgentSuffix != "" ? " " + settings.userAgentSuffix.get() : "")).c_str()); #if LIBCURL_VERSION_NUM >= 0x072b00 curl_easy_setopt(req, CURLOPT_PIPEWAIT, 1); #endif diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 7295b0d30af..af37ec61d7a 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -318,6 +318,9 @@ public: Setting connectTimeout{this, 0, "connect-timeout", "Timeout for connecting to servers during downloads. 0 means use curl's builtin default."}; + + Setting userAgentSuffix{this, "", "user-agent-suffix", + "String appended to the user agent in HTTP requests."}; }; From 75a1d9849d7355c227ce76be17809a71852956b3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Benno=20F=C3=BCnfst=C3=BCck?= Date: Mon, 15 May 2017 10:17:53 +0200 Subject: [PATCH 0339/2196] nar-accessor: use tree, fixes readDirectory missing children Previously, if a directory `foo` existed and a file `foo-` (where `-` is any character that is sorted before `/`), then `readDirectory` would return an empty list. To fix this, we now use a tree where we can just access the children of the node, and do not need to rely on sorting behavior to list the contents of a directory. --- src/libstore/nar-accessor.cc | 109 ++++++++++++++++++++++++----------- 1 file changed, 76 insertions(+), 33 deletions(-) diff --git a/src/libstore/nar-accessor.cc b/src/libstore/nar-accessor.cc index 4cb5de7449e..ee1cf385c08 100644 --- a/src/libstore/nar-accessor.cc +++ b/src/libstore/nar-accessor.cc @@ -2,6 +2,8 @@ #include "archive.hh" #include +#include +#include namespace nix { @@ -16,16 +18,36 @@ struct NarMember size_t start, size; std::string target; + + /* If this is a directory, all the children of the directory. */ + std::map children; + + NarMember* find(const Path & path) + { + if(path == "") return this; + + if(type != FSAccessor::Type::tDirectory) { + return nullptr; + } + + auto split = std::find(path.begin() + 1, path.end(), '/'); + std::string child_name(path.begin() + 1, split); + std::string remaining(split, path.end()); + + auto child = children.find(child_name); + if(child == children.end()) return nullptr; + + return child->second.find(remaining); + } + }; struct NarIndexer : ParseSink, StringSource { - // FIXME: should store this as a tree. Now we're vulnerable to - // O(nm) memory consumption (e.g. for x_0/.../x_n/{y_0..y_m}). - typedef std::map Members; - Members members; + NarMember root; + std::stack parents; - Path currentPath; + std::string currentName; std::string currentStart; bool isExec = false; @@ -33,28 +55,45 @@ struct NarIndexer : ParseSink, StringSource { } + void createMember(const Path & path, NarMember member) { + size_t level = std::count(path.begin(), path.end(), '/'); + while(parents.size() > level) { + parents.pop(); + } + + if(parents.empty()) { + root = std::move(member); + parents.push(&root); + } else { + if(parents.top()->type != FSAccessor::Type::tDirectory) { + throw Error(format("NAR file missing parent directory of path ‘%1%’") % path); + } + auto result = parents.top()->children.emplace(baseNameOf(path), std::move(member)); + parents.push(&result.first->second); + } + } + void createDirectory(const Path & path) override { - members.emplace(path, - NarMember{FSAccessor::Type::tDirectory, false, 0, 0}); + createMember(path, {FSAccessor::Type::tDirectory, false, 0, 0 }); } void createRegularFile(const Path & path) override { - currentPath = path; + createMember(path, {FSAccessor::Type::tRegular, false, 0, 0 }); } void isExecutable() override { - isExec = true; + parents.top()->isExecutable = true; } void preallocateContents(unsigned long long size) override { currentStart = string(s, pos, 16); assert(size <= std::numeric_limits::max()); - members.emplace(currentPath, - NarMember{FSAccessor::Type::tRegular, isExec, pos, (size_t) size}); + parents.top()->size = (size_t)size; + parents.top()->start = pos; } void receiveContents(unsigned char * data, unsigned int len) override @@ -68,16 +107,23 @@ struct NarIndexer : ParseSink, StringSource void createSymlink(const Path & path, const string & target) override { - members.emplace(path, + createMember(path, NarMember{FSAccessor::Type::tSymlink, false, 0, 0, target}); } - Members::iterator find(const Path & path) + NarMember* find(const Path & path) { - auto i = members.find(path); - if (i == members.end()) + Path canon = path == "" ? "" : canonPath(path); + NarMember* result = root.find(canon); + return result; + } + + NarMember& at(const Path & path) { + auto result = find(path); + if(result == nullptr) { throw Error(format("NAR file does not contain path ‘%1%’") % path); - return i; + } + return *result; } }; @@ -93,44 +139,41 @@ struct NarAccessor : public FSAccessor Stat stat(const Path & path) override { - auto i = indexer.members.find(path); - if (i == indexer.members.end()) + auto i = indexer.find(path); + if (i == nullptr) return {FSAccessor::Type::tMissing, 0, false}; - return {i->second.type, i->second.size, i->second.isExecutable}; + return {i->type, i->size, i->isExecutable}; } StringSet readDirectory(const Path & path) override { - auto i = indexer.find(path); + auto i = indexer.at(path); - if (i->second.type != FSAccessor::Type::tDirectory) + if (i.type != FSAccessor::Type::tDirectory) throw Error(format("path ‘%1%’ inside NAR file is not a directory") % path); - ++i; StringSet res; - while (i != indexer.members.end() && isInDir(i->first, path)) { - // FIXME: really bad performance. - if (i->first.find('/', path.size() + 1) == std::string::npos) - res.insert(std::string(i->first, path.size() + 1)); - ++i; + for(auto&& child : i.children) { + res.insert(child.first); + } return res; } std::string readFile(const Path & path) override { - auto i = indexer.find(path); - if (i->second.type != FSAccessor::Type::tRegular) + auto i = indexer.at(path); + if (i.type != FSAccessor::Type::tRegular) throw Error(format("path ‘%1%’ inside NAR file is not a regular file") % path); - return std::string(*nar, i->second.start, i->second.size); + return std::string(*nar, i.start, i.size); } std::string readLink(const Path & path) override { - auto i = indexer.find(path); - if (i->second.type != FSAccessor::Type::tSymlink) + auto i = indexer.at(path); + if (i.type != FSAccessor::Type::tSymlink) throw Error(format("path ‘%1%’ inside NAR file is not a symlink") % path); - return i->second.target; + return i.target; } }; From 06880d7ed8b8ef8658eb965e6614136c67988970 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Benno=20F=C3=BCnfst=C3=BCck?= Date: Mon, 15 May 2017 10:18:27 +0200 Subject: [PATCH 0340/2196] nix ls: support '/' for the root directory --- src/nix/ls.cc | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/nix/ls.cc b/src/nix/ls.cc index 3476dfb0528..417b7b421b1 100644 --- a/src/nix/ls.cc +++ b/src/nix/ls.cc @@ -61,6 +61,10 @@ struct MixLs : virtual Args showFile(curPath, relPath); }; + if (path == "/") { + path = ""; + } + auto st = accessor->stat(path); if (st.type == FSAccessor::Type::tMissing) throw Error(format("path ‘%1%’ does not exist") % path); From 4412f7c08367b17b3be723ee42df159100d93922 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Benno=20F=C3=BCnfst=C3=BCck?= Date: Mon, 15 May 2017 12:23:21 +0200 Subject: [PATCH 0341/2196] nar-archive.cc: add tests for the nar index --- tests/local.mk | 3 ++- tests/nar-index.nix | 23 +++++++++++++++++++++++ tests/nar-index.sh | 23 +++++++++++++++++++++++ 3 files changed, 48 insertions(+), 1 deletion(-) create mode 100644 tests/nar-index.nix create mode 100644 tests/nar-index.sh diff --git a/tests/local.mk b/tests/local.mk index 108e3febdb0..7d99a0fc767 100644 --- a/tests/local.mk +++ b/tests/local.mk @@ -13,7 +13,8 @@ nix_tests = \ check-reqs.sh pass-as-file.sh tarball.sh restricted.sh \ placeholders.sh nix-shell.sh \ linux-sandbox.sh \ - build-remote.sh + build-remote.sh \ + nar-index.sh # parallel.sh install-tests += $(foreach x, $(nix_tests), tests/$(x)) diff --git a/tests/nar-index.nix b/tests/nar-index.nix new file mode 100644 index 00000000000..0e2a7f72113 --- /dev/null +++ b/tests/nar-index.nix @@ -0,0 +1,23 @@ +with import ./config.nix; + +rec { + a = mkDerivation { + name = "nar-index-a"; + builder = builtins.toFile "builder.sh" + '' + mkdir $out + mkdir $out/foo + touch $out/foo-x + touch $out/foo/bar + touch $out/foo/baz + touch $out/qux + mkdir $out/zyx + + cat >$out/foo/data < $narFile + +echo "check that find and ls-nar match" +( cd $storePath; find . | sort ) > files.find +nix ls-nar -R -d $narFile "" | sort > files.ls-nar +diff -u files.find files.ls-nar + +echo "check that file contents of data match" +nix cat-nar $narFile /foo/data > data.cat-nar +diff -u data.cat-nar $storePath/foo/data + +echo "check that file contents of baz match" +nix cat-nar $narFile /foo/baz > baz.cat-nar +diff -u baz.cat-nar $storePath/foo/baz \ No newline at end of file From 2b761d5f50b7dc17dc55c31980c2253c37b3c920 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 11 May 2017 17:06:07 +0200 Subject: [PATCH 0342/2196] Make fmt() non-recursive --- src/libutil/logging.hh | 2 +- src/libutil/types.hh | 17 ++++++----------- 2 files changed, 7 insertions(+), 12 deletions(-) diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh index 81aebccdca4..a8c69dbd956 100644 --- a/src/libutil/logging.hh +++ b/src/libutil/logging.hh @@ -88,7 +88,7 @@ template inline void warn(const std::string & fs, Args... args) { boost::format f(fs); - formatHelper(f, args...); + nop{boost::io::detail::feed(f, args)...}; logger->warn(f.str()); } diff --git a/src/libutil/types.hh b/src/libutil/types.hh index 1429c238513..9f32d31addb 100644 --- a/src/libutil/types.hh +++ b/src/libutil/types.hh @@ -32,6 +32,11 @@ using std::vector; using boost::format; +/* A variadic template that does nothing. Useful to call a function + for all variadic arguments but ignoring the result. */ +struct nop { template nop(T...) {} }; + + struct FormatOrString { string s; @@ -46,16 +51,6 @@ struct FormatOrString ... a_n’. However, ‘fmt(s)’ is equivalent to ‘s’ (so no %-expansion takes place). */ -inline void formatHelper(boost::format & f) -{ -} - -template -inline void formatHelper(boost::format & f, T x, Args... args) -{ - formatHelper(f % x, args...); -} - inline std::string fmt(const std::string & s) { return s; @@ -75,7 +70,7 @@ template inline std::string fmt(const std::string & fs, Args... args) { boost::format f(fs); - formatHelper(f, args...); + nop{boost::io::detail::feed(f, args)...}; return f.str(); } From b30f5784d0184688de964f6239e373b62101ebc4 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 15 May 2017 17:26:20 +0200 Subject: [PATCH 0343/2196] Linux sandbox: Don't barf on invalid paths This is useful when we're using a diverted store (e.g. "--store local?root=/tmp/nix") in conjunction with a statically-linked sh from the host store (e.g. "sandbox-paths =/bin/sh=/nix/store/.../bin/busybox"). --- src/libstore/build.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 270500d81c9..5ec4cbf6654 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -1774,6 +1774,7 @@ void DerivationGoal::startBuilder() try { if (worker.store.isInStore(i.second.source)) worker.store.computeFSClosure(worker.store.toStorePath(i.second.source), closure); + } catch (InvalidPath & e) { } catch (Error & e) { throw Error(format("while processing ‘build-sandbox-paths’: %s") % e.what()); } From a2d92bb20e82a0957067ede60e91fab256948b41 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 15 May 2017 17:30:33 +0200 Subject: [PATCH 0344/2196] Add --with-sandbox-shell configure flag And add a 116 KiB ash shell from busybox to the release build. This helps to make sandbox builds work out of the box on non-NixOS systems and with diverted stores. --- Makefile.config.in | 1 + configure.ac | 6 ++++++ release-common.nix | 21 +++++++++++++++++++++ release.nix | 9 ++++----- shell.nix | 7 +++---- src/libstore/globals.cc | 4 ++-- src/libstore/local.mk | 2 +- 7 files changed, 38 insertions(+), 12 deletions(-) create mode 100644 release-common.nix diff --git a/Makefile.config.in b/Makefile.config.in index 3cae30d487d..45a70cd6dd1 100644 --- a/Makefile.config.in +++ b/Makefile.config.in @@ -28,6 +28,7 @@ localstatedir = @localstatedir@ mandir = @mandir@ pkglibdir = $(libdir)/$(PACKAGE_NAME) prefix = @prefix@ +sandbox_shell = @sandbox_shell@ storedir = @storedir@ sysconfdir = @sysconfdir@ doc_generate = @doc_generate@ diff --git a/configure.ac b/configure.ac index c7026cf954d..24a95ce56f3 100644 --- a/configure.ac +++ b/configure.ac @@ -240,6 +240,12 @@ fi AC_SUBST(tarFlags) +AC_ARG_WITH(sandbox-shell, AC_HELP_STRING([--with-sandbox-shell=PATH], + [path of a statically-linked shell to use as /bin/sh in sandboxes]), + sandbox_shell=$withval) +AC_SUBST(sandbox_shell) + + # Expand all variables in config.status. test "$prefix" = NONE && prefix=$ac_default_prefix test "$exec_prefix" = NONE && exec_prefix='${prefix}' diff --git a/release-common.nix b/release-common.nix new file mode 100644 index 00000000000..8047c75bdb7 --- /dev/null +++ b/release-common.nix @@ -0,0 +1,21 @@ +{ pkgs }: + +rec { + sh = pkgs.busybox.override { + useMusl = true; + enableStatic = true; + enableMinimal = true; + extraConfig = '' + CONFIG_ASH y + CONFIG_ASH_BUILTIN_ECHO y + CONFIG_ASH_BUILTIN_TEST y + CONFIG_ASH_OPTIMIZE_FOR_SIZE y + ''; + }; + + configureFlags = + [ "--disable-init-state" + "--enable-gc" + "--with-sandbox-shell=${sh}/bin/busybox" + ]; +} diff --git a/release.nix b/release.nix index 54d20c868da..f1a553d01cc 100644 --- a/release.nix +++ b/release.nix @@ -66,6 +66,8 @@ let with import { inherit system; }; + with import ./release-common.nix { inherit pkgs; }; + releaseTools.nixBuild { name = "nix"; src = tarball; @@ -83,11 +85,8 @@ let customMemoryManagement = false; }); - configureFlags = '' - --disable-init-state - --enable-gc - --sysconfdir=/etc - ''; + configureFlags = configureFlags ++ + [ "--sysconfdir=/etc" ]; enableParallelBuilding = true; diff --git a/shell.nix b/shell.nix index 8645d36020e..c4e2a20f8fa 100644 --- a/shell.nix +++ b/shell.nix @@ -2,6 +2,8 @@ with import {}; +with import ./release-common.nix { inherit pkgs; }; + (if useClang then clangStdenv else stdenv).mkDerivation { name = "nix"; @@ -22,10 +24,7 @@ with import {}; perlPackages.DBDSQLite ]; - configureFlags = - [ "--disable-init-state" - "--enable-gc" - ]; + inherit configureFlags; enableParallelBuilding = true; diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 4bdbde989ab..3dd2508a26d 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -47,8 +47,8 @@ Settings::Settings() auto s = getEnv("NIX_REMOTE_SYSTEMS"); if (s != "") builderFiles = tokenizeString(s, ":"); -#if __linux__ - sandboxPaths = tokenizeString("/bin/sh=" BASH_PATH); +#if defined(__linux__) && defined(SANDBOX_SHELL) + sandboxPaths = tokenizeString("/bin/sh=" SANDBOX_SHELL); #endif allowedImpureHostPrefixes = tokenizeString(DEFAULT_ALLOWED_IMPURE_PREFIXES); diff --git a/src/libstore/local.mk b/src/libstore/local.mk index 4da20330cf3..e06002587f9 100644 --- a/src/libstore/local.mk +++ b/src/libstore/local.mk @@ -27,7 +27,7 @@ libstore_CXXFLAGS = \ -DNIX_CONF_DIR=\"$(sysconfdir)/nix\" \ -DNIX_LIBEXEC_DIR=\"$(libexecdir)\" \ -DNIX_BIN_DIR=\"$(bindir)\" \ - -DBASH_PATH="\"$(bash)\"" \ + -DSANDBOX_SHELL="\"$(sandbox_shell)\"" \ -DLSOF=\"$(lsof)\" $(d)/local-store.cc: $(d)/schema.sql.hh From c05d9ae7a5f72a6575130cf92fb54e3f90d1927d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 15 May 2017 18:44:58 +0200 Subject: [PATCH 0345/2196] Disallow outputHash being null or an empty string Fixes #1384. --- src/libexpr/primops.cc | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 615cc813843..cc23827a17a 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -534,7 +534,8 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * PathSet context; - string outputHash, outputHashAlgo; + std::experimental::optional outputHash; + std::string outputHashAlgo; bool outputHashRecursive = false; StringSet outputs; @@ -703,7 +704,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * throw EvalError(format("derivation names are not allowed to end in ‘%1%’, at %2%") % drvExtension % posDrvName); - if (outputHash != "") { + if (outputHash) { /* Handle fixed-output derivations. */ if (outputs.size() != 1 || *(outputs.begin()) != "out") throw Error(format("multiple outputs are not supported in fixed-output derivations, at %1%") % posDrvName); @@ -711,13 +712,13 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * HashType ht = parseHashType(outputHashAlgo); if (ht == htUnknown) throw EvalError(format("unknown hash algorithm ‘%1%’, at %2%") % outputHashAlgo % posDrvName); - Hash h = parseHash16or32(ht, outputHash); + Hash h = parseHash16or32(ht, *outputHash); outputHash = printHash(h); if (outputHashRecursive) outputHashAlgo = "r:" + outputHashAlgo; Path outPath = state.store->makeFixedOutputPath(outputHashRecursive, h, drvName); drv.env["out"] = outPath; - drv.outputs["out"] = DerivationOutput(outPath, outputHashAlgo, outputHash); + drv.outputs["out"] = DerivationOutput(outPath, outputHashAlgo, *outputHash); } else { From e80257f12209c8fbb709b901039ef5199111276e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 15 May 2017 18:50:54 +0200 Subject: [PATCH 0346/2196] Simplify fixed-output check --- src/libstore/build.cc | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 5ec4cbf6654..91156d313e2 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -1066,12 +1066,8 @@ void DerivationGoal::haveDerivation() /* Reject doing a hash build of anything other than a fixed-output derivation. */ - if (buildMode == bmHash) { - if (drv->outputs.size() != 1 || - drv->outputs.find("out") == drv->outputs.end() || - drv->outputs["out"].hashAlgo == "") - throw Error(format("cannot do a hash build of non-fixed-output derivation ‘%1%’") % drvPath); - } + if (buildMode == bmHash && !drv->isFixedOutput()) + throw Error("cannot do a hash build of non-fixed-output derivation ‘%1%’", drvPath); /* We are first going to try to create the invalid output paths through substitutes. If that doesn't work, we'll build From 5ee06e612a93a30bfa3b2129a3951e0c36f95602 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Benno=20F=C3=BCnfst=C3=BCck?= Date: Mon, 15 May 2017 19:32:51 +0200 Subject: [PATCH 0347/2196] nar-accessor: non-recursive NarMember::find This avoids a possible stack overflow if directories are very deeply nested. --- src/libstore/nar-accessor.cc | 42 ++++++++++++++++++------------------ 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/src/libstore/nar-accessor.cc b/src/libstore/nar-accessor.cc index ee1cf385c08..c84bb1dea32 100644 --- a/src/libstore/nar-accessor.cc +++ b/src/libstore/nar-accessor.cc @@ -21,25 +21,6 @@ struct NarMember /* If this is a directory, all the children of the directory. */ std::map children; - - NarMember* find(const Path & path) - { - if(path == "") return this; - - if(type != FSAccessor::Type::tDirectory) { - return nullptr; - } - - auto split = std::find(path.begin() + 1, path.end(), '/'); - std::string child_name(path.begin() + 1, split); - std::string remaining(split, path.end()); - - auto child = children.find(child_name); - if(child == children.end()) return nullptr; - - return child->second.find(remaining); - } - }; struct NarIndexer : ParseSink, StringSource @@ -114,8 +95,27 @@ struct NarIndexer : ParseSink, StringSource NarMember* find(const Path & path) { Path canon = path == "" ? "" : canonPath(path); - NarMember* result = root.find(canon); - return result; + NarMember* current = &root; + auto end = path.end(); + for(auto it = path.begin(); it != end; ) { + // because it != end, the remaining component is non-empty so we need + // a directory + if(current->type != FSAccessor::Type::tDirectory) return nullptr; + + // skip slash (canonPath above ensures that this is always a slash) + assert(*it == '/'); + it += 1; + + // lookup current component + auto next = std::find(it, end, '/'); + auto child = current->children.find(std::string(it, next)); + if(child == current->children.end()) return nullptr; + current = &child->second; + + it = next; + } + + return current; } NarMember& at(const Path & path) { From a1f428b13bd003caaf3a1d1da6e934d52b6ea6dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Benno=20F=C3=BCnfst=C3=BCck?= Date: Mon, 15 May 2017 19:35:36 +0200 Subject: [PATCH 0348/2196] nar-accessor.cc: remove unused member NarIndexer::currentName --- src/libstore/nar-accessor.cc | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/libstore/nar-accessor.cc b/src/libstore/nar-accessor.cc index c84bb1dea32..82595e76a9b 100644 --- a/src/libstore/nar-accessor.cc +++ b/src/libstore/nar-accessor.cc @@ -28,7 +28,6 @@ struct NarIndexer : ParseSink, StringSource NarMember root; std::stack parents; - std::string currentName; std::string currentStart; bool isExec = false; @@ -56,7 +55,7 @@ struct NarIndexer : ParseSink, StringSource void createDirectory(const Path & path) override { - createMember(path, {FSAccessor::Type::tDirectory, false, 0, 0 }); + createMember(path, {FSAccessor::Type::tDirectory, false, 0, 0 }); } void createRegularFile(const Path & path) override From b01d62285cdcd376a8db1863049c68d8c7238837 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 16 May 2017 16:09:57 +0200 Subject: [PATCH 0349/2196] Improve progress indicator --- src/libexpr/eval.cc | 2 +- src/libexpr/get-drvs.cc | 3 +- src/libexpr/primops.cc | 8 +- src/libexpr/primops/fetchgit.cc | 2 +- src/libstore/build.cc | 31 ++- src/libstore/builtins.cc | 3 - src/libstore/download.cc | 38 +--- src/libstore/download.hh | 1 - src/libstore/export-import.cc | 8 +- src/libstore/gc.cc | 2 +- src/libstore/http-binary-cache-store.cc | 2 - src/libstore/optimise-store.cc | 2 +- src/libstore/store-api.cc | 9 +- src/libutil/logging.cc | 13 +- src/libutil/logging.hh | 91 ++++++--- src/libutil/util.cc | 2 +- src/libutil/util.hh | 2 + src/nix-channel/nix-channel.cc | 1 - src/nix-daemon/nix-daemon.cc | 7 +- src/nix-env/nix-env.cc | 2 +- src/nix-instantiate/nix-instantiate.cc | 2 +- src/nix/installables.cc | 6 +- src/nix/main.cc | 1 + src/nix/progress-bar.cc | 251 +++++++++++++++++++----- src/nix/sigs.cc | 6 +- src/nix/verify.cc | 12 +- 26 files changed, 339 insertions(+), 168 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 5e1ae63c482..0cdce602d7b 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -642,7 +642,7 @@ void EvalState::evalFile(const Path & path, Value & v) return; } - Activity act(*logger, lvlTalkative, format("evaluating file ‘%1%’") % path2); + printTalkative("evaluating file ‘%1%’", path2); Expr * e = parseExprFromFile(checkSourcePath(path2)); try { eval(e, v); diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc index ae9fb0e5ec3..4200e8fd675 100644 --- a/src/libexpr/get-drvs.cc +++ b/src/libexpr/get-drvs.cc @@ -289,7 +289,7 @@ static void getDerivations(EvalState & state, Value & vIn, bound to the attribute with the "lower" name should take precedence). */ for (auto & i : v.attrs->lexicographicOrder()) { - Activity act(*logger, lvlDebug, format("evaluating attribute ‘%1%’") % i->name); + debug("evaluating attribute ‘%1%’", i->name); if (!std::regex_match(std::string(i->name), attrRegex)) continue; string pathPrefix2 = addToPath(pathPrefix, i->name); @@ -310,7 +310,6 @@ static void getDerivations(EvalState & state, Value & vIn, else if (v.isList()) { for (unsigned int n = 0; n < v.listSize(); ++n) { - Activity act(*logger, lvlDebug, "evaluating list element"); string pathPrefix2 = addToPath(pathPrefix, (format("%1%") % n).str()); if (getDerivation(state, *v.listElems()[n], pathPrefix2, drvs, done, ignoreAssertionFailures)) getDerivations(state, *v.listElems()[n], pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures); diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index cc23827a17a..93b66269dd0 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -127,7 +127,7 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args env->values[displ++] = attr.value; } - Activity act(*logger, lvlTalkative, format("evaluating file ‘%1%’") % path); + printTalkative("evaluating file ‘%1%’", path); Expr * e = state.parseExprFromFile(resolveExprPath(path), staticEnv); e->eval(state, *env, v); @@ -326,8 +326,6 @@ typedef list ValueList; static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * args, Value & v) { - Activity act(*logger, lvlDebug, "finding dependencies"); - state.forceAttrs(*args[0], pos); /* Get the start set. */ @@ -499,8 +497,6 @@ void prim_valueSize(EvalState & state, const Pos & pos, Value * * args, Value & derivation. */ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * args, Value & v) { - Activity act(*logger, lvlVomit, "evaluating derivation"); - state.forceAttrs(*args[0], pos); /* Figure out the name first (for stack backtraces). */ @@ -544,7 +540,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * for (auto & i : args[0]->attrs->lexicographicOrder()) { if (i->name == state.sIgnoreNulls) continue; string key = i->name; - Activity act(*logger, lvlVomit, format("processing attribute ‘%1%’") % key); + vomit("processing attribute ‘%1%’", key); auto handleHashMode = [&](const std::string & s) { if (s == "recursive") outputHashRecursive = true; diff --git a/src/libexpr/primops/fetchgit.cc b/src/libexpr/primops/fetchgit.cc index 09e2c077bab..3e4ece2cffd 100644 --- a/src/libexpr/primops/fetchgit.cc +++ b/src/libexpr/primops/fetchgit.cc @@ -17,7 +17,7 @@ Path exportGit(ref store, const std::string & uri, const std::string & re runProgram("git", true, { "init", "--bare", cacheDir }); } - Activity act(*logger, lvlInfo, format("fetching Git repository ‘%s’") % uri); + //Activity act(*logger, lvlInfo, format("fetching Git repository ‘%s’") % uri); std::string localRef = "pid-" + std::to_string(getpid()); Path localRefFile = cacheDir + "/refs/heads/" + localRef; diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 91156d313e2..44cae3431fc 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -120,6 +120,8 @@ class Goal : public std::enable_shared_from_this /* Whether the goal is finished. */ ExitCode exitCode; + Activity act; + Goal(Worker & worker) : worker(worker) { nrFailed = nrNoSubstituters = nrIncompleteClosure = 0; @@ -168,7 +170,8 @@ class Goal : public std::enable_shared_from_this virtual string key() = 0; protected: - void amDone(ExitCode result); + + virtual void amDone(ExitCode result); }; @@ -902,6 +905,12 @@ class DerivationGoal : public Goal void repairClosure(); + void amDone(ExitCode result) + { + logger->event(evBuildFinished, act, result == ecSuccess); + Goal::amDone(result); + } + void done(BuildResult::Status status, const string & msg = ""); }; @@ -920,6 +929,8 @@ DerivationGoal::DerivationGoal(const Path & drvPath, const StringSet & wantedOut state = &DerivationGoal::getDerivation; name = (format("building of ‘%1%’") % drvPath).str(); trace("created"); + + logger->event(evBuildCreated, act, drvPath); } @@ -935,6 +946,8 @@ DerivationGoal::DerivationGoal(const Path & drvPath, const BasicDerivation & drv name = (format("building of %1%") % showPaths(drv.outputPaths())).str(); trace("created"); + logger->event(evBuildCreated, act, drvPath); + /* Prevent the .chroot directory from being garbage-collected. (See isActiveTempFile() in gc.cc.) */ worker.store.addTempRoot(drvPath); @@ -2112,6 +2125,8 @@ void DerivationGoal::startBuilder() } debug(msg); } + + logger->event(evBuildStarted, act); } @@ -2857,7 +2872,7 @@ void DerivationGoal::registerOutputs() contained in it. Compute the SHA-256 NAR hash at the same time. The hash is stored in the database so that we can verify later on whether nobody has messed with the store. */ - Activity act(*logger, lvlTalkative, format("scanning for references inside ‘%1%’") % path); + debug("scanning for references inside ‘%1%’", path); HashResult hash; PathSet references = scanForReferences(actualPath, allPaths, hash); @@ -3130,6 +3145,7 @@ void DerivationGoal::flushLine() logTail.push_back(currentLogLine); if (logTail.size() > settings.logLines) logTail.pop_front(); } + logger->event(evBuildOutput, act, currentLogLine); currentLogLine = ""; currentLogLinePos = 0; } @@ -3244,6 +3260,12 @@ class SubstitutionGoal : public Goal void handleEOF(int fd); Path getStorePath() { return storePath; } + + void amDone(ExitCode result) + { + logger->event(evSubstitutionFinished, act, result == ecSuccess); + Goal::amDone(result); + } }; @@ -3256,6 +3278,7 @@ SubstitutionGoal::SubstitutionGoal(const Path & storePath, Worker & worker, bool state = &SubstitutionGoal::init; name = (format("substitution of ‘%1%’") % storePath).str(); trace("created"); + logger->event(evSubstitutionCreated, act, storePath); } @@ -3391,6 +3414,8 @@ void SubstitutionGoal::tryToRun() printInfo(format("fetching path ‘%1%’...") % storePath); + logger->event(evSubstitutionStarted, act); + outPipe.create(); promise = std::promise(); @@ -3637,7 +3662,7 @@ void Worker::run(const Goals & _topGoals) { for (auto & i : _topGoals) topGoals.insert(i); - Activity act(*logger, lvlDebug, "entered goal loop"); + debug("entered goal loop"); while (1) { diff --git a/src/libstore/builtins.cc b/src/libstore/builtins.cc index c5dbd57f8bc..8a5cf3327d4 100644 --- a/src/libstore/builtins.cc +++ b/src/libstore/builtins.cc @@ -28,9 +28,6 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) DownloadRequest request(url); request.verifyTLS = false; - /* Show a progress indicator, even though stderr is not a tty. */ - request.showProgress = DownloadRequest::yes; - /* Note: have to use a fresh downloader here because we're in a forked process. */ auto data = makeDownloader()->download(request); diff --git a/src/libstore/download.cc b/src/libstore/download.cc index dc1a3d94f74..63e498f0603 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -63,6 +63,7 @@ struct CurlDownloader : public Downloader CurlDownloader & downloader; DownloadRequest request; DownloadResult result; + Activity act; bool done = false; // whether either the success or failure function has been called std::function success; std::function failure; @@ -70,10 +71,6 @@ struct CurlDownloader : public Downloader bool active = false; // whether the handle has been added to the multi object std::string status; - bool showProgress = false; - double prevProgressTime{0}, startTime{0}; - unsigned int moveBack{1}; - unsigned int attempt = 0; /* Don't start this download until the specified time point @@ -87,12 +84,10 @@ struct CurlDownloader : public Downloader DownloadItem(CurlDownloader & downloader, const DownloadRequest & request) : downloader(downloader), request(request) { - showProgress = - request.showProgress == DownloadRequest::yes || - (request.showProgress == DownloadRequest::automatic && isatty(STDERR_FILENO)); - if (!request.expectedETag.empty()) requestHeaders = curl_slist_append(requestHeaders, ("If-None-Match: " + request.expectedETag).c_str()); + + logger->event(evDownloadCreated, act, request.uri); } ~DownloadItem() @@ -109,6 +104,7 @@ struct CurlDownloader : public Downloader } catch (...) { ignoreException(); } + logger->event(evDownloadDestroyed, act); } template @@ -171,19 +167,7 @@ struct CurlDownloader : public Downloader int progressCallback(double dltotal, double dlnow) { - if (showProgress) { - double now = getTime(); - if (prevProgressTime <= now - 1) { - string s = (format(" [%1$.0f/%2$.0f KiB, %3$.1f KiB/s]") - % (dlnow / 1024.0) - % (dltotal / 1024.0) - % (now == startTime ? 0 : dlnow / 1024.0 / (now - startTime))).str(); - std::cerr << "\e[" << moveBack << "D" << s; - moveBack = s.size(); - std::cerr.flush(); - prevProgressTime = now; - } - } + logger->event(evDownloadProgress, act, dltotal, dlnow); return _isInterrupted; } @@ -201,13 +185,6 @@ struct CurlDownloader : public Downloader void init() { - // FIXME: handle parallel downloads. - if (showProgress) { - std::cerr << (format("downloading ‘%1%’... ") % request.uri); - std::cerr.flush(); - startTime = getTime(); - } - if (!req) req = curl_easy_init(); curl_easy_reset(req); @@ -263,10 +240,6 @@ struct CurlDownloader : public Downloader void finish(CURLcode code) { - if (showProgress) - //std::cerr << "\e[" << moveBack << "D\e[K\n"; - std::cerr << "\n"; - long httpStatus = 0; curl_easy_getinfo(req, CURLINFO_RESPONSE_CODE, &httpStatus); @@ -292,6 +265,7 @@ struct CurlDownloader : public Downloader try { result.data = decodeContent(encoding, ref(result.data)); callSuccess(success, failure, const_cast(result)); + logger->event(evDownloadSucceeded, act, result.data->size()); } catch (...) { done = true; callFailure(failure, std::current_exception()); diff --git a/src/libstore/download.hh b/src/libstore/download.hh index 62f3860b9da..7d8982d64c4 100644 --- a/src/libstore/download.hh +++ b/src/libstore/download.hh @@ -13,7 +13,6 @@ struct DownloadRequest std::string uri; std::string expectedETag; bool verifyTLS = true; - enum { yes, no, automatic } showProgress = yes; bool head = false; size_t tries = 5; unsigned int baseRetryTimeMs = 250; diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index 2b8ab063e18..6e8bc692cdf 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -30,13 +30,13 @@ void Store::exportPaths(const Paths & paths, Sink & sink) std::reverse(sorted.begin(), sorted.end()); std::string doneLabel("paths exported"); - logger->incExpected(doneLabel, sorted.size()); + //logger->incExpected(doneLabel, sorted.size()); for (auto & path : sorted) { - Activity act(*logger, lvlInfo, format("exporting path ‘%s’") % path); + //Activity act(*logger, lvlInfo, format("exporting path ‘%s’") % path); sink << 1; exportPath(path, sink); - logger->incProgress(doneLabel); + //logger->incProgress(doneLabel); } sink << 0; @@ -81,7 +81,7 @@ Paths Store::importPaths(Source & source, std::shared_ptr accessor, info.path = readStorePath(*this, source); - Activity act(*logger, lvlInfo, format("importing path ‘%s’") % info.path); + //Activity act(*logger, lvlInfo, format("importing path ‘%s’") % info.path); info.references = readStorePaths(*this, source); diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index 3e7e42cbc96..3cdbb114a79 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -615,7 +615,7 @@ void LocalStore::tryToDelete(GCState & state, const Path & path) auto realPath = realStoreDir + "/" + baseNameOf(path); if (realPath == linksDir || realPath == trashDir) return; - Activity act(*logger, lvlDebug, format("considering whether to delete ‘%1%’") % path); + //Activity act(*logger, lvlDebug, format("considering whether to delete ‘%1%’") % path); if (!isStorePath(path) || !isValidPath(path)) { /* A lock file belonging to a path that we're building right diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index 37a7d6ace14..cead81514ab 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -50,7 +50,6 @@ class HttpBinaryCacheStore : public BinaryCacheStore { try { DownloadRequest request(cacheUri + "/" + path); - request.showProgress = DownloadRequest::no; request.head = true; request.tries = 5; getDownloader()->download(request); @@ -76,7 +75,6 @@ class HttpBinaryCacheStore : public BinaryCacheStore std::function failure) override { DownloadRequest request(cacheUri + "/" + path); - request.showProgress = DownloadRequest::no; request.tries = 8; getDownloader()->enqueueDownload(request, diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc index d354812e3da..56167c4dfae 100644 --- a/src/libstore/optimise-store.cc +++ b/src/libstore/optimise-store.cc @@ -240,7 +240,7 @@ void LocalStore::optimiseStore(OptimiseStats & stats) for (auto & i : paths) { addTempRoot(i); if (!isValidPath(i)) continue; /* path was GC'ed, probably */ - Activity act(*logger, lvlChatty, format("hashing files in ‘%1%’") % i); + //Activity act(*logger, lvlChatty, format("hashing files in ‘%1%’") % i); optimisePath_(stats, realStoreDir + "/" + baseNameOf(i), inodeHash); } } diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 96799adb1e6..e6cbd53dc80 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -822,7 +822,7 @@ void copyPaths(ref from, ref to, const PathSet & storePaths, std::string copiedLabel = "copied"; - logger->setExpected(copiedLabel, missing.size()); + //logger->setExpected(copiedLabel, missing.size()); ThreadPool pool; @@ -838,13 +838,14 @@ void copyPaths(ref from, ref to, const PathSet & storePaths, checkInterrupt(); if (!to->isValidPath(storePath)) { - Activity act(*logger, lvlInfo, format("copying ‘%s’...") % storePath); + //Activity act(*logger, lvlInfo, format("copying ‘%s’...") % storePath); copyStorePath(from, to, storePath, false, dontCheckSigs); - logger->incProgress(copiedLabel); + //logger->incProgress(copiedLabel); } else - logger->incExpected(copiedLabel, -1); + ; + //logger->incExpected(copiedLabel, -1); }); pool.process(); diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index afcc2ec5854..2d0acca2421 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -1,6 +1,8 @@ #include "logging.hh" #include "util.hh" +#include + namespace nix { Logger * logger = makeDefaultLogger(); @@ -42,12 +44,7 @@ class SimpleLogger : public Logger writeToStderr(prefix + (tty ? fs.s : filterANSIEscapes(fs.s)) + "\n"); } - void startActivity(Activity & activity, Verbosity lvl, const FormatOrString & fs) override - { - log(lvl, fs); - } - - void stopActivity(Activity & activity) override + void event(const Event & ev) override { } }; @@ -79,4 +76,8 @@ Logger * makeDefaultLogger() return new SimpleLogger(); } +std::atomic Activity::nextId{(uint64_t) getpid() << 32}; + +Activity::Activity() : id(nextId++) { }; + } diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh index a8c69dbd956..ddfc336fee0 100644 --- a/src/libutil/logging.hh +++ b/src/libutil/logging.hh @@ -13,7 +13,64 @@ typedef enum { lvlVomit } Verbosity; -class Activity; +class Activity +{ + static std::atomic nextId; +public: + typedef uint64_t t; + const t id; + Activity(); + Activity(const Activity & act) : id(act.id) { }; + Activity(uint64_t id) : id(id) { }; +}; + +typedef enum { + evBuildCreated = 0, + evBuildStarted = 1, + evBuildOutput = 2, + evBuildFinished = 3, + evDownloadCreated = 4, + evDownloadDestroyed = 5, + evDownloadProgress = 6, + evDownloadSucceeded = 7, + evSubstitutionCreated = 8, + evSubstitutionStarted = 9, + evSubstitutionFinished = 10, +} EventType; + +struct Event +{ + struct Field + { + // FIXME: use std::variant. + enum { tInt, tString } type; + uint64_t i = 0; + std::string s; + Field(const std::string & s) : type(tString), s(s) { } + Field(const char * s) : type(tString), s(s) { } + Field(const uint64_t & i) : type(tInt), i(i) { } + Field(const Activity & act) : type(tInt), i(act.id) { } + }; + + typedef std::vector Fields; + + EventType type; + Fields fields; + + std::string getS(size_t n) const + { + assert(n < fields.size()); + assert(fields[n].type == Field::tString); + return fields[n].s; + } + + uint64_t getI(size_t n) const + { + assert(n < fields.size()); + assert(fields[n].type == Field::tInt); + return fields[n].i; + } +}; class Logger { @@ -32,34 +89,16 @@ public: virtual void warn(const std::string & msg); - virtual void setExpected(const std::string & label, uint64_t value = 1) { } - virtual void setProgress(const std::string & label, uint64_t value = 1) { } - virtual void incExpected(const std::string & label, uint64_t value = 1) { } - virtual void incProgress(const std::string & label, uint64_t value = 1) { } - -private: - - virtual void startActivity(Activity & activity, Verbosity lvl, const FormatOrString & fs) = 0; - - virtual void stopActivity(Activity & activity) = 0; - -}; - -class Activity -{ -public: - Logger & logger; - - Activity(Logger & logger, Verbosity lvl, const FormatOrString & fs) - : logger(logger) + template + void event(EventType type, const Args & ... args) { - logger.startActivity(*this, lvl, fs); + Event ev; + ev.type = type; + nop{(ev.fields.emplace_back(Event::Field(args)), 1)...}; + event(ev); } - ~Activity() - { - logger.stopActivity(*this); - } + virtual void event(const Event & ev) = 0; }; extern Logger * logger; diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 1d1f68fc845..16f4b232e6c 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -372,7 +372,7 @@ void deletePath(const Path & path) void deletePath(const Path & path, unsigned long long & bytesFreed) { - Activity act(*logger, lvlDebug, format("recursively deleting path ‘%1%’") % path); + //Activity act(*logger, lvlDebug, format("recursively deleting path ‘%1%’") % path); bytesFreed = 0; _deletePath(path, bytesFreed); } diff --git a/src/libutil/util.hh b/src/libutil/util.hh index 5a9c9513fd5..7ea32e8d9f1 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -364,6 +364,8 @@ void ignoreException(); #define ANSI_NORMAL "\e[0m" #define ANSI_BOLD "\e[1m" #define ANSI_RED "\e[31;1m" +#define ANSI_GREEN "\e[32;1m" +#define ANSI_BLUE "\e[34;1m" /* Filter out ANSI escape codes from the given string. If ‘nixOnly’ is diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc index 2aaae2f471b..f2742bc3bbd 100755 --- a/src/nix-channel/nix-channel.cc +++ b/src/nix-channel/nix-channel.cc @@ -112,7 +112,6 @@ static void update(const StringSet & channelNames) // The URL doesn't unpack directly, so let's try treating it like a full channel folder with files in it // Check if the channel advertises a binary cache. DownloadRequest request(url + "/binary-cache-url"); - request.showProgress = DownloadRequest::no; try { auto dlRes = dl->download(request); extraAttrs = "binaryCacheURL = \"" + *dlRes.data + "\";"; diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index d2bb7b8c88b..44127635ded 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -82,12 +82,7 @@ class TunnelLogger : public Logger defaultLogger->log(lvl, fs); } - void startActivity(Activity & activity, Verbosity lvl, const FormatOrString & fs) override - { - log(lvl, fs); - } - - void stopActivity(Activity & activity) override + void event(const Event & ev) override { } }; diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index da39bf36ab6..464bcee4a84 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -989,7 +989,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) try { if (i.hasFailed()) continue; - Activity act(*logger, lvlDebug, format("outputting query result ‘%1%’") % i.attrPath); + //Activity act(*logger, lvlDebug, format("outputting query result ‘%1%’") % i.attrPath); if (globals.prebuiltOnly && validPaths.find(i.queryOutPath()) == validPaths.end() && diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index c1b0b0ea092..25f0b1bd692 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -19,7 +19,7 @@ using namespace nix; static Expr * parseStdin(EvalState & state) { - Activity act(*logger, lvlTalkative, format("parsing standard input")); + //Activity act(*logger, lvlTalkative, format("parsing standard input")); return state.parseExprFromString(drainFD(0), absPath(".")); } diff --git a/src/nix/installables.cc b/src/nix/installables.cc index 4756fc44bba..f23308b9bc3 100644 --- a/src/nix/installables.cc +++ b/src/nix/installables.cc @@ -223,9 +223,9 @@ PathSet InstallablesCommand::buildInstallables(ref store, bool dryRun) buildables.insert(b.begin(), b.end()); } - printMissing(store, buildables); - - if (!dryRun) + if (dryRun) + printMissing(store, buildables); + else store->buildPaths(buildables); PathSet outPaths; diff --git a/src/nix/main.cc b/src/nix/main.cc index 440ced97dfc..216f0bccef1 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -27,6 +27,7 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs void mainWrapped(int argc, char * * argv) { + verbosity = lvlError; settings.verboseBuild = false; initNix(); diff --git a/src/nix/progress-bar.cc b/src/nix/progress-bar.cc index 69811b28280..24e435f81e8 100644 --- a/src/nix/progress-bar.cc +++ b/src/nix/progress-bar.cc @@ -1,8 +1,12 @@ #include "progress-bar.hh" #include "util.hh" #include "sync.hh" +#include "store-api.hh" #include +#include + +#include namespace nix { @@ -12,31 +16,47 @@ class ProgressBar : public Logger struct ActInfo { - Activity * activity; - Verbosity lvl; - std::string s; + std::string s, s2; }; - struct Progress + struct DownloadInfo { - uint64_t expected = 0, progress = 0; + std::string uri; + uint64_t current = 0; + uint64_t expected = 0; }; struct State { + std::map builds; + std::set runningBuilds; + uint64_t succeededBuilds = 0; + uint64_t failedBuilds = 0; + std::map substitutions; + std::set runningSubstitutions; + uint64_t succeededSubstitutions = 0; + uint64_t downloadedBytes = 0; // finished downloads + std::map downloads; std::list activities; - std::map::iterator> its; - std::map progress; + std::map::iterator> its; }; Sync state_; + int width = 0; + public: + ProgressBar() + { + struct winsize ws; + if (ioctl(1, TIOCGWINSZ, &ws) == 0) + width = ws.ws_col; + } + ~ProgressBar() { auto state(state_.lock()); - assert(state->activities.empty()); writeToStderr("\r\e[K"); } @@ -52,52 +72,36 @@ class ProgressBar : public Logger update(state); } - void startActivity(Activity & activity, Verbosity lvl, const FormatOrString & fs) override - { - if (lvl > verbosity) return; - auto state(state_.lock()); - state->activities.emplace_back(ActInfo{&activity, lvl, fs.s}); - state->its.emplace(&activity, std::prev(state->activities.end())); - update(*state); - } - - void stopActivity(Activity & activity) override - { - auto state(state_.lock()); - auto i = state->its.find(&activity); - if (i == state->its.end()) return; - state->activities.erase(i->second); - state->its.erase(i); - update(*state); - } - - void setExpected(const std::string & label, uint64_t value) override + void createActivity(State & state, Activity::t activity, const std::string & s) { - auto state(state_.lock()); - state->progress[label].expected = value; - } - - void setProgress(const std::string & label, uint64_t value) override - { - auto state(state_.lock()); - state->progress[label].progress = value; + state.activities.emplace_back(ActInfo{s}); + state.its.emplace(activity, std::prev(state.activities.end())); } - void incExpected(const std::string & label, uint64_t value) override + void deleteActivity(State & state, Activity::t activity) { - auto state(state_.lock()); - state->progress[label].expected += value; + auto i = state.its.find(activity); + if (i != state.its.end()) { + state.activities.erase(i->second); + state.its.erase(i); + } } - void incProgress(const std::string & label, uint64_t value) override + void updateActivity(State & state, Activity::t activity, const std::string & s2) { - auto state(state_.lock()); - state->progress[label].progress += value; + auto i = state.its.find(activity); + assert(i != state.its.end()); + ActInfo info = *i->second; + state.activities.erase(i->second); + info.s2 = s2; + state.activities.emplace_back(info); + i->second = std::prev(state.activities.end()); } void update() { auto state(state_.lock()); + update(*state); } void update(State & state) @@ -113,28 +117,169 @@ class ProgressBar : public Logger if (!state.activities.empty()) { if (!status.empty()) line += " "; - line += state.activities.rbegin()->s; + auto i = state.activities.rbegin(); + line += i->s; + if (!i->s2.empty()) { + line += ": "; + line += i->s2; + } } line += "\e[K"; - writeToStderr(line); + writeToStderr(std::string(line, 0, width - 1)); } std::string getStatus(State & state) { std::string res; - for (auto & p : state.progress) - if (p.second.expected || p.second.progress) { - if (!res.empty()) res += ", "; - res += std::to_string(p.second.progress); - if (p.second.expected) { - res += "/"; - res += std::to_string(p.second.expected); - } - res += " "; res += p.first; + + if (state.failedBuilds) { + if (!res.empty()) res += ", "; + res += fmt(ANSI_RED "%d failed" ANSI_NORMAL, state.failedBuilds); + } + + if (!state.builds.empty() || state.succeededBuilds) + { + if (!res.empty()) res += ", "; + if (!state.runningBuilds.empty()) + res += fmt(ANSI_BLUE "%d" "/" ANSI_NORMAL, state.runningBuilds.size()); + res += fmt(ANSI_GREEN "%d/%d built" ANSI_NORMAL, + state.succeededBuilds, state.succeededBuilds + state.builds.size()); + } + + if (!state.substitutions.empty() || state.succeededSubstitutions) { + if (!res.empty()) res += ", "; + if (!state.runningSubstitutions.empty()) + res += fmt(ANSI_BLUE "%d" "/" ANSI_NORMAL, state.runningSubstitutions.size()); + res += fmt(ANSI_GREEN "%d/%d fetched" ANSI_NORMAL, + state.succeededSubstitutions, + state.succeededSubstitutions + state.substitutions.size()); + } + + if (!state.downloads.empty() || state.downloadedBytes) { + if (!res.empty()) res += ", "; + uint64_t expected = state.downloadedBytes, current = state.downloadedBytes; + for (auto & i : state.downloads) { + expected += i.second.expected; + current += i.second.current; } + res += fmt("%1$.0f/%2$.0f KiB", current / 1024.0, expected / 1024.0); + } + return res; } + + void event(const Event & ev) override + { + if (ev.type == evBuildCreated) { + auto state(state_.lock()); + state->builds[ev.getI(0)] = ev.getS(1); + update(*state); + } + + if (ev.type == evBuildStarted) { + auto state(state_.lock()); + Activity::t act = ev.getI(0); + state->runningBuilds.insert(act); + auto name = storePathToName(state->builds[act]); + if (hasSuffix(name, ".drv")) + name.resize(name.size() - 4); + createActivity(*state, act, fmt("building " ANSI_BOLD "%s" ANSI_NORMAL, name)); + update(*state); + } + + if (ev.type == evBuildFinished) { + auto state(state_.lock()); + Activity::t act = ev.getI(0); + if (ev.getI(1)) { + if (state->runningBuilds.count(act)) + state->succeededBuilds++; + } else + state->failedBuilds++; + state->runningBuilds.erase(act); + state->builds.erase(act); + deleteActivity(*state, act); + update(*state); + } + + if (ev.type == evBuildOutput) { + auto state(state_.lock()); + Activity::t act = ev.getI(0); + assert(state->runningBuilds.count(act)); + updateActivity(*state, act, ev.getS(1)); + update(*state); + } + + if (ev.type == evSubstitutionCreated) { + auto state(state_.lock()); + state->substitutions[ev.getI(0)] = ev.getS(1); + update(*state); + } + + if (ev.type == evSubstitutionStarted) { + auto state(state_.lock()); + Activity::t act = ev.getI(0); + state->runningSubstitutions.insert(act); + auto name = storePathToName(state->substitutions[act]); + createActivity(*state, act, fmt("fetching " ANSI_BOLD "%s" ANSI_NORMAL, name)); + update(*state); + } + + if (ev.type == evSubstitutionFinished) { + auto state(state_.lock()); + Activity::t act = ev.getI(0); + if (ev.getI(1)) { + if (state->runningSubstitutions.count(act)) + state->succeededSubstitutions++; + } + state->runningSubstitutions.erase(act); + state->substitutions.erase(act); + deleteActivity(*state, act); + update(*state); + } + + if (ev.type == evDownloadCreated) { + auto state(state_.lock()); + Activity::t act = ev.getI(0); + std::string uri = ev.getS(1); + state->downloads.emplace(act, DownloadInfo{uri}); + if (state->runningSubstitutions.empty()) // FIXME: hack + createActivity(*state, act, fmt("downloading " ANSI_BOLD "%s" ANSI_NORMAL "", uri)); + update(*state); + } + + if (ev.type == evDownloadProgress) { + auto state(state_.lock()); + Activity::t act = ev.getI(0); + auto i = state->downloads.find(act); + assert(i != state->downloads.end()); + i->second.expected = ev.getI(1); + i->second.current = ev.getI(2); + update(*state); + } + + if (ev.type == evDownloadSucceeded) { + auto state(state_.lock()); + Activity::t act = ev.getI(0); + auto i = state->downloads.find(act); + assert(i != state->downloads.end()); + state->downloadedBytes += ev.getI(1); + state->downloads.erase(i); + deleteActivity(*state, act); + update(*state); + } + + if (ev.type == evDownloadDestroyed) { + auto state(state_.lock()); + Activity::t act = ev.getI(0); + auto i = state->downloads.find(act); + if (i != state->downloads.end()) { + state->downloads.erase(i); + deleteActivity(*state, act); + update(*state); + } + } + } }; StartProgressBar::StartProgressBar() diff --git a/src/nix/sigs.cc b/src/nix/sigs.cc index d8d8c0f53df..3dd03771619 100644 --- a/src/nix/sigs.cc +++ b/src/nix/sigs.cc @@ -42,10 +42,10 @@ struct CmdCopySigs : StorePathsCommand std::string doneLabel = "done"; std::atomic added{0}; - logger->setExpected(doneLabel, storePaths.size()); + //logger->setExpected(doneLabel, storePaths.size()); auto doPath = [&](const Path & storePath) { - Activity act(*logger, lvlInfo, format("getting signatures for ‘%s’") % storePath); + //Activity act(*logger, lvlInfo, format("getting signatures for ‘%s’") % storePath); checkInterrupt(); @@ -76,7 +76,7 @@ struct CmdCopySigs : StorePathsCommand added += newSigs.size(); } - logger->incProgress(doneLabel); + //logger->incProgress(doneLabel); }; for (auto & storePath : storePaths) diff --git a/src/nix/verify.cc b/src/nix/verify.cc index 2f8d02fa060..8facb4bef8a 100644 --- a/src/nix/verify.cc +++ b/src/nix/verify.cc @@ -65,7 +65,7 @@ struct CmdVerify : StorePathsCommand std::string untrustedLabel("untrusted"); std::string corruptedLabel("corrupted"); std::string failedLabel("failed"); - logger->setExpected(doneLabel, storePaths.size()); + //logger->setExpected(doneLabel, storePaths.size()); ThreadPool pool; @@ -73,7 +73,7 @@ struct CmdVerify : StorePathsCommand try { checkInterrupt(); - Activity act(*logger, lvlInfo, format("checking ‘%s’") % storePath); + //Activity act(*logger, lvlInfo, format("checking ‘%s’") % storePath); auto info = store->queryPathInfo(storePath); @@ -85,7 +85,7 @@ struct CmdVerify : StorePathsCommand auto hash = sink.finish(); if (hash.first != info->narHash) { - logger->incProgress(corruptedLabel); + //logger->incProgress(corruptedLabel); corrupted = 1; printError( format("path ‘%s’ was modified! expected hash ‘%s’, got ‘%s’") @@ -137,19 +137,19 @@ struct CmdVerify : StorePathsCommand } if (!good) { - logger->incProgress(untrustedLabel); + //logger->incProgress(untrustedLabel); untrusted++; printError(format("path ‘%s’ is untrusted") % info->path); } } - logger->incProgress(doneLabel); + //logger->incProgress(doneLabel); done++; } catch (Error & e) { printError(format(ANSI_RED "error:" ANSI_NORMAL " %s") % e.what()); - logger->incProgress(failedLabel); + //logger->incProgress(failedLabel); failed++; } }; From e46090edb101acac20ab1e6260a0ba98c177206a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 17 May 2017 11:58:01 +0200 Subject: [PATCH 0350/2196] builtins.match: Improve error message for bad regular expression Issue #1331. --- src/libexpr/primops.cc | 39 +++++++++++++++++++++++---------------- 1 file changed, 23 insertions(+), 16 deletions(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 93b66269dd0..99ffddaeb80 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1709,26 +1709,33 @@ static void prim_hashString(EvalState & state, const Pos & pos, Value * * args, ‘null’ or a list containing substring matches. */ static void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v) { - std::regex regex(state.forceStringNoCtx(*args[0], pos), std::regex::extended); + auto re = state.forceStringNoCtx(*args[0], pos); - PathSet context; - const std::string str = state.forceString(*args[1], context, pos); + try { + std::regex regex(re, std::regex::extended); - std::smatch match; - if (!std::regex_match(str, match, regex)) { - mkNull(v); - return; - } + PathSet context; + const std::string str = state.forceString(*args[1], context, pos); - // the first match is the whole string - const size_t len = match.size() - 1; - state.mkList(v, len); - for (size_t i = 0; i < len; ++i) { - if (!match[i+1].matched) - mkNull(*(v.listElems()[i] = state.allocValue())); - else - mkString(*(v.listElems()[i] = state.allocValue()), match[i + 1].str().c_str()); + std::smatch match; + if (!std::regex_match(str, match, regex)) { + mkNull(v); + return; + } + + // the first match is the whole string + const size_t len = match.size() - 1; + state.mkList(v, len); + for (size_t i = 0; i < len; ++i) { + if (!match[i+1].matched) + mkNull(*(v.listElems()[i] = state.allocValue())); + else + mkString(*(v.listElems()[i] = state.allocValue()), match[i + 1].str().c_str()); + } + + } catch (std::regex_error &) { + throw EvalError("invalid regular expression ‘%s’, at %s", re, pos); } } From f134fc4cbecc258771272c2418af3b92ade88f80 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 17 May 2017 12:18:18 +0200 Subject: [PATCH 0351/2196] Document that builtins.match takes a POSIX extended RE --- doc/manual/expressions/builtins.xml | 14 ++++++++++---- tests/lang/eval-okay-regex-match.nix | 3 +++ 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/doc/manual/expressions/builtins.xml b/doc/manual/expressions/builtins.xml index 63d13e184a1..8a32661066f 100644 --- a/doc/manual/expressions/builtins.xml +++ b/doc/manual/expressions/builtins.xml @@ -213,10 +213,11 @@ if builtins ? getEnv then builtins.getEnv "PATH" else "" builtins.match regex str - Returns a list if - regex matches - str precisely, otherwise returns null. - Each item in the list is a regex group. + Returns a list if the extended + POSIX regular expression regex + matches str precisely, otherwise returns + null. Each item in the list is a regex group. builtins.match "ab" "abc" @@ -236,6 +237,11 @@ builtins.match "a(b)(c)" "abc" Evaluates to [ "b" "c" ]. + +builtins.match "[[:space:]]+([[:upper:]]+)[[:space:]]+" " FOO " + + +Evaluates to [ "foo" ]. diff --git a/tests/lang/eval-okay-regex-match.nix b/tests/lang/eval-okay-regex-match.nix index ae6501532d1..273e2590713 100644 --- a/tests/lang/eval-okay-regex-match.nix +++ b/tests/lang/eval-okay-regex-match.nix @@ -17,8 +17,11 @@ assert matches "fo+" "foo"; assert matches "fo{1,2}" "foo"; assert !matches "fo{1,2}" "fooo"; assert !matches "fo*" "foobar"; +assert matches "[[:space:]]+([^[:space:]]+)[[:space:]]+" " foo "; +assert !matches "[[:space:]]+([[:upper:]]+)[[:space:]]+" " foo "; assert match "(.*)\\.nix" "foobar.nix" == [ "foobar" ]; +assert match "[[:space:]]+([[:upper:]]+)[[:space:]]+" " FOO " == [ "FOO" ]; assert splitFN "/path/to/foobar.nix" == [ "/path/to/" "/path/to" "foobar" "nix" ]; assert splitFN "foobar.cc" == [ null null "foobar" "cc" ]; From 9711524188494ff5bcbba8c6945add89fbb29713 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 24 May 2017 11:21:38 +0200 Subject: [PATCH 0352/2196] Fix #1380 It lacked a backslash. Use a raw string and single quotes around PS1 to simplify this. --- src/nix-build/nix-build.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index b7cf36d1607..d5f23a37fe6 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -438,7 +438,7 @@ int main(int argc, char ** argv) "[ -e $stdenv/setup ] && source $stdenv/setup; " "%3%" "set +e; " - "[ -n \"$PS1\" ] && PS1=\"\\n\\[\\033[1;32m\\][nix-shell:\\w]\\$\\[\\033[0m\\] \"; " + R"s([ -n "$PS1" ] && PS1='\n\[\033[1;32m\][nix-shell:\w]\$\[\033[0m\] '; )s" "if [ \"$(type -t runHook)\" = function ]; then runHook shellHook; fi; " "unset NIX_ENFORCE_PURITY; " "unset NIX_INDENT_MAKE; " From a7e55151a8d45d987ca42ba318c44ed3ccdeecca Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 24 May 2017 11:33:42 +0200 Subject: [PATCH 0353/2196] Fix #1314 Also, make nix-shell respect --option. (Previously it only passed it along to nix-instantiate and nix-build.) --- src/nix-build/nix-build.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 7167e96f11d..42d68fdfdd7 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -196,6 +196,7 @@ int main(int argc, char ** argv) buildArgs.push_back(arg); buildArgs.push_back(args[n + 1]); buildArgs.push_back(args[n + 2]); + settings.set(args[n + 1], args[n + 2]); n += 2; } @@ -407,7 +408,7 @@ int main(int argc, char ** argv) env["NIX_BUILD_TOP"] = env["TMPDIR"] = env["TEMPDIR"] = env["TMP"] = env["TEMP"] = tmp; env["NIX_STORE"] = store->storeDir; - env["NIX_BUILD_CORES"] = settings.buildCores; + env["NIX_BUILD_CORES"] = std::to_string(settings.buildCores); auto passAsFile = tokenizeString(get(drv.env, "passAsFile", "")); From 86ea7d15665b7bf2bc018c3bfd91d0a948c8f3be Mon Sep 17 00:00:00 2001 From: Pyry Jahkola Date: Sun, 28 May 2017 15:48:57 +0300 Subject: [PATCH 0354/2196] Fix variable name typo in derivations doc --- doc/manual/expressions/derivations.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/expressions/derivations.xml b/doc/manual/expressions/derivations.xml index 5efe2213e37..6f6297565ca 100644 --- a/doc/manual/expressions/derivations.xml +++ b/doc/manual/expressions/derivations.xml @@ -100,7 +100,7 @@ outputs = [ "lib" "headers" "doc" ]; buildInputs = [ pkg.lib pkg.headers ]; - The first element of output determines the + The first element of outputs determines the default output. Thus, you could also write buildInputs = [ pkg pkg.headers ]; From 370428f86d03488756cfb27cb9126bf29767e848 Mon Sep 17 00:00:00 2001 From: Pyry Jahkola Date: Sun, 28 May 2017 20:47:35 +0300 Subject: [PATCH 0355/2196] Remove stray `>` in builtins doc --- doc/manual/expressions/builtins.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/expressions/builtins.xml b/doc/manual/expressions/builtins.xml index f46a93ae0d5..125cbd78255 100644 --- a/doc/manual/expressions/builtins.xml +++ b/doc/manual/expressions/builtins.xml @@ -440,7 +440,7 @@ builtins.fromJSON ''{"x": [1, 2, 3], "y": null}'' Generate list of size length, with each element - i> equal to the value returned by + i equal to the value returned by generator i. For example, From 63145be2a5ac46a283f85c835fa84bf54db59bbe Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 May 2017 15:50:25 +0200 Subject: [PATCH 0356/2196] Fix typo --- src/libstore/local-store.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 207e8a40b6d..3ac23ec268f 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -923,7 +923,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, const ref & info.path, info.narHash.to_string(), h.to_string()); if (nar->size() != info.narSize) - throw Error("szie mismatch importing path ‘%s’; expected %s, got %s", + throw Error("size mismatch importing path ‘%s’; expected %s, got %s", info.path, info.narSize, nar->size()); if (requireSigs && !dontCheckSigs && !info.checkSignatures(*this, publicKeys)) From 588dad4084711e71a4dcb0758b2daf2fe03e2c8b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 May 2017 15:53:16 +0200 Subject: [PATCH 0357/2196] Fix build failure on Debian/Ubuntu http://hydra.nixos.org/build/53537463 --- src/libutil/logging.cc | 2 +- src/libutil/logging.hh | 1 - src/nix/progress-bar.cc | 1 + 3 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index 2d0acca2421..43245f61c60 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -76,7 +76,7 @@ Logger * makeDefaultLogger() return new SimpleLogger(); } -std::atomic Activity::nextId{(uint64_t) getpid() << 32}; +std::atomic nextId{(uint64_t) getpid() << 32}; Activity::Activity() : id(nextId++) { }; diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh index ddfc336fee0..9ef6e3ee30e 100644 --- a/src/libutil/logging.hh +++ b/src/libutil/logging.hh @@ -15,7 +15,6 @@ typedef enum { class Activity { - static std::atomic nextId; public: typedef uint64_t t; const t id; diff --git a/src/nix/progress-bar.cc b/src/nix/progress-bar.cc index 24e435f81e8..2ecbea8eeea 100644 --- a/src/nix/progress-bar.cc +++ b/src/nix/progress-bar.cc @@ -24,6 +24,7 @@ class ProgressBar : public Logger std::string uri; uint64_t current = 0; uint64_t expected = 0; + DownloadInfo(const std::string & uri) : uri(uri) { } }; struct State From 6e01ecd112dce8d8bbe46c839f982892a3ffb589 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 May 2017 16:08:56 +0200 Subject: [PATCH 0358/2196] Fix nix-copy-closure test MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes client# error: size mismatch importing path ‘/nix/store/ywf5fihjlxwijm6ygh6s0a353b5yvq4d-libidn2-0.16’; expected 0, got 120264 This is mostly an artifact of the NixOS VM test environment, where the Nix database doesn't contain hashes/sizes. http://hydra.nixos.org/build/53537471 --- src/libstore/store-api.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index e6cbd53dc80..76ed9942256 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -547,6 +547,7 @@ void copyStorePath(ref srcStore, ref dstStore, if (!info->narHash && dontCheckSigs) { auto info2 = make_ref(*info); info2->narHash = hashString(htSHA256, *sink.s); + if (!info->narSize) info2->narSize = sink.s->size(); info = info2; } From 6cc6c15a2d50d0021d7242e9806ed6d54538de17 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 May 2017 11:34:24 +0200 Subject: [PATCH 0359/2196] Add a seccomp filter to prevent creating setuid/setgid binaries This prevents builders from setting the S_ISUID or S_ISGID bits, preventing users from using a nixbld* user to create a setuid/setgid binary to interfere with subsequent builds under the same nixbld* uid. This is based on aszlig's seccomp code (47f587700d646f5b03a42f2fa57c28875a31efbe). Reported by Linus Heckemann. --- configure.ac | 7 +++++++ release.nix | 2 ++ shell.nix | 3 ++- src/libstore/build.cc | 39 +++++++++++++++++++++++++++++++++++++++ src/libstore/local.mk | 4 ++++ 5 files changed, 54 insertions(+), 1 deletion(-) diff --git a/configure.ac b/configure.ac index 24a95ce56f3..9d8a81d0427 100644 --- a/configure.ac +++ b/configure.ac @@ -176,6 +176,13 @@ AC_SUBST(HAVE_SODIUM, [$have_sodium]) PKG_CHECK_MODULES([LIBLZMA], [liblzma], [CXXFLAGS="$LIBLZMA_CFLAGS $CXXFLAGS"]) +# Look for libseccomp, required for Linux sandboxing. +if test "$sys_name" = linux; then + PKG_CHECK_MODULES([LIBSECCOMP], [libseccomp], + [CXXFLAGS="$LIBSECCOMP_CFLAGS $CXXFLAGS"]) +fi + + # Look for aws-cpp-sdk-s3. AC_LANG_PUSH(C++) AC_CHECK_HEADERS([aws/s3/S3Client.h], diff --git a/release.nix b/release.nix index f1a553d01cc..1e854a075f6 100644 --- a/release.nix +++ b/release.nix @@ -30,6 +30,7 @@ let docbook5 docbook5_xsl autoconf-archive git + libseccomp ]; configureFlags = "--enable-gc"; @@ -78,6 +79,7 @@ let openssl pkgconfig sqlite boehmgc ] + ++ lib.optional stdenv.isLinux libseccomp ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) (aws-sdk-cpp.override { diff --git a/shell.nix b/shell.nix index c4e2a20f8fa..435399f0b25 100644 --- a/shell.nix +++ b/shell.nix @@ -22,7 +22,8 @@ with import ./release-common.nix { inherit pkgs; }; # For nix-perl perl perlPackages.DBDSQLite - ]; + ] + ++ lib.optional stdenv.isLinux libseccomp; inherit configureFlags; diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 44cae3431fc..06d25960666 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -46,6 +46,7 @@ #include #include #include +#include #define pivot_root(new_root, put_old) (syscall(SYS_pivot_root, new_root, put_old)) #endif @@ -2298,6 +2299,42 @@ void DerivationGoal::doExportReferencesGraph() } +void setupSeccomp() +{ +#if __linux__ + scmp_filter_ctx ctx; + + if (!(ctx = seccomp_init(SCMP_ACT_ALLOW))) + throw SysError("unable to initialize seccomp mode 2"); + + Finally cleanup([&]() { + seccomp_release(ctx); + }); + + if (seccomp_arch_add(ctx, SCMP_ARCH_X86) != 0) + throw SysError("unable to add 32-bit seccomp architecture"); + + for (int perm : { S_ISUID, S_ISGID }) { + // TODO: test chmod and fchmod. + if (seccomp_rule_add(ctx, SCMP_ACT_ERRNO(EPERM), SCMP_SYS(chmod), 1, + SCMP_A1(SCMP_CMP_MASKED_EQ, perm, perm)) != 0) + throw SysError("unable to add seccomp rule"); + + if (seccomp_rule_add(ctx, SCMP_ACT_ERRNO(EPERM), SCMP_SYS(fchmod), 1, + SCMP_A1(SCMP_CMP_MASKED_EQ, perm, perm)) != 0) + throw SysError("unable to add seccomp rule"); + + if (seccomp_rule_add(ctx, SCMP_ACT_ERRNO(EPERM), SCMP_SYS(fchmodat), 1, + SCMP_A2(SCMP_CMP_MASKED_EQ, perm, perm)) != 0) + throw SysError("unable to add seccomp rule"); + } + + if (seccomp_load(ctx) != 0) + throw SysError("unable to load seccomp BPF program"); +#endif +} + + void DerivationGoal::runChild() { /* Warning: in the child we should absolutely not make any SQLite @@ -2307,6 +2344,8 @@ void DerivationGoal::runChild() commonChildInit(builderOut); + setupSeccomp(); + bool setUser = true; /* Make the contents of netrc available to builtin:fetchurl diff --git a/src/libstore/local.mk b/src/libstore/local.mk index e06002587f9..ffdb55abc65 100644 --- a/src/libstore/local.mk +++ b/src/libstore/local.mk @@ -18,6 +18,10 @@ ifeq ($(OS), SunOS) libstore_LDFLAGS += -lsocket endif +ifeq ($(OS), Linux) + libstore_LDFLAGS += -lseccomp +endif + libstore_CXXFLAGS = \ -DNIX_PREFIX=\"$(prefix)\" \ -DNIX_STORE_DIR=\"$(storedir)\" \ From cf93397d3f1d2a8165a100482d07b7f4b7e5bf7f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 May 2017 14:18:36 +0200 Subject: [PATCH 0360/2196] Fix seccomp initialisation on i686-linux --- src/libstore/build.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 06d25960666..b6293c15ebc 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2311,7 +2311,8 @@ void setupSeccomp() seccomp_release(ctx); }); - if (seccomp_arch_add(ctx, SCMP_ARCH_X86) != 0) + if (settings.thisSystem == "x86_64-linux" && + seccomp_arch_add(ctx, SCMP_ARCH_X86) != 0) throw SysError("unable to add 32-bit seccomp architecture"); for (int perm : { S_ISUID, S_ISGID }) { From 1d9ab273bad34b004dfcfd486273d0df5fed1eca Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 May 2017 14:19:11 +0200 Subject: [PATCH 0361/2196] Add test for setuid seccomp filter --- release.nix | 5 +++ tests/setuid.nix | 108 +++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 113 insertions(+) create mode 100644 tests/setuid.nix diff --git a/release.nix b/release.nix index 1e854a075f6..ec6bda99577 100644 --- a/release.nix +++ b/release.nix @@ -219,6 +219,11 @@ let nix = build.x86_64-linux; system = "x86_64-linux"; }); + tests.setuid = pkgs.lib.genAttrs (pkgs.lib.filter (pkgs.lib.hasSuffix "-linux") systems) (system: + import ./tests/setuid.nix rec { + nix = build.${system}; inherit system; + }); + tests.binaryTarball = with import { system = "x86_64-linux"; }; vmTools.runInLinuxImage (runCommand "nix-binary-tarball-test" diff --git a/tests/setuid.nix b/tests/setuid.nix new file mode 100644 index 00000000000..2508549c546 --- /dev/null +++ b/tests/setuid.nix @@ -0,0 +1,108 @@ +# Verify that Linux builds cannot create setuid or setgid binaries. + +{ system, nix }: + +with import { inherit system; }; + +makeTest { + + machine = + { config, lib, pkgs, ... }: + { virtualisation.writableStore = true; + nix.package = nix; + nix.binaryCaches = [ ]; + nix.nixPath = [ "nixpkgs=${lib.cleanSource pkgs.path}" ]; + virtualisation.pathsInNixDB = [ pkgs.stdenv pkgs.pkgsi686Linux.stdenv ]; + }; + + testScript = { nodes }: + '' + startAll; + + # Copying to /tmp should succeed. + $machine->succeed('nix-build --option build-use-sandbox false -E \'(with import {}; runCommand "foo" {} " + mkdir -p $out + cp ${pkgs.coreutils}/bin/id /tmp/id + ")\' '); + + $machine->succeed('[[ $(stat -c %a /tmp/id) = 555 ]]'); + + $machine->succeed("rm /tmp/id"); + + # Creating a setuid binary should fail. + $machine->fail('nix-build --option build-use-sandbox false -E \'(with import {}; runCommand "foo" {} " + mkdir -p $out + cp ${pkgs.coreutils}/bin/id /tmp/id + chmod 4755 /tmp/id + ")\' '); + + $machine->succeed('[[ $(stat -c %a /tmp/id) = 555 ]]'); + + $machine->succeed("rm /tmp/id"); + + # Creating a setgid binary should fail. + $machine->fail('nix-build --option build-use-sandbox false -E \'(with import {}; runCommand "foo" {} " + mkdir -p $out + cp ${pkgs.coreutils}/bin/id /tmp/id + chmod 2755 /tmp/id + ")\' '); + + $machine->succeed('[[ $(stat -c %a /tmp/id) = 555 ]]'); + + $machine->succeed("rm /tmp/id"); + + # The checks should also work on 32-bit binaries. + $machine->fail('nix-build --option build-use-sandbox false -E \'(with import { system = "i686-linux"; }; runCommand "foo" {} " + mkdir -p $out + cp ${pkgs.coreutils}/bin/id /tmp/id + chmod 2755 /tmp/id + ")\' '); + + $machine->succeed('[[ $(stat -c %a /tmp/id) = 555 ]]'); + + $machine->succeed("rm /tmp/id"); + + # The tests above use fchmodat(). Test chmod() as well. + $machine->succeed('nix-build --option build-use-sandbox false -E \'(with import {}; runCommand "foo" { buildInputs = [ perl ]; } " + mkdir -p $out + cp ${pkgs.coreutils}/bin/id /tmp/id + perl -e \"chmod 0666, qw(/tmp/id) or die\" + ")\' '); + + $machine->succeed('[[ $(stat -c %a /tmp/id) = 666 ]]'); + + $machine->succeed("rm /tmp/id"); + + $machine->fail('nix-build --option build-use-sandbox false -E \'(with import {}; runCommand "foo" { buildInputs = [ perl ]; } " + mkdir -p $out + cp ${pkgs.coreutils}/bin/id /tmp/id + perl -e \"chmod 04755, qw(/tmp/id) or die\" + ")\' '); + + $machine->succeed('[[ $(stat -c %a /tmp/id) = 555 ]]'); + + $machine->succeed("rm /tmp/id"); + + # And test fchmod(). + $machine->succeed('nix-build --option build-use-sandbox false -E \'(with import {}; runCommand "foo" { buildInputs = [ perl ]; } " + mkdir -p $out + cp ${pkgs.coreutils}/bin/id /tmp/id + perl -e \"my \\\$x; open \\\$x, qw(/tmp/id); chmod 01750, \\\$x or die\" + ")\' '); + + $machine->succeed('[[ $(stat -c %a /tmp/id) = 1750 ]]'); + + $machine->succeed("rm /tmp/id"); + + $machine->fail('nix-build --option build-use-sandbox false -E \'(with import {}; runCommand "foo" { buildInputs = [ perl ]; } " + mkdir -p $out + cp ${pkgs.coreutils}/bin/id /tmp/id + perl -e \"my \\\$x; open \\\$x, qw(/tmp/id); chmod 04777, \\\$x or die\" + ")\' '); + + $machine->succeed('[[ $(stat -c %a /tmp/id) = 555 ]]'); + + $machine->succeed("rm /tmp/id"); + ''; + +} From ff6becafa8efc2f7e6f2b9b889ba4adf20b8d524 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 30 May 2017 12:37:04 +0200 Subject: [PATCH 0362/2196] Require seccomp only in multi-user setups --- src/libstore/build.cc | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index b6293c15ebc..09cc2709ab7 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2345,7 +2345,11 @@ void DerivationGoal::runChild() commonChildInit(builderOut); - setupSeccomp(); + try { + setupSeccomp(); + } catch (...) { + if (buildUser) throw; + } bool setUser = true; From d798349ede3d6eb6e92a2e4f95f6b2179407ceb9 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 30 May 2017 13:43:51 +0200 Subject: [PATCH 0363/2196] canonicalisePathMetaData(): Remove extended attributes / ACLs EAs/ACLs are not part of the NAR canonicalisation. Worse, setting an ACL allows a builder to create writable files in the Nix store. So get rid of them. Closes #185. --- src/libstore/local-store.cc | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 3ac23ec268f..5b03e86f3ea 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -27,6 +27,7 @@ #include #include #include +#include #endif #include @@ -407,6 +408,27 @@ static void canonicalisePathMetaData_(const Path & path, uid_t fromUid, InodesSe if (!(S_ISREG(st.st_mode) || S_ISDIR(st.st_mode) || S_ISLNK(st.st_mode))) throw Error(format("file ‘%1%’ has an unsupported type") % path); +#if __linux__ + /* Remove extended attributes / ACLs. */ + ssize_t eaSize = llistxattr(path.c_str(), nullptr, 0); + + if (eaSize < 0) { + if (errno != ENOTSUP) + throw SysError("querying extended attributes of ‘%s’", path); + } else if (eaSize > 0) { + std::vector eaBuf(eaSize); + + if ((eaSize = llistxattr(path.c_str(), eaBuf.data(), eaBuf.size())) < 0) + throw SysError("querying extended attributes of ‘%s’", path); + + for (auto & eaName: tokenizeString(std::string(eaBuf.data(), eaSize), std::string("\000", 1))) + if (lremovexattr(path.c_str(), eaName.c_str()) == -1) + throw SysError("removing extended attribute ‘%s’ from ‘%s’", eaName, path); + + assert(llistxattr(path.c_str(), nullptr, 0) == 0); + } +#endif + /* Fail if the file is not owned by the build user. This prevents us from messing up the ownership/permissions of files hard-linked into the output (e.g. "ln /etc/shadow $out/foo"). From 2ac99a32dab0d2ea59cb9e926f6d6d5b7ef638c6 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 30 May 2017 13:55:17 +0200 Subject: [PATCH 0364/2196] Add a seccomp rule to disallow setxattr() --- src/libstore/build.cc | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 09cc2709ab7..0a10efaed1d 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2315,8 +2315,8 @@ void setupSeccomp() seccomp_arch_add(ctx, SCMP_ARCH_X86) != 0) throw SysError("unable to add 32-bit seccomp architecture"); + /* Prevent builders from creating setuid/setgid binaries. */ for (int perm : { S_ISUID, S_ISGID }) { - // TODO: test chmod and fchmod. if (seccomp_rule_add(ctx, SCMP_ACT_ERRNO(EPERM), SCMP_SYS(chmod), 1, SCMP_A1(SCMP_CMP_MASKED_EQ, perm, perm)) != 0) throw SysError("unable to add seccomp rule"); @@ -2330,6 +2330,14 @@ void setupSeccomp() throw SysError("unable to add seccomp rule"); } + /* Prevent builders from creating EAs or ACLs. Not all filesystems + support these, and they're not allowed in the Nix store because + they're not representable in the NAR serialisation. */ + if (seccomp_rule_add(ctx, SCMP_ACT_ERRNO(ENOTSUP), SCMP_SYS(setxattr), 0) != 0 || + seccomp_rule_add(ctx, SCMP_ACT_ERRNO(ENOTSUP), SCMP_SYS(lsetxattr), 0) != 0 || + seccomp_rule_add(ctx, SCMP_ACT_ERRNO(ENOTSUP), SCMP_SYS(fsetxattr), 0) != 0) + throw SysError("unable to add seccomp rule"); + if (seccomp_load(ctx) != 0) throw SysError("unable to load seccomp BPF program"); #endif From d552d387585840f68fcc7507fca83feb6f937a10 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 30 May 2017 14:35:50 +0200 Subject: [PATCH 0365/2196] Shut up some clang warnings --- src/libstore/build.cc | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 0a10efaed1d..0a874bbf1cd 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -906,7 +906,7 @@ class DerivationGoal : public Goal void repairClosure(); - void amDone(ExitCode result) + void amDone(ExitCode result) override { logger->event(evBuildFinished, act, result == ecSuccess); Goal::amDone(result); @@ -3288,16 +3288,16 @@ class SubstitutionGoal : public Goal SubstitutionGoal(const Path & storePath, Worker & worker, bool repair = false); ~SubstitutionGoal(); - void timedOut() { abort(); }; + void timedOut() override { abort(); }; - string key() + string key() override { /* "a$" ensures substitution goals happen before derivation goals. */ return "a$" + storePathToName(storePath) + "$" + storePath; } - void work(); + void work() override; /* The states. */ void init(); @@ -3308,12 +3308,12 @@ class SubstitutionGoal : public Goal void finished(); /* Callback used by the worker to write to the log. */ - void handleChildOutput(int fd, const string & data); - void handleEOF(int fd); + void handleChildOutput(int fd, const string & data) override; + void handleEOF(int fd) override; Path getStorePath() { return storePath; } - void amDone(ExitCode result) + void amDone(ExitCode result) override { logger->event(evSubstitutionFinished, act, result == ecSuccess); Goal::amDone(result); From fe08d17934e6abe3e8566706f53063166b881f8c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 30 May 2017 14:37:24 +0200 Subject: [PATCH 0366/2196] Fix seccomp build failure on clang Fixes src/libstore/build.cc:2321:45: error: non-constant-expression cannot be narrowed from type 'int' to 'scmp_datum_t' (aka 'unsigned long') in initializer list [-Wc++11-narrowing] --- src/libstore/build.cc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 0a874bbf1cd..86cab9f35b6 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2318,15 +2318,15 @@ void setupSeccomp() /* Prevent builders from creating setuid/setgid binaries. */ for (int perm : { S_ISUID, S_ISGID }) { if (seccomp_rule_add(ctx, SCMP_ACT_ERRNO(EPERM), SCMP_SYS(chmod), 1, - SCMP_A1(SCMP_CMP_MASKED_EQ, perm, perm)) != 0) + SCMP_A1(SCMP_CMP_MASKED_EQ, (scmp_datum_t) perm, (scmp_datum_t) perm)) != 0) throw SysError("unable to add seccomp rule"); if (seccomp_rule_add(ctx, SCMP_ACT_ERRNO(EPERM), SCMP_SYS(fchmod), 1, - SCMP_A1(SCMP_CMP_MASKED_EQ, perm, perm)) != 0) + SCMP_A1(SCMP_CMP_MASKED_EQ, (scmp_datum_t) perm, (scmp_datum_t) perm)) != 0) throw SysError("unable to add seccomp rule"); if (seccomp_rule_add(ctx, SCMP_ACT_ERRNO(EPERM), SCMP_SYS(fchmodat), 1, - SCMP_A2(SCMP_CMP_MASKED_EQ, perm, perm)) != 0) + SCMP_A2(SCMP_CMP_MASKED_EQ, (scmp_datum_t) perm, (scmp_datum_t) perm)) != 0) throw SysError("unable to add seccomp rule"); } From 9bdb88ea6e833ad61d6833aa09560d32c9bc39df Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 30 May 2017 15:56:15 +0200 Subject: [PATCH 0367/2196] Only pass --with-sandbox-shell on Linux --- release-common.nix | 1 + 1 file changed, 1 insertion(+) diff --git a/release-common.nix b/release-common.nix index 8047c75bdb7..c64fc619df6 100644 --- a/release-common.nix +++ b/release-common.nix @@ -16,6 +16,7 @@ rec { configureFlags = [ "--disable-init-state" "--enable-gc" + ] ++ pkgs.lib.optionals pkgs.stdenv.isLinux [ "--with-sandbox-shell=${sh}/bin/busybox" ]; } From 83eec5a997cd121158b7adb32a688dc5a63d6c9c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 30 May 2017 15:56:40 +0200 Subject: [PATCH 0368/2196] resolve-system-dependencies: Several fixes MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This fixes error: getting attributes of path ‘Versions/Current/CoreFoundation’: No such file or directory when /System/Library/Frameworks/CoreFoundation.framework/CoreFoundation is a symlink. Also fixes a segfault when encounting a file that is not a MACH binary (such as /dev/null, which is included in __impureHostDeps in Nixpkgs). Possibly fixes #786. --- .../resolve-system-dependencies.cc | 118 ++++++++++-------- 1 file changed, 65 insertions(+), 53 deletions(-) diff --git a/src/resolve-system-dependencies/resolve-system-dependencies.cc b/src/resolve-system-dependencies/resolve-system-dependencies.cc index ae8ca36ba9d..61504298608 100644 --- a/src/resolve-system-dependencies/resolve-system-dependencies.cc +++ b/src/resolve-system-dependencies/resolve-system-dependencies.cc @@ -17,59 +17,75 @@ using namespace nix; static auto cacheDir = Path{}; -Path resolveCacheFile(Path lib) { +Path resolveCacheFile(Path lib) +{ std::replace(lib.begin(), lib.end(), '/', '%'); return cacheDir + "/" + lib; } -std::set readCacheFile(const Path & file) { +std::set readCacheFile(const Path & file) +{ return tokenizeString>(readFile(file), "\n"); } -void writeCacheFile(const Path & file, std::set & deps) { +void writeCacheFile(const Path & file, std::set & deps) +{ std::ofstream fp; fp.open(file); - for (auto & d : deps) { + for (auto & d : deps) fp << d << "\n"; - } fp.close(); } -std::string findDylibName(bool should_swap, ptrdiff_t dylib_command_start) { +std::string findDylibName(bool should_swap, ptrdiff_t dylib_command_start) +{ struct dylib_command *dylc = (struct dylib_command*)dylib_command_start; return std::string((char*)(dylib_command_start + DO_SWAP(should_swap, dylc->dylib.name.offset))); } -std::set runResolver(const Path & filename) { - int fd = open(filename.c_str(), O_RDONLY); - struct stat s; - fstat(fd, &s); - void *obj = mmap(NULL, s.st_size, PROT_READ, MAP_SHARED, fd, 0); +std::set runResolver(const Path & filename) +{ + AutoCloseFD fd = open(filename.c_str(), O_RDONLY); + if (!fd) + throw SysError("opening ‘%s’", filename); + + struct stat st; + if (fstat(fd.get(), &st)) + throw SysError("statting ‘%s’", filename); + + if (st.st_size < sizeof(mach_header_64)) { + printError("file ‘%s’ is too short for a MACH binary", filename); + return {}; + } + + void *obj = mmap(NULL, st.st_size, PROT_READ, MAP_SHARED, fd.get(), 0); + if (!obj) + throw SysError("mmapping ‘%s’", filename); ptrdiff_t mach64_offset = 0; - uint32_t magic = ((struct mach_header_64*)obj)->magic; - if(magic == FAT_CIGAM || magic == FAT_MAGIC) { + uint32_t magic = ((struct mach_header_64*) obj)->magic; + if (magic == FAT_CIGAM || magic == FAT_MAGIC) { bool should_swap = magic == FAT_CIGAM; uint32_t narches = DO_SWAP(should_swap, ((struct fat_header*)obj)->nfat_arch); - for(uint32_t iter = 0; iter < narches; iter++) { + for (uint32_t iter = 0; iter < narches; iter++) { ptrdiff_t header_offset = (ptrdiff_t)obj + sizeof(struct fat_header) * (iter + 1); struct fat_arch* arch = (struct fat_arch*)header_offset; - if(DO_SWAP(should_swap, arch->cputype) == CPU_TYPE_X86_64) { + if (DO_SWAP(should_swap, arch->cputype) == CPU_TYPE_X86_64) { mach64_offset = (ptrdiff_t)DO_SWAP(should_swap, arch->offset); break; } } if (mach64_offset == 0) { printError(format("Could not find any mach64 blobs in file ‘%1%’, continuing...") % filename); - return std::set(); + return {}; } } else if (magic == MH_MAGIC_64 || magic == MH_CIGAM_64) { mach64_offset = 0; } else { printError(format("Object file has unknown magic number ‘%1%’, skipping it...") % magic); - return std::set(); + return {}; } ptrdiff_t mach_header_offset = (ptrdiff_t)obj + mach64_offset; @@ -94,30 +110,28 @@ std::set runResolver(const Path & filename) { return libs; } -bool isSymlink(const Path & path) { +bool isSymlink(const Path & path) +{ struct stat st; - if(lstat(path.c_str(), &st)) - throw SysError(format("getting attributes of path ‘%1%’") % path); + if (lstat(path.c_str(), &st) == -1) + throw SysError("getting attributes of path ‘%1%’", path); return S_ISLNK(st.st_mode); } -Path resolveSymlink(const Path & path) { - char buf[PATH_MAX]; - ssize_t len = readlink(path.c_str(), buf, sizeof(buf) - 1); - if(len != -1) { - buf[len] = 0; - return Path(buf); - } else { - throw SysError(format("readlink('%1%')") % path); - } +Path resolveSymlink(const Path & path) +{ + auto target = readLink(path); + return hasPrefix(target, "/") + ? target + : dirOf(path) + "/" + target; } -std::set resolveTree(const Path & path, PathSet & deps) { +std::set resolveTree(const Path & path, PathSet & deps) +{ std::set results; - if(deps.find(path) != deps.end()) { - return std::set(); - } + if (deps.count(path)) + return {}; deps.insert(path); for (auto & lib : runResolver(path)) { results.insert(lib); @@ -128,32 +142,33 @@ std::set resolveTree(const Path & path, PathSet & deps) { return results; } -std::set getPath(const Path & path) { +std::set getPath(const Path & path) +{ + if (hasPrefix(path, "/dev")) return {}; + Path cacheFile = resolveCacheFile(path); - if(pathExists(cacheFile)) { + if (pathExists(cacheFile)) return readCacheFile(cacheFile); - } - std::set deps; - std::set paths; + std::set deps, paths; paths.insert(path); - Path next_path = Path(path); - while(isSymlink(next_path)) { - next_path = resolveSymlink(next_path); - paths.insert(next_path); + Path nextPath(path); + while (isSymlink(nextPath)) { + nextPath = resolveSymlink(nextPath); + paths.insert(nextPath); } - for(auto & t : resolveTree(next_path, deps)) { + for (auto & t : resolveTree(nextPath, deps)) paths.insert(t); - } writeCacheFile(cacheFile, paths); return paths; } -int main(int argc, char ** argv) { +int main(int argc, char ** argv) +{ return handleExceptions(argv[0], [&]() { initNix(); @@ -177,18 +192,15 @@ int main(int argc, char ** argv) { auto drv = store->derivationFromPath(Path(argv[1])); Strings impurePaths = tokenizeString(get(drv.env, "__impureHostDeps")); - std::set all_paths; + std::set allPaths; - for (auto & path : impurePaths) { - for(auto & p : getPath(path)) { - all_paths.insert(p); - } - } + for (auto & path : impurePaths) + for (auto & p : getPath(path)) + allPaths.insert(p); std::cout << "extra-chroot-dirs" << std::endl; - for(auto & path : all_paths) { + for (auto & path : allPaths) std::cout << path << std::endl; - } std::cout << std::endl; }); } From 53a16441878ebd4a5ac6ef8a95a649bfd521da3d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 30 May 2017 17:16:49 +0200 Subject: [PATCH 0369/2196] Darwin sandbox: Disallow creating setuid/setgid binaries Suggested by Daiderd Jordan. --- src/libstore/build.cc | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 86cab9f35b6..46ce562f798 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2656,6 +2656,10 @@ void DerivationGoal::runChild() sandboxProfile += "(deny default (with no-log))\n"; } + /* Disallow creating setuid/setgid binaries, since that + would allow breaking build user isolation. */ + sandboxProfile += "(deny file-write-setugid)\n"; + /* The tmpDir in scope points at the temporary build directory for our derivation. Some packages try different mechanisms to find temporary directories, so we want to open up a broader place for them to dump their files, if needed. */ Path globalTmpDir = canonPath(getEnv("TMPDIR", "/tmp"), true); From acc889c82179e96537ebe1494ec13b9536d579ca Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 30 May 2017 17:40:12 +0200 Subject: [PATCH 0370/2196] Darwin sandbox: Use sandbox-defaults.sb Issue #759. Also, remove nix.conf from the sandbox since I don't really see a legitimate reason for builders to access the Nix configuration. --- .gitignore | 4 ++-- src/libstore/build.cc | 6 +++--- src/libstore/local-store.cc | 2 +- src/libstore/local.mk | 14 +++++++++----- ...{sandbox-defaults.sb.in => sandbox-defaults.sb} | 9 ++++----- 5 files changed, 19 insertions(+), 16 deletions(-) rename src/libstore/{sandbox-defaults.sb.in => sandbox-defaults.sb} (90%) diff --git a/.gitignore b/.gitignore index 4f7e668e781..60bd7d06499 100644 --- a/.gitignore +++ b/.gitignore @@ -48,8 +48,8 @@ perl/Makefile.config /src/libexpr/nix.tbl # /src/libstore/ -/src/libstore/schema.sql.hh -/src/libstore/sandbox-defaults.sb +/src/libstore/schema.sql.gen.hh +/src/libstore/sandbox-defaults.sb.gen.hh /src/nix/nix diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 46ce562f798..92471b228d0 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2656,9 +2656,9 @@ void DerivationGoal::runChild() sandboxProfile += "(deny default (with no-log))\n"; } - /* Disallow creating setuid/setgid binaries, since that - would allow breaking build user isolation. */ - sandboxProfile += "(deny file-write-setugid)\n"; + sandboxProfile += +#include "sandbox-defaults.sb.gen.hh" + ; /* The tmpDir in scope points at the temporary build directory for our derivation. Some packages try different mechanisms to find temporary directories, so we want to open up a broader place for them to dump their files, if needed. */ diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 5b03e86f3ea..a226e0110af 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -320,7 +320,7 @@ void LocalStore::openDB(State & state, bool create) /* Initialise the database schema, if necessary. */ if (create) { const char * schema = -#include "schema.sql.hh" +#include "schema.sql.gen.hh" ; db.exec(schema); } diff --git a/src/libstore/local.mk b/src/libstore/local.mk index ffdb55abc65..7bc69f65d80 100644 --- a/src/libstore/local.mk +++ b/src/libstore/local.mk @@ -34,12 +34,16 @@ libstore_CXXFLAGS = \ -DSANDBOX_SHELL="\"$(sandbox_shell)\"" \ -DLSOF=\"$(lsof)\" -$(d)/local-store.cc: $(d)/schema.sql.hh +$(d)/local-store.cc: $(d)/schema.sql.gen.hh -%.sql.hh: %.sql - $(trace-gen) sed -e 's/"/\\"/g' -e 's/\(.*\)/"\1\\n"/' < $< > $@ || (rm $@ && exit 1) +$(d)/build.cc: $(d)/sandbox-defaults.sb.gen.hh -clean-files += $(d)/schema.sql.hh +%.gen.hh: % + echo 'R"foo(' >> $@.tmp + cat $< >> $@.tmp + echo ')foo"' >> $@.tmp + mv $@.tmp $@ + +clean-files += $(d)/schema.sql.gen.hh $(d)/sandbox-defaults.sb.gen.hh $(eval $(call install-file-in, $(d)/nix-store.pc, $(prefix)/lib/pkgconfig, 0644)) -$(eval $(call install-file-in, $(d)/sandbox-defaults.sb, $(datadir)/nix, 0644)) diff --git a/src/libstore/sandbox-defaults.sb.in b/src/libstore/sandbox-defaults.sb similarity index 90% rename from src/libstore/sandbox-defaults.sb.in rename to src/libstore/sandbox-defaults.sb index b5e80085fbe..6bd15603e79 100644 --- a/src/libstore/sandbox-defaults.sb.in +++ b/src/libstore/sandbox-defaults.sb @@ -28,15 +28,10 @@ (allow file-read-metadata (literal "/var") (literal "/tmp") - ; symlinks - (literal "@sysconfdir@") - (literal "@sysconfdir@/nix") - (literal "@sysconfdir@/nix/nix.conf") (literal "/etc/resolv.conf") (literal "/private/etc/resolv.conf")) (allow file-read* - (literal "/private@sysconfdir@/nix/nix.conf") (literal "/private/var/run/resolv.conf")) ; some builders use filehandles other than stdin/stdout @@ -61,3 +56,7 @@ ; allow local networking (allow network* (local ip) (remote unix-socket)) + +; Disallow creating setuid/setgid binaries, since that +; would allow breaking build user isolation. +(deny file-write-setugid) From 683a499ebbb3d5e8803feeab9097930a9ce91d3f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 30 May 2017 20:39:40 +0200 Subject: [PATCH 0371/2196] resolve-system-dependencies: Fix another segfault runResolver() was barfing on directories like /System/Library/Frameworks/Security.framework/Versions/Current/PlugIns. It should probably do something sophisticated for frameworks, but let's ignore them for now. --- .../resolve-system-dependencies.cc | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/resolve-system-dependencies/resolve-system-dependencies.cc b/src/resolve-system-dependencies/resolve-system-dependencies.cc index 61504298608..b04595732b9 100644 --- a/src/resolve-system-dependencies/resolve-system-dependencies.cc +++ b/src/resolve-system-dependencies/resolve-system-dependencies.cc @@ -53,6 +53,11 @@ std::set runResolver(const Path & filename) if (fstat(fd.get(), &st)) throw SysError("statting ‘%s’", filename); + if (!S_ISREG(st.st_mode)) { + printError("file ‘%s’ is not a regular file", filename); + return {}; + } + if (st.st_size < sizeof(mach_header_64)) { printError("file ‘%s’ is too short for a MACH binary", filename); return {}; From c740c3ce500af2b7eb34651b5eeec01288d79dca Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 31 May 2017 13:39:27 +0200 Subject: [PATCH 0372/2196] OS X sandbox: Store .sb file in $TMPDIR rather than the Nix store MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The filename used was not unique and owned by the build user, so builds could fail with error: while setting up the build environment: cannot unlink ‘/nix/store/99i210ihnsjacajaw8r33fmgjvzpg6nr-bison-3.0.4.drv.sb’: Permission denied --- src/libstore/build.cc | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 92471b228d0..8695850b344 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -778,7 +778,6 @@ class DerivationGoal : public Goal #if __APPLE__ typedef string SandboxProfile; SandboxProfile additionalSandboxProfile; - AutoDelete autoDelSandbox; #endif /* Hash rewriting. */ @@ -2711,9 +2710,7 @@ void DerivationGoal::runChild() debug("Generated sandbox profile:"); debug(sandboxProfile); - Path sandboxFile = drvPath + ".sb"; - deletePath(sandboxFile); - autoDelSandbox.reset(sandboxFile, false); + Path sandboxFile = tmpDir + "/.sandbox.sb"; writeFile(sandboxFile, sandboxProfile); From 44f3f8048fdc2564f25389ec2c613880763bbd03 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 31 May 2017 14:00:06 +0200 Subject: [PATCH 0373/2196] OS X sandbox: Don't use a deterministic $TMPDIR This doesn't work because the OS X sandbox cannot bind-mount path to a different location. --- src/libstore/build.cc | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 8695850b344..7f7d2fdb8bd 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -1732,9 +1732,6 @@ void DerivationGoal::startBuilder() directory. */ #if __linux__ tmpDirInSandbox = useChroot ? settings.sandboxBuildDir : tmpDir; -#elif __APPLE__ - // On Darwin, we canonize /tmp because its probably a symlink to /private/tmp. - tmpDirInSandbox = useChroot ? canonPath("/tmp", true) + "/nix-build-" + drvName + "-0" : tmpDir; #else tmpDirInSandbox = tmpDir; #endif From c368e079ca27195aa7dbed1e834479ab17ccae73 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 31 May 2017 15:34:03 +0200 Subject: [PATCH 0374/2196] resolve-system-dependencies: Simplify --- .../resolve-system-dependencies.cc | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/src/resolve-system-dependencies/resolve-system-dependencies.cc b/src/resolve-system-dependencies/resolve-system-dependencies.cc index b04595732b9..aadfdc9476b 100644 --- a/src/resolve-system-dependencies/resolve-system-dependencies.cc +++ b/src/resolve-system-dependencies/resolve-system-dependencies.cc @@ -28,15 +28,6 @@ std::set readCacheFile(const Path & file) return tokenizeString>(readFile(file), "\n"); } -void writeCacheFile(const Path & file, std::set & deps) -{ - std::ofstream fp; - fp.open(file); - for (auto & d : deps) - fp << d << "\n"; - fp.close(); -} - std::string findDylibName(bool should_swap, ptrdiff_t dylib_command_start) { struct dylib_command *dylc = (struct dylib_command*)dylib_command_start; @@ -167,7 +158,7 @@ std::set getPath(const Path & path) for (auto & t : resolveTree(nextPath, deps)) paths.insert(t); - writeCacheFile(cacheFile, paths); + writeFile(cacheFile, concatStringsSep("\n", paths)); return paths; } From 5ea8161b552ad79b7caf9b68b3c7d6daab203266 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 31 May 2017 16:10:10 +0200 Subject: [PATCH 0375/2196] resolve-system-dependencies: Misc fixes MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This fixes Could not find any mach64 blobs in file ‘/usr/lib/libSystem.B.dylib’, continuing... --- .../resolve-system-dependencies.cc | 42 +++++++++---------- 1 file changed, 20 insertions(+), 22 deletions(-) diff --git a/src/resolve-system-dependencies/resolve-system-dependencies.cc b/src/resolve-system-dependencies/resolve-system-dependencies.cc index aadfdc9476b..63e557ec5f1 100644 --- a/src/resolve-system-dependencies/resolve-system-dependencies.cc +++ b/src/resolve-system-dependencies/resolve-system-dependencies.cc @@ -28,12 +28,6 @@ std::set readCacheFile(const Path & file) return tokenizeString>(readFile(file), "\n"); } -std::string findDylibName(bool should_swap, ptrdiff_t dylib_command_start) -{ - struct dylib_command *dylc = (struct dylib_command*)dylib_command_start; - return std::string((char*)(dylib_command_start + DO_SWAP(should_swap, dylc->dylib.name.offset))); -} - std::set runResolver(const Path & filename) { AutoCloseFD fd = open(filename.c_str(), O_RDONLY); @@ -54,22 +48,20 @@ std::set runResolver(const Path & filename) return {}; } - void *obj = mmap(NULL, st.st_size, PROT_READ, MAP_SHARED, fd.get(), 0); + char* obj = (char*) mmap(NULL, st.st_size, PROT_READ, MAP_SHARED, fd.get(), 0); if (!obj) throw SysError("mmapping ‘%s’", filename); ptrdiff_t mach64_offset = 0; - uint32_t magic = ((struct mach_header_64*) obj)->magic; + uint32_t magic = ((mach_header_64*) obj)->magic; if (magic == FAT_CIGAM || magic == FAT_MAGIC) { bool should_swap = magic == FAT_CIGAM; - uint32_t narches = DO_SWAP(should_swap, ((struct fat_header*)obj)->nfat_arch); - - for (uint32_t iter = 0; iter < narches; iter++) { - ptrdiff_t header_offset = (ptrdiff_t)obj + sizeof(struct fat_header) * (iter + 1); - struct fat_arch* arch = (struct fat_arch*)header_offset; + uint32_t narches = DO_SWAP(should_swap, ((fat_header *) obj)->nfat_arch); + for (uint32_t i = 0; i < narches; i++) { + fat_arch* arch = (fat_arch*) (obj + sizeof(fat_header) + sizeof(fat_arch) * i); if (DO_SWAP(should_swap, arch->cputype) == CPU_TYPE_X86_64) { - mach64_offset = (ptrdiff_t)DO_SWAP(should_swap, arch->offset); + mach64_offset = (ptrdiff_t) DO_SWAP(should_swap, arch->offset); break; } } @@ -84,20 +76,19 @@ std::set runResolver(const Path & filename) return {}; } - ptrdiff_t mach_header_offset = (ptrdiff_t)obj + mach64_offset; - struct mach_header_64 *m_header = (struct mach_header_64 *)mach_header_offset; + mach_header_64 * m_header = (mach_header_64 *) (obj + mach64_offset); bool should_swap = magic == MH_CIGAM_64; - ptrdiff_t cmd_offset = mach_header_offset + sizeof(struct mach_header_64); + ptrdiff_t cmd_offset = mach64_offset + sizeof(mach_header_64); std::set libs; - for(uint32_t i = 0; i < DO_SWAP(should_swap, m_header->ncmds); i++) { - struct load_command *cmd = (struct load_command*)cmd_offset; + for (uint32_t i = 0; i < DO_SWAP(should_swap, m_header->ncmds); i++) { + load_command * cmd = (load_command *) (obj + cmd_offset); switch(DO_SWAP(should_swap, cmd->cmd)) { case LC_LOAD_UPWARD_DYLIB: case LC_LOAD_DYLIB: case LC_REEXPORT_DYLIB: - libs.insert(findDylibName(should_swap, cmd_offset)); + libs.insert(std::string((char *) cmd + ((dylib_command*) cmd)->dylib.name.offset)); break; } cmd_offset += DO_SWAP(should_swap, cmd->cmdsize); @@ -185,8 +176,15 @@ int main(int argc, char ** argv) auto store = openStore(); - auto drv = store->derivationFromPath(Path(argv[1])); - Strings impurePaths = tokenizeString(get(drv.env, "__impureHostDeps")); + StringSet impurePaths; + + if (std::string(argv[1]) == "--test") + impurePaths.insert(argv[2]); + else { + auto drv = store->derivationFromPath(Path(argv[1])); + impurePaths = tokenizeString(get(drv.env, "__impureHostDeps")); + impurePaths.insert("/usr/lib/libSystem.dylib"); + } std::set allPaths; From c96e8cd097ce0d181467fddd92acad4341ca566a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 31 May 2017 17:23:27 +0200 Subject: [PATCH 0376/2196] OS X sandbox: Improve builtin sandbox profile Also, add rules to allow fixed-output derivations to access the network. These rules are sufficient to build stdenvDarwin without any __sandboxProfile magic. --- .gitignore | 1 + src/libstore/build.cc | 13 ++++- src/libstore/local.mk | 12 ++-- src/libstore/sandbox-defaults.sb | 94 +++++++++++++++----------------- src/libstore/sandbox-network.sb | 16 ++++++ 5 files changed, 77 insertions(+), 59 deletions(-) create mode 100644 src/libstore/sandbox-network.sb diff --git a/.gitignore b/.gitignore index 60bd7d06499..1cf7a3c3515 100644 --- a/.gitignore +++ b/.gitignore @@ -50,6 +50,7 @@ perl/Makefile.config # /src/libstore/ /src/libstore/schema.sql.gen.hh /src/libstore/sandbox-defaults.sb.gen.hh +/src/libstore/sandbox-network.sb.gen.hh /src/nix/nix diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 7f7d2fdb8bd..55c8ac58837 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2614,8 +2614,9 @@ void DerivationGoal::runChild() string sandboxProfile; if (drv->isBuiltin()) { ; + } #if __APPLE__ - } else if (useChroot) { + else if (useChroot) { /* Lots and lots and lots of file functions freak out if they can't stat their full ancestry */ PathSet ancestry; @@ -2653,9 +2654,14 @@ void DerivationGoal::runChild() } sandboxProfile += -#include "sandbox-defaults.sb.gen.hh" + #include "sandbox-defaults.sb.gen.hh" ; + if (fixedOutput) + sandboxProfile += + #include "sandbox-network.sb.gen.hh" + ; + /* The tmpDir in scope points at the temporary build directory for our derivation. Some packages try different mechanisms to find temporary directories, so we want to open up a broader place for them to dump their files, if needed. */ Path globalTmpDir = canonPath(getEnv("TMPDIR", "/tmp"), true); @@ -2718,8 +2724,9 @@ void DerivationGoal::runChild() args.push_back("-D"); args.push_back("_GLOBAL_TMP_DIR=" + globalTmpDir); args.push_back(drv->builder); + } #endif - } else { + else { builder = drv->builder.c_str(); string builderBasename = baseNameOf(drv->builder); args.push_back(builderBasename); diff --git a/src/libstore/local.mk b/src/libstore/local.mk index 7bc69f65d80..c0cc91c2658 100644 --- a/src/libstore/local.mk +++ b/src/libstore/local.mk @@ -36,14 +36,14 @@ libstore_CXXFLAGS = \ $(d)/local-store.cc: $(d)/schema.sql.gen.hh -$(d)/build.cc: $(d)/sandbox-defaults.sb.gen.hh +$(d)/build.cc: $(d)/sandbox-defaults.sb.gen.hh $(d)/sandbox-network.sb.gen.hh %.gen.hh: % - echo 'R"foo(' >> $@.tmp - cat $< >> $@.tmp - echo ')foo"' >> $@.tmp - mv $@.tmp $@ + @echo 'R"foo(' >> $@.tmp + $(trace-gen) cat $< >> $@.tmp + @echo ')foo"' >> $@.tmp + @mv $@.tmp $@ -clean-files += $(d)/schema.sql.gen.hh $(d)/sandbox-defaults.sb.gen.hh +clean-files += $(d)/schema.sql.gen.hh $(d)/sandbox-defaults.sb.gen.hh $(d)/sandbox-network.sb.gen.hh $(eval $(call install-file-in, $(d)/nix-store.pc, $(prefix)/lib/pkgconfig, 0644)) diff --git a/src/libstore/sandbox-defaults.sb b/src/libstore/sandbox-defaults.sb index 6bd15603e79..0292f5ee882 100644 --- a/src/libstore/sandbox-defaults.sb +++ b/src/libstore/sandbox-defaults.sb @@ -1,62 +1,56 @@ -(allow file-read* file-write-data (literal "/dev/null")) -(allow ipc-posix*) -(allow mach-lookup (global-name "com.apple.SecurityServer")) +(define TMPDIR (param "_GLOBAL_TMP_DIR")) -(allow file-read* - (literal "/dev/dtracehelper") - (literal "/dev/tty") - (literal "/dev/autofs_nowait") - (literal "/System/Library/CoreServices/SystemVersion.plist") - (literal "/private/var/run/systemkeychaincheck.done") - (literal "/private/etc/protocols") - (literal "/private/var/tmp") - (literal "/private/var/db") - (subpath "/private/var/db/mds")) +; Disallow creating setuid/setgid binaries, since that +; would allow breaking build user isolation. +(deny file-write-setugid) -(allow file-read* - (subpath "/usr/share/icu") - (subpath "/usr/share/locale") - (subpath "/usr/share/zoneinfo")) +; Allow forking. +(allow process-fork) -(allow file-write* - (literal "/dev/tty") - (literal "/dev/dtracehelper") - (literal "/mds")) +; Allow reading system information like #CPUs, etc. +(allow sysctl-read) -(allow file-ioctl (literal "/dev/dtracehelper")) +; Allow POSIX semaphores and shared memory. +(allow ipc-posix*) -(allow file-read-metadata - (literal "/var") - (literal "/tmp") - (literal "/etc/resolv.conf") - (literal "/private/etc/resolv.conf")) +; Allow socket creation. +(allow system-socket) -(allow file-read* - (literal "/private/var/run/resolv.conf")) +; Allow sending signals within the sandbox. +(allow signal (target same-sandbox)) -; some builders use filehandles other than stdin/stdout -(allow file* - (subpath "/dev/fd") - (literal "/dev/ptmx") - (regex #"^/dev/[pt]ty.*$")) +; Access to /tmp. +(allow file* process-exec (literal "/tmp") (subpath TMPDIR)) -; allow everything inside TMP -(allow file* process-exec - (subpath (param "_GLOBAL_TMP_DIR")) - (subpath "/private/tmp")) +; Some packages like to read the system version. +(allow file-read* (literal "/System/Library/CoreServices/SystemVersion.plist")) -(allow process-fork) -(allow sysctl-read) -(allow signal (target same-sandbox)) +; Without this line clang cannot write to /dev/null, breaking some configure tests. +(allow file-read-metadata (literal "/dev")) -; allow getpwuid (for git and other packages) -(allow mach-lookup - (global-name "com.apple.system.notification_center") - (global-name "com.apple.system.opendirectoryd.libinfo")) +; Standard devices. +(allow file* + (literal "/dev/null") + (literal "/dev/random") + (literal "/dev/stdin") + (literal "/dev/stdout") + (literal "/dev/tty") + (literal "/dev/urandom") + (literal "/dev/zero") + (subpath "/dev/fd")) -; allow local networking -(allow network* (local ip) (remote unix-socket)) +; Does nothing, but reduces build noise. +(allow file* (literal "/dev/dtracehelper")) -; Disallow creating setuid/setgid binaries, since that -; would allow breaking build user isolation. -(deny file-write-setugid) +; Allow access to zoneinfo since libSystem needs it. +(allow file-read* (subpath "/usr/share/zoneinfo")) + +(allow file-read* (subpath "/usr/share/locale")) + +; This is mostly to get more specific log messages when builds try to +; access something in /etc or /var. +(allow file-read-metadata + (literal "/etc") + (literal "/var") + (literal "/private/var/tmp") + ) diff --git a/src/libstore/sandbox-network.sb b/src/libstore/sandbox-network.sb new file mode 100644 index 00000000000..56beec761fa --- /dev/null +++ b/src/libstore/sandbox-network.sb @@ -0,0 +1,16 @@ +; Allow local and remote network traffic. +(allow network* (local ip) (remote ip)) + +; Allow access to /etc/resolv.conf (which is a symlink to +; /private/var/run/resolv.conf). +(allow file-read-metadata + (literal "/var") + (literal "/etc") + (literal "/etc/resolv.conf") + (literal "/private/etc/resolv.conf")) + +(allow file-read* + (literal "/private/var/run/resolv.conf")) + +; Allow DNS lookups. +(allow network-outbound (remote unix-socket (path-literal "/private/var/run/mDNSResponder"))) From 52fec8dde862264874a4f19be329124ac46adb81 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 31 May 2017 20:43:47 +0200 Subject: [PATCH 0377/2196] Remove listxattr assertion It appears that sometimes, listxattr() returns a different value for the query case (i.e. when the buffer size is 0). --- src/libstore/local-store.cc | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index a226e0110af..ee36428af03 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -424,8 +424,6 @@ static void canonicalisePathMetaData_(const Path & path, uid_t fromUid, InodesSe for (auto & eaName: tokenizeString(std::string(eaBuf.data(), eaSize), std::string("\000", 1))) if (lremovexattr(path.c_str(), eaName.c_str()) == -1) throw SysError("removing extended attribute ‘%s’ from ‘%s’", eaName, path); - - assert(llistxattr(path.c_str(), nullptr, 0) == 0); } #endif From ab5834f7a1c2cae9b7071d5a6944ff8b1eeb6e38 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 1 Jun 2017 14:28:21 +0200 Subject: [PATCH 0378/2196] RPM, Deb: Add dependency on libseccomp --- nix.spec.in | 2 ++ release.nix | 6 +++--- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/nix.spec.in b/nix.spec.in index 390893d64dc..88f0d04d390 100644 --- a/nix.spec.in +++ b/nix.spec.in @@ -20,9 +20,11 @@ Requires: curl Requires: bzip2 Requires: gzip Requires: xz +Requires: libseccomp BuildRequires: bzip2-devel BuildRequires: sqlite-devel BuildRequires: libcurl-devel +BuildRequires: libseccomp-devel # Hack to make that shitty RPM scanning hack shut up. Provides: perl(Nix::SSH) diff --git a/release.nix b/release.nix index ec6bda99577..fbd644cd62b 100644 --- a/release.nix +++ b/release.nix @@ -305,7 +305,7 @@ let src = jobs.tarball; diskImage = (diskImageFun vmTools.diskImageFuns) { extraPackages = - [ "sqlite" "sqlite-devel" "bzip2-devel" "emacs" "libcurl-devel" "openssl-devel" "xz-devel" ] + [ "sqlite" "sqlite-devel" "bzip2-devel" "emacs" "libcurl-devel" "openssl-devel" "xz-devel" "libseccomp-devel" ] ++ extraPackages; }; memSize = 1024; meta.schedulingPriority = 50; @@ -327,14 +327,14 @@ let src = jobs.tarball; diskImage = (diskImageFun vmTools.diskImageFuns) { extraPackages = - [ "libsqlite3-dev" "libbz2-dev" "libcurl-dev" "libcurl3-nss" "libssl-dev" "liblzma-dev" ] + [ "libsqlite3-dev" "libbz2-dev" "libcurl-dev" "libcurl3-nss" "libssl-dev" "liblzma-dev" "libseccomp-dev" ] ++ extraPackages; }; memSize = 1024; meta.schedulingPriority = 50; postInstall = "make installcheck"; configureFlags = "--sysconfdir=/etc"; debRequires = - [ "curl" "libsqlite3-0" "libbz2-1.0" "bzip2" "xz-utils" "libssl1.0.0" "liblzma5" ] + [ "curl" "libsqlite3-0" "libbz2-1.0" "bzip2" "xz-utils" "libssl1.0.0" "liblzma5" "libseccomp2" ] ++ extraDebPackages; debMaintainer = "Eelco Dolstra "; doInstallCheck = true; From b4b1f4525f8dc8f320d666c208bff5cb36777580 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 1 Jun 2017 14:43:15 +0200 Subject: [PATCH 0379/2196] Fix coverage job --- release.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/release.nix b/release.nix index fbd644cd62b..aa2ee7290e6 100644 --- a/release.nix +++ b/release.nix @@ -173,7 +173,7 @@ let src = tarball; buildInputs = - [ curl bzip2 openssl pkgconfig sqlite xz libsodium + [ curl bzip2 openssl pkgconfig sqlite xz libsodium libseccomp # These are for "make check" only: graphviz libxml2 libxslt ]; From 88acb6461045e4e565c7eef884ea7ab69c2e6de9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Mon, 5 Jun 2017 22:52:05 +0100 Subject: [PATCH 0380/2196] Add .editorconfig - Automatically adjust editor to nix coding style -> less nitpiks/styling issues in pull requests -> profit(!) see also nixpkgs' editorconfig: https://github.com/NixOS/nixpkgs/blob/master/.editorconfig --- .editorconfig | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 .editorconfig diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000000..887ecadba59 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,26 @@ +# EditorConfig configuration for nix +# http://EditorConfig.org + +# Top-most EditorConfig file +root = true + +# Unix-style newlines with a newline ending every file, utf-8 charset +[*] +end_of_line = lf +insert_final_newline = true +trim_trailing_whitespace = true +charset = utf-8 + +# Match nix files, set indent to spaces with width of two +[*.nix] +indent_style = space +indent_size = 2 + +# Match c++/shell/perl, set indent to spaces with width of four +[*.{hpp,cc,hh,sh,pl}] +indent_style = space +indent_size = 4 + +# Match diffs, avoid to trim trailing whitespace +[*.{diff,patch}] +trim_trailing_whitespace = false From 85e93d7b874f99730387714394bb60407cf138d5 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 6 Jun 2017 18:44:49 +0200 Subject: [PATCH 0381/2196] Always use the Darwin sandbox Even with "build-use-sandbox = false", we now use sandboxing with a permissive profile that allows everything except the creation of setuid/setgid binaries. --- .gitignore | 4 +- src/libstore/build.cc | 168 ++++++++++++++++--------------- src/libstore/local.mk | 6 +- src/libstore/sandbox-defaults.sb | 2 + src/libstore/sandbox-minimal.sb | 5 + 5 files changed, 99 insertions(+), 86 deletions(-) create mode 100644 src/libstore/sandbox-minimal.sb diff --git a/.gitignore b/.gitignore index 1cf7a3c3515..6163087384d 100644 --- a/.gitignore +++ b/.gitignore @@ -48,9 +48,7 @@ perl/Makefile.config /src/libexpr/nix.tbl # /src/libstore/ -/src/libstore/schema.sql.gen.hh -/src/libstore/sandbox-defaults.sb.gen.hh -/src/libstore/sandbox-network.sb.gen.hh +/src/libstore/*.gen.hh /src/nix/nix diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 55c8ac58837..d12a1a7913b 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2611,104 +2611,102 @@ void DerivationGoal::runChild() const char *builder = "invalid"; - string sandboxProfile; if (drv->isBuiltin()) { ; } #if __APPLE__ - else if (useChroot) { - /* Lots and lots and lots of file functions freak out if they can't stat their full ancestry */ - PathSet ancestry; + else { + /* This has to appear before import statements. */ + std::string sandboxProfile = "(version 1)\n"; + + if (useChroot) { + + /* Lots and lots and lots of file functions freak out if they can't stat their full ancestry */ + PathSet ancestry; + + /* We build the ancestry before adding all inputPaths to the store because we know they'll + all have the same parents (the store), and there might be lots of inputs. This isn't + particularly efficient... I doubt it'll be a bottleneck in practice */ + for (auto & i : dirsInChroot) { + Path cur = i.first; + while (cur.compare("/") != 0) { + cur = dirOf(cur); + ancestry.insert(cur); + } + } - /* We build the ancestry before adding all inputPaths to the store because we know they'll - all have the same parents (the store), and there might be lots of inputs. This isn't - particularly efficient... I doubt it'll be a bottleneck in practice */ - for (auto & i : dirsInChroot) { - Path cur = i.first; + /* And we want the store in there regardless of how empty dirsInChroot. We include the innermost + path component this time, since it's typically /nix/store and we care about that. */ + Path cur = worker.store.storeDir; while (cur.compare("/") != 0) { - cur = dirOf(cur); ancestry.insert(cur); + cur = dirOf(cur); } - } - - /* And we want the store in there regardless of how empty dirsInChroot. We include the innermost - path component this time, since it's typically /nix/store and we care about that. */ - Path cur = worker.store.storeDir; - while (cur.compare("/") != 0) { - ancestry.insert(cur); - cur = dirOf(cur); - } - /* Add all our input paths to the chroot */ - for (auto & i : inputPaths) - dirsInChroot[i] = i; - - /* This has to appear before import statements */ - sandboxProfile += "(version 1)\n"; - - /* Violations will go to the syslog if you set this. Unfortunately the destination does not appear to be configurable */ - if (settings.darwinLogSandboxViolations) { - sandboxProfile += "(deny default)\n"; - } else { - sandboxProfile += "(deny default (with no-log))\n"; - } + /* Add all our input paths to the chroot */ + for (auto & i : inputPaths) + dirsInChroot[i] = i; - sandboxProfile += - #include "sandbox-defaults.sb.gen.hh" - ; + /* Violations will go to the syslog if you set this. Unfortunately the destination does not appear to be configurable */ + if (settings.darwinLogSandboxViolations) { + sandboxProfile += "(deny default)\n"; + } else { + sandboxProfile += "(deny default (with no-log))\n"; + } - if (fixedOutput) sandboxProfile += - #include "sandbox-network.sb.gen.hh" + #include "sandbox-defaults.sb.gen.hh" ; - /* The tmpDir in scope points at the temporary build directory for our derivation. Some packages try different mechanisms - to find temporary directories, so we want to open up a broader place for them to dump their files, if needed. */ - Path globalTmpDir = canonPath(getEnv("TMPDIR", "/tmp"), true); - - /* They don't like trailing slashes on subpath directives */ - if (globalTmpDir.back() == '/') globalTmpDir.pop_back(); - - /* Our rwx outputs */ - sandboxProfile += "(allow file-read* file-write* process-exec\n"; - for (auto & i : missingPaths) { - sandboxProfile += (format("\t(subpath \"%1%\")\n") % i.c_str()).str(); - } - sandboxProfile += ")\n"; - - /* Our inputs (transitive dependencies and any impurities computed above) + if (fixedOutput) + sandboxProfile += + #include "sandbox-network.sb.gen.hh" + ; - without file-write* allowed, access() incorrectly returns EPERM - */ - sandboxProfile += "(allow file-read* file-write* process-exec\n"; - for (auto & i : dirsInChroot) { - if (i.first != i.second.source) - throw Error(format( - "can't map '%1%' to '%2%': mismatched impure paths not supported on Darwin") - % i.first % i.second.source); - - string path = i.first; - struct stat st; - if (lstat(path.c_str(), &st)) { - if (i.second.optional && errno == ENOENT) - continue; - throw SysError(format("getting attributes of path ‘%1%’") % path); + /* Our rwx outputs */ + sandboxProfile += "(allow file-read* file-write* process-exec\n"; + for (auto & i : missingPaths) { + sandboxProfile += (format("\t(subpath \"%1%\")\n") % i.c_str()).str(); } - if (S_ISDIR(st.st_mode)) - sandboxProfile += (format("\t(subpath \"%1%\")\n") % path).str(); - else - sandboxProfile += (format("\t(literal \"%1%\")\n") % path).str(); - } - sandboxProfile += ")\n"; + sandboxProfile += ")\n"; + + /* Our inputs (transitive dependencies and any impurities computed above) + + without file-write* allowed, access() incorrectly returns EPERM + */ + sandboxProfile += "(allow file-read* file-write* process-exec\n"; + for (auto & i : dirsInChroot) { + if (i.first != i.second.source) + throw Error(format( + "can't map '%1%' to '%2%': mismatched impure paths not supported on Darwin") + % i.first % i.second.source); + + string path = i.first; + struct stat st; + if (lstat(path.c_str(), &st)) { + if (i.second.optional && errno == ENOENT) + continue; + throw SysError(format("getting attributes of path ‘%1%’") % path); + } + if (S_ISDIR(st.st_mode)) + sandboxProfile += (format("\t(subpath \"%1%\")\n") % path).str(); + else + sandboxProfile += (format("\t(literal \"%1%\")\n") % path).str(); + } + sandboxProfile += ")\n"; - /* Allow file-read* on full directory hierarchy to self. Allows realpath() */ - sandboxProfile += "(allow file-read*\n"; - for (auto & i : ancestry) { - sandboxProfile += (format("\t(literal \"%1%\")\n") % i.c_str()).str(); - } - sandboxProfile += ")\n"; + /* Allow file-read* on full directory hierarchy to self. Allows realpath() */ + sandboxProfile += "(allow file-read*\n"; + for (auto & i : ancestry) { + sandboxProfile += (format("\t(literal \"%1%\")\n") % i.c_str()).str(); + } + sandboxProfile += ")\n"; - sandboxProfile += additionalSandboxProfile; + sandboxProfile += additionalSandboxProfile; + } else + sandboxProfile += + #include "sandbox-minimal.sb.gen.hh" + ; debug("Generated sandbox profile:"); debug(sandboxProfile); @@ -2717,6 +2715,13 @@ void DerivationGoal::runChild() writeFile(sandboxFile, sandboxProfile); + /* The tmpDir in scope points at the temporary build directory for our derivation. Some packages try different mechanisms + to find temporary directories, so we want to open up a broader place for them to dump their files, if needed. */ + Path globalTmpDir = canonPath(getEnv("TMPDIR", "/tmp"), true); + + /* They don't like trailing slashes on subpath directives */ + if (globalTmpDir.back() == '/') globalTmpDir.pop_back(); + builder = "/usr/bin/sandbox-exec"; args.push_back("sandbox-exec"); args.push_back("-f"); @@ -2725,12 +2730,13 @@ void DerivationGoal::runChild() args.push_back("_GLOBAL_TMP_DIR=" + globalTmpDir); args.push_back(drv->builder); } -#endif +#else else { builder = drv->builder.c_str(); string builderBasename = baseNameOf(drv->builder); args.push_back(builderBasename); } +#endif for (auto & i : drv->args) args.push_back(rewriteStrings(i, inputRewrites)); diff --git a/src/libstore/local.mk b/src/libstore/local.mk index c0cc91c2658..36b270f2e07 100644 --- a/src/libstore/local.mk +++ b/src/libstore/local.mk @@ -36,7 +36,9 @@ libstore_CXXFLAGS = \ $(d)/local-store.cc: $(d)/schema.sql.gen.hh -$(d)/build.cc: $(d)/sandbox-defaults.sb.gen.hh $(d)/sandbox-network.sb.gen.hh +sandbox-headers = $(d)/sandbox-defaults.sb.gen.hh $(d)/sandbox-network.sb.gen.hh $(d)/sandbox-minimal.sb.gen.hh + +$(d)/build.cc: $(sandbox-headers) %.gen.hh: % @echo 'R"foo(' >> $@.tmp @@ -44,6 +46,6 @@ $(d)/build.cc: $(d)/sandbox-defaults.sb.gen.hh $(d)/sandbox-network.sb.gen.hh @echo ')foo"' >> $@.tmp @mv $@.tmp $@ -clean-files += $(d)/schema.sql.gen.hh $(d)/sandbox-defaults.sb.gen.hh $(d)/sandbox-network.sb.gen.hh +clean-files += $(d)/schema.sql.gen.hh $(sandbox-headers) $(eval $(call install-file-in, $(d)/nix-store.pc, $(prefix)/lib/pkgconfig, 0644)) diff --git a/src/libstore/sandbox-defaults.sb b/src/libstore/sandbox-defaults.sb index 0292f5ee882..d63c8f813c9 100644 --- a/src/libstore/sandbox-defaults.sb +++ b/src/libstore/sandbox-defaults.sb @@ -1,5 +1,7 @@ (define TMPDIR (param "_GLOBAL_TMP_DIR")) +(deny default) + ; Disallow creating setuid/setgid binaries, since that ; would allow breaking build user isolation. (deny file-write-setugid) diff --git a/src/libstore/sandbox-minimal.sb b/src/libstore/sandbox-minimal.sb new file mode 100644 index 00000000000..65f5108b399 --- /dev/null +++ b/src/libstore/sandbox-minimal.sb @@ -0,0 +1,5 @@ +(allow default) + +; Disallow creating setuid/setgid binaries, since that +; would allow breaking build user isolation. +(deny file-write-setugid) From c8cc50d46e78de7ae02c2cb7a5159e995c993f61 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 6 Jun 2017 18:52:15 +0200 Subject: [PATCH 0382/2196] Disable the build user mechanism on all platforms except Linux and OS X --- src/libstore/build.cc | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index d12a1a7913b..d5fe41d1b16 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -1716,11 +1716,17 @@ void DerivationGoal::startBuilder() /* If `build-users-group' is not empty, then we have to build as one of the members of that group. */ if (settings.buildUsersGroup != "" && getuid() == 0) { +#if defined(__linux__) || defined(__APPLE__) buildUser = std::make_unique(); /* Make sure that no other processes are executing under this uid. */ buildUser->kill(); +#else + /* Don't know how to block the creation of setuid/setgid + binaries on this platform. */ + throw Error("build users are not supported on this platform for security reasons"); +#endif } /* Create a temporary directory where the build will take From b8283773bd64d7da6859ed520ee19867742a03ba Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 7 Jun 2017 16:17:17 +0200 Subject: [PATCH 0383/2196] nix: Make all options available as flags MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Thus, instead of ‘--option ’, you can write ‘-- ’. So --option http-connections 100 becomes --http-connections 100 Apart from brevity, the difference is that it's not an error to set a non-existent option via --option, but unrecognized arguments are fatal. Boolean options have special treatment: they're mapped to the argument-less flags ‘--’ and ‘--no-’. E.g. --option auto-optimise-store false becomes --no-auto-optimise-store --- src/libstore/globals.cc | 7 +++++++ src/libutil/config.cc | 25 +++++++++++++++++++++++++ src/libutil/config.hh | 6 ++++++ src/nix/main.cc | 2 ++ 4 files changed, 40 insertions(+) diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 3dd2508a26d..3f2bea8e7b1 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -98,6 +98,13 @@ template<> void BaseSetting::toJSON(JSONPlaceholder & out) AbstractSetting::toJSON(out); } +template<> void BaseSetting::convertToArg(Args & args) +{ + args.mkFlag(0, name, {}, "Enable sandboxing.", 0, [=](Strings ss) { value = smEnabled; }); + args.mkFlag(0, "no-" + name, {}, "Disable sandboxing.", 0, [=](Strings ss) { value = smDisabled; }); + args.mkFlag(0, "relaxed-" + name, {}, "Enable sandboxing, but allow builds to disable it.", 0, [=](Strings ss) { value = smRelaxed; }); +} + void MaxBuildJobsSetting::set(const std::string & str) { if (str == "auto") value = std::max(1U, std::thread::hardware_concurrency()); diff --git a/src/libutil/config.cc b/src/libutil/config.cc index f7a46bfee63..612fb6e6835 100644 --- a/src/libutil/config.cc +++ b/src/libutil/config.cc @@ -115,6 +115,13 @@ void Config::toJSON(JSONObject & out) } } +void Config::convertToArgs(Args & args) +{ + for (auto & s : _settings) + if (!s.second.isAlias) + s.second.setting->convertToArg(args); +} + AbstractSetting::AbstractSetting( const std::string & name, const std::string & description, @@ -128,12 +135,22 @@ void AbstractSetting::toJSON(JSONPlaceholder & out) out.write(to_string()); } +void AbstractSetting::convertToArg(Args & args) +{ +} + template void BaseSetting::toJSON(JSONPlaceholder & out) { out.write(value); } +template +void BaseSetting::convertToArg(Args & args) +{ + args.mkFlag(0, name, {}, description, 1, [=](Strings ss) { set(*ss.begin()); }); +} + template<> void BaseSetting::set(const std::string & str) { value = str; @@ -174,6 +191,12 @@ template<> std::string BaseSetting::to_string() return value ? "true" : "false"; } +template<> void BaseSetting::convertToArg(Args & args) +{ + args.mkFlag(0, name, {}, description, 0, [=](Strings ss) { value = true; }); + args.mkFlag(0, "no-" + name, {}, description, 0, [=](Strings ss) { value = false; }); +} + template<> void BaseSetting::set(const std::string & str) { value = tokenizeString(str); @@ -216,6 +239,8 @@ template class BaseSetting; template class BaseSetting; template class BaseSetting; template class BaseSetting; +template class BaseSetting; +template class BaseSetting; void PathSetting::set(const std::string & str) { diff --git a/src/libutil/config.hh b/src/libutil/config.hh index 77620d47d37..2ca643fe0e7 100644 --- a/src/libutil/config.hh +++ b/src/libutil/config.hh @@ -63,6 +63,8 @@ public: void resetOverriden(); void toJSON(JSONObject & out); + + void convertToArgs(Args & args); }; class AbstractSetting @@ -99,6 +101,8 @@ protected: virtual void toJSON(JSONPlaceholder & out); + virtual void convertToArg(Args & args); + bool isOverriden() { return overriden; } }; @@ -132,6 +136,8 @@ public: std::string to_string() override; + void convertToArg(Args & args) override; + void toJSON(JSONPlaceholder & out) override; }; diff --git a/src/nix/main.cc b/src/nix/main.cc index 216f0bccef1..f83843415e3 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -22,6 +22,8 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs }); mkFlag(0, "version", "show version information", std::bind(printVersion, programName)); + + settings.convertToArgs(*this); } }; From aa952d5f0bc623a1584f2d589209f586e594c75f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 7 Jun 2017 16:49:54 +0200 Subject: [PATCH 0384/2196] nix: Add --help-config flag --- src/libutil/args.hh | 2 +- src/libutil/config.hh | 10 +++++++++- src/nix/main.cc | 20 ++++++++++++++++++++ 3 files changed, 30 insertions(+), 2 deletions(-) diff --git a/src/libutil/args.hh b/src/libutil/args.hh index ac12f8be633..f70bb7823dc 100644 --- a/src/libutil/args.hh +++ b/src/libutil/args.hh @@ -41,7 +41,7 @@ protected: virtual bool processFlag(Strings::iterator & pos, Strings::iterator end); - void printFlags(std::ostream & out); + virtual void printFlags(std::ostream & out); /* Positional arguments. */ struct ExpectedArg diff --git a/src/libutil/config.hh b/src/libutil/config.hh index 2ca643fe0e7..994eab91170 100644 --- a/src/libutil/config.hh +++ b/src/libutil/config.hh @@ -31,6 +31,8 @@ class Config { friend class AbstractSetting; +public: + struct SettingData { bool isAlias; @@ -40,7 +42,11 @@ class Config { } }; - std::map _settings; + typedef std::map Settings; + +private: + + Settings _settings; StringMap initials; @@ -58,6 +64,8 @@ public: StringMap getSettings(bool overridenOnly = false); + const Settings & _getSettings() { return _settings; } + void applyConfigFile(const Path & path, bool fatal = false); void resetOverriden(); diff --git a/src/nix/main.cc b/src/nix/main.cc index f83843415e3..ea6838cd76c 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -21,10 +21,30 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs throw Exit(); }); + mkFlag(0, "help-config", "show configuration options", [=]() { + std::cout << "The following configuration options are available:\n\n"; + Table2 tbl; + for (const auto & s : settings._getSettings()) + if (!s.second.isAlias) + tbl.emplace_back(s.first, s.second.setting->description); + printTable(std::cout, tbl); + throw Exit(); + }); + mkFlag(0, "version", "show version information", std::bind(printVersion, programName)); settings.convertToArgs(*this); } + + void printFlags(std::ostream & out) override + { + Args::printFlags(out); + std::cout << + "\n" + "In addition, most configuration settings can be overriden using ‘-- ’.\n" + "Boolean settings can be overriden using ‘--’ or ‘--no-’. See ‘nix\n" + "--help-config’ for a list of configuration settings.\n"; + } }; void mainWrapped(int argc, char * * argv) From 186571965dccf57d15b9f37c1cca92a57187b7b3 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 7 Jun 2017 18:41:20 +0200 Subject: [PATCH 0385/2196] Don't show flags from config settings in "nix --help" --- src/libmain/common-args.cc | 8 +++- src/libstore/globals.cc | 20 +++++++-- src/libutil/args.cc | 26 +++++++++--- src/libutil/args.hh | 83 ++++++++++++++++++++++++++------------ src/libutil/config.cc | 29 +++++++++---- src/libutil/config.hh | 6 +-- src/nix/main.cc | 4 +- src/nix/sigs.cc | 9 ++++- src/nix/verify.cc | 9 ++++- 9 files changed, 140 insertions(+), 54 deletions(-) diff --git a/src/libmain/common-args.cc b/src/libmain/common-args.cc index 9a7a893138d..3fa42c2aafa 100644 --- a/src/libmain/common-args.cc +++ b/src/libmain/common-args.cc @@ -18,8 +18,12 @@ MixCommonArgs::MixCommonArgs(const string & programName) verbosity = lvlDebug; }); - mkFlag(0, "option", {"name", "value"}, "set a Nix configuration option (overriding nix.conf)", 2, - [](Strings ss) { + mkFlag() + .longName("option") + .labels({"name", "value"}) + .description("set a Nix configuration option (overriding nix.conf)") + .arity(2) + .handler([](Strings ss) { auto name = ss.front(); ss.pop_front(); auto value = ss.front(); try { diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 3f2bea8e7b1..2aceed27051 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -98,11 +98,23 @@ template<> void BaseSetting::toJSON(JSONPlaceholder & out) AbstractSetting::toJSON(out); } -template<> void BaseSetting::convertToArg(Args & args) +template<> void BaseSetting::convertToArg(Args & args, const std::string & category) { - args.mkFlag(0, name, {}, "Enable sandboxing.", 0, [=](Strings ss) { value = smEnabled; }); - args.mkFlag(0, "no-" + name, {}, "Disable sandboxing.", 0, [=](Strings ss) { value = smDisabled; }); - args.mkFlag(0, "relaxed-" + name, {}, "Enable sandboxing, but allow builds to disable it.", 0, [=](Strings ss) { value = smRelaxed; }); + args.mkFlag() + .longName(name) + .description("Enable sandboxing.") + .handler([=](Strings ss) { value = smEnabled; }) + .category(category); + args.mkFlag() + .longName("no-" + name) + .description("Disable sandboxing.") + .handler([=](Strings ss) { value = smDisabled; }) + .category(category); + args.mkFlag() + .longName("relaxed-" + name) + .description("Enable sandboxing, but allow builds to disable it.") + .handler([=](Strings ss) { value = smRelaxed; }) + .category(category); } void MaxBuildJobsSetting::set(const std::string & str) diff --git a/src/libutil/args.cc b/src/libutil/args.cc index 115484f9e6c..df7e040875d 100644 --- a/src/libutil/args.cc +++ b/src/libutil/args.cc @@ -3,6 +3,18 @@ namespace nix { +Args::FlagMaker Args::mkFlag() +{ + return FlagMaker(*this); +} + +Args::FlagMaker::~FlagMaker() +{ + assert(flag->longName != ""); + args.longFlags[flag->longName] = flag; + if (flag->shortName) args.shortFlags[flag->shortName] = flag; +} + void Args::parseCmdline(const Strings & _cmdline) { Strings pendingArgs; @@ -71,11 +83,13 @@ void Args::printHelp(const string & programName, std::ostream & out) void Args::printFlags(std::ostream & out) { Table2 table; - for (auto & flag : longFlags) + for (auto & flag : longFlags) { + if (hiddenCategories.count(flag.second->category)) continue; table.push_back(std::make_pair( - (flag.second.shortName ? std::string("-") + flag.second.shortName + ", " : " ") - + "--" + flag.first + renderLabels(flag.second.labels), - flag.second.description)); + (flag.second->shortName ? std::string("-") + flag.second->shortName + ", " : " ") + + "--" + flag.first + renderLabels(flag.second->labels), + flag.second->description)); + } printTable(out, table); } @@ -99,14 +113,14 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end) if (string(*pos, 0, 2) == "--") { auto i = longFlags.find(string(*pos, 2)); if (i == longFlags.end()) return false; - return process("--" + i->first, i->second); + return process("--" + i->first, *i->second); } if (string(*pos, 0, 1) == "-" && pos->size() == 2) { auto c = (*pos)[1]; auto i = shortFlags.find(c); if (i == shortFlags.end()) return false; - return process(std::string("-") + c, i->second); + return process(std::string("-") + c, *i->second); } return false; diff --git a/src/libutil/args.hh b/src/libutil/args.hh index f70bb7823dc..aa11373d5f1 100644 --- a/src/libutil/args.hh +++ b/src/libutil/args.hh @@ -29,15 +29,18 @@ protected: /* Flags. */ struct Flag { - char shortName; + typedef std::shared_ptr ptr; + std::string longName; + char shortName = 0; std::string description; Strings labels; - size_t arity; + size_t arity = 0; std::function handler; + std::string category; }; - std::map longFlags; - std::map shortFlags; + std::map longFlags; + std::map shortFlags; virtual bool processFlag(Strings::iterator & pos, Strings::iterator end); @@ -55,33 +58,53 @@ protected: virtual bool processArgs(const Strings & args, bool finish); + std::set hiddenCategories; + public: + class FlagMaker + { + Args & args; + Flag::ptr flag; + friend class Args; + FlagMaker(Args & args) : args(args), flag(std::make_shared()) { }; + public: + ~FlagMaker(); + FlagMaker & longName(const std::string & s) { flag->longName = s; return *this; }; + FlagMaker & shortName(char s) { flag->shortName = s; return *this; }; + FlagMaker & description(const std::string & s) { flag->description = s; return *this; }; + FlagMaker & labels(const Strings & ls) { flag->labels = ls; return *this; }; + FlagMaker & arity(size_t arity) { flag->arity = arity; return *this; }; + FlagMaker & handler(std::function handler) { flag->handler = handler; return *this; }; + FlagMaker & category(const std::string & s) { flag->category = s; return *this; }; + }; + + FlagMaker mkFlag(); + /* Helper functions for constructing flags / positional arguments. */ - void mkFlag(char shortName, const std::string & longName, - const Strings & labels, const std::string & description, - size_t arity, std::function handler) - { - auto flag = Flag{shortName, description, labels, arity, handler}; - if (shortName) shortFlags[shortName] = flag; - longFlags[longName] = flag; - } - void mkFlag(char shortName, const std::string & longName, const std::string & description, std::function fun) { - mkFlag(shortName, longName, {}, description, 0, std::bind(fun)); + mkFlag() + .shortName(shortName) + .longName(longName) + .description(description) + .handler(std::bind(fun)); } void mkFlag1(char shortName, const std::string & longName, const std::string & label, const std::string & description, std::function fun) { - mkFlag(shortName, longName, {label}, description, 1, [=](Strings ss) { - fun(ss.front()); - }); + mkFlag() + .shortName(shortName) + .longName(longName) + .labels({label}) + .description(description) + .arity(1) + .handler([=](Strings ss) { fun(ss.front()); }); } void mkFlag(char shortName, const std::string & name, @@ -105,9 +128,11 @@ public: void mkFlag(char shortName, const std::string & longName, const std::string & description, T * dest, const T & value) { - mkFlag(shortName, longName, {}, description, 0, [=](Strings ss) { - *dest = value; - }); + mkFlag() + .shortName(shortName) + .longName(longName) + .description(description) + .handler([=](Strings ss) { *dest = value; }); } template @@ -123,12 +148,18 @@ public: void mkFlag(char shortName, const std::string & longName, const std::string & description, std::function fun) { - mkFlag(shortName, longName, {"N"}, description, 1, [=](Strings ss) { - I n; - if (!string2Int(ss.front(), n)) - throw UsageError(format("flag ‘--%1%’ requires a integer argument") % longName); - fun(n); - }); + mkFlag() + .shortName(shortName) + .longName(longName) + .labels({"N"}) + .description(description) + .arity(1) + .handler([=](Strings ss) { + I n; + if (!string2Int(ss.front(), n)) + throw UsageError(format("flag ‘--%1%’ requires a integer argument") % longName); + fun(n); + }); } /* Expect a string argument. */ diff --git a/src/libutil/config.cc b/src/libutil/config.cc index 612fb6e6835..0682bcd5dbc 100644 --- a/src/libutil/config.cc +++ b/src/libutil/config.cc @@ -115,11 +115,11 @@ void Config::toJSON(JSONObject & out) } } -void Config::convertToArgs(Args & args) +void Config::convertToArgs(Args & args, const std::string & category) { for (auto & s : _settings) if (!s.second.isAlias) - s.second.setting->convertToArg(args); + s.second.setting->convertToArg(args, category); } AbstractSetting::AbstractSetting( @@ -135,7 +135,7 @@ void AbstractSetting::toJSON(JSONPlaceholder & out) out.write(to_string()); } -void AbstractSetting::convertToArg(Args & args) +void AbstractSetting::convertToArg(Args & args, const std::string & category) { } @@ -146,9 +146,14 @@ void BaseSetting::toJSON(JSONPlaceholder & out) } template -void BaseSetting::convertToArg(Args & args) +void BaseSetting::convertToArg(Args & args, const std::string & category) { - args.mkFlag(0, name, {}, description, 1, [=](Strings ss) { set(*ss.begin()); }); + args.mkFlag() + .longName(name) + .description(description) + .arity(1) + .handler([=](Strings ss) { set(*ss.begin()); }) + .category(category); } template<> void BaseSetting::set(const std::string & str) @@ -191,10 +196,18 @@ template<> std::string BaseSetting::to_string() return value ? "true" : "false"; } -template<> void BaseSetting::convertToArg(Args & args) +template<> void BaseSetting::convertToArg(Args & args, const std::string & category) { - args.mkFlag(0, name, {}, description, 0, [=](Strings ss) { value = true; }); - args.mkFlag(0, "no-" + name, {}, description, 0, [=](Strings ss) { value = false; }); + args.mkFlag() + .longName(name) + .description(description) + .handler([=](Strings ss) { value = true; }) + .category(category); + args.mkFlag() + .longName("no-" + name) + .description(description) + .handler([=](Strings ss) { value = false; }) + .category(category); } template<> void BaseSetting::set(const std::string & str) diff --git a/src/libutil/config.hh b/src/libutil/config.hh index 994eab91170..99850c1cdfd 100644 --- a/src/libutil/config.hh +++ b/src/libutil/config.hh @@ -72,7 +72,7 @@ public: void toJSON(JSONObject & out); - void convertToArgs(Args & args); + void convertToArgs(Args & args, const std::string & category); }; class AbstractSetting @@ -109,7 +109,7 @@ protected: virtual void toJSON(JSONPlaceholder & out); - virtual void convertToArg(Args & args); + virtual void convertToArg(Args & args, const std::string & category); bool isOverriden() { return overriden; } }; @@ -144,7 +144,7 @@ public: std::string to_string() override; - void convertToArg(Args & args) override; + void convertToArg(Args & args, const std::string & category) override; void toJSON(JSONPlaceholder & out) override; }; diff --git a/src/nix/main.cc b/src/nix/main.cc index ea6838cd76c..88a602b8481 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -33,7 +33,9 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs mkFlag(0, "version", "show version information", std::bind(printVersion, programName)); - settings.convertToArgs(*this); + std::string cat = "config"; + settings.convertToArgs(*this, cat); + hiddenCategories.insert(cat); } void printFlags(std::ostream & out) override diff --git a/src/nix/sigs.cc b/src/nix/sigs.cc index 3dd03771619..0e3eb9b694b 100644 --- a/src/nix/sigs.cc +++ b/src/nix/sigs.cc @@ -13,8 +13,13 @@ struct CmdCopySigs : StorePathsCommand CmdCopySigs() { - mkFlag('s', "substituter", {"store-uri"}, "use signatures from specified store", 1, - [&](Strings ss) { substituterUris.push_back(ss.front()); }); + mkFlag() + .longName("substituter") + .shortName('s') + .labels({"store-uri"}) + .description("use signatures from specified store") + .arity(1) + .handler([&](Strings ss) { substituterUris.push_back(ss.front()); }); } std::string name() override diff --git a/src/nix/verify.cc b/src/nix/verify.cc index 8facb4bef8a..18533e6066c 100644 --- a/src/nix/verify.cc +++ b/src/nix/verify.cc @@ -19,8 +19,13 @@ struct CmdVerify : StorePathsCommand { mkFlag(0, "no-contents", "do not verify the contents of each store path", &noContents); mkFlag(0, "no-trust", "do not verify whether each store path is trusted", &noTrust); - mkFlag('s', "substituter", {"store-uri"}, "use signatures from specified store", 1, - [&](Strings ss) { substituterUris.push_back(ss.front()); }); + mkFlag() + .longName("substituter") + .shortName('s') + .labels({"store-uri"}) + .description("use signatures from specified store") + .arity(1) + .handler([&](Strings ss) { substituterUris.push_back(ss.front()); }); mkIntFlag('n', "sigs-needed", "require that each path has at least N valid signatures", &sigsNeeded); } From 7f5b750b401e98e9e2a346552aba5bd2e0a9203f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 12 Jun 2017 16:07:34 +0200 Subject: [PATCH 0386/2196] Don't run pre-build-hook if we don't have a derivation This fixes a build failure on OS X when using Hydra or Nix 1.12's build-remote (since they don't copy the derivation to the build machine). --- src/libstore/build.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index d5fe41d1b16..bdec30151b0 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -1950,7 +1950,7 @@ void DerivationGoal::startBuilder() } } - if (settings.preBuildHook != "") { + if (useChroot && settings.preBuildHook != "" && dynamic_cast(drv.get())) { printMsg(lvlChatty, format("executing pre-build hook ‘%1%’") % settings.preBuildHook); auto args = useChroot ? Strings({drvPath, chrootRootDir}) : From 847f19a5f7a558252bbde9b4c70efa5f7fac1f4f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 12 Jun 2017 16:44:43 +0200 Subject: [PATCH 0387/2196] Provide a builtin default for $NIX_SSL_CERT_FILE This is mostly to ensure that when Nix is started on macOS via a launchd service or sshd (for a remote build), it gets a certificate bundle. --- misc/launchd/org.nixos.nix-daemon.plist.in | 5 ----- src/libstore/download.cc | 7 ++++--- src/libstore/globals.cc | 10 +++++++++- 3 files changed, 13 insertions(+), 9 deletions(-) diff --git a/misc/launchd/org.nixos.nix-daemon.plist.in b/misc/launchd/org.nixos.nix-daemon.plist.in index c5ef97ee9a3..66fcd155ee9 100644 --- a/misc/launchd/org.nixos.nix-daemon.plist.in +++ b/misc/launchd/org.nixos.nix-daemon.plist.in @@ -12,10 +12,5 @@ /var/log/nix-daemon.log StandardOutPath /dev/null - EnvironmentVariables - - NIX_SSL_CERT_FILE - /nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt - diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 63e498f0603..33ab1f02782 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -221,9 +221,10 @@ struct CurlDownloader : public Downloader if (request.head) curl_easy_setopt(req, CURLOPT_NOBODY, 1); - if (request.verifyTLS) - curl_easy_setopt(req, CURLOPT_CAINFO, settings.caFile.c_str()); - else { + if (request.verifyTLS) { + if (settings.caFile != "") + curl_easy_setopt(req, CURLOPT_CAINFO, settings.caFile.c_str()); + } else { curl_easy_setopt(req, CURLOPT_SSL_VERIFYPEER, 0); curl_easy_setopt(req, CURLOPT_SSL_VERIFYHOST, 0); } diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 2aceed27051..935018132d3 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -41,7 +41,15 @@ Settings::Settings() { buildUsersGroup = getuid() == 0 ? "nixbld" : ""; lockCPU = getEnv("NIX_AFFINITY_HACK", "1") == "1"; - caFile = getEnv("NIX_SSL_CERT_FILE", getEnv("SSL_CERT_FILE", "/etc/ssl/certs/ca-certificates.crt")); + + caFile = getEnv("NIX_SSL_CERT_FILE", getEnv("SSL_CERT_FILE", "")); + if (caFile == "") { + for (auto & fn : {"/etc/ssl/certs/ca-certificates.crt", "/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt"}) + if (pathExists(fn)) { + caFile = fn; + break; + } + } /* Backwards compatibility. */ auto s = getEnv("NIX_REMOTE_SYSTEMS"); From 25230a17a9d0c22f97ed80b1a8f50566a4ff548d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 12 Jun 2017 17:43:19 +0200 Subject: [PATCH 0388/2196] On macOS, don't use /var/folders for TMPDIR This broke "nix-store --serve". --- src/libmain/shared.cc | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index d6c1c0c9cb4..915e7955014 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -138,6 +138,14 @@ void initNix() struct timeval tv; gettimeofday(&tv, 0); srandom(tv.tv_usec); + + /* On macOS, don't use the per-session TMPDIR (as set e.g. by + sshd). This breaks build users because they don't have access + to the TMPDIR, in particular in ‘nix-store --serve’. */ +#if __APPLE__ + if (getuid() == 0 && hasPrefix(getEnv("TMPDIR"), "/var/folders/")) + unsetenv("TMPDIR"); +#endif } From 177f3996e28967368791ba0e4ec036f3dbbb88d0 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 12 Jun 2017 18:34:48 +0200 Subject: [PATCH 0389/2196] Suppress spurious "killing process N: Operation not permitted" on macOS --- src/libutil/util.cc | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 16f4b232e6c..6bf4b3d9180 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -724,8 +724,15 @@ int Pid::kill() /* Send the requested signal to the child. If it has its own process group, send the signal to every process in the child process group (which hopefully includes *all* its children). */ - if (::kill(separatePG ? -pid : pid, killSignal) != 0) - printError((SysError(format("killing process %1%") % pid).msg())); + if (::kill(separatePG ? -pid : pid, killSignal) != 0) { + /* On BSDs, killing a process group will return EPERM if all + processes in the group are zombies (or something like + that). So try to detect and ignore that situation. */ +#if __FreeBSD__ || __APPLE__ + if (errno != EPERM || ::kill(pid, 0) != 0) +#endif + printError((SysError("killing process %d", pid).msg())); + } return wait(); } From 88b291ffc4aed550d3136a44580ba5f5d66dd922 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 14 Jun 2017 11:41:03 +0200 Subject: [PATCH 0390/2196] canonicalisePathMetaData(): Ignore security.selinux attribute Untested, hopefully fixes #1406. --- src/libstore/local-store.cc | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index ee36428af03..aa985ee53d9 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -421,10 +421,14 @@ static void canonicalisePathMetaData_(const Path & path, uid_t fromUid, InodesSe if ((eaSize = llistxattr(path.c_str(), eaBuf.data(), eaBuf.size())) < 0) throw SysError("querying extended attributes of ‘%s’", path); - for (auto & eaName: tokenizeString(std::string(eaBuf.data(), eaSize), std::string("\000", 1))) + for (auto & eaName: tokenizeString(std::string(eaBuf.data(), eaSize), std::string("\000", 1))) { + /* Ignore SELinux security labels since these cannot be + removed even by root. */ + if (eaName == "security.selinux") continue; if (lremovexattr(path.c_str(), eaName.c_str()) == -1) throw SysError("removing extended attribute ‘%s’ from ‘%s’", eaName, path); - } + } + } #endif /* Fail if the file is not owned by the build user. This prevents From 38b7d55af1402e2323ed22a03d6d97b16dec52ad Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 14 Jun 2017 13:45:38 +0200 Subject: [PATCH 0391/2196] Remove redundant debug line --- src/libstore/s3-binary-cache-store.cc | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 24545529601..fb36dbc7be7 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -273,8 +273,6 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore std::function failure) override { sync2async>(success, failure, [&]() { - debug(format("fetching ‘s3://%1%/%2%’...") % bucketName % path); - stats.get++; auto res = s3Helper.getObject(bucketName, path); From 1dcadadf745442e96db29eb652ed4e535b6352d6 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 12 Jun 2017 13:56:38 +0200 Subject: [PATCH 0392/2196] Add 1.11.10 release notes (cherry picked from commit 0fb60e4e0f66cc42c7c274acfcf00b51f6c829c4) --- doc/manual/release-notes/release-notes.xml | 1 + doc/manual/release-notes/rl-1.11.10.xml | 31 ++++++++++++++++++++++ 2 files changed, 32 insertions(+) create mode 100644 doc/manual/release-notes/rl-1.11.10.xml diff --git a/doc/manual/release-notes/release-notes.xml b/doc/manual/release-notes/release-notes.xml index 8c2deb39418..c4b14bc5499 100644 --- a/doc/manual/release-notes/release-notes.xml +++ b/doc/manual/release-notes/release-notes.xml @@ -13,6 +13,7 @@ --> + diff --git a/doc/manual/release-notes/rl-1.11.10.xml b/doc/manual/release-notes/rl-1.11.10.xml new file mode 100644 index 00000000000..13cb497d921 --- /dev/null +++ b/doc/manual/release-notes/rl-1.11.10.xml @@ -0,0 +1,31 @@ +
+ +Release 1.11.10 (2017-06-12) + +This release fixes a security bug in Nix’s “build user” build +isolation mechanism. Previously, Nix builders had the ability to +create setuid binaries owned by a nixbld +user. Such a binary could then be used by an attacker to assume a +nixbld identity and interfere with subsequent +builds running under the same UID. + +To prevent this issue, Nix now disallows builders to create +setuid and setgid binaries. On Linux, this is done using a seccomp BPF +filter. Note that this imposes a small performance penalty (e.g. 1% +when building GNU Hello). Using seccomp, we now also prevent the +creation of extended attributes and POSIX ACLs since these cannot be +represented in the NAR format and (in the case of POSIX ACLs) allow +bypassing regular Nix store permissions. On OS X, the restriction is +implemented using the existing sandbox mechanism, which now uses a +minimal “allow all except the creation of setuid/setgid binaries” +profile when regular sandboxing is disabled. On other platforms, the +“build user” mechanism is now disabled. + +Thanks go to Linus Heckemann for discovering and reporting this +bug. + +
From a10951de08117dd2f9e7117fdd6fa61a7b4e2b72 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 12 Jun 2017 14:04:52 +0200 Subject: [PATCH 0393/2196] OS X -> macOS (cherry picked from commit c20641ce569dc1fdeaeaa147b0292f258667f53b) --- doc/manual/release-notes/rl-1.11.10.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/release-notes/rl-1.11.10.xml b/doc/manual/release-notes/rl-1.11.10.xml index 13cb497d921..415388b3e2d 100644 --- a/doc/manual/release-notes/rl-1.11.10.xml +++ b/doc/manual/release-notes/rl-1.11.10.xml @@ -19,7 +19,7 @@ filter. Note that this imposes a small performance penalty (e.g. 1% when building GNU Hello). Using seccomp, we now also prevent the creation of extended attributes and POSIX ACLs since these cannot be represented in the NAR format and (in the case of POSIX ACLs) allow -bypassing regular Nix store permissions. On OS X, the restriction is +bypassing regular Nix store permissions. On macOS, the restriction is implemented using the existing sandbox mechanism, which now uses a minimal “allow all except the creation of setuid/setgid binaries” profile when regular sandboxing is disabled. On other platforms, the From b5bdfdef7320d74a98af1e53d64371e0cef2dbbe Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 19 Jun 2017 14:19:21 +0200 Subject: [PATCH 0394/2196] macOS: Remove flags In particular, UF_IMMUTABLE (uchg) needs to be cleared to allow the path to be garbage-collected or optimised. See https://github.com/NixOS/nixpkgs/issues/25819. + the file from being garbage-collected. --- src/libstore/local-store.cc | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index aa985ee53d9..74c74e672e2 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -400,6 +400,16 @@ static void canonicalisePathMetaData_(const Path & path, uid_t fromUid, InodesSe { checkInterrupt(); +#if __APPLE__ + /* Remove flags, in particular UF_IMMUTABLE which would prevent + the file from being garbage-collected. FIXME: Use + setattrlist() to remove other attributes as well. */ + if (lchflags(path.c_str(), 0)) { + if (errno != ENOTSUP) + throw SysError(format("clearing flags of path ‘%1%’") % path); + } +#endif + struct stat st; if (lstat(path.c_str(), &st)) throw SysError(format("getting attributes of path ‘%1%’") % path); From 1888f7889b107ecbca7ad47f3a1132c32a17d227 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 19 Jun 2017 14:24:48 +0200 Subject: [PATCH 0395/2196] macOS: Ugly hack to make the tests succeed Sandboxes cannot be nested, so if Nix's build runs inside a sandbox, it cannot use a sandbox itself. I don't see a clean way to detect whether we're in a sandbox, so use a test-specific hack. https://github.com/NixOS/nix/issues/1413 --- src/libstore/build.cc | 5 ++--- tests/common.sh.in | 4 ++++ 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index bdec30151b0..c34083d2e2f 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2621,7 +2621,7 @@ void DerivationGoal::runChild() ; } #if __APPLE__ - else { + else if (getEnv("_NIX_TEST_NO_SANDBOX") == "") { /* This has to appear before import statements. */ std::string sandboxProfile = "(version 1)\n"; @@ -2736,13 +2736,12 @@ void DerivationGoal::runChild() args.push_back("_GLOBAL_TMP_DIR=" + globalTmpDir); args.push_back(drv->builder); } -#else +#endif else { builder = drv->builder.c_str(); string builderBasename = baseNameOf(drv->builder); args.push_back(builderBasename); } -#endif for (auto & i : drv->args) args.push_back(rewriteStrings(i, inputRewrites)); diff --git a/tests/common.sh.in b/tests/common.sh.in index 4565a490adf..6c3804a257a 100644 --- a/tests/common.sh.in +++ b/tests/common.sh.in @@ -15,6 +15,10 @@ export NIX_STATE_DIR=$TEST_ROOT/var/nix export NIX_CONF_DIR=$TEST_ROOT/etc export NIX_MANIFESTS_DIR=$TEST_ROOT/var/nix/manifests export _NIX_TEST_SHARED=$TEST_ROOT/shared +if [[ -n $NIX_STORE ]]; then + export _NIX_TEST_NO_SANDBOX=1 +fi +export _NIX_IN_TEST=$TEST_ROOT/shared export NIX_REMOTE=$NIX_REMOTE_ unset NIX_PATH export TEST_HOME=$TEST_ROOT/test-home From 00aa7c6705c073aab8b24ae945ea9a09d5d256aa Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 19 Jun 2017 17:01:01 +0200 Subject: [PATCH 0396/2196] Show aws-sdk-cpp log messages --- src/libstore/s3-binary-cache-store.cc | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index fb36dbc7be7..39d98cd61e7 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -12,6 +12,8 @@ #include #include #include +#include +#include #include #include #include @@ -41,6 +43,16 @@ R && checkAws(const FormatOrString & fs, Aws::Utils::Outcome && outcome) return outcome.GetResultWithOwnership(); } +class AwsLogger : public Aws::Utils::Logging::FormattedLogSystem +{ + using Aws::Utils::Logging::FormattedLogSystem::FormattedLogSystem; + + void ProcessFormattedStatement(Aws::String && statement) override + { + debug("AWS: %s", chomp(statement)); + } +}; + static void initAWS() { static std::once_flag flag; @@ -51,6 +63,16 @@ static void initAWS() shared.cc), so don't let aws-sdk-cpp override it. */ options.cryptoOptions.initAndCleanupOpenSSL = false; + if (verbosity >= lvlDebug) { + options.loggingOptions.logLevel = + verbosity == lvlDebug + ? Aws::Utils::Logging::LogLevel::Debug + : Aws::Utils::Logging::LogLevel::Trace; + options.loggingOptions.logger_create_fn = [options]() { + return std::make_shared(options.loggingOptions.logLevel); + }; + } + Aws::InitAPI(options); }); } From 1c969611ba962a860744b2718fa6f989e7be5165 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 19 Jun 2017 18:13:32 +0200 Subject: [PATCH 0397/2196] Suppress "will retry in N ms" for non-retriable errors Newer versions of aws-sdk-cpp call CalculateDelayBeforeNextRetry() even for non-retriable errors (like NoSuchKey) whih causes log spam in hydra-queue-runner. --- src/libstore/s3-binary-cache-store.cc | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 39d98cd61e7..f57227f024a 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -86,12 +86,13 @@ S3Helper::S3Helper(const string & region) /* Log AWS retries. */ class RetryStrategy : public Aws::Client::DefaultRetryStrategy { - long CalculateDelayBeforeNextRetry(const Aws::Client::AWSError& error, long attemptedRetries) const override + bool ShouldRetry(const Aws::Client::AWSError& error, long attemptedRetries) const override { - auto res = Aws::Client::DefaultRetryStrategy::CalculateDelayBeforeNextRetry(error, attemptedRetries); - printError("AWS error '%s' (%s), will retry in %d ms", - error.GetExceptionName(), error.GetMessage(), res); - return res; + auto retry = Aws::Client::DefaultRetryStrategy::ShouldRetry(error, attemptedRetries); + if (retry) + printError("AWS error '%s' (%s), will retry in %d ms", + error.GetExceptionName(), error.GetMessage(), CalculateDelayBeforeNextRetry(error, attemptedRetries)); + return retry; } }; From b33621d4253addc4a0cdf1fb7a4082109aaafa78 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 19 Jun 2017 18:15:23 +0200 Subject: [PATCH 0398/2196] Handle S3Errors::RESOURCE_NOT_FOUND from aws-sdk-cpp This is returned by recent versions. Also handle NO_SUCH_KEY even though the library doesn't actually return that at the moment. --- src/libstore/s3-binary-cache-store.cc | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index f57227f024a..8029cd8cca0 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -233,8 +233,10 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore if (!res.IsSuccess()) { auto & error = res.GetError(); - if (error.GetErrorType() == Aws::S3::S3Errors::UNKNOWN // FIXME - && error.GetMessage().find("404") != std::string::npos) + if (error.GetErrorType() == Aws::S3::S3Errors::RESOURCE_NOT_FOUND + || error.GetErrorType() == Aws::S3::S3Errors::NO_SUCH_KEY + || (error.GetErrorType() == Aws::S3::S3Errors::UNKNOWN // FIXME + && error.GetMessage().find("404") != std::string::npos)) return false; throw Error(format("AWS error fetching ‘%s’: %s") % path % error.GetMessage()); } From 82a0d614cf17b2997ba5f895c1db4c2cde41e88d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 19 Jun 2017 18:39:57 +0200 Subject: [PATCH 0399/2196] Support creating S3 caches in other regions than us-east-1 --- src/libstore/s3-binary-cache-store.cc | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 8029cd8cca0..06622210cbf 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -187,14 +187,20 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore if (res.GetError().GetErrorType() != Aws::S3::S3Errors::NO_SUCH_BUCKET) throw Error(format("AWS error checking bucket ‘%s’: %s") % bucketName % res.GetError().GetMessage()); + printInfo("creating S3 bucket ‘%s’...", bucketName); + + // Stupid S3 bucket locations. + auto bucketConfig = Aws::S3::Model::CreateBucketConfiguration(); + if (s3Helper.config->region != "us-east-1") + bucketConfig.SetLocationConstraint( + Aws::S3::Model::BucketLocationConstraintMapper::GetBucketLocationConstraintForName( + s3Helper.config->region)); + checkAws(format("AWS error creating bucket ‘%s’") % bucketName, s3Helper.client->CreateBucket( Aws::S3::Model::CreateBucketRequest() .WithBucket(bucketName) - .WithCreateBucketConfiguration( - Aws::S3::Model::CreateBucketConfiguration() - /* .WithLocationConstraint( - Aws::S3::Model::BucketLocationConstraint::US) */ ))); + .WithCreateBucketConfiguration(bucketConfig))); } BinaryCacheStore::init(); From a1355917ecf75e1f1f37101505a351743d590121 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 19 Jun 2017 18:40:22 +0200 Subject: [PATCH 0400/2196] Disable use of virtual hosting in aws-sdk-cpp Recently aws-sdk-cpp quietly switched to using S3 virtual host URIs (https://github.com/aws/aws-sdk-cpp/commit/69d9c53882), i.e. it sends requests to http://..s3.amazonaws.com rather than http://.s3.amazonaws.com/. However this interacts badly with curl connection reuse. For example, if we do the following: 1) Check whether a bucket exists using GetBucketLocation. 2) If it doesn't, create it using CreateBucket. 3) Do operations on the bucket. then 3) will fail for a minute or so with a NoSuchBucket exception, presumably because the server being hit is a fallback for cases when buckets don't exist. Disabling the use of virtual hosts ensures that 3) succeeds immediately. (I don't know what S3's consistency guarantees are for bucket creation, but in practice buckets appear to be available immediately.) --- src/libstore/s3-binary-cache-store.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 06622210cbf..145a8191c55 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -79,7 +79,7 @@ static void initAWS() S3Helper::S3Helper(const string & region) : config(makeConfig(region)) - , client(make_ref(*config)) + , client(make_ref(*config, true, false)) { } From 04ed11a978bf70e04042df95f8b125c2977e070d Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Mon, 19 Jun 2017 14:21:06 -0400 Subject: [PATCH 0401/2196] Let hydra choose an alternate list of systems --- release.nix | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/release.nix b/release.nix index aa2ee7290e6..af7611fbe60 100644 --- a/release.nix +++ b/release.nix @@ -1,15 +1,13 @@ { nix ? { outPath = ./.; revCount = 1234; shortRev = "abcdef"; } , nixpkgs ? { outPath = ; revCount = 1234; shortRev = "abcdef"; } , officialRelease ? false +, systems ? [ "x86_64-linux" "i686-linux" "x86_64-darwin" "aarch64-linux" ] }: let pkgs = import {}; - systems = [ "x86_64-linux" "i686-linux" "x86_64-darwin" "aarch64-linux" ]; - - jobs = rec { From c7346a275c4cdcb59b3961241ddc52b79452d716 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 20 Jun 2017 12:11:22 +0200 Subject: [PATCH 0402/2196] Restore thunks on any exception There's no reason to restrict this to Error exceptions. This shouldn't matter to #1407 since the repl doesn't catch non-Error exceptions anyway, but you never know... --- src/libexpr/eval-inline.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh index 0748fbd3f3e..8cc50e56135 100644 --- a/src/libexpr/eval-inline.hh +++ b/src/libexpr/eval-inline.hh @@ -33,7 +33,7 @@ void EvalState::forceValue(Value & v, const Pos & pos) v.type = tBlackhole; //checkInterrupt(); expr->eval(*this, *env, v); - } catch (Error & e) { + } catch (...) { v.type = tThunk; v.thunk.env = env; v.thunk.expr = expr; From 596b0e0a045eb953b5ed3328d5ae8eb636e31373 Mon Sep 17 00:00:00 2001 From: David McFarland Date: Tue, 20 Jun 2017 09:10:10 -0300 Subject: [PATCH 0403/2196] Call SetDllDirectory("") after sqlite3 init on cygwin Cygwin sqlite3 is patched to call SetDllDirectory("/usr/bin") on init, which affects the current process and is inherited by child processes. It causes DLLs to be loaded from /usr/bin/ before $PATH, which breaks all sorts of things. A typical failures would be header/lib version mismatches (e.g. openssl when running checkPhase on openssh). We'll just set it back to the default value. Note that this is a problem with the cygwin version of sqlite3 (currently 3.18.0). nixpkgs doesn't have the problematic patch. --- src/libstore/local-store.cc | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 74c74e672e2..c76294dcccb 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -30,6 +30,10 @@ #include #endif +#ifdef __CYGWIN__ +#include +#endif + #include @@ -281,6 +285,16 @@ void LocalStore::openDB(State & state, bool create) SQLITE_OPEN_READWRITE | (create ? SQLITE_OPEN_CREATE : 0), 0) != SQLITE_OK) throw Error(format("cannot open Nix database ‘%1%’") % dbPath); +#ifdef __CYGWIN__ + /* The cygwin version of sqlite3 has a patch which calls + SetDllDirectory("/usr/bin") on init. It was intended to fix extension + loading, which we don't use, and the effect of SetDllDirectory is + inherited by child processes, and causes libraries to be loaded from + /usr/bin instead of $PATH. This breaks quite a few things (e.g. + checkPhase on openssh), so we set it back to default behaviour. */ + SetDllDirectoryW(L""); +#endif + if (sqlite3_busy_timeout(db, 60 * 60 * 1000) != SQLITE_OK) throwSQLiteError(db, "setting timeout"); From b591536e935a431b7f0a7a917ee04d62bd8b81d9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Niklas=20Hamb=C3=BCchen?= Date: Sat, 24 Jun 2017 02:17:45 +0200 Subject: [PATCH 0404/2196] Fix potential crash/wrong result two hashes of unequal length are compared --- src/libutil/hash.cc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index fa1bb5d9718..e6f8aa85c6d 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -54,6 +54,8 @@ bool Hash::operator != (const Hash & h2) const bool Hash::operator < (const Hash & h) const { + if (hashSize < h.hashSize) return true; + if (hashSize > h.hashSize) return false; for (unsigned int i = 0; i < hashSize; i++) { if (hash[i] < h.hash[i]) return true; if (hash[i] > h.hash[i]) return false; From 90da34e421607ad6c40f3dea08709ae89db7a7e1 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 28 Jun 2017 16:38:02 +0200 Subject: [PATCH 0405/2196] processGraph(): Call getEdges in parallel --- src/libutil/thread-pool.hh | 75 ++++++++++++++++++++++++-------------- 1 file changed, 47 insertions(+), 28 deletions(-) diff --git a/src/libutil/thread-pool.hh b/src/libutil/thread-pool.hh index b64dc52d473..361a9d33a73 100644 --- a/src/libutil/thread-pool.hh +++ b/src/libutil/thread-pool.hh @@ -70,50 +70,69 @@ void processGraph( struct Graph { std::set left; std::map> refs, rrefs; - std::function wrap; }; - ref> graph_ = make_ref>(); + Sync graph_(Graph{nodes, {}, {}}); - auto wrapWork = [&pool, graph_, processNode](const T & node) { + std::function worker; + + worker = [&](const T & node) { + + { + auto graph(graph_.lock()); + auto i = graph->refs.find(node); + if (i == graph->refs.end()) + goto getRefs; + goto doWork; + } + + getRefs: + { + auto refs = getEdges(node); + refs.erase(node); + + { + auto graph(graph_.lock()); + for (auto & ref : refs) + if (graph->left.count(ref)) { + graph->refs[node].insert(ref); + graph->rrefs[ref].insert(node); + } + if (graph->refs[node].empty()) + goto doWork; + } + } + + return; + + doWork: processNode(node); - /* Enqueue work for all nodes that were waiting on this one. */ + /* Enqueue work for all nodes that were waiting on this one + and have no unprocessed dependencies. */ { - auto graph(graph_->lock()); - graph->left.erase(node); + auto graph(graph_.lock()); for (auto & rref : graph->rrefs[node]) { auto & refs(graph->refs[rref]); auto i = refs.find(node); assert(i != refs.end()); refs.erase(i); if (refs.empty()) - pool.enqueue(std::bind(graph->wrap, rref)); + pool.enqueue(std::bind(worker, rref)); } + graph->left.erase(node); + graph->refs.erase(node); + graph->rrefs.erase(node); } }; - { - auto graph(graph_->lock()); - graph->left = nodes; - graph->wrap = wrapWork; - } - - /* Build the dependency graph; enqueue all nodes with no - dependencies. */ - for (auto & node : nodes) { - auto refs = getEdges(node); - { - auto graph(graph_->lock()); - for (auto & ref : refs) - if (ref != node && graph->left.count(ref)) { - graph->refs[node].insert(ref); - graph->rrefs[ref].insert(node); - } - if (graph->refs[node].empty()) - pool.enqueue(std::bind(graph->wrap, node)); - } - } + for (auto & node : nodes) + pool.enqueue(std::bind(worker, std::ref(node))); + + pool.process(); + + if (!graph_.lock()->left.empty()) + throw Error("graph processing incomplete (cyclic reference?)"); } } From fcca702a96a8ca0e73f6d035052c30121776aeba Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 28 Jun 2017 18:11:01 +0200 Subject: [PATCH 0406/2196] Replace a few bool flags with enums Functions like copyClosure() had 3 bool arguments, which creates a severe risk of mixing up arguments. Also, implement copyClosure() using copyPaths(). --- src/build-remote/build-remote.cc | 4 +- src/libexpr/eval.cc | 1 + src/libexpr/eval.hh | 3 +- src/libstore/binary-cache-store.cc | 10 +-- src/libstore/binary-cache-store.hh | 6 +- src/libstore/build.cc | 18 ++--- src/libstore/derivations.cc | 2 +- src/libstore/derivations.hh | 3 +- src/libstore/download.cc | 4 +- src/libstore/export-import.cc | 4 +- src/libstore/legacy-ssh-store.cc | 9 +-- src/libstore/local-store.cc | 16 ++--- src/libstore/local-store.hh | 15 ++-- src/libstore/remote-store.cc | 12 ++-- src/libstore/remote-store.hh | 11 +-- src/libstore/store-api.cc | 91 +++++++++--------------- src/libstore/store-api.hh | 38 +++++++--- src/nix-copy-closure/nix-copy-closure.cc | 6 +- src/nix-daemon/nix-daemon.cc | 10 +-- src/nix-env/nix-env.cc | 4 +- src/nix-instantiate/nix-instantiate.cc | 4 +- src/nix-store/nix-store.cc | 8 +-- 22 files changed, 138 insertions(+), 141 deletions(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index 7ffbdca7c0f..8719959f0b6 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -201,7 +201,7 @@ int main (int argc, char * * argv) printError("somebody is hogging the upload lock for ‘%s’, continuing..."); alarm(0); signal(SIGALRM, old); - copyPaths(store, ref(sshStore), inputs, false, true); + copyPaths(store, ref(sshStore), inputs, NoRepair, NoCheckSigs); uploadLock = -1; BasicDerivation drv(readDerivation(drvPath)); @@ -219,7 +219,7 @@ int main (int argc, char * * argv) if (!missing.empty()) { setenv("NIX_HELD_LOCKS", concatStringsSep(" ", missing).c_str(), 1); /* FIXME: ugly */ - copyPaths(ref(sshStore), store, missing, false, true); + copyPaths(ref(sshStore), store, missing, NoRepair, NoCheckSigs); } return; diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 0cdce602d7b..ca4c9a373a3 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -293,6 +293,7 @@ EvalState::EvalState(const Strings & _searchPath, ref store) , sWrong(symbols.create("wrong")) , sStructuredAttrs(symbols.create("__structuredAttrs")) , sBuilder(symbols.create("builder")) + , repair(NoRepair) , store(store) , baseEnv(allocEnv(128)) , staticBaseEnv(false, 0) diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 46d5a1cc866..1e32db1e86b 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -14,6 +14,7 @@ namespace nix { class Store; class EvalState; +enum RepairFlag : bool; typedef void (* PrimOpFun) (EvalState & state, const Pos & pos, Value * * args, Value & v); @@ -73,7 +74,7 @@ public: /* If set, force copying files to the Nix store even if they already exist there. */ - bool repair = false; + RepairFlag repair; /* If set, don't allow access to files outside of the Nix search path or to environment variables. */ diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 46c5aa21b2e..8ce5f5bbc7c 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -134,7 +134,7 @@ Path BinaryCacheStore::narInfoFileFor(const Path & storePath) } void BinaryCacheStore::addToStore(const ValidPathInfo & info, const ref & nar, - bool repair, bool dontCheckSigs, std::shared_ptr accessor) + RepairFlag repair, CheckSigsFlag checkSigs, std::shared_ptr accessor) { if (!repair && isValidPath(info.path)) return; @@ -328,7 +328,7 @@ void BinaryCacheStore::queryPathInfoUncached(const Path & storePath, } Path BinaryCacheStore::addToStore(const string & name, const Path & srcPath, - bool recursive, HashType hashAlgo, PathFilter & filter, bool repair) + bool recursive, HashType hashAlgo, PathFilter & filter, RepairFlag repair) { // FIXME: some cut&paste from LocalStore::addToStore(). @@ -349,13 +349,13 @@ Path BinaryCacheStore::addToStore(const string & name, const Path & srcPath, ValidPathInfo info; info.path = makeFixedOutputPath(recursive, h, name); - addToStore(info, sink.s, repair, false, 0); + addToStore(info, sink.s, repair, CheckSigs, nullptr); return info.path; } Path BinaryCacheStore::addTextToStore(const string & name, const string & s, - const PathSet & references, bool repair) + const PathSet & references, RepairFlag repair) { ValidPathInfo info; info.path = computeStorePathForText(name, s, references); @@ -364,7 +364,7 @@ Path BinaryCacheStore::addTextToStore(const string & name, const string & s, if (repair || !isValidPath(info.path)) { StringSink sink; dumpString(s, sink); - addToStore(info, sink.s, repair, false, 0); + addToStore(info, sink.s, repair, CheckSigs, nullptr); } return info.path; diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh index 87d4aa43838..bf5a56ab4dc 100644 --- a/src/libstore/binary-cache-store.hh +++ b/src/libstore/binary-cache-store.hh @@ -85,15 +85,15 @@ public: bool wantMassQuery() override { return wantMassQuery_; } void addToStore(const ValidPathInfo & info, const ref & nar, - bool repair, bool dontCheckSigs, + RepairFlag repair, CheckSigsFlag checkSigs, std::shared_ptr accessor) override; Path addToStore(const string & name, const Path & srcPath, bool recursive, HashType hashAlgo, - PathFilter & filter, bool repair) override; + PathFilter & filter, RepairFlag repair) override; Path addTextToStore(const string & name, const string & s, - const PathSet & references, bool repair) override; + const PathSet & references, RepairFlag repair) override; void narFromPath(const Path & path, Sink & sink) override; diff --git a/src/libstore/build.cc b/src/libstore/build.cc index c34083d2e2f..6c740d99c58 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -262,7 +262,7 @@ class Worker GoalPtr makeDerivationGoal(const Path & drvPath, const StringSet & wantedOutputs, BuildMode buildMode = bmNormal); std::shared_ptr makeBasicDerivationGoal(const Path & drvPath, const BasicDerivation & drv, BuildMode buildMode = bmNormal); - GoalPtr makeSubstitutionGoal(const Path & storePath, bool repair = false); + GoalPtr makeSubstitutionGoal(const Path & storePath, RepairFlag repair = NoRepair); /* Remove a dead goal. */ void removeGoal(GoalPtr goal); @@ -1087,7 +1087,7 @@ void DerivationGoal::haveDerivation() them. */ if (settings.useSubstitutes && drv->substitutesAllowed()) for (auto & i : invalidOutputs) - addWaitee(worker.makeSubstitutionGoal(i, buildMode == bmRepair)); + addWaitee(worker.makeSubstitutionGoal(i, buildMode == bmRepair ? Repair : NoRepair)); if (waitees.empty()) /* to prevent hang (no wake-up event) */ outputsSubstituted(); @@ -1195,7 +1195,7 @@ void DerivationGoal::repairClosure() printError(format("found corrupted or missing path ‘%1%’ in the output closure of ‘%2%’") % i % drvPath); Path drvPath2 = outputsToDrv[i]; if (drvPath2 == "") - addWaitee(worker.makeSubstitutionGoal(i, true)); + addWaitee(worker.makeSubstitutionGoal(i, Repair)); else addWaitee(worker.makeDerivationGoal(drvPath2, PathSet(), bmRepair)); } @@ -3291,7 +3291,7 @@ class SubstitutionGoal : public Goal std::promise promise; /* Whether to try to repair a valid path. */ - bool repair; + RepairFlag repair; /* Location where we're downloading the substitute. Differs from storePath when doing a repair. */ @@ -3301,7 +3301,7 @@ class SubstitutionGoal : public Goal GoalState state; public: - SubstitutionGoal(const Path & storePath, Worker & worker, bool repair = false); + SubstitutionGoal(const Path & storePath, Worker & worker, RepairFlag repair = NoRepair); ~SubstitutionGoal(); void timedOut() override { abort(); }; @@ -3337,7 +3337,7 @@ class SubstitutionGoal : public Goal }; -SubstitutionGoal::SubstitutionGoal(const Path & storePath, Worker & worker, bool repair) +SubstitutionGoal::SubstitutionGoal(const Path & storePath, Worker & worker, RepairFlag repair) : Goal(worker) , hasSubstitute(false) , repair(repair) @@ -3600,7 +3600,7 @@ std::shared_ptr Worker::makeBasicDerivationGoal(const Path & drv } -GoalPtr Worker::makeSubstitutionGoal(const Path & path, bool repair) +GoalPtr Worker::makeSubstitutionGoal(const Path & path, RepairFlag repair) { GoalPtr goal = substitutionGoals[path].lock(); if (!goal) { @@ -3953,7 +3953,7 @@ void LocalStore::buildPaths(const PathSet & drvPaths, BuildMode buildMode) if (isDerivation(i2.first)) goals.insert(worker.makeDerivationGoal(i2.first, i2.second, buildMode)); else - goals.insert(worker.makeSubstitutionGoal(i, buildMode)); + goals.insert(worker.makeSubstitutionGoal(i, buildMode == bmRepair ? Repair : NoRepair)); } worker.run(goals); @@ -4011,7 +4011,7 @@ void LocalStore::ensurePath(const Path & path) void LocalStore::repairPath(const Path & path) { Worker worker(*this); - GoalPtr goal = worker.makeSubstitutionGoal(path, true); + GoalPtr goal = worker.makeSubstitutionGoal(path, Repair); Goals goals = {goal}; worker.run(goals); diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 0c6ceb9f674..bb7b8fe628a 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -71,7 +71,7 @@ bool BasicDerivation::canBuildLocally() const Path writeDerivation(ref store, - const Derivation & drv, const string & name, bool repair) + const Derivation & drv, const string & name, RepairFlag repair) { PathSet references; references.insert(drv.inputSrcs.begin(), drv.inputSrcs.end()); diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh index 9717a81e469..7b97730d3bf 100644 --- a/src/libstore/derivations.hh +++ b/src/libstore/derivations.hh @@ -2,6 +2,7 @@ #include "types.hh" #include "hash.hh" +#include "store-api.hh" #include @@ -85,7 +86,7 @@ class Store; /* Write a derivation to the Nix store, and return its path. */ Path writeDerivation(ref store, - const Derivation & drv, const string & name, bool repair = false); + const Derivation & drv, const string & name, RepairFlag repair = NoRepair); /* Read a derivation from a file. */ Derivation readDerivation(const Path & drvPath); diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 33ab1f02782..4f3bf2d14f1 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -631,7 +631,7 @@ Path Downloader::downloadCached(ref store, const string & url_, bool unpa info.narHash = hashString(htSHA256, *sink.s); info.narSize = sink.s->size(); info.ca = makeFixedOutputCA(false, hash); - store->addToStore(info, sink.s, false, true); + store->addToStore(info, sink.s, NoRepair, NoCheckSigs); storePath = info.path; } @@ -660,7 +660,7 @@ Path Downloader::downloadCached(ref store, const string & url_, bool unpa AutoDelete autoDelete(tmpDir, true); // FIXME: this requires GNU tar for decompression. runProgram("tar", true, {"xf", storePath, "-C", tmpDir, "--strip-components", "1"}); - unpackedStorePath = store->addToStore(name, tmpDir, true, htSHA256, defaultPathFilter, false); + unpackedStorePath = store->addToStore(name, tmpDir, true, htSHA256, defaultPathFilter, NoRepair); } replaceSymlink(unpackedStorePath, unpackedLink); storePath = unpackedStorePath; diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index 6e8bc692cdf..1b3a43df32d 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -61,7 +61,7 @@ void Store::exportPath(const Path & path, Sink & sink) hashAndWriteSink << exportMagic << path << info->references << info->deriver << 0; } -Paths Store::importPaths(Source & source, std::shared_ptr accessor, bool dontCheckSigs) +Paths Store::importPaths(Source & source, std::shared_ptr accessor, CheckSigsFlag checkSigs) { Paths res; while (true) { @@ -95,7 +95,7 @@ Paths Store::importPaths(Source & source, std::shared_ptr accessor, if (readInt(source) == 1) readString(source); - addToStore(info, tee.source.data, false, dontCheckSigs, accessor); + addToStore(info, tee.source.data, NoRepair, checkSigs, accessor); res.push_back(info.path); } diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index e09932e3d18..a84f85c1b95 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -113,7 +113,7 @@ struct LegacySSHStore : public Store } void addToStore(const ValidPathInfo & info, const ref & nar, - bool repair, bool dontCheckSigs, + RepairFlag repair, CheckSigsFlag checkSigs, std::shared_ptr accessor) override { debug("adding path ‘%s’ to remote host ‘%s’", info.path, host); @@ -168,11 +168,11 @@ struct LegacySSHStore : public Store Path addToStore(const string & name, const Path & srcPath, bool recursive, HashType hashAlgo, - PathFilter & filter, bool repair) override + PathFilter & filter, RepairFlag repair) override { unsupported(); } Path addTextToStore(const string & name, const string & s, - const PathSet & references, bool repair) override + const PathSet & references, RepairFlag repair) override { unsupported(); } BuildResult buildDerivation(const Path & drvPath, const BasicDerivation & drv, @@ -249,7 +249,8 @@ struct LegacySSHStore : public Store out.insert(res.begin(), res.end()); } - PathSet queryValidPaths(const PathSet & paths, bool maybeSubstitute = false) override + PathSet queryValidPaths(const PathSet & paths, + SubstituteFlag maybeSubstitute = NoSubstitute) override { auto conn(connections->get()); diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index c76294dcccb..a7a94a8b9e8 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -718,7 +718,7 @@ bool LocalStore::isValidPathUncached(const Path & path) } -PathSet LocalStore::queryValidPaths(const PathSet & paths, bool maybeSubstitute) +PathSet LocalStore::queryValidPaths(const PathSet & paths, SubstituteFlag maybeSubstitute) { PathSet res; for (auto & i : paths) @@ -961,7 +961,7 @@ void LocalStore::invalidatePath(State & state, const Path & path) void LocalStore::addToStore(const ValidPathInfo & info, const ref & nar, - bool repair, bool dontCheckSigs, std::shared_ptr accessor) + RepairFlag repair, CheckSigsFlag checkSigs, std::shared_ptr accessor) { assert(info.narHash); @@ -974,7 +974,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, const ref & throw Error("size mismatch importing path ‘%s’; expected %s, got %s", info.path, info.narSize, nar->size()); - if (requireSigs && !dontCheckSigs && !info.checkSignatures(*this, publicKeys)) + if (requireSigs && checkSigs && !info.checkSignatures(*this, publicKeys)) throw Error("cannot add path ‘%s’ because it lacks a valid signature", info.path); addTempRoot(info.path); @@ -1012,7 +1012,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, const ref & Path LocalStore::addToStoreFromDump(const string & dump, const string & name, - bool recursive, HashType hashAlgo, bool repair) + bool recursive, HashType hashAlgo, RepairFlag repair) { Hash h = hashString(hashAlgo, dump); @@ -1070,7 +1070,7 @@ Path LocalStore::addToStoreFromDump(const string & dump, const string & name, Path LocalStore::addToStore(const string & name, const Path & _srcPath, - bool recursive, HashType hashAlgo, PathFilter & filter, bool repair) + bool recursive, HashType hashAlgo, PathFilter & filter, RepairFlag repair) { Path srcPath(absPath(_srcPath)); @@ -1088,7 +1088,7 @@ Path LocalStore::addToStore(const string & name, const Path & _srcPath, Path LocalStore::addTextToStore(const string & name, const string & s, - const PathSet & references, bool repair) + const PathSet & references, RepairFlag repair) { auto hash = hashString(htSHA256, s); auto dstPath = makeTextPath(name, hash, references); @@ -1170,7 +1170,7 @@ void LocalStore::invalidatePathChecked(const Path & path) } -bool LocalStore::verifyStore(bool checkContents, bool repair) +bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) { printError(format("reading the Nix store...")); @@ -1255,7 +1255,7 @@ bool LocalStore::verifyStore(bool checkContents, bool repair) void LocalStore::verifyPath(const Path & path, const PathSet & store, - PathSet & done, PathSet & validPaths, bool repair, bool & errors) + PathSet & done, PathSet & validPaths, RepairFlag repair, bool & errors) { checkInterrupt(); diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index f2c40e96464..551c6b506fb 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -98,7 +98,8 @@ public: bool isValidPathUncached(const Path & path) override; - PathSet queryValidPaths(const PathSet & paths, bool maybeSubstitute = false) override; + PathSet queryValidPaths(const PathSet & paths, + SubstituteFlag maybeSubstitute = NoSubstitute) override; PathSet queryAllValidPaths() override; @@ -122,22 +123,22 @@ public: SubstitutablePathInfos & infos) override; void addToStore(const ValidPathInfo & info, const ref & nar, - bool repair, bool dontCheckSigs, + RepairFlag repair, CheckSigsFlag checkSigs, std::shared_ptr accessor) override; Path addToStore(const string & name, const Path & srcPath, bool recursive, HashType hashAlgo, - PathFilter & filter, bool repair) override; + PathFilter & filter, RepairFlag repair) override; /* Like addToStore(), but the contents of the path are contained in `dump', which is either a NAR serialisation (if recursive == true) or simply the contents of a regular file (if recursive == false). */ Path addToStoreFromDump(const string & dump, const string & name, - bool recursive = true, HashType hashAlgo = htSHA256, bool repair = false); + bool recursive = true, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair); Path addTextToStore(const string & name, const string & s, - const PathSet & references, bool repair) override; + const PathSet & references, RepairFlag repair) override; void buildPaths(const PathSet & paths, BuildMode buildMode) override; @@ -174,7 +175,7 @@ public: /* Optimise a single store path. */ void optimisePath(const Path & path); - bool verifyStore(bool checkContents, bool repair) override; + bool verifyStore(bool checkContents, RepairFlag repair) override; /* Register the validity of a path, i.e., that `path' exists, that the paths referenced by it exists, and in the case of an output @@ -212,7 +213,7 @@ private: void invalidatePathChecked(const Path & path); void verifyPath(const Path & path, const PathSet & store, - PathSet & done, PathSet & validPaths, bool repair, bool & errors); + PathSet & done, PathSet & validPaths, RepairFlag repair, bool & errors); void updatePathInfo(State & state, const ValidPathInfo & info); diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index be8819bbc00..7337e406d2e 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -185,7 +185,7 @@ bool RemoteStore::isValidPathUncached(const Path & path) } -PathSet RemoteStore::queryValidPaths(const PathSet & paths, bool maybeSubstitute) +PathSet RemoteStore::queryValidPaths(const PathSet & paths, SubstituteFlag maybeSubstitute) { auto conn(connections->get()); if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 12) { @@ -357,7 +357,7 @@ Path RemoteStore::queryPathFromHashPart(const string & hashPart) void RemoteStore::addToStore(const ValidPathInfo & info, const ref & nar, - bool repair, bool dontCheckSigs, std::shared_ptr accessor) + RepairFlag repair, CheckSigsFlag checkSigs, std::shared_ptr accessor) { auto conn(connections->get()); @@ -390,7 +390,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, const ref << info.path << info.deriver << printHash(info.narHash) << info.references << info.registrationTime << info.narSize << info.ultimate << info.sigs << info.ca - << repair << dontCheckSigs; + << repair << !checkSigs; conn->to(*nar); conn->processStderr(); } @@ -398,7 +398,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, const ref Path RemoteStore::addToStore(const string & name, const Path & _srcPath, - bool recursive, HashType hashAlgo, PathFilter & filter, bool repair) + bool recursive, HashType hashAlgo, PathFilter & filter, RepairFlag repair) { if (repair) throw Error("repairing is not supported when building through the Nix daemon"); @@ -434,7 +434,7 @@ Path RemoteStore::addToStore(const string & name, const Path & _srcPath, Path RemoteStore::addTextToStore(const string & name, const string & s, - const PathSet & references, bool repair) + const PathSet & references, RepairFlag repair) { if (repair) throw Error("repairing is not supported when building through the Nix daemon"); @@ -570,7 +570,7 @@ void RemoteStore::optimiseStore() } -bool RemoteStore::verifyStore(bool checkContents, bool repair) +bool RemoteStore::verifyStore(bool checkContents, RepairFlag repair) { auto conn(connections->get()); conn->to << wopVerifyStore << checkContents << repair; diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh index ed430e4cabb..e370e4797d2 100644 --- a/src/libstore/remote-store.hh +++ b/src/libstore/remote-store.hh @@ -31,7 +31,8 @@ public: bool isValidPathUncached(const Path & path) override; - PathSet queryValidPaths(const PathSet & paths, bool maybeSubstitute = false) override; + PathSet queryValidPaths(const PathSet & paths, + SubstituteFlag maybeSubstitute = NoSubstitute) override; PathSet queryAllValidPaths() override; @@ -55,15 +56,15 @@ public: SubstitutablePathInfos & infos) override; void addToStore(const ValidPathInfo & info, const ref & nar, - bool repair, bool dontCheckSigs, + RepairFlag repair, CheckSigsFlag checkSigs, std::shared_ptr accessor) override; Path addToStore(const string & name, const Path & srcPath, bool recursive = true, HashType hashAlgo = htSHA256, - PathFilter & filter = defaultPathFilter, bool repair = false) override; + PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) override; Path addTextToStore(const string & name, const string & s, - const PathSet & references, bool repair = false) override; + const PathSet & references, RepairFlag repair) override; void buildPaths(const PathSet & paths, BuildMode buildMode) override; @@ -84,7 +85,7 @@ public: void optimiseStore() override; - bool verifyStore(bool checkContents, bool repair) override; + bool verifyStore(bool checkContents, RepairFlag repair) override; void addSignatures(const Path & storePath, const StringSet & sigs) override; diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 76ed9942256..39b9466162f 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -378,7 +378,7 @@ void Store::queryPathInfo(const Path & storePath, } -PathSet Store::queryValidPaths(const PathSet & paths, bool maybeSubstitute) +PathSet Store::queryValidPaths(const PathSet & paths, SubstituteFlag maybeSubstitute) { struct State { @@ -537,14 +537,14 @@ void Store::buildPaths(const PathSet & paths, BuildMode buildMode) void copyStorePath(ref srcStore, ref dstStore, - const Path & storePath, bool repair, bool dontCheckSigs) + const Path & storePath, RepairFlag repair, CheckSigsFlag checkSigs) { auto info = srcStore->queryPathInfo(storePath); StringSink sink; srcStore->narFromPath({storePath}, sink); - if (!info->narHash && dontCheckSigs) { + if (!info->narHash && !checkSigs) { auto info2 = make_ref(*info); info2->narHash = hashString(htSHA256, *sink.s); if (!info->narSize) info2->narSize = sink.s->size(); @@ -561,33 +561,47 @@ void copyStorePath(ref srcStore, ref dstStore, assert(info->narHash); - dstStore->addToStore(*info, sink.s, repair, dontCheckSigs); + dstStore->addToStore(*info, sink.s, repair, checkSigs); } -void copyClosure(ref srcStore, ref dstStore, - const PathSet & storePaths, bool repair, bool dontCheckSigs) +void copyPaths(ref srcStore, ref dstStore, const PathSet & storePaths, + RepairFlag repair, CheckSigsFlag checkSigs, SubstituteFlag substitute) { - PathSet closure; + PathSet valid = dstStore->queryValidPaths(storePaths, substitute); + + PathSet missing; for (auto & path : storePaths) - srcStore->computeFSClosure(path, closure); + if (!valid.count(path)) missing.insert(path); - // FIXME: use copyStorePaths() + ThreadPool pool; - PathSet valid = dstStore->queryValidPaths(closure); + processGraph(pool, + PathSet(missing.begin(), missing.end()), - if (valid.size() == closure.size()) return; + [&](const Path & storePath) { + if (dstStore->isValidPath(storePath)) return PathSet(); + return srcStore->queryPathInfo(storePath)->references; + }, - Paths sorted = srcStore->topoSortPaths(closure); + [&](const Path & storePath) { + checkInterrupt(); - Paths missing; - for (auto i = sorted.rbegin(); i != sorted.rend(); ++i) - if (!valid.count(*i)) missing.push_back(*i); + if (!dstStore->isValidPath(storePath)) { + printError("copying ‘%s’...", storePath); + copyStorePath(srcStore, dstStore, storePath, repair, checkSigs); + } + }); +} - printMsg(lvlDebug, format("copying %1% missing paths") % missing.size()); - for (auto & i : missing) - copyStorePath(srcStore, dstStore, i, repair, dontCheckSigs); +void copyClosure(ref srcStore, ref dstStore, + const PathSet & storePaths, RepairFlag repair, CheckSigsFlag checkSigs, + SubstituteFlag substitute) +{ + PathSet closure; + srcStore->computeFSClosure({storePaths}, closure); + copyPaths(srcStore, dstStore, closure, repair, checkSigs, substitute); } @@ -812,45 +826,4 @@ std::list> getDefaultSubstituters() } -void copyPaths(ref from, ref to, const PathSet & storePaths, - bool substitute, bool dontCheckSigs) -{ - PathSet valid = to->queryValidPaths(storePaths, substitute); - - PathSet missing; - for (auto & path : storePaths) - if (!valid.count(path)) missing.insert(path); - - std::string copiedLabel = "copied"; - - //logger->setExpected(copiedLabel, missing.size()); - - ThreadPool pool; - - processGraph(pool, - PathSet(missing.begin(), missing.end()), - - [&](const Path & storePath) { - if (to->isValidPath(storePath)) return PathSet(); - return from->queryPathInfo(storePath)->references; - }, - - [&](const Path & storePath) { - checkInterrupt(); - - if (!to->isValidPath(storePath)) { - //Activity act(*logger, lvlInfo, format("copying ‘%s’...") % storePath); - - copyStorePath(from, to, storePath, false, dontCheckSigs); - - //logger->incProgress(copiedLabel); - } else - ; - //logger->incExpected(copiedLabel, -1); - }); - - pool.process(); -} - - } diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 929c95a0f2f..c625a363033 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -32,6 +32,11 @@ class Store; class JSONPlaceholder; +enum RepairFlag : bool { NoRepair = false, Repair = true }; +enum CheckSigsFlag : bool { NoCheckSigs = false, CheckSigs = true }; +enum SubstituteFlag : bool { NoSubstitute = false, Substitute = true }; + + /* Size of the hash part of store paths, in base-32 characters. */ const size_t storePathHashLen = 32; // i.e. 160 bits @@ -332,7 +337,7 @@ public: /* Query which of the given paths is valid. Optionally, try to substitute missing paths. */ virtual PathSet queryValidPaths(const PathSet & paths, - bool maybeSubstitute = false); + SubstituteFlag maybeSubstitute = NoSubstitute); /* Query the set of all valid paths. Note that for some store backends, the name part of store paths may be omitted @@ -392,7 +397,7 @@ public: /* Import a path into the store. */ virtual void addToStore(const ValidPathInfo & info, const ref & nar, - bool repair = false, bool dontCheckSigs = false, + RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs, std::shared_ptr accessor = 0) = 0; /* Copy the contents of a path to the store and register the @@ -401,12 +406,12 @@ public: libutil/archive.hh). */ virtual Path addToStore(const string & name, const Path & srcPath, bool recursive = true, HashType hashAlgo = htSHA256, - PathFilter & filter = defaultPathFilter, bool repair = false) = 0; + PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) = 0; /* Like addToStore, but the contents written to the output path is a regular file containing the given string. */ virtual Path addTextToStore(const string & name, const string & s, - const PathSet & references, bool repair = false) = 0; + const PathSet & references, RepairFlag repair = NoRepair) = 0; /* Write a NAR dump of a store path. */ virtual void narFromPath(const Path & path, Sink & sink) = 0; @@ -496,7 +501,7 @@ public: /* Check the integrity of the Nix store. Returns true if errors remain. */ - virtual bool verifyStore(bool checkContents, bool repair) { return false; }; + virtual bool verifyStore(bool checkContents, RepairFlag repair = NoRepair) { return false; }; /* Return an object to access files in the Nix store. */ virtual ref getFSAccessor() = 0; @@ -548,7 +553,7 @@ public: preloaded into the specified FS accessor to speed up subsequent access. */ Paths importPaths(Source & source, std::shared_ptr accessor, - bool dontCheckSigs = false); + CheckSigsFlag checkSigs = CheckSigs); struct Stats { @@ -650,12 +655,26 @@ void checkStoreName(const string & name); /* Copy a path from one store to another. */ void copyStorePath(ref srcStore, ref dstStore, - const Path & storePath, bool repair = false, bool dontCheckSigs = false); + const Path & storePath, RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs); + + +/* Copy store paths from one store to another. The paths may be copied + in parallel. They are copied in a topologically sorted order + (i.e. if A is a reference of B, then A is copied before B), but + the set of store paths is not automatically closed; use + copyClosure() for that. */ +void copyPaths(ref srcStore, ref dstStore, const PathSet & storePaths, + RepairFlag repair = NoRepair, + CheckSigsFlag checkSigs = CheckSigs, + SubstituteFlag substitute = NoSubstitute); /* Copy the closure of the specified paths from one store to another. */ void copyClosure(ref srcStore, ref dstStore, - const PathSet & storePaths, bool repair = false, bool dontCheckSigs = false); + const PathSet & storePaths, + RepairFlag repair = NoRepair, + CheckSigsFlag checkSigs = CheckSigs, + SubstituteFlag substitute = NoSubstitute); /* Remove the temporary roots file for this process. Any temporary @@ -694,9 +713,6 @@ ref openStore(const std::string & uri = getEnv("NIX_REMOTE"), const Store::Params & extraParams = Store::Params()); -void copyPaths(ref from, ref to, const PathSet & storePaths, - bool substitute = false, bool dontCheckSigs = false); - enum StoreType { tDaemon, tLocal, diff --git a/src/nix-copy-closure/nix-copy-closure.cc b/src/nix-copy-closure/nix-copy-closure.cc index dc324abcb3b..0c69bd41356 100755 --- a/src/nix-copy-closure/nix-copy-closure.cc +++ b/src/nix-copy-closure/nix-copy-closure.cc @@ -12,7 +12,7 @@ int main(int argc, char ** argv) auto toMode = true; auto includeOutputs = false; auto dryRun = false; - auto useSubstitutes = false; + auto useSubstitutes = NoSubstitute; std::string sshHost; PathSet storePaths; @@ -36,7 +36,7 @@ int main(int argc, char ** argv) else if (*arg == "--dry-run") dryRun = true; else if (*arg == "--use-substitutes" || *arg == "-s") - useSubstitutes = true; + useSubstitutes = Substitute; else if (sshHost.empty()) sshHost = *arg; else @@ -58,6 +58,6 @@ int main(int argc, char ** argv) PathSet closure; from->computeFSClosure(storePaths2, closure, false, includeOutputs); - copyPaths(from, to, closure, useSubstitutes, true); + copyPaths(from, to, closure, NoRepair, NoCheckSigs, useSubstitutes); }); } diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index 44127635ded..c9c16776634 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -304,7 +304,7 @@ static void performOp(ref store, bool trusted, unsigned int clientVe string s = readString(from); PathSet refs = readStorePaths(*store, from); startWork(); - Path path = store->addTextToStore(suffix, s, refs, false); + Path path = store->addTextToStore(suffix, s, refs, NoRepair); stopWork(); to << path; break; @@ -324,7 +324,8 @@ static void performOp(ref store, bool trusted, unsigned int clientVe case wopImportPaths: { startWork(); TunnelSource source(from); - Paths paths = store->importPaths(source, 0, trusted); + Paths paths = store->importPaths(source, nullptr, + trusted ? NoCheckSigs : CheckSigs); stopWork(); to << paths; break; @@ -576,7 +577,7 @@ static void performOp(ref store, bool trusted, unsigned int clientVe startWork(); if (repair && !trusted) throw Error("you are not privileged to repair paths"); - bool errors = store->verifyStore(checkContents, repair); + bool errors = store->verifyStore(checkContents, (RepairFlag) repair); stopWork(); to << errors; break; @@ -623,7 +624,8 @@ static void performOp(ref store, bool trusted, unsigned int clientVe parseDump(tee, tee.source); startWork(); - store->addToStore(info, tee.source.data, repair, dontCheckSigs, nullptr); + store->addToStore(info, tee.source.data, (RepairFlag) repair, + dontCheckSigs ? NoCheckSigs : CheckSigs, nullptr); stopWork(); break; } diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index 464bcee4a84..10100d6a601 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -1310,7 +1310,7 @@ int main(int argc, char * * argv) Strings opFlags, opArgs, searchPath; std::map autoArgs_; Operation op = 0; - bool repair = false; + RepairFlag repair = NoRepair; string file; Globals globals; @@ -1372,7 +1372,7 @@ int main(int argc, char * * argv) else if (*arg == "--prebuilt-only" || *arg == "-b") globals.prebuiltOnly = true; else if (*arg == "--repair") - repair = true; + repair = Repair; else if (*arg != "" && arg->at(0) == '-') { opFlags.push_back(*arg); /* FIXME: hacky */ diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index 25f0b1bd692..a5d12c1466f 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -108,7 +108,7 @@ int main(int argc, char * * argv) Strings attrPaths; bool wantsReadWrite = false; std::map autoArgs_; - bool repair = false; + RepairFlag repair = NoRepair; parseCmdLine(argc, argv, [&](Strings::iterator & arg, const Strings::iterator & end) { if (*arg == "--help") @@ -146,7 +146,7 @@ int main(int argc, char * * argv) else if (*arg == "--strict") strict = true; else if (*arg == "--repair") - repair = true; + repair = Repair; else if (*arg == "--dry-run") settings.readOnlyMode = true; else if (*arg != "" && arg->at(0) == '-') diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 950222812e2..314c9423907 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -677,7 +677,7 @@ static void opImport(Strings opFlags, Strings opArgs) if (!opArgs.empty()) throw UsageError("no arguments expected"); FdSource source(STDIN_FILENO); - Paths paths = store->importPaths(source, nullptr, true); + Paths paths = store->importPaths(source, nullptr, NoCheckSigs); for (auto & i : paths) cout << format("%1%\n") % i << std::flush; @@ -702,11 +702,11 @@ static void opVerify(Strings opFlags, Strings opArgs) throw UsageError("no arguments expected"); bool checkContents = false; - bool repair = false; + RepairFlag repair = NoRepair; for (auto & i : opFlags) if (i == "--check-contents") checkContents = true; - else if (i == "--repair") repair = true; + else if (i == "--repair") repair = Repair; else throw UsageError(format("unknown flag ‘%1%’") % i); if (store->verifyStore(checkContents, repair)) { @@ -871,7 +871,7 @@ static void opServe(Strings opFlags, Strings opArgs) case cmdImportPaths: { if (!writeAllowed) throw Error("importing paths is not allowed"); - store->importPaths(in, 0, true); // FIXME: should we skip sig checking? + store->importPaths(in, nullptr, NoCheckSigs); // FIXME: should we skip sig checking? out << 1; // indicate success break; } From 0a5a867758285ee07334a44ede336ae75fd51c50 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 3 Jul 2017 11:54:30 +0200 Subject: [PATCH 0407/2196] nix-shell: Respect --dry-run Fixes #824. --- src/nix-build/nix-build.cc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 42d68fdfdd7..dc80dd6a583 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -390,6 +390,8 @@ int main(int argc, char ** argv) maybePrintExecError(e); } + if (dryRun) return; + // Set the environment. auto env = getEnv(); From fe97c6989841460efca37f0f3b9b470c98229283 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 4 Jul 2017 14:45:50 +0200 Subject: [PATCH 0408/2196] : Support sha512 argument --- corepkgs/fetchurl.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/corepkgs/fetchurl.nix b/corepkgs/fetchurl.nix index 62359433971..e135b947fdb 100644 --- a/corepkgs/fetchurl.nix +++ b/corepkgs/fetchurl.nix @@ -1,10 +1,10 @@ { system ? builtins.currentSystem , url -, md5 ? "", sha1 ? "", sha256 ? "" +, md5 ? "", sha1 ? "", sha256 ? "", sha512 ? "" , outputHash ? - if sha1 != "" then sha1 else if md5 != "" then md5 else sha256 + if sha512 != "" then sha512 else if sha1 != "" then sha1 else if md5 != "" then md5 else sha256 , outputHashAlgo ? - if sha1 != "" then "sha1" else if md5 != "" then "md5" else "sha256" + if sha512 != "" then "sha512" else if sha1 != "" then "sha1" else if md5 != "" then "md5" else "sha256" , executable ? false , unpack ? false , name ? baseNameOf (toString url) From c0015e87af70f539f24d2aa2bc224a9d8b84276b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 4 Jul 2017 14:47:59 +0200 Subject: [PATCH 0409/2196] Support base-64 hashes Also simplify the Hash API. Fixes #1437. --- perl/lib/Nix/Store.xs | 20 ++- src/libexpr/primops.cc | 8 +- src/libstore/binary-cache-store.cc | 2 +- src/libstore/build.cc | 2 +- src/libstore/derivations.cc | 6 +- src/libstore/download.cc | 2 +- src/libstore/export-import.cc | 2 +- src/libstore/gc.cc | 2 +- src/libstore/local-store.cc | 26 +--- src/libstore/nar-info-disk-cache.cc | 4 +- src/libstore/nar-info.cc | 6 +- src/libstore/optimise-store.cc | 4 +- src/libstore/remote-store.cc | 4 +- src/libstore/store-api.cc | 17 +-- src/libutil/hash.cc | 185 +++++++++++------------ src/libutil/hash.hh | 51 +++---- src/nix-daemon/nix-daemon.cc | 6 +- src/nix-prefetch-url/nix-prefetch-url.cc | 2 +- src/nix-store/nix-store.cc | 8 +- src/nix/hash.cc | 40 ++--- src/nix/verify.cc | 2 +- tests/fetchurl.sh | 9 ++ tests/hash.sh | 10 +- 23 files changed, 205 insertions(+), 213 deletions(-) diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs index aa14bfa6270..bbfb2934315 100644 --- a/perl/lib/Nix/Store.xs +++ b/perl/lib/Nix/Store.xs @@ -81,8 +81,7 @@ SV * queryReferences(char * path) SV * queryPathHash(char * path) PPCODE: try { - auto hash = store()->queryPathInfo(path)->narHash; - string s = "sha256:" + printHash32(hash); + auto s = store()->queryPathInfo(path)->narHash.to_string(); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); @@ -108,7 +107,7 @@ SV * queryPathInfo(char * path, int base32) XPUSHs(&PL_sv_undef); else XPUSHs(sv_2mortal(newSVpv(info->deriver.c_str(), 0))); - string s = "sha256:" + (base32 ? printHash32(info->narHash) : printHash(info->narHash)); + auto s = info->narHash.to_string(base32 ? Base32 : Base16); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); mXPUSHi(info->registrationTime); mXPUSHi(info->narSize); @@ -184,7 +183,7 @@ void importPaths(int fd, int dontCheckSigs) PPCODE: try { FdSource source(fd); - store()->importPaths(source, 0, dontCheckSigs); + store()->importPaths(source, nullptr, dontCheckSigs ? NoCheckSigs : CheckSigs); } catch (Error & e) { croak("%s", e.what()); } @@ -194,7 +193,7 @@ SV * hashPath(char * algo, int base32, char * path) PPCODE: try { Hash h = hashPath(parseHashType(algo), path).first; - string s = base32 ? printHash32(h) : printHash(h); + auto s = h.to_string(base32 ? Base32 : Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); @@ -205,7 +204,7 @@ SV * hashFile(char * algo, int base32, char * path) PPCODE: try { Hash h = hashFile(parseHashType(algo), path); - string s = base32 ? printHash32(h) : printHash(h); + auto s = h.to_string(base32 ? Base32 : Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); @@ -216,7 +215,7 @@ SV * hashString(char * algo, int base32, char * s) PPCODE: try { Hash h = hashString(parseHashType(algo), s); - string s = base32 ? printHash32(h) : printHash(h); + auto s = h.to_string(base32 ? Base32 : Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); @@ -226,8 +225,8 @@ SV * hashString(char * algo, int base32, char * s) SV * convertHash(char * algo, char * s, int toBase32) PPCODE: try { - Hash h = parseHash16or32(parseHashType(algo), s); - string s = toBase32 ? printHash32(h) : printHash(h); + Hash h(s, parseHashType(algo)); + string s = h.to_string(toBase32 ? Base32 : Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); @@ -286,8 +285,7 @@ SV * addToStore(char * srcPath, int recursive, char * algo) SV * makeFixedOutputPath(int recursive, char * algo, char * hash, char * name) PPCODE: try { - HashType ht = parseHashType(algo); - Hash h = parseHash16or32(ht, hash); + Hash h(hash, parseHashType(algo)); Path path = store()->makeFixedOutputPath(recursive, h, name); XPUSHs(sv_2mortal(newSVpv(path.c_str(), 0))); } catch (Error & e) { diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 99ffddaeb80..b753d84e2e6 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -708,8 +708,8 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * HashType ht = parseHashType(outputHashAlgo); if (ht == htUnknown) throw EvalError(format("unknown hash algorithm ‘%1%’, at %2%") % outputHashAlgo % posDrvName); - Hash h = parseHash16or32(ht, *outputHash); - outputHash = printHash(h); + Hash h(*outputHash, ht); + outputHash = h.to_string(Base16, false); if (outputHashRecursive) outputHashAlgo = "r:" + outputHashAlgo; Path outPath = state.store->makeFixedOutputPath(outputHashRecursive, h, drvName); @@ -1701,7 +1701,7 @@ static void prim_hashString(EvalState & state, const Pos & pos, Value * * args, PathSet context; // discarded string s = state.forceString(*args[1], context, pos); - mkString(v, printHash(hashString(ht, s)), context); + mkString(v, hashString(ht, s).to_string(Base16, false), context); } @@ -1852,7 +1852,7 @@ void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v, if (n == "url") url = state.forceStringNoCtx(*attr.value, *attr.pos); else if (n == "sha256") - expectedHash = parseHash16or32(htSHA256, state.forceStringNoCtx(*attr.value, *attr.pos)); + expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256); else if (n == "name") name = state.forceStringNoCtx(*attr.value, *attr.pos); else diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 8ce5f5bbc7c..8147345c2e1 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -239,7 +239,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, const refurl = "nar/" + printHash32(narInfo->fileHash) + ".nar" + narInfo->url = "nar/" + narInfo->fileHash.to_string(Base32, false) + ".nar" + (compression == "xz" ? ".xz" : compression == "bzip2" ? ".bz2" : compression == "br" ? ".br" : diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 6c740d99c58..f40a8c5498e 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -3236,7 +3236,7 @@ PathSet DerivationGoal::checkPathValidity(bool returnValid, bool checkHash) Path DerivationGoal::addHashRewrite(const Path & path) { string h1 = string(path, worker.store.storeDir.size() + 1, 32); - string h2 = string(printHash32(hashString(htSHA256, "rewrite:" + drvPath + ":" + path)), 0, 32); + string h2 = string(hashString(htSHA256, "rewrite:" + drvPath + ":" + path).to_string(Base32, false), 0, 32); Path p = worker.store.storeDir + "/" + h2 + string(path, worker.store.storeDir.size() + 33); deletePath(p); assert(path.size() == p.size()); diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index bb7b8fe628a..48c0837ffaa 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -23,7 +23,7 @@ void DerivationOutput::parseHashInfo(bool & recursive, Hash & hash) const if (hashType == htUnknown) throw Error(format("unknown hash algorithm ‘%1%’") % algo); - hash = parseHash(hashType, this->hash); + hash = Hash(this->hash, hashType); } @@ -354,7 +354,7 @@ Hash hashDerivationModulo(Store & store, Derivation drv) h = hashDerivationModulo(store, drv2); drvHashes[i.first] = h; } - inputs2[printHash(h)] = i.second; + inputs2[h.to_string(Base16, false)] = i.second; } drv.inputDrvs = inputs2; @@ -437,7 +437,7 @@ Sink & operator << (Sink & out, const BasicDerivation & drv) std::string hashPlaceholder(const std::string & outputName) { // FIXME: memoize? - return "/" + printHash32(hashString(htSHA256, "nix-output:" + outputName)); + return "/" + hashString(htSHA256, "nix-output:" + outputName).to_string(Base32, false); } diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 4f3bf2d14f1..15eb68c69ea 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -581,7 +581,7 @@ Path Downloader::downloadCached(ref store, const string & url_, bool unpa Path cacheDir = getCacheDir() + "/nix/tarballs"; createDirs(cacheDir); - string urlHash = printHash32(hashString(htSHA256, url)); + string urlHash = hashString(htSHA256, url).to_string(Base32, false); Path dataFile = cacheDir + "/" + urlHash + ".info"; Path fileLink = cacheDir + "/" + urlHash + "-file"; diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index 1b3a43df32d..2cbcedc6fb0 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -56,7 +56,7 @@ void Store::exportPath(const Path & path, Sink & sink) Hash hash = hashAndWriteSink.currentHash(); if (hash != info->narHash && info->narHash != Hash(info->narHash.type)) throw Error(format("hash of path ‘%1%’ has changed from ‘%2%’ to ‘%3%’!") % path - % printHash(info->narHash) % printHash(hash)); + % info->narHash.to_string() % hash.to_string()); hashAndWriteSink << exportMagic << path << info->references << info->deriver << 0; } diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index 3cdbb114a79..0cf9f87cac3 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -76,7 +76,7 @@ void LocalStore::syncWithGC() void LocalStore::addIndirectRoot(const Path & path) { - string hash = printHash32(hashString(htSHA1, path)); + string hash = hashString(htSHA1, path).to_string(Base32, false); Path realRoot = canonPath((format("%1%/%2%/auto/%3%") % stateDir % gcRootsDir % hash).str()); makeSymlink(realRoot, path); diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index a7a94a8b9e8..7c41dfca7f3 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -572,7 +572,7 @@ uint64_t LocalStore::addValidPath(State & state, state.stmtRegisterValidPath.use() (info.path) - ("sha256:" + printHash(info.narHash)) + (info.narHash.to_string(Base16)) (info.registrationTime == 0 ? time(0) : info.registrationTime) (info.deriver, info.deriver != "") (info.narSize, info.narSize != 0) @@ -614,20 +614,6 @@ uint64_t LocalStore::addValidPath(State & state, } -Hash parseHashField(const Path & path, const string & s) -{ - string::size_type colon = s.find(':'); - if (colon == string::npos) - throw Error(format("corrupt hash ‘%1%’ in valid-path entry for ‘%2%’") - % s % path); - HashType ht = parseHashType(string(s, 0, colon)); - if (ht == htUnknown) - throw Error(format("unknown hash type ‘%1%’ in valid-path entry for ‘%2%’") - % string(s, 0, colon) % path); - return parseHash(ht, string(s, colon + 1)); -} - - void LocalStore::queryPathInfoUncached(const Path & path, std::function)> success, std::function failure) @@ -650,7 +636,11 @@ void LocalStore::queryPathInfoUncached(const Path & path, info->id = useQueryPathInfo.getInt(0); - info->narHash = parseHashField(path, useQueryPathInfo.getStr(1)); + try { + info->narHash = Hash(useQueryPathInfo.getStr(1)); + } catch (BadHash & e) { + throw Error("in valid-path entry for ‘%s’: %s", path, e.what()); + } info->registrationTime = useQueryPathInfo.getInt(2); @@ -685,7 +675,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info) { state.stmtUpdatePathInfo.use() (info.narSize, info.narSize != 0) - ("sha256:" + printHash(info.narHash)) + (info.narHash.to_string(Base16)) (info.ultimate ? 1 : 0, info.ultimate) (concatStringsSep(" ", info.sigs), !info.sigs.empty()) (info.ca, !info.ca.empty()) @@ -1211,7 +1201,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) if (info->narHash != nullHash && info->narHash != current.first) { printError(format("path ‘%1%’ was modified! " "expected hash ‘%2%’, got ‘%3%’") - % i % printHash(info->narHash) % printHash(current.first)); + % i % info->narHash.to_string() % current.first.to_string()); if (repair) repairPath(i); else errors = true; } else { diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc index 180a936edb8..6e155e87780 100644 --- a/src/libstore/nar-info-disk-cache.cc +++ b/src/libstore/nar-info-disk-cache.cc @@ -203,9 +203,9 @@ class NarInfoDiskCacheImpl : public NarInfoDiskCache narInfo->url = queryNAR.getStr(3); narInfo->compression = queryNAR.getStr(4); if (!queryNAR.isNull(5)) - narInfo->fileHash = parseHash(queryNAR.getStr(5)); + narInfo->fileHash = Hash(queryNAR.getStr(5)); narInfo->fileSize = queryNAR.getInt(6); - narInfo->narHash = parseHash(queryNAR.getStr(7)); + narInfo->narHash = Hash(queryNAR.getStr(7)); narInfo->narSize = queryNAR.getInt(8); for (auto & r : tokenizeString(queryNAR.getStr(9), " ")) narInfo->references.insert(cache.storeDir + "/" + r); diff --git a/src/libstore/nar-info.cc b/src/libstore/nar-info.cc index d1042c6de25..660f6a42a19 100644 --- a/src/libstore/nar-info.cc +++ b/src/libstore/nar-info.cc @@ -11,7 +11,7 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & auto parseHashField = [&](const string & s) { try { - return parseHash(s); + return Hash(s); } catch (BadHash &) { corrupt(); return Hash(); // never reached @@ -90,10 +90,10 @@ std::string NarInfo::to_string() const assert(compression != ""); res += "Compression: " + compression + "\n"; assert(fileHash.type == htSHA256); - res += "FileHash: sha256:" + printHash32(fileHash) + "\n"; + res += "FileHash: " + fileHash.to_string(Base32) + "\n"; res += "FileSize: " + std::to_string(fileSize) + "\n"; assert(narHash.type == htSHA256); - res += "NarHash: sha256:" + printHash32(narHash) + "\n"; + res += "NarHash: " + narHash.to_string(Base32) + "\n"; res += "NarSize: " + std::to_string(narSize) + "\n"; res += "References: " + concatStringsSep(" ", shortRefs()) + "\n"; diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc index 56167c4dfae..adaf313131f 100644 --- a/src/libstore/optimise-store.cc +++ b/src/libstore/optimise-store.cc @@ -135,10 +135,10 @@ void LocalStore::optimisePath_(OptimiseStats & stats, const Path & path, InodeHa contents of the symlink (i.e. the result of readlink()), not the contents of the target (which may not even exist). */ Hash hash = hashPath(htSHA256, path).first; - debug(format("‘%1%’ has hash ‘%2%’") % path % printHash(hash)); + debug(format("‘%1%’ has hash ‘%2%’") % path % hash.to_string()); /* Check if this is a known hash. */ - Path linkPath = linksDir + "/" + printHash32(hash); + Path linkPath = linksDir + "/" + hash.to_string(Base32, false); retry: if (!pathExists(linkPath)) { diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 7337e406d2e..ab726e79534 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -294,7 +294,7 @@ void RemoteStore::queryPathInfoUncached(const Path & path, info->path = path; info->deriver = readString(conn->from); if (info->deriver != "") assertStorePath(info->deriver); - info->narHash = parseHash(htSHA256, readString(conn->from)); + info->narHash = Hash(readString(conn->from), htSHA256); info->references = readStorePaths(*this, conn->from); conn->from >> info->registrationTime >> info->narSize; if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) { @@ -387,7 +387,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, const ref else { conn->to << wopAddToStoreNar - << info.path << info.deriver << printHash(info.narHash) + << info.path << info.deriver << info.narHash.to_string(Base16, false) << info.references << info.registrationTime << info.narSize << info.ultimate << info.sigs << info.ca << repair << !checkSigs; diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 39b9466162f..d7b784cfbc2 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -176,13 +176,12 @@ Path Store::makeStorePath(const string & type, const Hash & hash, const string & name) const { /* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */ - string s = type + ":sha256:" + printHash(hash) + ":" - + storeDir + ":" + name; + string s = type + ":" + hash.to_string(Base16) + ":" + storeDir + ":" + name; checkStoreName(name); return storeDir + "/" - + printHash32(compressHash(hashString(htSHA256, s), 20)) + + compressHash(hashString(htSHA256, s), 20).to_string(Base32, false) + "-" + name; } @@ -202,7 +201,7 @@ Path Store::makeFixedOutputPath(bool recursive, ? makeStorePath("source", hash, name) : makeStorePath("output:out", hashString(htSHA256, "fixed:out:" + (recursive ? (string) "r:" : "") + - printHashType(hash.type) + ":" + printHash(hash) + ":"), + hash.to_string(Base16) + ":"), name); } @@ -438,7 +437,7 @@ string Store::makeValidityRegistration(const PathSet & paths, auto info = queryPathInfo(i); if (showHash) { - s += printHash(info->narHash) + "\n"; + s += info->narHash.to_string(Base16, false) + "\n"; s += (format("%1%\n") % info->narSize).str(); } @@ -613,7 +612,7 @@ ValidPathInfo decodeValidPathInfo(std::istream & str, bool hashGiven) if (hashGiven) { string s; getline(str, s); - info.narHash = parseHash(htSHA256, s); + info.narHash = Hash(s, htSHA256); getline(str, s); if (!string2Int(s, info.narSize)) throw Error("number expected"); } @@ -648,7 +647,7 @@ std::string ValidPathInfo::fingerprint() const % path); return "1;" + path + ";" - + printHashType(narHash.type) + ":" + printHash32(narHash) + ";" + + narHash.to_string(Base32) + ";" + std::to_string(narSize) + ";" + concatStringsSep(",", references); } @@ -667,7 +666,7 @@ bool ValidPathInfo::isContentAddressed(const Store & store) const }; if (hasPrefix(ca, "text:")) { - auto hash = parseHash(std::string(ca, 5)); + Hash hash(std::string(ca, 5)); if (store.makeTextPath(storePathToName(path), hash, references) == path) return true; else @@ -676,7 +675,7 @@ bool ValidPathInfo::isContentAddressed(const Store & store) const else if (hasPrefix(ca, "fixed:")) { bool recursive = ca.compare(6, 2, "r:") == 0; - auto hash = parseHash(std::string(ca, recursive ? 8 : 6)); + Hash hash(std::string(ca, recursive ? 8 : 6)); if (store.makeFixedOutputPath(recursive, hash, storePathToName(path)) == path) return true; else diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index fa1bb5d9718..6b45ac859d5 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -16,17 +16,8 @@ namespace nix { -Hash::Hash() +void Hash::init() { - type = htUnknown; - hashSize = 0; - memset(hash, 0, maxHashSize); -} - - -Hash::Hash(HashType type) -{ - this->type = type; if (type == htMD5) hashSize = md5HashSize; else if (type == htSHA1) hashSize = sha1HashSize; else if (type == htSHA256) hashSize = sha256HashSize; @@ -62,16 +53,10 @@ bool Hash::operator < (const Hash & h) const } -std::string Hash::to_string(bool base32) const -{ - return printHashType(type) + ":" + (base32 ? printHash32(*this) : printHash(*this)); -} - - const string base16Chars = "0123456789abcdef"; -string printHash(const Hash & hash) +static string printHash16(const Hash & hash) { char buf[hash.hashSize * 2]; for (unsigned int i = 0; i < hash.hashSize; i++) { @@ -82,42 +67,11 @@ string printHash(const Hash & hash) } -Hash parseHash(const string & s) -{ - string::size_type colon = s.find(':'); - if (colon == string::npos) - throw BadHash(format("invalid hash ‘%s’") % s); - string hts = string(s, 0, colon); - HashType ht = parseHashType(hts); - if (ht == htUnknown) - throw BadHash(format("unknown hash type ‘%s’") % hts); - return parseHash16or32(ht, string(s, colon + 1)); -} - - -Hash parseHash(HashType ht, const string & s) -{ - Hash hash(ht); - if (s.length() != hash.hashSize * 2) - throw BadHash(format("invalid hash ‘%1%’") % s); - for (unsigned int i = 0; i < hash.hashSize; i++) { - string s2(s, i * 2, 2); - if (!isxdigit(s2[0]) || !isxdigit(s2[1])) - throw BadHash(format("invalid hash ‘%1%’") % s); - istringstream_nocopy str(s2); - int n; - str >> std::hex >> n; - hash.hash[i] = n; - } - return hash; -} - - // omitted: E O U T const string base32Chars = "0123456789abcdfghijklmnpqrsvwxyz"; -string printHash32(const Hash & hash) +static string printHash32(const Hash & hash) { assert(hash.hashSize); size_t len = hash.base32Len(); @@ -142,66 +96,103 @@ string printHash32(const Hash & hash) string printHash16or32(const Hash & hash) { - return hash.type == htMD5 ? printHash(hash) : printHash32(hash); + return hash.to_string(hash.type == htMD5 ? Base16 : Base32); } -Hash parseHash32(HashType ht, const string & s) +std::string Hash::to_string(Base base, bool includeType) const { - Hash hash(ht); - size_t len = hash.base32Len(); - assert(s.size() == len); - - for (unsigned int n = 0; n < len; ++n) { - char c = s[len - n - 1]; - unsigned char digit; - for (digit = 0; digit < base32Chars.size(); ++digit) /* !!! slow */ - if (base32Chars[digit] == c) break; - if (digit >= 32) - throw BadHash(format("invalid base-32 hash ‘%1%’") % s); - unsigned int b = n * 5; - unsigned int i = b / 8; - unsigned int j = b % 8; - hash.hash[i] |= digit << j; - - if (i < hash.hashSize - 1) { - hash.hash[i + 1] |= digit >> (8 - j); - } else { - if (digit >> (8 - j)) - throw BadHash(format("invalid base-32 hash ‘%1%’") % s); - } + std::string s; + if (includeType) { + s += printHashType(type); + s += ':'; } - - return hash; + switch (base) { + case Base16: + s += printHash16(*this); + break; + case Base32: + s += printHash32(*this); + break; + case Base64: + s += base64Encode(std::string((const char *) hash, hashSize)); + break; + } + return s; } -Hash parseHash16or32(HashType ht, const string & s) +Hash::Hash(const std::string & s, HashType type) + : type(type) { - Hash hash(ht); - if (s.size() == hash.hashSize * 2) - /* hexadecimal representation */ - hash = parseHash(ht, s); - else if (s.size() == hash.base32Len()) - /* base-32 representation */ - hash = parseHash32(ht, s); - else - throw BadHash(format("hash ‘%1%’ has wrong length for hash type ‘%2%’") - % s % printHashType(ht)); - return hash; -} + auto colon = s.find(':'); + + size_t pos = 0; + + if (colon == string::npos) { + if (type == htUnknown) + throw BadHash("hash ‘%s’ does not include a type", s); + } else { + string hts = string(s, 0, colon); + this->type = parseHashType(hts); + if (this->type == htUnknown) + throw BadHash("unknown hash type ‘%s’", hts); + if (type != htUnknown && type != this->type) + throw BadHash("hash ‘%s’ should have type ‘%s’", s, printHashType(type)); + pos = colon + 1; + } + init(); -bool isHash(const string & s) -{ - if (s.length() != 32) return false; - for (int i = 0; i < 32; i++) { - char c = s[i]; - if (!((c >= '0' && c <= '9') || - (c >= 'a' && c <= 'f'))) - return false; + size_t size = s.size() - pos; + + if (size == base16Len()) { + + auto parseHexDigit = [&](char c) { + if (c >= '0' && c <= '9') return c - '0'; + if (c >= 'A' && c <= 'F') return c - 'A' + 10; + if (c >= 'a' && c <= 'f') return c - 'a' + 10; + throw BadHash("invalid base-16 hash ‘%s’", s); + }; + + for (unsigned int i = 0; i < hashSize; i++) { + hash[i] = + parseHexDigit(s[pos + i * 2]) << 4 + | parseHexDigit(s[pos + i * 2 + 1]); + } } - return true; + + else if (size == base32Len()) { + + for (unsigned int n = 0; n < size; ++n) { + char c = s[pos + size - n - 1]; + unsigned char digit; + for (digit = 0; digit < base32Chars.size(); ++digit) /* !!! slow */ + if (base32Chars[digit] == c) break; + if (digit >= 32) + throw BadHash("invalid base-32 hash ‘%s’", s); + unsigned int b = n * 5; + unsigned int i = b / 8; + unsigned int j = b % 8; + hash[i] |= digit << j; + + if (i < hashSize - 1) { + hash[i + 1] |= digit >> (8 - j); + } else { + if (digit >> (8 - j)) + throw BadHash("invalid base-32 hash ‘%s’", s); + } + } + } + + else if (size == base64Len()) { + auto d = base64Decode(std::string(s, pos)); + assert(d.size() == hashSize); + memcpy(hash, d.data(), hashSize); + } + + else + throw BadHash("hash ‘%s’ has wrong length for hash type ‘%s’", s, printHashType(type)); } diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh index 02e213fc7b3..b8b432256c9 100644 --- a/src/libutil/hash.hh +++ b/src/libutil/hash.hh @@ -20,20 +20,30 @@ const int sha512HashSize = 64; extern const string base32Chars; +enum Base : int { Base64, Base32, Base16 }; + struct Hash { static const unsigned int maxHashSize = 64; - unsigned int hashSize; - unsigned char hash[maxHashSize]; + unsigned int hashSize = 0; + unsigned char hash[maxHashSize] = {}; - HashType type; + HashType type = htUnknown; /* Create an unset hash object. */ - Hash(); + Hash() { }; /* Create a zero-filled hash object. */ - Hash(HashType type); + Hash(HashType type) : type(type) { init(); }; + + /* Initialize the hash from a string representation, in the format + "[:]". If the ‘type’ argument is + htUnknown, then the hash type must be specified in the + string. */ + Hash(const std::string & s, HashType type = htUnknown); + + void init(); /* Check whether a hash is set. */ operator bool () const { return type != htUnknown; } @@ -59,33 +69,22 @@ struct Hash return (hashSize * 8 - 1) / 5 + 1; } - std::string to_string(bool base32 = true) const; -}; - - -/* Convert a hash to a hexadecimal representation. */ -string printHash(const Hash & hash); - -Hash parseHash(const string & s); + /* Returns the length of a base-64 representation of this hash. */ + size_t base64Len() const + { + return ((4 * hashSize / 3) + 3) & ~3; + } -/* Parse a hexadecimal representation of a hash code. */ -Hash parseHash(HashType ht, const string & s); + /* Return a string representation of the hash, in base-16, base-32 + or base-64. By default, this is prefixed by the hash type + (e.g. "sha256:"). */ + std::string to_string(Base base = Base32, bool includeType = true) const; +}; -/* Convert a hash to a base-32 representation. */ -string printHash32(const Hash & hash); /* Print a hash in base-16 if it's MD5, or base-32 otherwise. */ string printHash16or32(const Hash & hash); -/* Parse a base-32 representation of a hash code. */ -Hash parseHash32(HashType ht, const string & s); - -/* Parse a base-16 or base-32 representation of a hash code. */ -Hash parseHash16or32(HashType ht, const string & s); - -/* Verify that the given string is a valid hash code. */ -bool isHash(const string & s); - /* Compute the hash of the given string. */ Hash hashString(HashType ht, const string & s); diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index c9c16776634..b029b92db15 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -216,7 +216,7 @@ static void performOp(ref store, bool trusted, unsigned int clientVe startWork(); auto hash = store->queryPathInfo(path)->narHash; stopWork(); - to << printHash(hash); + to << hash.to_string(Base16, false); break; } @@ -550,7 +550,7 @@ static void performOp(ref store, bool trusted, unsigned int clientVe if (info) { if (GET_PROTOCOL_MINOR(clientVersion) >= 17) to << 1; - to << info->deriver << printHash(info->narHash) << info->references + to << info->deriver << info->narHash.to_string(Base16, false) << info->references << info->registrationTime << info->narSize; if (GET_PROTOCOL_MINOR(clientVersion) >= 16) { to << info->ultimate @@ -610,7 +610,7 @@ static void performOp(ref store, bool trusted, unsigned int clientVe from >> info.deriver; if (!info.deriver.empty()) store->assertStorePath(info.deriver); - info.narHash = parseHash(htSHA256, readString(from)); + info.narHash = Hash(readString(from), htSHA256); info.references = readStorePaths(*store, from); from >> info.registrationTime >> info.narSize >> info.ultimate; info.sigs = readStrings(from); diff --git a/src/nix-prefetch-url/nix-prefetch-url.cc b/src/nix-prefetch-url/nix-prefetch-url.cc index b3b2fcac713..47e66eaa651 100644 --- a/src/nix-prefetch-url/nix-prefetch-url.cc +++ b/src/nix-prefetch-url/nix-prefetch-url.cc @@ -145,7 +145,7 @@ int main(int argc, char * * argv) Hash hash, expectedHash(ht); Path storePath; if (args.size() == 2) { - expectedHash = parseHash16or32(ht, args[1]); + expectedHash = Hash(args[1], ht); storePath = store->makeFixedOutputPath(unpack, expectedHash, name); if (store->isValidPath(storePath)) hash = expectedHash; diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 314c9423907..6cea57a7671 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -212,7 +212,7 @@ static void opPrintFixedPath(Strings opFlags, Strings opArgs) string name = *i++; cout << format("%1%\n") % - store->makeFixedOutputPath(recursive, parseHash16or32(hashAlgo, hash), name); + store->makeFixedOutputPath(recursive, Hash(hash, hashAlgo), name); } @@ -380,9 +380,9 @@ static void opQuery(Strings opFlags, Strings opArgs) auto info = store->queryPathInfo(j); if (query == qHash) { assert(info->narHash.type == htSHA256); - cout << format("sha256:%1%\n") % printHash32(info->narHash); + cout << fmt("%s\n", info->narHash.to_string(Base32)); } else if (query == qSize) - cout << format("%1%\n") % info->narSize; + cout << fmt("%d\n", info->narSize); } } break; @@ -734,7 +734,7 @@ static void opVerifyPath(Strings opFlags, Strings opArgs) if (current.first != info->narHash) { printError( format("path ‘%1%’ was modified! expected hash ‘%2%’, got ‘%3%’") - % path % printHash(info->narHash) % printHash(current.first)); + % path % info->narHash.to_string() % current.first.to_string()); status = 1; } } diff --git a/src/nix/hash.cc b/src/nix/hash.cc index 5dd891e8add..98de8897112 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -9,15 +9,16 @@ struct CmdHash : Command { enum Mode { mFile, mPath }; Mode mode; - bool base32 = false; + Base base = Base16; bool truncate = false; HashType ht = htSHA512; Strings paths; CmdHash(Mode mode) : mode(mode) { - mkFlag(0, "base32", "print hash in base-32", &base32); - mkFlag(0, "base16", "print hash in base-16", &base32, false); + mkFlag(0, "base64", "print hash in base-64", &base, Base64); + mkFlag(0, "base32", "print hash in base-32 (Nix-specific)", &base, Base32); + mkFlag(0, "base16", "print hash in base-16", &base, Base16); mkHashTypeFlag("type", &ht); expectArgs("paths", &paths); } @@ -40,7 +41,7 @@ struct CmdHash : Command Hash h = mode == mFile ? hashFile(ht, path) : hashPath(ht, path).first; if (truncate && h.hashSize > 20) h = compressHash(h, 20); std::cout << format("%1%\n") % - (base32 ? printHash32(h) : printHash(h)); + h.to_string(base, false); } } }; @@ -50,11 +51,11 @@ static RegisterCommand r2(make_ref(CmdHash::mPath)); struct CmdToBase : Command { - bool toBase32; + Base base; HashType ht = htSHA512; Strings args; - CmdToBase(bool toBase32) : toBase32(toBase32) + CmdToBase(Base base) : base(base) { mkHashTypeFlag("type", &ht); expectArgs("strings", &args); @@ -62,28 +63,29 @@ struct CmdToBase : Command std::string name() override { - return toBase32 ? "to-base32" : "to-base16"; + return + base == Base16 ? "to-base16" : + base == Base32 ? "to-base32" : + "to-base64"; } std::string description() override { - return toBase32 - ? "convert a hash to base-32 representation" - : "convert a hash to base-16 representation"; + return fmt("convert a hash to base-%d representation", + base == Base16 ? 16 : + base == Base32 ? 32 : 64); } void run() override { - for (auto s : args) { - Hash h = parseHash16or32(ht, s); - std::cout << format("%1%\n") % - (toBase32 ? printHash32(h) : printHash(h)); - } + for (auto s : args) + std::cout << fmt("%s\n", Hash(s, ht).to_string(base, false)); } }; -static RegisterCommand r3(make_ref(false)); -static RegisterCommand r4(make_ref(true)); +static RegisterCommand r3(make_ref(Base16)); +static RegisterCommand r4(make_ref(Base32)); +static RegisterCommand r5(make_ref(Base64)); /* Legacy nix-hash command. */ static int compatNixHash(int argc, char * * argv) @@ -121,14 +123,14 @@ static int compatNixHash(int argc, char * * argv) if (op == opHash) { CmdHash cmd(flat ? CmdHash::mFile : CmdHash::mPath); cmd.ht = ht; - cmd.base32 = base32; + cmd.base = base32 ? Base32 : Base16; cmd.truncate = truncate; cmd.paths = ss; cmd.run(); } else { - CmdToBase cmd(op == opTo32); + CmdToBase cmd(op == opTo32 ? Base32 : Base16); cmd.args = ss; cmd.ht = ht; cmd.run(); diff --git a/src/nix/verify.cc b/src/nix/verify.cc index 18533e6066c..973f60a74ff 100644 --- a/src/nix/verify.cc +++ b/src/nix/verify.cc @@ -94,7 +94,7 @@ struct CmdVerify : StorePathsCommand corrupted = 1; printError( format("path ‘%s’ was modified! expected hash ‘%s’, got ‘%s’") - % info->path % printHash(info->narHash) % printHash(hash.first)); + % info->path % info->narHash.to_string() % hash.first.to_string()); } } diff --git a/tests/fetchurl.sh b/tests/fetchurl.sh index b6fa3a27edd..808f460258d 100644 --- a/tests/fetchurl.sh +++ b/tests/fetchurl.sh @@ -9,6 +9,15 @@ outPath=$(nix-build '' --argstr url file://$(pwd)/fetchurl.sh cmp $outPath fetchurl.sh +# Now using a base-64 hash. +clearStore + +hash=$(nix hash-file --type sha512 --base64 ./fetchurl.sh) + +outPath=$(nix-build '' --argstr url file://$(pwd)/fetchurl.sh --argstr sha512 $hash --no-out-link) + +cmp $outPath fetchurl.sh + # Test unpacking a NAR. rm -rf $TEST_ROOT/archive mkdir -p $TEST_ROOT/archive diff --git a/tests/hash.sh b/tests/hash.sh index a95c68683f8..9f234bc635b 100644 --- a/tests/hash.sh +++ b/tests/hash.sh @@ -63,11 +63,15 @@ try2 md5 "f78b733a68f5edbdf9413899339eaa4a" # Conversion. try3() { + h64=$(nix to-base64 --type "$1" "$2") + [ "$h64" = "$4" ] h32=$(nix-hash --type "$1" --to-base32 "$2") [ "$h32" = "$3" ] h16=$(nix-hash --type "$1" --to-base16 "$h32") [ "$h16" = "$2" ] + h16=$(nix to-base16 --type "$1" "$h64") + [ "$h16" = "$2" ] } -try3 sha1 "800d59cfcd3c05e900cb4e214be48f6b886a08df" "vw46m23bizj4n8afrc0fj19wrp7mj3c0" -try3 sha256 "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad" "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s" -try3 sha512 "204a8fc6dda82f0a0ced7beb8e08a41657c16ef468b228a8279be331a703c33596fd15c13b1b07f9aa1d3bea57789ca031ad85c7a71dd70354ec631238ca3445" "12k9jiq29iyqm03swfsgiw5mlqs173qazm3n7daz43infy12pyrcdf30fkk3qwv4yl2ick8yipc2mqnlh48xsvvxl60lbx8vp38yji0" +try3 sha1 "800d59cfcd3c05e900cb4e214be48f6b886a08df" "vw46m23bizj4n8afrc0fj19wrp7mj3c0" "gA1Zz808BekAy04hS+SPa4hqCN8=" +try3 sha256 "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad" "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s" "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=" +try3 sha512 "204a8fc6dda82f0a0ced7beb8e08a41657c16ef468b228a8279be331a703c33596fd15c13b1b07f9aa1d3bea57789ca031ad85c7a71dd70354ec631238ca3445" "12k9jiq29iyqm03swfsgiw5mlqs173qazm3n7daz43infy12pyrcdf30fkk3qwv4yl2ick8yipc2mqnlh48xsvvxl60lbx8vp38yji0" "IEqPxt2oLwoM7XvrjgikFlfBbvRosiioJ5vjMacDwzWW/RXBOxsH+aodO+pXeJygMa2Fx6cd1wNU7GMSOMo0RQ==" From ad8b96f1f2c80bf3f91ceab4d955aa368b7c85d4 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 4 Jul 2017 15:38:23 +0200 Subject: [PATCH 0410/2196] Fix handling of expression installables with a / in them --- src/nix/installables.cc | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/nix/installables.cc b/src/nix/installables.cc index f23308b9bc3..9982ff75f4f 100644 --- a/src/nix/installables.cc +++ b/src/nix/installables.cc @@ -189,7 +189,10 @@ std::vector> InstallablesCommand::parseInstallables for (auto & s : ss) { - if (s.find("/") != std::string::npos) { + if (s.compare(0, 1, "(") == 0) + result.push_back(std::make_shared(*this, s)); + + else if (s.find("/") != std::string::npos) { auto path = store->toStorePath(store->followLinksToStore(s)); @@ -201,9 +204,6 @@ std::vector> InstallablesCommand::parseInstallables } } - else if (s.compare(0, 1, "(") == 0) - result.push_back(std::make_shared(*this, s)); - else if (s == "" || std::regex_match(s, attrPathRegex)) result.push_back(std::make_shared(*this, s)); From 6cf23c3e8fa12e6bf297ca87d1b47889bf115d52 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 4 Jul 2017 15:43:06 +0200 Subject: [PATCH 0411/2196] Add allow-new-privileges option MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This allows builds to call setuid binaries. This was previously possible until we started using seccomp. Turns out that seccomp by default disallows processes from acquiring new privileges. Generally, any use of setuid binaries (except those created by the builder itself) is by definition impure, but some people were relying on this ability for certain tests. Example: $ nix build '(with import {}; runCommand "foo" {} "/run/wrappers/bin/ping -c 1 8.8.8.8; exit 1")' --no-allow-new-privileges builder for ‘/nix/store/j0nd8kv85hd6r4kxgnwzvr0k65ykf6fv-foo.drv’ failed with exit code 1; last 2 log lines: cannot raise the capability into the Ambient set : Operation not permitted $ nix build '(with import {}; runCommand "foo" {} "/run/wrappers/bin/ping -c 1 8.8.8.8; exit 1")' --allow-new-privileges builder for ‘/nix/store/j0nd8kv85hd6r4kxgnwzvr0k65ykf6fv-foo.drv’ failed with exit code 1; last 6 log lines: PING 8.8.8.8 (8.8.8.8) 56(84) bytes of data. 64 bytes from 8.8.8.8: icmp_seq=1 ttl=46 time=15.2 ms Fixes #1429. --- doc/manual/command-ref/conf-file.xml | 17 +++++++++++++++++ src/libstore/build.cc | 3 +++ src/libstore/globals.hh | 6 ++++++ 3 files changed, 26 insertions(+) diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index 616983bc7f0..cde32b35f5b 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -643,6 +643,23 @@ password my-password
+ allow-new-privileges + + (Linux-specific.) By default, builders on Linux + cannot acquire new privileges by calling setuid/setgid programs or + programs that have file capabilities. For example, programs such + as sudo or ping will + fail. (Note that in sandbox builds, no such programs are available + unless you bind-mount them into the sandbox via the + option.) You can allow the + use of such programs by enabling this option. This is impure and + usually undesirable, but may be useful in certain scenarios + (e.g. to spin up containers or set up userspace network interfaces + in tests). + + + + diff --git a/src/libstore/build.cc b/src/libstore/build.cc index f40a8c5498e..355fb3b7dfe 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2340,6 +2340,9 @@ void setupSeccomp() seccomp_rule_add(ctx, SCMP_ACT_ERRNO(ENOTSUP), SCMP_SYS(fsetxattr), 0) != 0) throw SysError("unable to add seccomp rule"); + if (seccomp_attr_set(ctx, SCMP_FLTATR_CTL_NNP, settings.allowNewPrivileges ? 0 : 1) != 0) + throw SysError("unable to set 'no new privileges' seccomp attribute"); + if (seccomp_load(ctx) != 0) throw SysError("unable to load seccomp BPF program"); #endif diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index af37ec61d7a..c8d67b07110 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -321,6 +321,12 @@ public: Setting userAgentSuffix{this, "", "user-agent-suffix", "String appended to the user agent in HTTP requests."}; + +#if __linux__ + Setting allowNewPrivileges{this, false, "allow-new-privileges", + "Whether builders can acquire new privileges by calling programs with " + "setuid/setgid bits or with file capabilities."}; +#endif }; From b7203e853e3b928e1a7fb081fce379f023e935bb Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 4 Jul 2017 16:26:48 +0200 Subject: [PATCH 0412/2196] getDefaultSubstituters(): Simplify initialisation As shlevy pointed out, static variables in C++11 have thread-safe initialisation. --- src/libstore/store-api.cc | 34 ++++++++++++++-------------------- 1 file changed, 14 insertions(+), 20 deletions(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index d7b784cfbc2..db2e023ab0b 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -795,33 +795,27 @@ static RegisterStoreImplementation regStore([]( std::list> getDefaultSubstituters() { - struct State { - bool done = false; + static auto stores([]() { std::list> stores; - }; - static Sync state_; - - auto state(state_.lock()); - if (state->done) return state->stores; + StringSet done; - StringSet done; - - auto addStore = [&](const std::string & uri) { - if (done.count(uri)) return; - done.insert(uri); - state->stores.push_back(openStore(uri)); - }; + auto addStore = [&](const std::string & uri) { + if (done.count(uri)) return; + done.insert(uri); + stores.push_back(openStore(uri)); + }; - for (auto uri : settings.substituters.get()) - addStore(uri); + for (auto uri : settings.substituters.get()) + addStore(uri); - for (auto uri : settings.extraSubstituters.get()) - addStore(uri); + for (auto uri : settings.extraSubstituters.get()) + addStore(uri); - state->done = true; + return stores; + } ()); - return state->stores; + return stores; } From 42c5774e78a9f1422dee9c35adb9c056aa994d3b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 4 Jul 2017 16:34:53 +0200 Subject: [PATCH 0413/2196] Sort substituters by priority Fixes #1438. --- src/libstore/binary-cache-store.hh | 2 ++ src/libstore/store-api.cc | 4 ++++ src/libstore/store-api.hh | 5 +++++ 3 files changed, 11 insertions(+) diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh index bf5a56ab4dc..f9c1c2cbe8a 100644 --- a/src/libstore/binary-cache-store.hh +++ b/src/libstore/binary-cache-store.hh @@ -123,6 +123,8 @@ public: std::shared_ptr getBuildLog(const Path & path) override; + int getPriority() override { return priority; } + }; } diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index db2e023ab0b..108e2d4ce9b 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -812,6 +812,10 @@ std::list> getDefaultSubstituters() for (auto uri : settings.extraSubstituters.get()) addStore(uri); + stores.sort([](ref & a, ref & b) { + return a->getPriority() < b->getPriority(); + }); + return stores; } ()); diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index c625a363033..cada37653e6 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -590,6 +590,11 @@ public: a notion of connection. Otherwise this is a no-op. */ virtual void connect() { }; + /* Get the priority of the store, used to order substituters. In + particular, binary caches can specify a priority field in their + "nix-cache-info" file. Lower value means higher priority. */ + virtual int getPriority() { return 0; } + protected: Stats stats; From a3dc1e65abe29f9d0528d3b5ea45812f4dcd63ed Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 4 Jul 2017 19:00:51 +0200 Subject: [PATCH 0414/2196] Add X32 to the seccomp filter Fixes #1432. --- src/libstore/build.cc | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 355fb3b7dfe..60b0a531f42 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2317,6 +2317,10 @@ void setupSeccomp() seccomp_arch_add(ctx, SCMP_ARCH_X86) != 0) throw SysError("unable to add 32-bit seccomp architecture"); + if (settings.thisSystem == "x86_64-linux" && + seccomp_arch_add(ctx, SCMP_ARCH_X32) != 0) + throw SysError("unable to add X32 seccomp architecture"); + /* Prevent builders from creating setuid/setgid binaries. */ for (int perm : { S_ISUID, S_ISGID }) { if (seccomp_rule_add(ctx, SCMP_ACT_ERRNO(EPERM), SCMP_SYS(chmod), 1, From 60da5d2b8f0f8586520566155172c1a18e4f04cc Mon Sep 17 00:00:00 2001 From: Robert Vollmert Date: Thu, 6 Jul 2017 16:37:19 +0200 Subject: [PATCH 0415/2196] Fix nix-instantiate manpage indentation The second command variant is now its own cmdsynopsis, which ensures it's not indented as was the case using sbrk. --- doc/manual/command-ref/nix-instantiate.xml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/doc/manual/command-ref/nix-instantiate.xml b/doc/manual/command-ref/nix-instantiate.xml index 3d03358bea3..fe077ff6560 100644 --- a/doc/manual/command-ref/nix-instantiate.xml +++ b/doc/manual/command-ref/nix-instantiate.xml @@ -43,7 +43,8 @@ files - + + nix-instantiate files From 01722b3d2c10090f53290e68b585c50d042f0adb Mon Sep 17 00:00:00 2001 From: Robert Vollmert Date: Thu, 6 Jul 2017 17:56:15 +0200 Subject: [PATCH 0416/2196] Remove unused variable from test script --- tests/common.sh.in | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/common.sh.in b/tests/common.sh.in index 6c3804a257a..a8d56707e27 100644 --- a/tests/common.sh.in +++ b/tests/common.sh.in @@ -1,7 +1,5 @@ set -e -datadir="@datadir@" - export TEST_ROOT=$(realpath ${TMPDIR:-/tmp}/nix-test) export NIX_STORE_DIR if ! NIX_STORE_DIR=$(readlink -f $TEST_ROOT/store 2> /dev/null); then From 72e80c59b5176eb08986247ec0f1978d32993364 Mon Sep 17 00:00:00 2001 From: Matthew Bauer Date: Thu, 6 Jul 2017 13:42:12 -0700 Subject: [PATCH 0417/2196] =?UTF-8?q?Don=E2=80=99t=20hardlink=20disallowed?= =?UTF-8?q?=20paths=20in=20OS=20X.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes #1443 --- src/libstore/optimise-store.cc | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc index 56167c4dfae..9e651ebeaf7 100644 --- a/src/libstore/optimise-store.cc +++ b/src/libstore/optimise-store.cc @@ -9,6 +9,7 @@ #include #include #include +#include namespace nix { @@ -96,6 +97,19 @@ void LocalStore::optimisePath_(OptimiseStats & stats, const Path & path, InodeHa if (lstat(path.c_str(), &st)) throw SysError(format("getting attributes of path ‘%1%’") % path); +#if __APPLE__ + /* HFS/OS X has some undocumented security feature disabling hardlinking for + special files within .app dirs. *.app/Contents/PkgInfo and + *.app/Contents/Resources/\*.lproj seem to be the only paths affected. See + https://github.com/NixOS/nix/issues/1443 for more discussion. */ + + if (std::regex_search(path, std::regex("\\.app/Contents/PkgInfo$")) || + std::regex_search(path, std::regex("\\.app/Contents/Resources/.+\\.lproj$"))) { + debug(format("‘%1%’ is not allowed to be linked in OS X") % path); + return; + } +#endif + if (S_ISDIR(st.st_mode)) { Strings names = readDirectoryIgnoringInodes(path, inodeHash); for (auto & i : names) From ce3095e14165ab0079f2c24002ae4c810b1c728d Mon Sep 17 00:00:00 2001 From: Robert Vollmert Date: Fri, 7 Jul 2017 20:38:33 +0200 Subject: [PATCH 0418/2196] glossary: Fix word order --- doc/manual/glossary/glossary.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/glossary/glossary.xml b/doc/manual/glossary/glossary.xml index d74940c90b3..e0636044cc2 100644 --- a/doc/manual/glossary/glossary.xml +++ b/doc/manual/glossary/glossary.xml @@ -54,7 +54,7 @@ A substitute is a command invocation stored in the Nix database that describes how to build a store object, bypassing - normal the build mechanism (i.e., derivations). Typically, the + the normal build mechanism (i.e., derivations). Typically, the substitute builds the store object by downloading a pre-built version of the store object from some server. From 68c626c6b07e8676f7235c21efb94a0055f5cf3d Mon Sep 17 00:00:00 2001 From: Robert Vollmert Date: Fri, 7 Jul 2017 20:37:16 +0200 Subject: [PATCH 0419/2196] man pages: Grouping for option alternatives --- doc/manual/command-ref/nix-copy-closure.xml | 6 ++-- doc/manual/command-ref/opt-common-syn.xml | 32 +++++++++++++++------ 2 files changed, 28 insertions(+), 10 deletions(-) diff --git a/doc/manual/command-ref/nix-copy-closure.xml b/doc/manual/command-ref/nix-copy-closure.xml index 97e261ae993..800e1db6ab0 100644 --- a/doc/manual/command-ref/nix-copy-closure.xml +++ b/doc/manual/command-ref/nix-copy-closure.xml @@ -27,8 +27,10 @@ --> - - + + + + user@machine diff --git a/doc/manual/command-ref/opt-common-syn.xml b/doc/manual/command-ref/opt-common-syn.xml index 5b793639395..3aff4e1b635 100644 --- a/doc/manual/command-ref/opt-common-syn.xml +++ b/doc/manual/command-ref/opt-common-syn.xml @@ -2,10 +2,18 @@ - - - - + + + + + + + + + + + + @@ -25,10 +33,18 @@ number - - - - + + + + + + + + + + + + From d1643bdaa2f66760b5dce3e42953b433e7eb49bc Mon Sep 17 00:00:00 2001 From: Robert Vollmert Date: Fri, 7 Jul 2017 21:11:39 +0200 Subject: [PATCH 0420/2196] man pages: Argument for --max-jobs --- doc/manual/command-ref/opt-common.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/manual/command-ref/opt-common.xml b/doc/manual/command-ref/opt-common.xml index 2aa41c4d438..18c664d0973 100644 --- a/doc/manual/command-ref/opt-common.xml +++ b/doc/manual/command-ref/opt-common.xml @@ -89,8 +89,8 @@
- - + / +number Sets the maximum number of build jobs that Nix will perform in parallel to the specified number. Specify From 56a1f8f4998cb6aa4ff2c8b61ca36634af4b3759 Mon Sep 17 00:00:00 2001 From: Robert Vollmert Date: Fri, 7 Jul 2017 21:14:45 +0200 Subject: [PATCH 0421/2196] man pages: Consistently separate alternatives by / --- doc/manual/command-ref/nix-env.xml | 6 ++---- doc/manual/command-ref/opt-common.xml | 12 ++++-------- 2 files changed, 6 insertions(+), 12 deletions(-) diff --git a/doc/manual/command-ref/nix-env.xml b/doc/manual/command-ref/nix-env.xml index 8462cf8a027..d4563ac4755 100644 --- a/doc/manual/command-ref/nix-env.xml +++ b/doc/manual/command-ref/nix-env.xml @@ -146,8 +146,7 @@ also . - - + / path Specifies the Nix expression (designated below as the active Nix expression) used by the @@ -166,8 +165,7 @@ also . - - + / path Specifies the profile to be used by those operations that operate on a profile (designated below as the diff --git a/doc/manual/command-ref/opt-common.xml b/doc/manual/command-ref/opt-common.xml index 18c664d0973..a930b4a0d78 100644 --- a/doc/manual/command-ref/opt-common.xml +++ b/doc/manual/command-ref/opt-common.xml @@ -22,8 +22,7 @@ - - + / @@ -76,8 +75,7 @@ - - + / By default, output written by builders to standard output and standard error is echoed to the Nix command's standard @@ -143,8 +141,7 @@ - - + / Keep going in case of failed builds, to the greatest extent possible. That is, if building an input of some @@ -156,8 +153,7 @@ - - + / Specifies that in case of a build failure, the temporary directory (usually in /tmp) in which From b1f5995a20b5511bce614d1718816088bc7f0ec0 Mon Sep 17 00:00:00 2001 From: Robert Vollmert Date: Fri, 7 Jul 2017 20:37:55 +0200 Subject: [PATCH 0422/2196] man page (nix-instantiate): Remove non-existent nix-build argument -r --- doc/manual/command-ref/nix-store.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/command-ref/nix-store.xml b/doc/manual/command-ref/nix-store.xml index fb017b741da..19c99841a0a 100644 --- a/doc/manual/command-ref/nix-store.xml +++ b/doc/manual/command-ref/nix-store.xml @@ -234,7 +234,7 @@ linkend="sec-nix-build">nix-build does. To test whether a previously-built derivation is deterministic: -$ nix-build -r '<nixpkgs>' -A hello --check -K +$ nix-build '<nixpkgs>' -A hello --check -K From 8ad898b2cd2f5d7bba29da48f67658d3a04888db Mon Sep 17 00:00:00 2001 From: Robert Vollmert Date: Fri, 7 Jul 2017 21:51:22 +0200 Subject: [PATCH 0423/2196] man page (nix-instantiate): Add --json to synopsis, order variables --- doc/manual/command-ref/nix-instantiate.xml | 33 +++++++++++----------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/doc/manual/command-ref/nix-instantiate.xml b/doc/manual/command-ref/nix-instantiate.xml index fe077ff6560..87422895470 100644 --- a/doc/manual/command-ref/nix-instantiate.xml +++ b/doc/manual/command-ref/nix-instantiate.xml @@ -24,6 +24,7 @@ + @@ -116,13 +117,17 @@ input. - + - When used with , print the resulting - value as an XML representation of the abstract syntax tree rather than as - an ATerm. The schema is the same as that used by the toXML built-in. - + When used with , + recursively evaluate list elements and attributes. Normally, such + sub-expressions are left unevaluated (since the Nix expression + language is lazy). + + This option can cause non-termination, because lazy + data structures can be infinitely large. + + @@ -134,17 +139,13 @@ input. - - - When used with , - recursively evaluate list elements and attributes. Normally, such - sub-expressions are left unevaluated (since the Nix expression - language is lazy). - - This option can cause non-termination, because lazy - data structures can be infinitely large. + - + When used with , print the resulting + value as an XML representation of the abstract syntax tree rather than as + an ATerm. The schema is the same as that used by the toXML built-in. + From 772ef22c25f6dac969ce7f13586a08d1191f4a44 Mon Sep 17 00:00:00 2001 From: Robert Vollmert Date: Fri, 7 Jul 2017 21:51:54 +0200 Subject: [PATCH 0424/2196] man page (nix-instantiate): -E is optional --- doc/manual/command-ref/nix-instantiate.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/command-ref/nix-instantiate.xml b/doc/manual/command-ref/nix-instantiate.xml index 87422895470..39c1282fcc3 100644 --- a/doc/manual/command-ref/nix-instantiate.xml +++ b/doc/manual/command-ref/nix-instantiate.xml @@ -39,7 +39,7 @@ path - + From 89771a882157c22ff90ab6b1f9e93ed464aa3177 Mon Sep 17 00:00:00 2001 From: Robert Vollmert Date: Fri, 7 Jul 2017 21:52:31 +0200 Subject: [PATCH 0425/2196] man page (nix-prefetch-url): Add some missing options --- doc/manual/command-ref/nix-prefetch-url.xml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/doc/manual/command-ref/nix-prefetch-url.xml b/doc/manual/command-ref/nix-prefetch-url.xml index 016d8863a94..8ef748c74be 100644 --- a/doc/manual/command-ref/nix-prefetch-url.xml +++ b/doc/manual/command-ref/nix-prefetch-url.xml @@ -19,14 +19,16 @@ nix-prefetch-url + hashAlgo + + name url hash - Description The command nix-prefetch-url downloads the @@ -91,7 +93,7 @@ downloaded file in the Nix store is also printed. - + name Override the name of the file in the Nix store. By default, this is From c85e662004d6f2881dde92991d3a2352e612e8e3 Mon Sep 17 00:00:00 2001 From: Robert Vollmert Date: Fri, 7 Jul 2017 21:52:56 +0200 Subject: [PATCH 0426/2196] man page (nix-shell): Fix grouping of -p option Not sure about the raw ellipsis. --- doc/manual/command-ref/nix-shell.xml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/manual/command-ref/nix-shell.xml b/doc/manual/command-ref/nix-shell.xml index c64c93ec3ac..078fa38494b 100644 --- a/doc/manual/command-ref/nix-shell.xml +++ b/doc/manual/command-ref/nix-shell.xml @@ -33,12 +33,12 @@ regexp - - + + - packages + packages path @@ -144,7 +144,7 @@ also . - / + / packages Set up an environment in which the specified packages are present. The command line arguments are interpreted From 17bb00d378aea9ea7f7f27618bd736f2c47e3de2 Mon Sep 17 00:00:00 2001 From: Rhys Date: Fri, 30 Jun 2017 14:14:50 +1000 Subject: [PATCH 0427/2196] Clearer error message when regex exceeds space limit --- src/libexpr/primops.cc | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 99ffddaeb80..f240e795ee7 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1734,8 +1734,13 @@ static void prim_match(EvalState & state, const Pos & pos, Value * * args, Value mkString(*(v.listElems()[i] = state.allocValue()), match[i + 1].str().c_str()); } - } catch (std::regex_error &) { - throw EvalError("invalid regular expression ‘%s’, at %s", re, pos); + } catch (std::regex_error &e) { + if (e.code() == std::regex_constants::error_space) { + // limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++ + throw EvalError("memory limit exceeded by regular expression ‘%s’, at %s", re, pos); + } else { + throw EvalError("invalid regular expression ‘%s’, at %s", re, pos); + } } } From 30117fb35bde6ba27d7de5037272ea23c8dcebf9 Mon Sep 17 00:00:00 2001 From: Robert Vollmert Date: Mon, 10 Jul 2017 14:36:55 +0200 Subject: [PATCH 0428/2196] fix buggy nix-shell man page --- doc/manual/command-ref/nix-shell.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/command-ref/nix-shell.xml b/doc/manual/command-ref/nix-shell.xml index 078fa38494b..62d026ac238 100644 --- a/doc/manual/command-ref/nix-shell.xml +++ b/doc/manual/command-ref/nix-shell.xml @@ -39,7 +39,7 @@ packages - + path From 8e8caf7f3e535d4e397f422f6c0effd81f497305 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 11 Jul 2017 23:11:35 +0200 Subject: [PATCH 0429/2196] fetchTarball: Prevent concurrent downloads of the same file Fixes #849. --- src/libstore/download.cc | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 15eb68c69ea..ac9a2b8fede 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -6,6 +6,7 @@ #include "archive.hh" #include "s3.hh" #include "compression.hh" +#include "pathlocks.hh" #ifdef ENABLE_S3 #include @@ -586,6 +587,8 @@ Path Downloader::downloadCached(ref store, const string & url_, bool unpa Path dataFile = cacheDir + "/" + urlHash + ".info"; Path fileLink = cacheDir + "/" + urlHash + "-file"; + PathLocks lock({fileLink}, fmt("waiting for lock on ‘%1%’...", fileLink)); + Path storePath; string expectedETag; @@ -647,6 +650,7 @@ Path Downloader::downloadCached(ref store, const string & url_, bool unpa if (unpack) { Path unpackedLink = cacheDir + "/" + baseNameOf(storePath) + "-unpacked"; + PathLocks lock2({unpackedLink}, fmt("waiting for lock on ‘%1%’...", unpackedLink)); Path unpackedStorePath; if (pathExists(unpackedLink)) { unpackedStorePath = readLink(unpackedLink); From 2965d40612e16388ef2177d2a13d168848c6ca8a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 11 Jul 2017 23:20:01 +0200 Subject: [PATCH 0430/2196] replaceSymlink(): Handle the case where the temporary file already exists Not really necessary anymore for #849, but still nice to have. --- src/libutil/util.cc | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 6bf4b3d9180..d9f8011b6fb 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -496,12 +496,21 @@ void createSymlink(const Path & target, const Path & link) void replaceSymlink(const Path & target, const Path & link) { - Path tmp = canonPath(dirOf(link) + "/.new_" + baseNameOf(link)); + for (unsigned int n = 0; true; n++) { + Path tmp = canonPath(fmt("%s/.%d_%s", dirOf(link), n, baseNameOf(link))); - createSymlink(target, tmp); + try { + createSymlink(target, tmp); + } catch (SysError & e) { + if (e.errNo == EEXIST) continue; + throw; + } - if (rename(tmp.c_str(), link.c_str()) != 0) - throw SysError(format("renaming ‘%1%’ to ‘%2%’") % tmp % link); + if (rename(tmp.c_str(), link.c_str()) != 0) + throw SysError(format("renaming ‘%1%’ to ‘%2%’") % tmp % link); + + break; + } } From 0681f8c9070f3a40899d3040370e460c249a74bd Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 12 Jul 2017 19:24:20 +0200 Subject: [PATCH 0431/2196] Shut up a memory leak warning --- src/libmain/stack.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/libmain/stack.cc b/src/libmain/stack.cc index 57b6a197c0f..ab3baf62c3d 100644 --- a/src/libmain/stack.cc +++ b/src/libmain/stack.cc @@ -52,7 +52,8 @@ void detectStackOverflow() delivered when we're out of stack space. */ stack_t stack; stack.ss_size = 4096 * 4 + MINSIGSTKSZ; - stack.ss_sp = new char[stack.ss_size]; + static auto stackBuf = std::make_unique>(stack.ss_size); + stack.ss_sp = stackBuf->data(); if (!stack.ss_sp) throw Error("cannot allocate alternative stack"); stack.ss_flags = 0; if (sigaltstack(&stack, 0) == -1) throw SysError("cannot set alternative stack"); From 38374a9d35765a1c0b78bfeb02e6f22fc8643e83 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 14 Jul 2017 11:40:09 +0200 Subject: [PATCH 0432/2196] Tarball job: Include libseccomp on Linux only --- release.nix | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/release.nix b/release.nix index af7611fbe60..1348ef7b93f 100644 --- a/release.nix +++ b/release.nix @@ -28,8 +28,7 @@ let docbook5 docbook5_xsl autoconf-archive git - libseccomp - ]; + ] ++ lib.optional stdenv.isLinux libseccomp; configureFlags = "--enable-gc"; From 112ff7833d4f3a233755b2fe856b2eb2b3723254 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 14 Jul 2017 13:44:45 +0200 Subject: [PATCH 0433/2196] nix: Show help when no arguments are given Fixes #1464. --- src/libutil/args.cc | 2 +- src/libutil/args.hh | 5 +++-- src/nix/command.cc | 2 +- src/nix/main.cc | 15 +++++++++------ 4 files changed, 14 insertions(+), 10 deletions(-) diff --git a/src/libutil/args.cc b/src/libutil/args.cc index df7e040875d..0eed4945418 100644 --- a/src/libutil/args.cc +++ b/src/libutil/args.cc @@ -146,7 +146,7 @@ bool Args::processArgs(const Strings & args, bool finish) res = true; } - if (finish && !expectedArgs.empty()) + if (finish && !expectedArgs.empty() && !expectedArgs.front().optional) throw UsageError("more arguments are required"); return res; diff --git a/src/libutil/args.hh b/src/libutil/args.hh index aa11373d5f1..ef8a7953e52 100644 --- a/src/libutil/args.hh +++ b/src/libutil/args.hh @@ -51,6 +51,7 @@ protected: { std::string label; size_t arity; // 0 = any + bool optional; std::function handler; }; @@ -165,7 +166,7 @@ public: /* Expect a string argument. */ void expectArg(const std::string & label, string * dest) { - expectedArgs.push_back(ExpectedArg{label, 1, [=](Strings ss) { + expectedArgs.push_back(ExpectedArg{label, 1, false, [=](Strings ss) { *dest = ss.front(); }}); } @@ -173,7 +174,7 @@ public: /* Expect 0 or more arguments. */ void expectArgs(const std::string & label, Strings * dest) { - expectedArgs.push_back(ExpectedArg{label, 0, [=](Strings ss) { + expectedArgs.push_back(ExpectedArg{label, 0, false, [=](Strings ss) { *dest = ss; }}); } diff --git a/src/nix/command.cc b/src/nix/command.cc index 3c82e0df57f..96b685a5b2e 100644 --- a/src/nix/command.cc +++ b/src/nix/command.cc @@ -24,7 +24,7 @@ void Command::printHelp(const string & programName, std::ostream & out) MultiCommand::MultiCommand(const Commands & _commands) : commands(_commands) { - expectedArgs.push_back(ExpectedArg{"command", 1, [=](Strings ss) { + expectedArgs.push_back(ExpectedArg{"command", 1, true, [=](Strings ss) { assert(!command); auto i = commands.find(ss.front()); if (i == commands.end()) diff --git a/src/nix/main.cc b/src/nix/main.cc index 88a602b8481..4b51c5ee14a 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -15,11 +15,7 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs { NixArgs() : MultiCommand(*RegisterCommand::commands), MixCommonArgs("nix") { - mkFlag('h', "help", "show usage information", [=]() { - printHelp(programName, std::cout); - std::cout << "\nNote: this program is EXPERIMENTAL and subject to change.\n"; - throw Exit(); - }); + mkFlag('h', "help", "show usage information", [&]() { showHelpAndExit(); }); mkFlag(0, "help-config", "show configuration options", [=]() { std::cout << "The following configuration options are available:\n\n"; @@ -47,6 +43,13 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs "Boolean settings can be overriden using ‘--’ or ‘--no-’. See ‘nix\n" "--help-config’ for a list of configuration settings.\n"; } + + void showHelpAndExit() + { + printHelp(programName, std::cout); + std::cout << "\nNote: this program is EXPERIMENTAL and subject to change.\n"; + throw Exit(); + } }; void mainWrapped(int argc, char * * argv) @@ -68,7 +71,7 @@ void mainWrapped(int argc, char * * argv) args.parseCmdline(argvToStrings(argc, argv)); - assert(args.command); + if (!args.command) args.showHelpAndExit(); StartProgressBar bar; From a0ad8ba12ee26edf0bf5f221b080a61c0d24a874 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Sun, 9 Jul 2017 12:38:32 -0400 Subject: [PATCH 0434/2196] Shellcheck the existing installer --- release.nix | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/release.nix b/release.nix index 1348ef7b93f..86a7350d5ea 100644 --- a/release.nix +++ b/release.nix @@ -137,7 +137,7 @@ let runCommand "nix-binary-tarball-${version}" { exportReferencesGraph = [ "closure1" toplevel "closure2" cacert ]; - buildInputs = [ perl ]; + buildInputs = [ perl shellcheck ]; meta.description = "Distribution-independent Nix bootstrap binaries for ${system}"; } '' @@ -146,6 +146,9 @@ let substitute ${./scripts/install-nix-from-closure.sh} $TMPDIR/install \ --subst-var-by nix ${toplevel} \ --subst-var-by cacert ${cacert} + + shellcheck -e SC1090 $TMPDIR/install + chmod +x $TMPDIR/install dir=nix-${version}-${system} fn=$out/$dir.tar.bz2 From fb40d73e23383224fc541911da95a894bbbcc2a4 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Sun, 9 Jul 2017 13:07:28 -0400 Subject: [PATCH 0435/2196] Switch to a fancy multi-user installer on Darwin --- release.nix | 7 +- scripts/install-darwin-multi-user.sh | 808 +++++++++++++++++++++++++++ scripts/install-nix-from-closure.sh | 24 +- 3 files changed, 831 insertions(+), 8 deletions(-) create mode 100644 scripts/install-darwin-multi-user.sh diff --git a/release.nix b/release.nix index 86a7350d5ea..796591cc167 100644 --- a/release.nix +++ b/release.nix @@ -146,10 +146,15 @@ let substitute ${./scripts/install-nix-from-closure.sh} $TMPDIR/install \ --subst-var-by nix ${toplevel} \ --subst-var-by cacert ${cacert} + substitute ${./scripts/install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user \ + --subst-var-by nix ${toplevel} \ + --subst-var-by cacert ${cacert} shellcheck -e SC1090 $TMPDIR/install + shellcheck -e SC1091,SC2002 $TMPDIR/install-darwin-multi-user chmod +x $TMPDIR/install + chmod +x $TMPDIR/install-darwin-multi-user dir=nix-${version}-${system} fn=$out/$dir.tar.bz2 mkdir -p $out/nix-support @@ -161,7 +166,7 @@ let --transform "s,$TMPDIR/install,$dir/install," \ --transform "s,$TMPDIR/reginfo,$dir/.reginfo," \ --transform "s,$NIX_STORE,$dir/store,S" \ - $TMPDIR/install $TMPDIR/reginfo $storePaths + $TMPDIR/install $TMPDIR/install-darwin-multi-user $TMPDIR/reginfo $storePaths ''); diff --git a/scripts/install-darwin-multi-user.sh b/scripts/install-darwin-multi-user.sh new file mode 100644 index 00000000000..87d55b2f017 --- /dev/null +++ b/scripts/install-darwin-multi-user.sh @@ -0,0 +1,808 @@ +#!/bin/bash + +set -eu +set -o pipefail + +# Sourced from: +# - https://github.com/LnL7/nix-darwin/blob/8c29d0985d74b4a990238497c47a2542a5616b3c/bootstrap.sh +# - https://gist.github.com/expipiplus1/e571ce88c608a1e83547c918591b149f/ac504c6c1b96e65505fbda437a28ce563408ecb0 +# - https://github.com/NixOS/nixos-org-configurations/blob/a122f418797713d519aadf02e677fce0dc1cb446/delft/scripts/nix-mac-installer.sh +# - https://github.com/matthewbauer/macNixOS/blob/f6045394f9153edea417be90c216788e754feaba/install-macNixOS.sh +# - https://gist.github.com/LnL7/9717bd6cdcb30b086fd7f2093e5f8494/86b26f852ce563e973acd30f796a9a416248c34a +# +# however tracking which bits came from which would be impossible. + +readonly ESC='\033[0m' +readonly BOLD='\033[38;1m' +readonly BLUE='\033[38;34m' +readonly BLUE_UL='\033[38;4;34m' +readonly GREEN='\033[38;32m' +readonly GREEN_UL='\033[38;4;32m' +readonly RED='\033[38;31m' +readonly RED_UL='\033[38;4;31m' +readonly YELLOW='\033[38;33m' +readonly YELLOW_UL='\033[38;4;33m' + +readonly CORES=$(sysctl -n hw.ncpu) +readonly NIX_USER_COUNT="$CORES" +readonly NIX_BUILD_GROUP_ID="30000" +readonly NIX_BUILD_GROUP_NAME="nixbld" +readonly NIX_FIRST_BUILD_UID="30001" +# Please don't change this. We don't support it, because the +# default shell profile that comes with Nix doesn't support it. +readonly NIX_ROOT="/nix" +readonly PLIST_DEST=/Library/LaunchDaemons/org.nixos.nix-daemon.plist + +readonly PROFILE_TARGETS=("/etc/profile" "/etc/bashrc" "/etc/zshrc") +readonly PROFILE_BACKUP_SUFFIX=".backup-before-nix" +readonly PROFILE_NIX_FILE_DIR="/etc/" +readonly PROFILE_NIX_FILE_NAME="profile-nix.sh" +readonly PROFILE_NIX_FILE="$PROFILE_NIX_FILE_DIR$PROFILE_NIX_FILE_NAME" + + + +readonly NIX_INSTALLED_NIX="@nix@" +readonly NIX_INSTALLED_CACERT="@cacert@" +readonly EXTRACTED_NIX_PATH="$(dirname "$0")" + +readonly ROOT_HOME="/var/root" + +contactme() { + echo "We'd love to help if you need it." + echo "" + echo "If you can, open an issue at https://github.com/nixos/nix/issues" + echo "" + echo "Or feel free to contact the team," + echo " - on IRC #nixos on irc.freenode.net" + echo " - on twitter @nixos_org" +} + +uninstall_directions() { + subheader "Uninstalling nix:" + local step=1 + cat < $1" +} + +bold() { + echo "$BOLD$*$ESC" +} + +ok() { + _textout "$GREEN" "$@" +} + +warning() { + warningheader "warning!" + cat + echo "" +} + +failure() { + header "oh no!" + _textout "$RED" "$@" + echo "" + _textout "$RED" "$(contactme)" + trap finish_cleanup EXIT + exit 1 +} + +ui_confirm() { + _textout "$GREEN$GREEN_UL" "$1" + + local prompt="[y/n] " + echo -n "$prompt" + while read -r y; do + if [ "$y" = "y" ]; then + echo "" + return 0 + elif [ "$y" = "n" ]; then + echo "" + return 1 + else + _textout "$RED" "Sorry, I didn't understand. I can only understand answers of y or n" + echo -n "$prompt" + fi + done + echo "" + return 1 +} + +__sudo() { + local expl="$1" + local cmd="$2" + shift + header "sudo execution" + + echo "I am executing:" + echo "" + echo " $ sudo $cmd" + echo "" + echo "$expl" + echo "" + + return 0 +} + +_sudo() { + local expl="$1" + shift + if __sudo "$expl" "$*"; then + sudo "$@" + fi +} + + +readonly SCRATCH=$(mktemp -d -t tmp.XXXXXXXXXX) +function finish_cleanup { + rm -rf "$SCRATCH" +} + +function finish_fail { + finish_cleanup + + failure < /dev/null >&2; then + failure < /dev/null 2>&1; then + _sudo "Create the Nix build group, $NIX_BUILD_GROUP_NAME" \ + /usr/sbin/dseditgroup -o create \ + -r "Nix build group for nix-daemon" \ + -i "$NIX_BUILD_GROUP_ID" \ + "$NIX_BUILD_GROUP_NAME" >&2 + row " Created" "Yes" + else + primary_group_id=$(dsclattr "/Groups/$NIX_BUILD_GROUP_NAME" "PrimaryGroupID") + if [ "$primary_group_id" -ne "$NIX_BUILD_GROUP_ID" ]; then + failure < /dev/null 2>&1; then + _sudo "Creating the Nix build user, $username" \ + /usr/sbin/sysadminctl -addUser -fullName "Nix build user $coreid" \ + -home /var/empty \ + -UID "${uid}" \ + -addUser "${username}" + row " Created" "Yes" + else + actual_uid=$(dsclattr "$dsclpath" "UniqueID") + if [ "$actual_uid" -ne "$uid" ]; then + failure < + hashed-mirrors + + A list of web servers used by + builtins.fetchurl to obtain files by + hash. The default is + http://tarballs.nixos.org/. Given a hash type + ht and a base-16 hash + h, Nix will try to download the file + from + hashed-mirror/ht/h. + This allows files to be downloaded even if they have disappeared + from their original URI. For example, given the default mirror + http://tarballs.nixos.org/, when building the derivation + + +builtins.fetchurl { + url = https://example.org/foo-1.2.3.tar.xz; + sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae"; +} + + + Nix will attempt to download this file from + http://tarballs.nixos.org/sha256/2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae + first. If it is not available there, if will try the original URI. + + + + diff --git a/src/libstore/builtins.cc b/src/libstore/builtins.cc index 8a5cf3327d4..b51b6f85cb0 100644 --- a/src/libstore/builtins.cc +++ b/src/libstore/builtins.cc @@ -38,12 +38,15 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) std::shared_ptr data; - try { - if (getAttr("outputHashMode") == "flat") - data = fetch("http://tarballs.nixos.org/" + getAttr("outputHashAlgo") + "/" + getAttr("outputHash")); - } catch (Error & e) { - debug(e.what()); - } + if (getAttr("outputHashMode") == "flat") + for (auto hashedMirror : settings.hashedMirrors.get()) + try { + if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/'; + data = fetch(hashedMirror + getAttr("outputHashAlgo") + "/" + getAttr("outputHash")); + break; + } catch (Error & e) { + debug(e.what()); + } if (!data) data = fetch(getAttr("url")); diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index c8d67b07110..9ebbf7b477b 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -327,6 +327,9 @@ public: "Whether builders can acquire new privileges by calling programs with " "setuid/setgid bits or with file capabilities."}; #endif + + Setting hashedMirrors{this, {"http://tarballs.nixos.org/"}, "hashed-mirrors", + "A list of servers used by builtins.fetchurl to fetch files by hash."}; }; diff --git a/tests/fetchurl.sh b/tests/fetchurl.sh index 808f460258d..02b83525bb3 100644 --- a/tests/fetchurl.sh +++ b/tests/fetchurl.sh @@ -5,7 +5,7 @@ clearStore # Test fetching a flat file. hash=$(nix-hash --flat --type sha256 ./fetchurl.sh) -outPath=$(nix-build '' --argstr url file://$(pwd)/fetchurl.sh --argstr sha256 $hash --no-out-link) +outPath=$(nix-build '' --argstr url file://$(pwd)/fetchurl.sh --argstr sha256 $hash --no-out-link --option hashed-mirrors '') cmp $outPath fetchurl.sh @@ -14,7 +14,7 @@ clearStore hash=$(nix hash-file --type sha512 --base64 ./fetchurl.sh) -outPath=$(nix-build '' --argstr url file://$(pwd)/fetchurl.sh --argstr sha512 $hash --no-out-link) +outPath=$(nix-build '' --argstr url file://$(pwd)/fetchurl.sh --argstr sha512 $hash --no-out-link --option hashed-mirrors '') cmp $outPath fetchurl.sh From bf6792c0df2163a0501beb14feeb5135c4aa71c2 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 17 Jul 2017 13:13:18 +0200 Subject: [PATCH 0458/2196] Always use base-16 for hashed mirror lookups In particular, don't use base-64, which we don't support. (We do have base-32 redirects for hysterical reasons.) Also, add a test for the hashed mirror feature. --- src/libstore/builtins.cc | 3 ++- tests/fetchurl.sh | 13 +++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/src/libstore/builtins.cc b/src/libstore/builtins.cc index b51b6f85cb0..a01dbba4ede 100644 --- a/src/libstore/builtins.cc +++ b/src/libstore/builtins.cc @@ -42,7 +42,8 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) for (auto hashedMirror : settings.hashedMirrors.get()) try { if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/'; - data = fetch(hashedMirror + getAttr("outputHashAlgo") + "/" + getAttr("outputHash")); + auto ht = parseHashType(getAttr("outputHashAlgo")); + data = fetch(hashedMirror + printHashType(ht) + "/" + Hash(getAttr("outputHash"), ht).to_string(Base16, false)); break; } catch (Error & e) { debug(e.what()); diff --git a/tests/fetchurl.sh b/tests/fetchurl.sh index 02b83525bb3..7f2de907049 100644 --- a/tests/fetchurl.sh +++ b/tests/fetchurl.sh @@ -18,6 +18,19 @@ outPath=$(nix-build '' --argstr url file://$(pwd)/fetchurl.sh cmp $outPath fetchurl.sh +# Test the hashed mirror feature. +clearStore + +hash=$(nix hash-file --type sha512 --base64 ./fetchurl.sh) +hash32=$(nix hash-file --type sha512 --base16 ./fetchurl.sh) + +mirror=$TMPDIR/hashed-mirror +rm -rf $mirror +mkdir -p $mirror/sha512 +ln -s $(pwd)/fetchurl.sh $mirror/sha512/$hash32 + +outPath=$(nix-build '' --argstr url file:///no-such-dir/fetchurl.sh --argstr sha512 $hash --no-out-link --option hashed-mirrors "file://$mirror") + # Test unpacking a NAR. rm -rf $TEST_ROOT/archive mkdir -p $TEST_ROOT/archive From da2ad300545a838b2b6485481271dad9865a5e8f Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Tue, 18 Jul 2017 08:02:53 -0400 Subject: [PATCH 0459/2196] Update the mailing list. --- doc/manual/release-notes/rl-1.2.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/release-notes/rl-1.2.xml b/doc/manual/release-notes/rl-1.2.xml index dc272c420dd..748fd9e6702 100644 --- a/doc/manual/release-notes/rl-1.2.xml +++ b/doc/manual/release-notes/rl-1.2.xml @@ -40,7 +40,7 @@ $ nix-env -i thunderbird --option binary-caches http://cache.nixos.org Binary caches are created using nix-push. For details on the operation and format of binary caches, see the nix-push manpage. More details are provided in - this + this nix-dev posting. From 364f75e03a2169c794846fb12dd1d2fb49dcabd6 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Tue, 18 Jul 2017 08:04:01 -0400 Subject: [PATCH 0460/2196] Update mailing list. --- tests/lang/eval-fail-path-slash.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/lang/eval-fail-path-slash.nix b/tests/lang/eval-fail-path-slash.nix index 530105b3210..8c2e104c788 100644 --- a/tests/lang/eval-fail-path-slash.nix +++ b/tests/lang/eval-fail-path-slash.nix @@ -2,5 +2,5 @@ # This restriction could be lifted sometime, # for example if we make '/' a path concatenation operator. # See https://github.com/NixOS/nix/issues/1138 -# and http://lists.science.uu.nl/pipermail/nix-dev/2016-June/020829.html +# and https://nixos.org/nix-dev/2016-June/020829.html /nix/store/ From d5e1bffd2a5b5c39f14944fe3ba25414dcd0d508 Mon Sep 17 00:00:00 2001 From: Dmitry Kalinkin Date: Tue, 18 Jul 2017 17:51:50 -0400 Subject: [PATCH 0461/2196] Do not try to fill fd_set with fd>=FD_SETSIZE This is UB and causes buffer overflow and crash on linux. --- src/libstore/build.cc | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 60b0a531f42..d2a270259a8 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -3835,6 +3835,9 @@ void Worker::waitForInput() int fdMax = 0; for (auto & i : children) { for (auto & j : i.fds) { + if (j >= FD_SETSIZE) { + throw BuildError("reached FD_SETSIZE limit"); + } FD_SET(j, &fds); if (j >= fdMax) fdMax = j + 1; } From 90825dea518ea078f0783a72cc471a5b3716d198 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 17 Jul 2017 19:02:56 +0200 Subject: [PATCH 0462/2196] Add "nix search" command --- src/libexpr/get-drvs.cc | 44 ++++++++++---- src/libexpr/get-drvs.hh | 39 ++++++------ src/libutil/args.cc | 1 + src/libutil/args.hh | 4 +- src/nix-env/nix-env.cc | 90 ++++++++++++++-------------- src/nix-env/user-env.cc | 7 ++- src/nix/command.hh | 34 +++++++---- src/nix/installables.cc | 16 ++--- src/nix/search.cc | 130 ++++++++++++++++++++++++++++++++++++++++ 9 files changed, 263 insertions(+), 102 deletions(-) create mode 100644 src/nix/search.cc diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc index 4200e8fd675..b7e16de7fa4 100644 --- a/src/libexpr/get-drvs.cc +++ b/src/libexpr/get-drvs.cc @@ -9,7 +9,34 @@ namespace nix { -string DrvInfo::queryDrvPath() +DrvInfo::DrvInfo(EvalState & state, const string & attrPath, Bindings * attrs) + : state(&state), attrs(attrs), attrPath(attrPath) +{ +} + + +string DrvInfo::queryName() const +{ + if (name == "" && attrs) { + auto i = attrs->find(state->sName); + if (i == attrs->end()) throw TypeError("derivation name missing"); + name = state->forceStringNoCtx(*i->value); + } + return name; +} + + +string DrvInfo::querySystem() const +{ + if (system == "" && attrs) { + auto i = attrs->find(state->sSystem); + system = i == attrs->end() ? "unknown" : state->forceStringNoCtx(*i->value, *i->pos); + } + return system; +} + + +string DrvInfo::queryDrvPath() const { if (drvPath == "" && attrs) { Bindings::iterator i = attrs->find(state->sDrvPath); @@ -20,7 +47,7 @@ string DrvInfo::queryDrvPath() } -string DrvInfo::queryOutPath() +string DrvInfo::queryOutPath() const { if (outPath == "" && attrs) { Bindings::iterator i = attrs->find(state->sOutPath); @@ -76,7 +103,7 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool onlyOutputsToInstall) } -string DrvInfo::queryOutputName() +string DrvInfo::queryOutputName() const { if (outputName == "" && attrs) { Bindings::iterator i = attrs->find(state->sOutputName); @@ -225,17 +252,12 @@ static bool getDerivation(EvalState & state, Value & v, if (done.find(v.attrs) != done.end()) return false; done.insert(v.attrs); - Bindings::iterator i = v.attrs->find(state.sName); - /* !!! We really would like to have a decent back trace here. */ - if (i == v.attrs->end()) throw TypeError("derivation name missing"); + DrvInfo drv(state, attrPath, v.attrs); - Bindings::iterator i2 = v.attrs->find(state.sSystem); - - DrvInfo drv(state, state.forceStringNoCtx(*i->value), attrPath, - i2 == v.attrs->end() ? "unknown" : state.forceStringNoCtx(*i2->value, *i2->pos), - v.attrs); + drv.queryName(); drvs.push_back(drv); + return false; } catch (AssertionError & e) { diff --git a/src/libexpr/get-drvs.hh b/src/libexpr/get-drvs.hh index 37fcbe829d3..82fb8a3ac6a 100644 --- a/src/libexpr/get-drvs.hh +++ b/src/libexpr/get-drvs.hh @@ -17,31 +17,32 @@ public: private: EvalState * state; - string drvPath; - string outPath; - string outputName; + mutable string name; + mutable string system; + mutable string drvPath; + mutable string outPath; + mutable string outputName; Outputs outputs; - bool failed; // set if we get an AssertionError + bool failed = false; // set if we get an AssertionError - Bindings * attrs, * meta; + Bindings * attrs = nullptr, * meta = nullptr; Bindings * getMeta(); bool checkMeta(Value & v); public: - string name; string attrPath; /* path towards the derivation */ - string system; - DrvInfo(EvalState & state) : state(&state), failed(false), attrs(0), meta(0) { }; - DrvInfo(EvalState & state, const string & name, const string & attrPath, const string & system, Bindings * attrs) - : state(&state), failed(false), attrs(attrs), meta(0), name(name), attrPath(attrPath), system(system) { }; + DrvInfo(EvalState & state) : state(&state) { }; + DrvInfo(EvalState & state, const string & attrPath, Bindings * attrs); - string queryDrvPath(); - string queryOutPath(); - string queryOutputName(); + string queryName() const; + string querySystem() const; + string queryDrvPath() const; + string queryOutPath() const; + string queryOutputName() const; /** Return the list of outputs. The "outputs to install" are determined by `mesa.outputsToInstall`. */ Outputs queryOutputs(bool onlyOutputsToInstall = false); @@ -58,15 +59,9 @@ public: MetaValue queryMetaInfo(EvalState & state, const string & name) const; */ - void setDrvPath(const string & s) - { - drvPath = s; - } - - void setOutPath(const string & s) - { - outPath = s; - } + void setName(const string & s) { name = s; } + void setDrvPath(const string & s) { drvPath = s; } + void setOutPath(const string & s) { outPath = s; } void setFailed() { failed = true; }; bool hasFailed() { return failed; }; diff --git a/src/libutil/args.cc b/src/libutil/args.cc index 0eed4945418..19a45d7e9b3 100644 --- a/src/libutil/args.cc +++ b/src/libutil/args.cc @@ -66,6 +66,7 @@ void Args::printHelp(const string & programName, std::ostream & out) std::cout << renderLabels({exp.label}); // FIXME: handle arity > 1 if (exp.arity == 0) std::cout << "..."; + if (exp.optional) std::cout << "?"; } std::cout << "\n"; diff --git a/src/libutil/args.hh b/src/libutil/args.hh index ef8a7953e52..37e780dd174 100644 --- a/src/libutil/args.hh +++ b/src/libutil/args.hh @@ -164,9 +164,9 @@ public: } /* Expect a string argument. */ - void expectArg(const std::string & label, string * dest) + void expectArg(const std::string & label, string * dest, bool optional = false) { - expectedArgs.push_back(ExpectedArg{label, 1, false, [=](Strings ss) { + expectedArgs.push_back(ExpectedArg{label, 1, optional, [=](Strings ss) { *dest = ss.front(); }}); } diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index 10100d6a601..8620cd25574 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -186,7 +186,7 @@ static void loadDerivations(EvalState & state, Path nixExprPath, system. */ for (DrvInfos::iterator i = elems.begin(), j; i != elems.end(); i = j) { j = i; j++; - if (systemFilter != "*" && i->system != systemFilter) + if (systemFilter != "*" && i->querySystem() != systemFilter) elems.erase(i); } } @@ -247,7 +247,7 @@ static DrvInfos filterBySelector(EvalState & state, const DrvInfos & allElems, for (DrvInfos::const_iterator j = allElems.begin(); j != allElems.end(); ++j, ++n) { - DrvName drvName(j->name); + DrvName drvName(j->queryName()); if (i.matches(drvName)) { i.hits++; matches.push_back(std::pair(*j, n)); @@ -269,36 +269,36 @@ static DrvInfos filterBySelector(EvalState & state, const DrvInfos & allElems, StringSet multiple; for (auto & j : matches) { - DrvName drvName(j.first.name); + DrvName drvName(j.first.queryName()); int d = 1; Newest::iterator k = newest.find(drvName.name); if (k != newest.end()) { - d = j.first.system == k->second.first.system ? 0 : - j.first.system == settings.thisSystem ? 1 : - k->second.first.system == settings.thisSystem ? -1 : 0; + d = j.first.querySystem() == k->second.first.querySystem() ? 0 : + j.first.querySystem() == settings.thisSystem ? 1 : + k->second.first.querySystem() == settings.thisSystem ? -1 : 0; if (d == 0) d = comparePriorities(state, j.first, k->second.first); if (d == 0) - d = compareVersions(drvName.version, DrvName(k->second.first.name).version); + d = compareVersions(drvName.version, DrvName(k->second.first.queryName()).version); } if (d > 0) { newest.erase(drvName.name); newest.insert(Newest::value_type(drvName.name, j)); - multiple.erase(j.first.name); + multiple.erase(j.first.queryName()); } else if (d == 0) { - multiple.insert(j.first.name); + multiple.insert(j.first.queryName()); } } matches.clear(); for (auto & j : newest) { - if (multiple.find(j.second.first.name) != multiple.end()) + if (multiple.find(j.second.first.queryName()) != multiple.end()) printInfo( - format("warning: there are multiple derivations named ‘%1%’; using the first one") - % j.second.first.name); + "warning: there are multiple derivations named ‘%1%’; using the first one", + j.second.first.queryName()); matches.push_back(j.second); } } @@ -386,7 +386,8 @@ static void queryInstSources(EvalState & state, if (dash != string::npos) name = string(name, dash + 1); - DrvInfo elem(state, name, "", "", 0); + DrvInfo elem(state, "", nullptr); + elem.setName(name); if (isDerivation(path)) { elem.setDrvPath(path); @@ -468,8 +469,8 @@ static void installDerivations(Globals & globals, path is not the one we want (e.g., `java-front' versus `java-front-0.9pre15899'). */ if (globals.forceName != "") - i.name = globals.forceName; - newNames.insert(DrvName(i.name).name); + i.setName(globals.forceName); + newNames.insert(DrvName(i.queryName()).name); } @@ -484,17 +485,17 @@ static void installDerivations(Globals & globals, DrvInfos installedElems = queryInstalled(*globals.state, profile); for (auto & i : installedElems) { - DrvName drvName(i.name); + DrvName drvName(i.queryName()); if (!globals.preserveInstalled && newNames.find(drvName.name) != newNames.end() && !keep(i)) - printInfo(format("replacing old ‘%1%’") % i.name); + printInfo("replacing old ‘%s’", i.queryName()); else allElems.push_back(i); } for (auto & i : newElems) - printInfo(format("installing ‘%1%’") % i.name); + printInfo("installing ‘%s’", i.queryName()); } printMissing(*globals.state, newElems); @@ -548,7 +549,7 @@ static void upgradeDerivations(Globals & globals, /* Go through all installed derivations. */ DrvInfos newElems; for (auto & i : installedElems) { - DrvName drvName(i.name); + DrvName drvName(i.queryName()); try { @@ -569,7 +570,7 @@ static void upgradeDerivations(Globals & globals, for (auto j = availElems.begin(); j != availElems.end(); ++j) { if (comparePriorities(*globals.state, i, *j) > 0) continue; - DrvName newName(j->name); + DrvName newName(j->queryName()); if (newName.name == drvName.name) { int d = compareVersions(drvName.version, newName.version); if ((upgradeType == utLt && d < 0) || @@ -596,14 +597,13 @@ static void upgradeDerivations(Globals & globals, { const char * action = compareVersions(drvName.version, bestVersion) <= 0 ? "upgrading" : "downgrading"; - printInfo( - format("%1% ‘%2%’ to ‘%3%’") - % action % i.name % bestElem->name); + printInfo("%1% ‘%2%’ to ‘%3%’", + action, i.queryName(), bestElem->queryName()); newElems.push_back(*bestElem); } else newElems.push_back(i); } catch (Error & e) { - e.addPrefix(format("while trying to find an upgrade for ‘%1%’:\n") % i.name); + e.addPrefix(fmt("while trying to find an upgrade for ‘%s’:\n", i.queryName())); throw; } } @@ -663,10 +663,10 @@ static void opSetFlag(Globals & globals, Strings opFlags, Strings opArgs) /* Update all matching derivations. */ for (auto & i : installedElems) { - DrvName drvName(i.name); + DrvName drvName(i.queryName()); for (auto & j : selectors) if (j.matches(drvName)) { - printInfo(format("setting flag on ‘%1%’") % i.name); + printInfo("setting flag on ‘%1%’", i.queryName()); j.hits++; setMetaFlag(*globals.state, i, flagName, flagValue); break; @@ -702,7 +702,7 @@ static void opSet(Globals & globals, Strings opFlags, Strings opArgs) DrvInfo & drv(elems.front()); if (globals.forceName != "") - drv.name = globals.forceName; + drv.setName(globals.forceName); if (drv.queryDrvPath() != "") { PathSet paths = {drv.queryDrvPath()}; @@ -732,7 +732,7 @@ static void uninstallDerivations(Globals & globals, Strings & selectors, DrvInfos newElems; for (auto & i : installedElems) { - DrvName drvName(i.name); + DrvName drvName(i.queryName()); bool found = false; for (auto & j : selectors) /* !!! the repeated calls to followLinksToStorePath() @@ -740,7 +740,7 @@ static void uninstallDerivations(Globals & globals, Strings & selectors, if ((isPath(j) && i.queryOutPath() == globals.state->store->followLinksToStorePath(j)) || DrvName(j).matches(drvName)) { - printInfo(format("uninstalling ‘%1%’") % i.name); + printInfo("uninstalling ‘%s’", i.queryName()); found = true; break; } @@ -771,9 +771,11 @@ static bool cmpChars(char a, char b) static bool cmpElemByName(const DrvInfo & a, const DrvInfo & b) { + auto a_name = a.queryName(); + auto b_name = b.queryName(); return lexicographical_compare( - a.name.begin(), a.name.end(), - b.name.begin(), b.name.end(), cmpChars); + a_name.begin(), a_name.end(), + b_name.begin(), b_name.end(), cmpChars); } @@ -822,13 +824,13 @@ typedef enum { cvLess, cvEqual, cvGreater, cvUnavail } VersionDiff; static VersionDiff compareVersionAgainstSet( const DrvInfo & elem, const DrvInfos & elems, string & version) { - DrvName name(elem.name); + DrvName name(elem.queryName()); VersionDiff diff = cvUnavail; version = "?"; for (auto & i : elems) { - DrvName name2(i.name); + DrvName name2(i.queryName()); if (name.name == name2.name) { int d = compareVersions(name.version, name2.version); if (d < 0) { @@ -857,8 +859,8 @@ static void queryJSON(Globals & globals, vector & elems) for (auto & i : elems) { JSONObject pkgObj = topObj.object(i.attrPath); - pkgObj.attr("name", i.name); - pkgObj.attr("system", i.system); + pkgObj.attr("name", i.queryName()); + pkgObj.attr("system", i.querySystem()); JSONObject metaObj = pkgObj.object("meta"); StringSet metaNames = i.queryMetaNames(); @@ -866,7 +868,7 @@ static void queryJSON(Globals & globals, vector & elems) auto placeholder = metaObj.placeholder(j); Value * v = i.queryMeta(j); if (!v) { - printError(format("derivation ‘%1%’ has invalid meta attribute ‘%2%’") % i.name % j); + printError("derivation ‘%s’ has invalid meta attribute ‘%s’", i.queryName(), j); placeholder.write(nullptr); } else { PathSet context; @@ -963,7 +965,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) try { paths.insert(i.queryOutPath()); } catch (AssertionError & e) { - printMsg(lvlTalkative, format("skipping derivation named ‘%1%’ which gives an assertion failure") % i.name); + printMsg(lvlTalkative, "skipping derivation named ‘%s’ which gives an assertion failure", i.queryName()); i.setFailed(); } validPaths = globals.state->store->queryValidPaths(paths); @@ -1024,9 +1026,9 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) columns.push_back(i.attrPath); if (xmlOutput) - attrs["name"] = i.name; + attrs["name"] = i.queryName(); else if (printName) - columns.push_back(i.name); + columns.push_back(i.queryName()); if (compareVersions) { /* Compare this element against the versions of the @@ -1059,10 +1061,10 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) } if (xmlOutput) { - if (i.system != "") attrs["system"] = i.system; + if (i.querySystem() != "") attrs["system"] = i.querySystem(); } else if (printSystem) - columns.push_back(i.system); + columns.push_back(i.querySystem()); if (printDrvPath) { string drvPath = i.queryDrvPath(); @@ -1110,7 +1112,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) attrs2["name"] = j; Value * v = i.queryMeta(j); if (!v) - printError(format("derivation ‘%1%’ has invalid meta attribute ‘%2%’") % i.name % j); + printError("derivation ‘%s’ has invalid meta attribute ‘%s’", i.queryName(), j); else { if (v->type == tString) { attrs2["type"] = "string"; @@ -1161,9 +1163,9 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) cout.flush(); } catch (AssertionError & e) { - printMsg(lvlTalkative, format("skipping derivation named ‘%1%’ which gives an assertion failure") % i.name); + printMsg(lvlTalkative, "skipping derivation named ‘%1%’ which gives an assertion failure", i.queryName()); } catch (Error & e) { - e.addPrefix(format("while querying the derivation named ‘%1%’:\n") % i.name); + e.addPrefix(fmt("while querying the derivation named ‘%1%’:\n", i.queryName())); throw; } } diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc index e9997fae57b..df5105f12c2 100644 --- a/src/nix-env/user-env.cc +++ b/src/nix-env/user-env.cc @@ -56,9 +56,10 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, state.mkAttrs(v, 16); mkString(*state.allocAttr(v, state.sType), "derivation"); - mkString(*state.allocAttr(v, state.sName), i.name); - if (!i.system.empty()) - mkString(*state.allocAttr(v, state.sSystem), i.system); + mkString(*state.allocAttr(v, state.sName), i.queryName()); + auto system = i.querySystem(); + if (!system.empty()) + mkString(*state.allocAttr(v, state.sSystem), system); mkString(*state.allocAttr(v, state.sOutPath), i.queryOutPath()); if (drvPath != "") mkString(*state.allocAttr(v, state.sDrvPath), i.queryDrvPath()); diff --git a/src/nix/command.hh b/src/nix/command.hh index ae7709b5dc8..53680265381 100644 --- a/src/nix/command.hh +++ b/src/nix/command.hh @@ -62,17 +62,13 @@ struct Installable } }; -/* A command that operates on a list of "installables", which can be - store paths, attribute paths, Nix expressions, etc. */ -struct InstallablesCommand : virtual Args, StoreCommand +struct SourceExprCommand : virtual Args, StoreCommand { - std::vector> installables; Path file; - InstallablesCommand() + SourceExprCommand() { mkFlag('f', "file", "file", "evaluate FILE rather than the default", &file); - expectArgs("installables", &_installables); } /* Return a value representing the Nix expression from which we @@ -81,14 +77,32 @@ struct InstallablesCommand : virtual Args, StoreCommand = import ...; bla = import ...; }’. */ Value * getSourceExpr(EvalState & state); + ref getEvalState(); + +private: + + std::shared_ptr evalState; + + Value * vSourceExpr = 0; +}; + +/* A command that operates on a list of "installables", which can be + store paths, attribute paths, Nix expressions, etc. */ +struct InstallablesCommand : virtual Args, SourceExprCommand +{ + std::vector> installables; + + InstallablesCommand() + { + expectArgs("installables", &_installables); + } + std::vector> parseInstallables(ref store, Strings ss); enum ToStorePathsMode { Build, NoBuild, DryRun }; PathSet toStorePaths(ref store, ToStorePathsMode mode); - ref getEvalState(); - void prepare() override; virtual bool useDefaultInstallables() { return true; } @@ -96,10 +110,6 @@ struct InstallablesCommand : virtual Args, StoreCommand private: Strings _installables; - - std::shared_ptr evalState; - - Value * vSourceExpr = 0; }; /* A command that operates on zero or more store paths. */ diff --git a/src/nix/installables.cc b/src/nix/installables.cc index 7fad8fe415c..4da736f4d5c 100644 --- a/src/nix/installables.cc +++ b/src/nix/installables.cc @@ -12,7 +12,7 @@ namespace nix { -Value * InstallablesCommand::getSourceExpr(EvalState & state) +Value * SourceExprCommand::getSourceExpr(EvalState & state) { if (vSourceExpr) return vSourceExpr; @@ -59,6 +59,13 @@ Value * InstallablesCommand::getSourceExpr(EvalState & state) return vSourceExpr; } +ref SourceExprCommand::getEvalState() +{ + if (!evalState) + evalState = std::make_shared(Strings{}, getStore()); + return ref(evalState); +} + struct InstallableStoreDrv : Installable { Path storePath; @@ -237,13 +244,6 @@ PathSet InstallablesCommand::toStorePaths(ref store, ToStorePathsMode mod return outPaths; } -ref InstallablesCommand::getEvalState() -{ - if (!evalState) - evalState = std::make_shared(Strings{}, getStore()); - return ref(evalState); -} - void InstallablesCommand::prepare() { installables = parseInstallables(getStore(), _installables); diff --git a/src/nix/search.cc b/src/nix/search.cc new file mode 100644 index 00000000000..813f6d0a622 --- /dev/null +++ b/src/nix/search.cc @@ -0,0 +1,130 @@ +#include "command.hh" +#include "globals.hh" +#include "eval.hh" +#include "eval-inline.hh" +#include "names.hh" +#include "get-drvs.hh" + +#include + +using namespace nix; + +std::string hilite(const std::string & s, const std::smatch & m) +{ + return + m.empty() + ? s + : std::string(m.prefix()) + + ANSI_RED + std::string(m.str()) + ANSI_NORMAL + + std::string(m.suffix()); +} + +struct CmdSearch : SourceExprCommand +{ + std::string re; + + CmdSearch() + { + expectArg("regex", &re, true); + } + + std::string name() override + { + return "search"; + } + + std::string description() override + { + return "query available packages"; + } + + void run(ref store) override + { + settings.readOnlyMode = true; + + std::regex regex(re, std::regex::extended | std::regex::icase); + + auto state = getEvalState(); + + std::function doExpr; + + bool first = true; + + doExpr = [&](Value * v, std::string attrPath, bool toplevel) { + debug("at attribute ‘%s’", attrPath); + + try { + + state->forceValue(*v); + + if (v->type == tLambda && toplevel) { + Value * v2 = state->allocValue(); + state->autoCallFunction(*state->allocBindings(1), *v, *v2); + v = v2; + state->forceValue(*v); + } + + if (state->isDerivation(*v)) { + + DrvInfo drv(*state, attrPath, v->attrs); + + DrvName parsed(drv.queryName()); + + std::smatch attrPathMatch; + std::regex_search(attrPath, attrPathMatch, regex); + + auto name = parsed.name; + std::smatch nameMatch; + std::regex_search(name, nameMatch, regex); + + std::string description = drv.queryMetaString("description"); + std::replace(description.begin(), description.end(), '\n', ' '); + std::smatch descriptionMatch; + std::regex_search(description, descriptionMatch, regex); + + if (!attrPathMatch.empty() + || !nameMatch.empty() + || !descriptionMatch.empty()) + { + if (!first) std::cout << "\n"; + first = false; + + std::cout << fmt( + "Attribute name: %s\n" + "Package name: %s\n" + "Version: %s\n" + "Description: %s\n", + hilite(attrPath, attrPathMatch), + hilite(name, nameMatch), + parsed.version, + hilite(description, descriptionMatch)); + } + } + + else if (v->type == tAttrs) { + + if (!toplevel) { + auto attrs = v->attrs; + Bindings::iterator j = attrs->find(state->symbols.create("recurseForDerivations")); + if (j == attrs->end() || !state->forceBool(*j->value, *j->pos)) return; + } + + Bindings::iterator j = v->attrs->find(state->symbols.create("_toplevel")); + bool toplevel2 = j != v->attrs->end() && state->forceBool(*j->value, *j->pos); + + for (auto & i : *v->attrs) { + doExpr(i.value, + attrPath == "" ? (std::string) i.name : attrPath + "." + (std::string) i.name, + toplevel2); + } + } + + } catch (AssertionError & e) { + } + }; + + doExpr(getSourceExpr(*state), "", true); + } +}; + +static RegisterCommand r1(make_ref()); From b144c4d61738b9d9e3d741e77066a2be115f0ab5 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 18 Jul 2017 17:30:09 +0200 Subject: [PATCH 0463/2196] nix search: Add --json flag --- src/nix/search.cc | 40 +++++++++++++++++++++++++++------------- 1 file changed, 27 insertions(+), 13 deletions(-) diff --git a/src/nix/search.cc b/src/nix/search.cc index 813f6d0a622..8aac06ad2cd 100644 --- a/src/nix/search.cc +++ b/src/nix/search.cc @@ -4,6 +4,8 @@ #include "eval-inline.hh" #include "names.hh" #include "get-drvs.hh" +#include "common-args.hh" +#include "json.hh" #include @@ -19,7 +21,7 @@ std::string hilite(const std::string & s, const std::smatch & m) + std::string(m.suffix()); } -struct CmdSearch : SourceExprCommand +struct CmdSearch : SourceExprCommand, MixJSON { std::string re; @@ -50,6 +52,8 @@ struct CmdSearch : SourceExprCommand bool first = true; + auto jsonOut = json ? std::make_unique(std::cout, true) : nullptr; + doExpr = [&](Value * v, std::string attrPath, bool toplevel) { debug("at attribute ‘%s’", attrPath); @@ -86,18 +90,28 @@ struct CmdSearch : SourceExprCommand || !nameMatch.empty() || !descriptionMatch.empty()) { - if (!first) std::cout << "\n"; - first = false; - - std::cout << fmt( - "Attribute name: %s\n" - "Package name: %s\n" - "Version: %s\n" - "Description: %s\n", - hilite(attrPath, attrPathMatch), - hilite(name, nameMatch), - parsed.version, - hilite(description, descriptionMatch)); + if (json) { + + auto jsonElem = jsonOut->object(attrPath); + + jsonElem.attr("pkgName", parsed.name); + jsonElem.attr("version", parsed.version); + jsonElem.attr("description", description); + + } else { + if (!first) std::cout << "\n"; + first = false; + + std::cout << fmt( + "Attribute name: %s\n" + "Package name: %s\n" + "Version: %s\n" + "Description: %s\n", + hilite(attrPath, attrPathMatch), + hilite(name, nameMatch), + parsed.version, + hilite(description, descriptionMatch)); + } } } From fc3568e2633bf3faa234509cfc6034be95f10b6e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 19 Jul 2017 11:18:06 +0200 Subject: [PATCH 0464/2196] FD_SETSIZE check: BuildError -> Error BuildError denotes a permanent build failure, which is not the case here. --- src/libstore/build.cc | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index d2a270259a8..8f483a90daa 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -3835,9 +3835,8 @@ void Worker::waitForInput() int fdMax = 0; for (auto & i : children) { for (auto & j : i.fds) { - if (j >= FD_SETSIZE) { - throw BuildError("reached FD_SETSIZE limit"); - } + if (j >= FD_SETSIZE) + throw Error("reached FD_SETSIZE limit"); FD_SET(j, &fds); if (j >= fdMax) fdMax = j + 1; } From 57a30e101b36a064f09619bf4a3f0b8a3fdcdcad Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 19 Jul 2017 16:06:10 +0200 Subject: [PATCH 0465/2196] nix search: Ignore top-level eval errors $NIX_PATH may contain elements that don't evaluate to an attrset (like "nixos-config"), so ignore those. --- src/nix/installables.cc | 6 +++++- src/nix/search.cc | 9 ++++++++- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/src/nix/installables.cc b/src/nix/installables.cc index 4da736f4d5c..59162fdc116 100644 --- a/src/nix/installables.cc +++ b/src/nix/installables.cc @@ -16,6 +16,8 @@ Value * SourceExprCommand::getSourceExpr(EvalState & state) { if (vSourceExpr) return vSourceExpr; + auto sToplevel = state.symbols.create("_toplevel"); + vSourceExpr = state.allocValue(); if (file != "") { @@ -29,7 +31,9 @@ Value * SourceExprCommand::getSourceExpr(EvalState & state) auto searchPath = state.getSearchPath(); - state.mkAttrs(*vSourceExpr, searchPath.size()); + state.mkAttrs(*vSourceExpr, searchPath.size() + 1); + + mkBool(*state.allocAttr(*vSourceExpr, sToplevel), true); std::unordered_set seen; diff --git a/src/nix/search.cc b/src/nix/search.cc index 8aac06ad2cd..970dcb9834b 100644 --- a/src/nix/search.cc +++ b/src/nix/search.cc @@ -54,6 +54,8 @@ struct CmdSearch : SourceExprCommand, MixJSON auto jsonOut = json ? std::make_unique(std::cout, true) : nullptr; + auto sToplevel = state->symbols.create("_toplevel"); + doExpr = [&](Value * v, std::string attrPath, bool toplevel) { debug("at attribute ‘%s’", attrPath); @@ -123,7 +125,7 @@ struct CmdSearch : SourceExprCommand, MixJSON if (j == attrs->end() || !state->forceBool(*j->value, *j->pos)) return; } - Bindings::iterator j = v->attrs->find(state->symbols.create("_toplevel")); + Bindings::iterator j = v->attrs->find(sToplevel); bool toplevel2 = j != v->attrs->end() && state->forceBool(*j->value, *j->pos); for (auto & i : *v->attrs) { @@ -134,6 +136,11 @@ struct CmdSearch : SourceExprCommand, MixJSON } } catch (AssertionError & e) { + } catch (Error & e) { + if (!toplevel) { + e.addPrefix(fmt("While evaluating the attribute ‘%s’:\n", attrPath)); + throw; + } } }; From c94f3d5575d7af5403274d1e9e2f3c9d72989751 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 20 Jul 2017 13:32:01 +0200 Subject: [PATCH 0466/2196] nix-shell: Use bashInteractive from This adds about 0.1s to nix-shell runtime in the case where bashInteractive already exists. See discussion at https://github.com/NixOS/nixpkgs/issues/27493. --- src/libexpr/get-drvs.cc | 7 +++--- src/libexpr/get-drvs.hh | 8 +++---- src/nix-build/local.mk | 2 +- src/nix-build/nix-build.cc | 44 +++++++++++++++++++++++++++++++++++++- src/nix/repl.cc | 6 +++--- 5 files changed, 54 insertions(+), 13 deletions(-) diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc index b7e16de7fa4..66689e3e889 100644 --- a/src/libexpr/get-drvs.cc +++ b/src/libexpr/get-drvs.cc @@ -267,15 +267,14 @@ static bool getDerivation(EvalState & state, Value & v, } -bool getDerivation(EvalState & state, Value & v, DrvInfo & drv, +std::experimental::optional getDerivation(EvalState & state, Value & v, bool ignoreAssertionFailures) { Done done; DrvInfos drvs; getDerivation(state, v, "", drvs, done, ignoreAssertionFailures); - if (drvs.size() != 1) return false; - drv = drvs.front(); - return true; + if (drvs.size() != 1) return {}; + return std::move(drvs.front()); } diff --git a/src/libexpr/get-drvs.hh b/src/libexpr/get-drvs.hh index 82fb8a3ac6a..32294e45875 100644 --- a/src/libexpr/get-drvs.hh +++ b/src/libexpr/get-drvs.hh @@ -75,10 +75,10 @@ typedef list DrvInfos; #endif -/* If value `v' denotes a derivation, store information about the - derivation in `drv' and return true. Otherwise, return false. */ -bool getDerivation(EvalState & state, Value & v, DrvInfo & drv, - bool ignoreAssertionFailures); +/* If value `v' denotes a derivation, return a DrvInfo object + describing it. Otherwise return nothing. */ +std::experimental::optional getDerivation(EvalState & state, + Value & v, bool ignoreAssertionFailures); void getDerivations(EvalState & state, Value & v, const string & pathPrefix, Bindings & autoArgs, DrvInfos & drvs, diff --git a/src/nix-build/local.mk b/src/nix-build/local.mk index 91532411a50..a2d1c91dfd9 100644 --- a/src/nix-build/local.mk +++ b/src/nix-build/local.mk @@ -4,6 +4,6 @@ nix-build_DIR := $(d) nix-build_SOURCES := $(d)/nix-build.cc -nix-build_LIBS = libmain libstore libutil libformat +nix-build_LIBS = libmain libexpr libstore libutil libformat $(eval $(call install-symlink, nix-build, $(bindir)/nix-shell)) diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index dc80dd6a583..72f89003d0b 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -13,6 +13,8 @@ #include "affinity.hh" #include "util.hh" #include "shared.hh" +#include "eval.hh" +#include "get-drvs.hh" using namespace nix; using namespace std::string_literals; @@ -75,6 +77,8 @@ int main(int argc, char ** argv) { return handleExceptions(argv[0], [&]() { initNix(); + initGC(); + auto store = openStore(); auto dryRun = false; auto verbose = false; @@ -88,6 +92,7 @@ int main(int argc, char ** argv) Strings instArgs; Strings buildArgs; Strings exprs; + Strings searchPath; auto shell = getEnv("SHELL", "/bin/sh"); std::string envCommand; // interactive shell @@ -320,6 +325,8 @@ int main(int argc, char ** argv) } } + EvalState state(searchPath, store); + if (packages && fromArgs) { throw UsageError("‘-p’ and ‘-E’ are mutually exclusive"); } @@ -465,7 +472,42 @@ int main(int argc, char ** argv) auto envPtrs = stringsToCharPtrs(envStrs); - auto shell = getEnv("NIX_BUILD_SHELL", "bash"); + auto shell = getEnv("NIX_BUILD_SHELL", ""); + + if (shell == "") { + + try { + + auto expr = state.parseExprFromString("(import {}).bashInteractive", absPath(".")); + + Value v; + state.eval(expr, v); + + auto drv = getDerivation(state, v, false); + if (!drv) + throw Error("the ‘bashInteractive’ attribute in did not evaluate to a derivation"); + + auto drvPath = drv->queryDrvPath(); + + unsigned long long downloadSize, narSize; + PathSet willBuild, willSubstitute, unknown; + store->queryMissing({drvPath}, + willBuild, willSubstitute, unknown, downloadSize, narSize); + + if (settings.printMissing) + printMissing(ref(store), willBuild, willSubstitute, unknown, downloadSize, narSize); + + store->buildPaths({drvPath}); + + shell = drv->queryOutPath() + "/bin/bash"; + if (!pathExists(shell)) + throw Error("expected shell ‘%s’ to exist, but it doesn't", shell); + + } catch (Error & e) { + printError("warning: %s; will use bash from your environment", e.what()); + shell = "bash"; + } + } environ = envPtrs.data(); diff --git a/src/nix/repl.cc b/src/nix/repl.cc index 437c7903ed4..7d5b8f4668a 100644 --- a/src/nix/repl.cc +++ b/src/nix/repl.cc @@ -292,10 +292,10 @@ bool isVarName(const string & s) Path NixRepl::getDerivationPath(Value & v) { - DrvInfo drvInfo(state); - if (!getDerivation(state, v, drvInfo, false)) + auto drvInfo = getDerivation(state, v, false); + if (!drvInfo) throw Error("expression does not evaluate to a derivation, so I can't build it"); - Path drvPath = drvInfo.queryDrvPath(); + Path drvPath = drvInfo->queryDrvPath(); if (drvPath == "" || !state.store->isValidPath(drvPath)) throw Error("expression did not evaluate to a valid derivation"); return drvPath; From 4c9ff89c261d84dcc4f88a79654daff2f4790e66 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 25 Jul 2017 15:09:06 +0200 Subject: [PATCH 0467/2196] nix-build/nix-shell: Eliminate call to nix-instantiate / nix-store Note that this removes the need for a derivation symlink, so the --drv-path and --add-drv-link flags now do nothing. --- src/libexpr/eval.hh | 2 + src/libexpr/parser.y | 7 + src/libmain/shared.cc | 9 +- src/libmain/shared.hh | 3 + src/nix-build/nix-build.cc | 830 +++++++++++-------------- src/nix-instantiate/nix-instantiate.cc | 11 +- 6 files changed, 400 insertions(+), 462 deletions(-) diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 1e32db1e86b..04a36b14cef 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -118,6 +118,8 @@ public: Expr * parseExprFromString(const string & s, const Path & basePath, StaticEnv & staticEnv); Expr * parseExprFromString(const string & s, const Path & basePath); + Expr * parseStdin(); + /* Evaluate an expression read from the given file to normal form. */ void evalFile(const Path & path, Value & v); diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 62982650a22..ca3d057451a 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -601,6 +601,13 @@ Expr * EvalState::parseExprFromString(const string & s, const Path & basePath) } +Expr * EvalState::parseStdin() +{ + //Activity act(*logger, lvlTalkative, format("parsing standard input")); + return parseExprFromString(drainFD(0), absPath(".")); +} + + void EvalState::addToSearchPath(const string & s) { size_t pos = s.find('='); diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 915e7955014..48b00e1a399 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -222,7 +222,14 @@ struct LegacyArgs : public MixCommonArgs void parseCmdLine(int argc, char * * argv, std::function parseArg) { - LegacyArgs(baseNameOf(argv[0]), parseArg).parseCmdline(argvToStrings(argc, argv)); + parseCmdLine(baseNameOf(argv[0]), argvToStrings(argc, argv), parseArg); +} + + +void parseCmdLine(const string & programName, const Strings & args, + std::function parseArg) +{ + LegacyArgs(programName, parseArg).parseCmdline(args); } diff --git a/src/libmain/shared.hh b/src/libmain/shared.hh index 6d94a22f788..14e436cc8bb 100644 --- a/src/libmain/shared.hh +++ b/src/libmain/shared.hh @@ -25,6 +25,9 @@ void initNix(); void parseCmdLine(int argc, char * * argv, std::function parseArg); +void parseCmdLine(const string & programName, const Strings & args, + std::function parseArg); + void printVersion(const string & programName); /* Ugh. No better place to put this. */ diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 72f89003d0b..8a3c8a02506 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -5,8 +5,6 @@ #include #include -#include - #include "store-api.hh" #include "globals.hh" #include "derivations.hh" @@ -14,7 +12,10 @@ #include "util.hh" #include "shared.hh" #include "eval.hh" +#include "eval-inline.hh" #include "get-drvs.hh" +#include "common-opts.hh" +#include "attr-path.hh" using namespace nix; using namespace std::string_literals; @@ -65,510 +66,435 @@ std::vector shellwords(const string & s) return res; } -static void maybePrintExecError(ExecError & e) +void mainWrapped(int argc, char * * argv) { - if (WIFEXITED(e.status)) - throw Exit(WEXITSTATUS(e.status)); - else - throw e; -} - -int main(int argc, char ** argv) -{ - return handleExceptions(argv[0], [&]() { - initNix(); - initGC(); - - auto store = openStore(); - auto dryRun = false; - auto verbose = false; - auto runEnv = std::regex_search(argv[0], std::regex("nix-shell$")); - auto pure = false; - auto fromArgs = false; - auto packages = false; - // Same condition as bash uses for interactive shells - auto interactive = isatty(STDIN_FILENO) && isatty(STDERR_FILENO); - - Strings instArgs; - Strings buildArgs; - Strings exprs; - Strings searchPath; - - auto shell = getEnv("SHELL", "/bin/sh"); - std::string envCommand; // interactive shell - Strings envExclude; - - auto myName = runEnv ? "nix-shell" : "nix-build"; - - auto inShebang = false; - std::string script; - std::vector savedArgs; - - AutoDelete tmpDir(createTempDir("", myName)); - - std::string outLink = "./result"; - auto drvLink = (Path) tmpDir + "/derivation"; - - std::vector args; - for (int i = 1; i < argc; ++i) - args.push_back(argv[i]); - - // Heuristic to see if we're invoked as a shebang script, namely, if we - // have a single argument, it's the name of an executable file, and it - // starts with "#!". - if (runEnv && argc > 1 && !std::regex_search(argv[1], std::regex("nix-shell"))) { - script = argv[1]; - if (access(script.c_str(), F_OK) == 0 && access(script.c_str(), X_OK) == 0) { - auto lines = tokenizeString(readFile(script), "\n"); - if (std::regex_search(lines.front(), std::regex("^#!"))) { - lines.pop_front(); - inShebang = true; - for (int i = 2; i < argc; ++i) - savedArgs.push_back(argv[i]); - args.clear(); - for (auto line : lines) { - line = chomp(line); - std::smatch match; - if (std::regex_match(line, match, std::regex("^#!\\s*nix-shell (.*)$"))) - for (const auto & word : shellwords(match[1].str())) - args.push_back(word); - } + initNix(); + initGC(); + + auto dryRun = false; + auto runEnv = std::regex_search(argv[0], std::regex("nix-shell$")); + auto pure = false; + auto fromArgs = false; + auto packages = false; + // Same condition as bash uses for interactive shells + auto interactive = isatty(STDIN_FILENO) && isatty(STDERR_FILENO); + Strings attrPaths; + Strings left; + Strings searchPath; + std::map autoArgs_; + RepairFlag repair = NoRepair; + Path gcRoot; + BuildMode buildMode = bmNormal; + bool readStdin = false; + + auto shell = getEnv("SHELL", "/bin/sh"); + std::string envCommand; // interactive shell + Strings envExclude; + + auto myName = runEnv ? "nix-shell" : "nix-build"; + + auto inShebang = false; + std::string script; + std::vector savedArgs; + + AutoDelete tmpDir(createTempDir("", myName)); + + std::string outLink = "./result"; + + Strings args; + for (int i = 1; i < argc; ++i) + args.push_back(argv[i]); + + // Heuristic to see if we're invoked as a shebang script, namely, if we + // have a single argument, it's the name of an executable file, and it + // starts with "#!". + if (runEnv && argc > 1 && !std::regex_search(argv[1], std::regex("nix-shell"))) { + script = argv[1]; + if (access(script.c_str(), F_OK) == 0 && access(script.c_str(), X_OK) == 0) { + auto lines = tokenizeString(readFile(script), "\n"); + if (std::regex_search(lines.front(), std::regex("^#!"))) { + lines.pop_front(); + inShebang = true; + for (int i = 2; i < argc; ++i) + savedArgs.push_back(argv[i]); + args.clear(); + for (auto line : lines) { + line = chomp(line); + std::smatch match; + if (std::regex_match(line, match, std::regex("^#!\\s*nix-shell (.*)$"))) + for (const auto & word : shellwords(match[1].str())) + args.push_back(word); } } } + } - for (size_t n = 0; n < args.size(); ++n) { - auto arg = args[n]; + parseCmdLine(myName, args, [&](Strings::iterator & arg, const Strings::iterator & end) { + if (*arg == "--help") { + deletePath(tmpDir); + showManPage(myName); + } - if (arg == "--help") { - deletePath(tmpDir); - showManPage(myName); - } + else if (*arg == "--version") + printVersion(myName); - else if (arg == "--version") - printVersion(myName); + else if (*arg == "--add-drv-link") + ; // obsolete - else if (arg == "--add-drv-link") { - drvLink = "./derivation"; - } + else if (*arg == "--no-out-link" || *arg == "--no-link") + outLink = (Path) tmpDir + "/result"; - else if (arg == "--no-out-link" || arg == "--no-link") { - outLink = (Path) tmpDir + "/result"; - } + else if (*arg == "--attr" || *arg == "-A") + attrPaths.push_back(getArg(*arg, arg, end)); - else if (arg == "--drv-link") { - n++; - if (n >= args.size()) { - throw UsageError("--drv-link requires an argument"); - } - drvLink = args[n]; - } + else if (*arg == "--drv-link") + getArg(*arg, arg, end); // obsolete - else if (arg == "--out-link" || arg == "-o") { - n++; - if (n >= args.size()) { - throw UsageError(format("%1% requires an argument") % arg); - } - outLink = args[n]; - } + else if (*arg == "--out-link" || *arg == "-o") + outLink = getArg(*arg, arg, end); - else if (arg == "--attr" || arg == "-A" || arg == "-I") { - n++; - if (n >= args.size()) { - throw UsageError(format("%1% requires an argument") % arg); - } - instArgs.push_back(arg); - instArgs.push_back(args[n]); - } + else if (parseAutoArgs(arg, end, autoArgs_)) + ; - else if (arg == "--arg" || arg == "--argstr") { - if (n + 2 >= args.size()) { - throw UsageError(format("%1% requires two arguments") % arg); - } - instArgs.push_back(arg); - instArgs.push_back(args[n + 1]); - instArgs.push_back(args[n + 2]); - n += 2; - } + else if (parseSearchPathArg(arg, end, searchPath)) + ; - else if (arg == "--option") { - if (n + 2 >= args.size()) { - throw UsageError(format("%1% requires two arguments") % arg); - } - instArgs.push_back(arg); - instArgs.push_back(args[n + 1]); - instArgs.push_back(args[n + 2]); - buildArgs.push_back(arg); - buildArgs.push_back(args[n + 1]); - buildArgs.push_back(args[n + 2]); - settings.set(args[n + 1], args[n + 2]); - n += 2; - } + else if (*arg == "--add-root") + gcRoot = getArg(*arg, arg, end); - else if (arg == "--max-jobs" || arg == "-j" || arg == "--max-silent-time" || arg == "--cores" || arg == "--timeout" || arg == "--add-root") { - n++; - if (n >= args.size()) { - throw UsageError(format("%1% requires an argument") % arg); - } - buildArgs.push_back(arg); - buildArgs.push_back(args[n]); - } + else if (*arg == "--dry-run") + dryRun = true; - else if (arg == "--dry-run") { - buildArgs.push_back("--dry-run"); - dryRun = true; - } + else if (*arg == "--repair") { + repair = Repair; + buildMode = bmRepair; + } - else if (arg == "--show-trace") { - instArgs.push_back(arg); - } + else if (*arg == "--run-env") // obsolete + runEnv = true; - else if (arg == "-") { - exprs = Strings{"-"}; - } + else if (*arg == "--command" || *arg == "--run") { + if (*arg == "--run") + interactive = false; + envCommand = getArg(*arg, arg, end) + "\nexit"; + } - else if (arg == "--verbose" || (arg.size() >= 2 && arg.substr(0, 2) == "-v")) { - buildArgs.push_back(arg); - instArgs.push_back(arg); - verbose = true; - } + else if (*arg == "--check") + buildMode = bmCheck; - else if (arg == "--quiet" || arg == "--repair") { - buildArgs.push_back(arg); - instArgs.push_back(arg); - } + else if (*arg == "--exclude") + envExclude.push_back(getArg(*arg, arg, end)); - else if (arg == "--check") { - buildArgs.push_back(arg); - } + else if (*arg == "--expr" || *arg == "-E") + fromArgs = true; - else if (arg == "--run-env") { // obsolete - runEnv = true; - } + else if (*arg == "--pure") pure = true; + else if (*arg == "--impure") pure = false; - else if (arg == "--command" || arg == "--run") { - n++; - if (n >= args.size()) { - throw UsageError(format("%1% requires an argument") % arg); - } - envCommand = args[n] + "\nexit"; - if (arg == "--run") - interactive = false; - } + else if (*arg == "--packages" || *arg == "-p") + packages = true; - else if (arg == "--exclude") { - n++; - if (n >= args.size()) { - throw UsageError(format("%1% requires an argument") % arg); - } - envExclude.push_back(args[n]); - } + else if (inShebang && *arg == "-i") { + auto interpreter = getArg(*arg, arg, end); + interactive = false; + auto execArgs = ""; - else if (arg == "--pure") { pure = true; } - else if (arg == "--impure") { pure = false; } + auto shellEscape = [](const string & s) { + return "'" + std::regex_replace(s, std::regex("'"), "'\\''") + "'"; + }; - else if (arg == "--expr" || arg == "-E") { - fromArgs = true; - instArgs.push_back("--expr"); - } + // Überhack to support Perl. Perl examines the shebang and + // executes it unless it contains the string "perl" or "indir", + // or (undocumented) argv[0] does not contain "perl". Exploit + // the latter by doing "exec -a". + if (std::regex_search(interpreter, std::regex("perl"))) + execArgs = "-a PERL"; - else if (arg == "--packages" || arg == "-p") { - packages = true; + std::ostringstream joined; + for (const auto & i : savedArgs) + joined << shellEscape(i) << ' '; + + if (std::regex_search(interpreter, std::regex("ruby"))) { + // Hack for Ruby. Ruby also examines the shebang. It tries to + // read the shebang to understand which packages to read from. Since + // this is handled via nix-shell -p, we wrap our ruby script execution + // in ruby -e 'load' which ignores the shebangs. + envCommand = (format("exec %1% %2% -e 'load(\"%3%\") -- %4%") % execArgs % interpreter % script % joined.str()).str(); + } else { + envCommand = (format("exec %1% %2% %3% %4%") % execArgs % interpreter % script % joined.str()).str(); } + } - else if (inShebang && arg == "-i") { - n++; - if (n >= args.size()) { - throw UsageError(format("%1% requires an argument") % arg); - } - interactive = false; - auto interpreter = args[n]; - auto execArgs = ""; - - auto shellEscape = [](const string & s) { - return "'" + std::regex_replace(s, std::regex("'"), "'\\''") + "'"; - }; - - // Überhack to support Perl. Perl examines the shebang and - // executes it unless it contains the string "perl" or "indir", - // or (undocumented) argv[0] does not contain "perl". Exploit - // the latter by doing "exec -a". - if (std::regex_search(interpreter, std::regex("perl"))) - execArgs = "-a PERL"; - - std::ostringstream joined; - for (const auto & i : savedArgs) - joined << shellEscape(i) << ' '; - - if (std::regex_search(interpreter, std::regex("ruby"))) { - // Hack for Ruby. Ruby also examines the shebang. It tries to - // read the shebang to understand which packages to read from. Since - // this is handled via nix-shell -p, we wrap our ruby script execution - // in ruby -e 'load' which ignores the shebangs. - envCommand = (format("exec %1% %2% -e 'load(\"%3%\") -- %4%") % execArgs % interpreter % script % joined.str()).str(); - } else { - envCommand = (format("exec %1% %2% %3% %4%") % execArgs % interpreter % script % joined.str()).str(); - } - } + else if (*arg == "-") + readStdin = true; - else if (!arg.empty() && arg[0] == '-') { - buildArgs.push_back(arg); - } + else if (*arg != "" && arg->at(0) == '-') + return false; - else if (arg == "-Q" || arg == "--no-build-output") { - buildArgs.push_back(arg); - instArgs.push_back(arg); - } + else + left.push_back(*arg); - else { - exprs.push_back(arg); - } - } + return true; + }); - EvalState state(searchPath, store); + if (packages && fromArgs) + throw UsageError("‘-p’ and ‘-E’ are mutually exclusive"); + + auto store = openStore(); + + EvalState state(searchPath, store); + state.repair = repair; + + Bindings & autoArgs(*evalAutoArgs(state, autoArgs_)); + + if (packages) { + std::ostringstream joined; + joined << "with import { }; (pkgs.runCommandCC or pkgs.runCommand) \"shell\" { buildInputs = [ "; + for (const auto & i : left) + joined << '(' << i << ") "; + joined << "]; } \"\""; + fromArgs = true; + left = {joined.str()}; + } else if (!fromArgs) { + if (left.empty() && runEnv && pathExists("shell.nix")) + left = {"shell.nix"}; + if (left.empty()) + left = {"default.nix"}; + } - if (packages && fromArgs) { - throw UsageError("‘-p’ and ‘-E’ are mutually exclusive"); - } + if (runEnv) + setenv("IN_NIX_SHELL", pure ? "pure" : "impure", 1); - if (packages) { - instArgs.push_back("--expr"); - std::ostringstream joined; - joined << "with import { }; (pkgs.runCommandCC or pkgs.runCommand) \"shell\" { buildInputs = [ "; - for (const auto & i : exprs) - joined << '(' << i << ") "; - joined << "]; } \"\""; - exprs = Strings{joined.str()}; - } else if (!fromArgs) { - if (exprs.empty() && runEnv && access("shell.nix", F_OK) == 0) - exprs.push_back("shell.nix"); - if (exprs.empty()) - exprs.push_back("default.nix"); + /* Parse the expressions. */ + std::vector exprs; + + if (readStdin) + exprs = {state.parseStdin()}; + else + for (auto i : left) { + if (fromArgs) + exprs.push_back(state.parseExprFromString(i, absPath("."))); + else + /* If we're in a #! script, interpret filenames + relative to the script. */ + exprs.push_back(state.parseExprFromFile(resolveExprPath(lookupFileArg(state, + inShebang && !packages ? absPath(i, dirOf(script)) : i)))); } - if (runEnv) - setenv("IN_NIX_SHELL", pure ? "pure" : "impure", 1); - - for (auto & expr : exprs) { - // Instantiate. - std::vector drvPaths; - if (!std::regex_match(expr, std::regex("^/.*\\.drv$"))) { - // If we're in a #! script, interpret filenames relative to the - // script. - if (inShebang && !packages) - expr = absPath(expr, dirOf(script)); - - Strings instantiateArgs{"--add-root", drvLink, "--indirect"}; - for (const auto & arg : instArgs) - instantiateArgs.push_back(arg); - instantiateArgs.push_back(expr); - try { - auto instOutput = runProgram(settings.nixBinDir + "/nix-instantiate", false, instantiateArgs); - drvPaths = tokenizeString>(instOutput); - } catch (ExecError & e) { - maybePrintExecError(e); - } - } else { - drvPaths.push_back(expr); - } + /* Evaluate them into derivations. */ + DrvInfos drvs; - if (runEnv) { - if (drvPaths.size() != 1) - throw UsageError("a single derivation is required"); - auto drvPath = drvPaths[0]; - drvPath = drvPath.substr(0, drvPath.find_first_of('!')); - if (isLink(drvPath)) - drvPath = readLink(drvPath); - auto drv = store->derivationFromPath(drvPath); - - // Build or fetch all dependencies of the derivation. - Strings nixStoreArgs{"-r", "--no-output", "--no-gc-warning"}; - for (const auto & arg : buildArgs) - nixStoreArgs.push_back(arg); - for (const auto & input : drv.inputDrvs) - if (std::all_of(envExclude.cbegin(), envExclude.cend(), [&](const string & exclude) { return !std::regex_search(input.first, std::regex(exclude)); })) - nixStoreArgs.push_back(input.first); - for (const auto & src : drv.inputSrcs) - nixStoreArgs.push_back(src); - - try { - runProgram(settings.nixBinDir + "/nix-store", false, nixStoreArgs); - } catch (ExecError & e) { - maybePrintExecError(e); - } + if (attrPaths.empty()) attrPaths = {""}; - if (dryRun) return; + for (auto e : exprs) { + Value vRoot; + state.eval(e, vRoot); - // Set the environment. - auto env = getEnv(); + for (auto & i : attrPaths) { + Value & v(*findAlongAttrPath(state, i, autoArgs, vRoot)); + state.forceValue(v); + getDerivations(state, v, "", autoArgs, drvs, false); + } + } - auto tmp = getEnv("TMPDIR", getEnv("XDG_RUNTIME_DIR", "/tmp")); + auto buildPaths = [&](const PathSet & paths) { + /* Note: we do this even when !printMissing to efficiently + fetch binary cache data. */ + unsigned long long downloadSize, narSize; + PathSet willBuild, willSubstitute, unknown; + store->queryMissing(paths, + willBuild, willSubstitute, unknown, downloadSize, narSize); - if (pure) { - std::set keepVars{"HOME", "USER", "LOGNAME", "DISPLAY", "PATH", "TERM", "IN_NIX_SHELL", "TZ", "PAGER", "NIX_BUILD_SHELL", "SHLVL"}; - decltype(env) newEnv; - for (auto & i : env) - if (keepVars.count(i.first)) - newEnv.emplace(i); - env = newEnv; - // NixOS hack: prevent /etc/bashrc from sourcing /etc/profile. - env["__ETC_PROFILE_SOURCED"] = "1"; - } + if (settings.printMissing) + printMissing(ref(store), willBuild, willSubstitute, unknown, downloadSize, narSize); - env["NIX_BUILD_TOP"] = env["TMPDIR"] = env["TEMPDIR"] = env["TMP"] = env["TEMP"] = tmp; - env["NIX_STORE"] = store->storeDir; - env["NIX_BUILD_CORES"] = std::to_string(settings.buildCores); - - auto passAsFile = tokenizeString(get(drv.env, "passAsFile", "")); - - bool keepTmp = false; - int fileNr = 0; - - for (auto & var : drv.env) - if (passAsFile.count(var.first)) { - keepTmp = true; - string fn = ".attr-" + std::to_string(fileNr++); - Path p = (Path) tmpDir + "/" + fn; - writeFile(p, var.second); - env[var.first + "Path"] = p; - } else - env[var.first] = var.second; - - restoreAffinity(); - - // Run a shell using the derivation's environment. For - // convenience, source $stdenv/setup to setup additional - // environment variables and shell functions. Also don't lose - // the current $PATH directories. - auto rcfile = (Path) tmpDir + "/rc"; - writeFile(rcfile, fmt( - (keepTmp ? "" : "rm -rf '%1%'; "s) + - "[ -n \"$PS1\" ] && [ -e ~/.bashrc ] && source ~/.bashrc; " - "%2%" - "dontAddDisableDepTrack=1; " - "[ -e $stdenv/setup ] && source $stdenv/setup; " - "%3%" - "set +e; " - R"s([ -n "$PS1" ] && PS1='\n\[\033[1;32m\][nix-shell:\w]\$\[\033[0m\] '; )s" - "if [ \"$(type -t runHook)\" = function ]; then runHook shellHook; fi; " - "unset NIX_ENFORCE_PURITY; " - "unset NIX_INDENT_MAKE; " - "shopt -u nullglob; " - "unset TZ; %4%" - "%5%", - (Path) tmpDir, - (pure ? "" : "p=$PATH; "), - (pure ? "" : "PATH=$PATH:$p; unset p; "), - (getenv("TZ") ? (string("export TZ='") + getenv("TZ") + "'; ") : ""), - envCommand)); - - Strings envStrs; - for (auto & i : env) - envStrs.push_back(i.first + "=" + i.second); - - auto args = interactive - ? Strings{"bash", "--rcfile", rcfile} - : Strings{"bash", rcfile}; - - auto envPtrs = stringsToCharPtrs(envStrs); - - auto shell = getEnv("NIX_BUILD_SHELL", ""); - - if (shell == "") { - - try { - - auto expr = state.parseExprFromString("(import {}).bashInteractive", absPath(".")); - - Value v; - state.eval(expr, v); - - auto drv = getDerivation(state, v, false); - if (!drv) - throw Error("the ‘bashInteractive’ attribute in did not evaluate to a derivation"); - - auto drvPath = drv->queryDrvPath(); - - unsigned long long downloadSize, narSize; - PathSet willBuild, willSubstitute, unknown; - store->queryMissing({drvPath}, - willBuild, willSubstitute, unknown, downloadSize, narSize); - - if (settings.printMissing) - printMissing(ref(store), willBuild, willSubstitute, unknown, downloadSize, narSize); - - store->buildPaths({drvPath}); - - shell = drv->queryOutPath() + "/bin/bash"; - if (!pathExists(shell)) - throw Error("expected shell ‘%s’ to exist, but it doesn't", shell); - - } catch (Error & e) { - printError("warning: %s; will use bash from your environment", e.what()); - shell = "bash"; - } - } + if (!dryRun) + store->buildPaths(paths, buildMode); + }; - environ = envPtrs.data(); + if (runEnv) { + if (drvs.size() != 1) + throw UsageError("nix-shell requires a single derivation"); - auto argPtrs = stringsToCharPtrs(args); + auto & drvInfo = drvs.front(); + auto drv = store->derivationFromPath(drvInfo.queryDrvPath()); - restoreSignals(); + PathSet pathsToBuild; - execvp(shell.c_str(), argPtrs.data()); + /* Figure out what bash shell to use. If $NIX_BUILD_SHELL + is not set, then build bashInteractive from + . */ + auto shell = getEnv("NIX_BUILD_SHELL", ""); - throw SysError("executing shell ‘%s’", shell); - } + if (shell == "") { - // Ugly hackery to make "nix-build -A foo.all" produce symlinks - // ./result, ./result-dev, and so on, rather than ./result, - // ./result-2-dev, and so on. This combines multiple derivation - // paths into one "/nix/store/drv-path!out1,out2,..." argument. - std::string prevDrvPath; - Strings drvPaths2; - for (const auto & drvPath : drvPaths) { - auto p = drvPath; - std::string output = "out"; - std::smatch match; - if (std::regex_match(drvPath, match, std::regex("(.*)!(.*)"))) { - p = match[1].str(); - output = match[2].str(); - } - auto target = readLink(p); - if (verbose) - std::cerr << "derivation is " << target << '\n'; - if (target == prevDrvPath) { - auto last = drvPaths2.back(); - drvPaths2.pop_back(); - drvPaths2.push_back(last + "," + output); - } else { - drvPaths2.push_back(target + "!" + output); - prevDrvPath = target; - } - } - // Build. - Strings outPaths; - Strings nixStoreArgs{"--add-root", outLink, "--indirect", "-r"}; - for (const auto & arg : buildArgs) - nixStoreArgs.push_back(arg); - for (const auto & path : drvPaths2) - nixStoreArgs.push_back(path); - - std::string nixStoreRes; try { - nixStoreRes = runProgram(settings.nixBinDir + "/nix-store", false, nixStoreArgs); - } catch (ExecError & e) { - maybePrintExecError(e); + auto expr = state.parseExprFromString("(import {}).bashInteractive", absPath(".")); + + Value v; + state.eval(expr, v); + + auto drv = getDerivation(state, v, false); + if (!drv) + throw Error("the ‘bashInteractive’ attribute in did not evaluate to a derivation"); + + pathsToBuild.insert(drv->queryDrvPath()); + + shell = drv->queryOutPath() + "/bin/bash"; + + } catch (Error & e) { + printError("warning: %s; will use bash from your environment", e.what()); + shell = "bash"; } + } + + // Build or fetch all dependencies of the derivation. + for (const auto & input : drv.inputDrvs) + if (std::all_of(envExclude.cbegin(), envExclude.cend(), [&](const string & exclude) { return !std::regex_search(input.first, std::regex(exclude)); })) + pathsToBuild.insert(input.first); + for (const auto & src : drv.inputSrcs) + pathsToBuild.insert(src); - for (const auto & outpath : tokenizeString>(nixStoreRes)) - outPaths.push_back(chomp(outpath)); + buildPaths(pathsToBuild); - if (dryRun) - continue; + if (dryRun) return; - for (const auto & outPath : outPaths) - std::cout << readLink(outPath) << '\n'; + // Set the environment. + auto env = getEnv(); + + auto tmp = getEnv("TMPDIR", getEnv("XDG_RUNTIME_DIR", "/tmp")); + + if (pure) { + std::set keepVars{"HOME", "USER", "LOGNAME", "DISPLAY", "PATH", "TERM", "IN_NIX_SHELL", "TZ", "PAGER", "NIX_BUILD_SHELL", "SHLVL"}; + decltype(env) newEnv; + for (auto & i : env) + if (keepVars.count(i.first)) + newEnv.emplace(i); + env = newEnv; + // NixOS hack: prevent /etc/bashrc from sourcing /etc/profile. + env["__ETC_PROFILE_SOURCED"] = "1"; } + + env["NIX_BUILD_TOP"] = env["TMPDIR"] = env["TEMPDIR"] = env["TMP"] = env["TEMP"] = tmp; + env["NIX_STORE"] = store->storeDir; + env["NIX_BUILD_CORES"] = std::to_string(settings.buildCores); + + auto passAsFile = tokenizeString(get(drv.env, "passAsFile", "")); + + bool keepTmp = false; + int fileNr = 0; + + for (auto & var : drv.env) + if (passAsFile.count(var.first)) { + keepTmp = true; + string fn = ".attr-" + std::to_string(fileNr++); + Path p = (Path) tmpDir + "/" + fn; + writeFile(p, var.second); + env[var.first + "Path"] = p; + } else + env[var.first] = var.second; + + restoreAffinity(); + + /* Run a shell using the derivation's environment. For + convenience, source $stdenv/setup to setup additional + environment variables and shell functions. Also don't + lose the current $PATH directories. */ + auto rcfile = (Path) tmpDir + "/rc"; + writeFile(rcfile, fmt( + (keepTmp ? "" : "rm -rf '%1%'; "s) + + "[ -n \"$PS1\" ] && [ -e ~/.bashrc ] && source ~/.bashrc; " + "%2%" + "dontAddDisableDepTrack=1; " + "[ -e $stdenv/setup ] && source $stdenv/setup; " + "%3%" + "set +e; " + R"s([ -n "$PS1" ] && PS1='\n\[\033[1;32m\][nix-shell:\w]\$\[\033[0m\] '; )s" + "if [ \"$(type -t runHook)\" = function ]; then runHook shellHook; fi; " + "unset NIX_ENFORCE_PURITY; " + "unset NIX_INDENT_MAKE; " + "shopt -u nullglob; " + "unset TZ; %4%" + "%5%", + (Path) tmpDir, + (pure ? "" : "p=$PATH; "), + (pure ? "" : "PATH=$PATH:$p; unset p; "), + (getenv("TZ") ? (string("export TZ='") + getenv("TZ") + "'; ") : ""), + envCommand)); + + Strings envStrs; + for (auto & i : env) + envStrs.push_back(i.first + "=" + i.second); + + auto args = interactive + ? Strings{"bash", "--rcfile", rcfile} + : Strings{"bash", rcfile}; + + auto envPtrs = stringsToCharPtrs(envStrs); + + environ = envPtrs.data(); + + auto argPtrs = stringsToCharPtrs(args); + + restoreSignals(); + + execvp(shell.c_str(), argPtrs.data()); + + throw SysError("executing shell ‘%s’", shell); + } + + else { + + PathSet pathsToBuild; + + std::map drvPrefixes; + std::map resultSymlinks; + std::vector outPaths; + + for (auto & drvInfo : drvs) { + auto drvPath = drvInfo.queryDrvPath(); + pathsToBuild.insert(drvPath); + + auto outputName = drvInfo.queryOutputName(); + if (outputName == "") + throw Error("derivation ‘%s’ lacks an ‘outputName’ attribute", drvPath); + + pathsToBuild.insert(drvPath + (outputName != "out" ? "!" + outputName : "")); + + std::string drvPrefix; + auto i = drvPrefixes.find(drvPath); + if (i != drvPrefixes.end()) + drvPrefix = i->second; + else { + drvPrefix = outLink; + if (drvPrefixes.size()) + drvPrefix += fmt("-%d", drvPrefixes.size() + 1); + drvPrefixes[drvPath] = drvPrefix; + } + + std::string symlink = drvPrefix; + if (outputName != "out") symlink += "-" + outputName; + + resultSymlinks[symlink] = drvInfo.queryOutPath(); + outPaths.push_back(drvInfo.queryOutPath()); + } + + buildPaths(pathsToBuild); + + if (dryRun) return; + + for (auto & symlink : resultSymlinks) + if (auto store2 = store.dynamic_pointer_cast()) + store2->addPermRoot(symlink.second, absPath(symlink.first), true); + + for (auto & path : outPaths) + std::cout << path << '\n'; + } +} + +int main(int argc, char * * argv) +{ + return handleExceptions(argv[0], [&]() { + return mainWrapped(argc, argv); }); } diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index a5d12c1466f..49a14cb6445 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -17,13 +17,6 @@ using namespace nix; -static Expr * parseStdin(EvalState & state) -{ - //Activity act(*logger, lvlTalkative, format("parsing standard input")); - return state.parseExprFromString(drainFD(0), absPath(".")); -} - - static Path gcRoot; static int rootNr = 0; static bool indirectRoot = false; @@ -166,7 +159,7 @@ int main(int argc, char * * argv) Bindings & autoArgs(*evalAutoArgs(state, autoArgs_)); - if (attrPaths.empty()) attrPaths.push_back(""); + if (attrPaths.empty()) attrPaths = {""}; if (findFile) { for (auto & i : files) { @@ -178,7 +171,7 @@ int main(int argc, char * * argv) } if (readStdin) { - Expr * e = parseStdin(state); + Expr * e = state.parseStdin(); processExpr(state, attrPaths, parseOnly, strict, autoArgs, evalOnly, outputKind, xmlOutputSourceLocation, e); } else if (files.empty() && !fromArgs) From 57b95057311d4dafb948c78889693a98ec349460 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 26 Jul 2017 17:21:46 +0200 Subject: [PATCH 0468/2196] nix search: Add a cache The package list is now cached in ~/.cache/nix/package-search.json. This gives a substantial speedup to "nix search" queries. For example (on an SSD): First run: (no package search cache, cold page cache) $ time nix search blender Attribute name: nixpkgs.blender Package name: blender Version: 2.78c Description: 3D Creation/Animation/Publishing System real 0m6.516s Second run: (package search cache populated) $ time nix search blender Attribute name: nixpkgs.blender Package name: blender Version: 2.78c Description: 3D Creation/Animation/Publishing System real 0m0.143s --- src/libutil/json.cc | 20 +++++++----- src/libutil/json.hh | 14 ++++++-- src/nix/search.cc | 78 +++++++++++++++++++++++++++++++++++++++------ 3 files changed, 92 insertions(+), 20 deletions(-) diff --git a/src/libutil/json.cc b/src/libutil/json.cc index b8b8ef9c8cc..813b257016e 100644 --- a/src/libutil/json.cc +++ b/src/libutil/json.cc @@ -50,20 +50,22 @@ template<> void toJSON(std::ostream & str, const std::nullptr_t JSONWriter::JSONWriter(std::ostream & str, bool indent) : state(new JSONState(str, indent)) { - state->stack.push_back(this); + state->stack++; } JSONWriter::JSONWriter(JSONState * state) : state(state) { - state->stack.push_back(this); + state->stack++; } JSONWriter::~JSONWriter() { - assertActive(); - state->stack.pop_back(); - if (state->stack.empty()) delete state; + if (state) { + assertActive(); + state->stack--; + if (state->stack == 0) delete state; + } } void JSONWriter::comma() @@ -121,9 +123,11 @@ void JSONObject::open() JSONObject::~JSONObject() { - state->depth--; - if (state->indent && !first) indent(); - state->str << "}"; + if (state) { + state->depth--; + if (state->indent && !first) indent(); + state->str << "}"; + } } void JSONObject::attr(const std::string & s) diff --git a/src/libutil/json.hh b/src/libutil/json.hh index 595e9bbe349..02a39917fb5 100644 --- a/src/libutil/json.hh +++ b/src/libutil/json.hh @@ -21,11 +21,11 @@ protected: std::ostream & str; bool indent; size_t depth = 0; - std::vector stack; + size_t stack = 0; JSONState(std::ostream & str, bool indent) : str(str), indent(indent) { } ~JSONState() { - assert(stack.empty()); + assert(stack == 0); } }; @@ -41,7 +41,7 @@ protected: void assertActive() { - assert(!state->stack.empty() && state->stack.back() == this); + assert(state->stack != 0); } void comma(); @@ -117,6 +117,14 @@ public: open(); } + JSONObject(const JSONObject & obj) = delete; + + JSONObject(JSONObject && obj) + : JSONWriter(obj.state) + { + obj.state = 0; + } + ~JSONObject(); template diff --git a/src/nix/search.cc b/src/nix/search.cc index 970dcb9834b..d3e018876a0 100644 --- a/src/nix/search.cc +++ b/src/nix/search.cc @@ -6,8 +6,10 @@ #include "get-drvs.hh" #include "common-args.hh" #include "json.hh" +#include "json-to-value.hh" #include +#include using namespace nix; @@ -25,9 +27,23 @@ struct CmdSearch : SourceExprCommand, MixJSON { std::string re; + bool writeCache = true; + bool useCache = true; + CmdSearch() { expectArg("regex", &re, true); + + mkFlag() + .longName("update-cache") + .shortName('u') + .description("update the package search cache") + .handler([&](Strings ss) { writeCache = true; useCache = false; }); + + mkFlag() + .longName("no-cache") + .description("do not use or update the package search cache") + .handler([&](Strings ss) { writeCache = false; useCache = false; }); } std::string name() override @@ -48,15 +64,18 @@ struct CmdSearch : SourceExprCommand, MixJSON auto state = getEvalState(); - std::function doExpr; - bool first = true; auto jsonOut = json ? std::make_unique(std::cout, true) : nullptr; auto sToplevel = state->symbols.create("_toplevel"); + auto sRecurse = state->symbols.create("recurseForDerivations"); + + bool fromCache = false; - doExpr = [&](Value * v, std::string attrPath, bool toplevel) { + std::function doExpr; + + doExpr = [&](Value * v, std::string attrPath, bool toplevel, JSONObject * cache) { debug("at attribute ‘%s’", attrPath); try { @@ -115,23 +134,41 @@ struct CmdSearch : SourceExprCommand, MixJSON hilite(description, descriptionMatch)); } } + + if (cache) { + cache->attr("type", "derivation"); + cache->attr("name", drv.queryName()); + cache->attr("system", drv.querySystem()); + if (description != "") { + auto meta(cache->object("meta")); + meta.attr("description", description); + } + } } else if (v->type == tAttrs) { if (!toplevel) { auto attrs = v->attrs; - Bindings::iterator j = attrs->find(state->symbols.create("recurseForDerivations")); - if (j == attrs->end() || !state->forceBool(*j->value, *j->pos)) return; + Bindings::iterator j = attrs->find(sRecurse); + if (j == attrs->end() || !state->forceBool(*j->value, *j->pos)) { + debug("skip attribute ‘%s’", attrPath); + return; + } } - Bindings::iterator j = v->attrs->find(sToplevel); - bool toplevel2 = j != v->attrs->end() && state->forceBool(*j->value, *j->pos); + bool toplevel2 = false; + if (!fromCache) { + Bindings::iterator j = v->attrs->find(sToplevel); + toplevel2 = j != v->attrs->end() && state->forceBool(*j->value, *j->pos); + } for (auto & i : *v->attrs) { + auto cache2 = + cache ? std::make_unique(cache->object(i.name)) : nullptr; doExpr(i.value, attrPath == "" ? (std::string) i.name : attrPath + "." + (std::string) i.name, - toplevel2); + toplevel2 || fromCache, cache2 ? cache2.get() : nullptr); } } @@ -144,7 +181,30 @@ struct CmdSearch : SourceExprCommand, MixJSON } }; - doExpr(getSourceExpr(*state), "", true); + Path jsonCacheFileName = getCacheDir() + "/nix/package-search.json"; + + if (useCache && pathExists(jsonCacheFileName)) { + + Value vRoot; + parseJSON(*state, readFile(jsonCacheFileName), vRoot); + + fromCache = true; + + doExpr(&vRoot, "", true, nullptr); + } + + else { + Path tmpFile = fmt("%s.tmp.%d", jsonCacheFileName, getpid()); + + std::ofstream jsonCacheFile(tmpFile); + + auto cache = writeCache ? std::make_unique(jsonCacheFile, false) : nullptr; + + doExpr(getSourceExpr(*state), "", true, cache.get()); + + if (rename(tmpFile.c_str(), jsonCacheFileName.c_str()) == -1) + throw SysError("cannot rename ‘%s’ to ‘%s’", tmpFile, jsonCacheFileName); + } } }; From 6d7de7f3ded6254ea3a284b2c9b6a51fc8b041bb Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 27 Jul 2017 16:16:08 +0200 Subject: [PATCH 0469/2196] builtins.fetchgit: Cache hash -> store path mappings This prevents an expensive call to addToStore() in the cached case. --- src/libexpr/primops/fetchgit.cc | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/src/libexpr/primops/fetchgit.cc b/src/libexpr/primops/fetchgit.cc index 3e4ece2cffd..1ac9b364f52 100644 --- a/src/libexpr/primops/fetchgit.cc +++ b/src/libexpr/primops/fetchgit.cc @@ -2,6 +2,7 @@ #include "eval-inline.hh" #include "download.hh" #include "store-api.hh" +#include "pathlocks.hh" namespace nix { @@ -28,7 +29,18 @@ Path exportGit(ref store, const std::string & uri, const std::string & re unlink(localRefFile.c_str()); - debug(format("got revision ‘%s’") % commitHash); + printTalkative("using revision %s of repo ‘%s’", uri, commitHash); + + Path storeLink = cacheDir + "/" + commitHash + ".link"; + PathLocks storeLinkLock({storeLink}, fmt("waiting for lock on ‘%1%’...", storeLink)); + + if (pathExists(storeLink)) { + auto storePath = readLink(storeLink); + store->addTempRoot(storePath); + if (store->isValidPath(storePath)) { + return storePath; + } + } // FIXME: should pipe this, or find some better way to extract a // revision. @@ -39,7 +51,11 @@ Path exportGit(ref store, const std::string & uri, const std::string & re runProgram("tar", true, { "x", "-C", tmpDir }, tar); - return store->addToStore("git-export", tmpDir); + auto storePath = store->addToStore("git-export", tmpDir); + + replaceSymlink(storePath, storeLink); + + return storePath; } static void prim_fetchgit(EvalState & state, const Pos & pos, Value * * args, Value & v) From 69deca194ec789fa63d222bbd6549dab73328022 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 27 Jul 2017 17:02:25 +0200 Subject: [PATCH 0470/2196] builtins.fetchgit: Use proper refs locally --- src/libexpr/primops/fetchgit.cc | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/libexpr/primops/fetchgit.cc b/src/libexpr/primops/fetchgit.cc index 1ac9b364f52..b64a00b6146 100644 --- a/src/libexpr/primops/fetchgit.cc +++ b/src/libexpr/primops/fetchgit.cc @@ -20,15 +20,14 @@ Path exportGit(ref store, const std::string & uri, const std::string & re //Activity act(*logger, lvlInfo, format("fetching Git repository ‘%s’") % uri); - std::string localRef = "pid-" + std::to_string(getpid()); + std::string localRef = hashString(htSHA256, fmt("%s-%s", uri, rev)).to_string(Base32, false); + Path localRefFile = cacheDir + "/refs/heads/" + localRef; - runProgram("git", true, { "-C", cacheDir, "fetch", uri, rev + ":" + localRef }); + runProgram("git", true, { "-C", cacheDir, "fetch", "--force", uri, rev + ":" + localRef }); std::string commitHash = chomp(readFile(localRefFile)); - unlink(localRefFile.c_str()); - printTalkative("using revision %s of repo ‘%s’", uri, commitHash); Path storeLink = cacheDir + "/" + commitHash + ".link"; From 9f64cb89cbbd0cd0540ad99e3578b6cecd385a81 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 27 Jul 2017 17:15:09 +0200 Subject: [PATCH 0471/2196] builtins.fetchgit: Respect tarball-ttl I.e. if the local ref is more recent than tarball-ttl seconds, then don't check the remote. --- src/libexpr/primops/fetchgit.cc | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/src/libexpr/primops/fetchgit.cc b/src/libexpr/primops/fetchgit.cc index b64a00b6146..3ab2644c8b0 100644 --- a/src/libexpr/primops/fetchgit.cc +++ b/src/libexpr/primops/fetchgit.cc @@ -4,6 +4,8 @@ #include "store-api.hh" #include "pathlocks.hh" +#include + namespace nix { Path exportGit(ref store, const std::string & uri, const std::string & rev) @@ -24,7 +26,23 @@ Path exportGit(ref store, const std::string & uri, const std::string & re Path localRefFile = cacheDir + "/refs/heads/" + localRef; - runProgram("git", true, { "-C", cacheDir, "fetch", "--force", uri, rev + ":" + localRef }); + /* If the local ref is older than ‘tarball-ttl’ seconds, do a git + fetch to update the local ref to the remote ref. */ + time_t now = time(0); + struct stat st; + if (stat(localRefFile.c_str(), &st) != 0 || + st.st_mtime < now - settings.tarballTtl) + { + runProgram("git", true, { "-C", cacheDir, "fetch", "--force", uri, rev + ":" + localRef }); + + struct timeval times[2]; + times[0].tv_sec = now; + times[0].tv_usec = 0; + times[1].tv_sec = now; + times[1].tv_usec = 0; + + utimes(localRefFile.c_str(), times); + } std::string commitHash = chomp(readFile(localRefFile)); From 7480f4f9a478d294eaa095c101df7c0543c1fd71 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 27 Jul 2017 18:08:23 +0200 Subject: [PATCH 0472/2196] builtins.fetchgit: Support specifying commit hashes This adds an argument "rev" specififying the Git commit hash. The existing argument "rev" is renamed to "ref". The default value for "ref" is "master". When specifying a hash, it's necessary to specify a ref since we're not cloning the entire repository but only fetching a specific ref. Example usage: builtins.fetchgit { url = https://github.com/NixOS/nixpkgs.git; ref = "release-16.03"; rev = "c1c0484041ab6f9c6858c8ade80a8477c9ae4442"; }; --- src/libexpr/primops/fetchgit.cc | 34 ++++++++++++++++++++++++++------- src/libexpr/primops/fetchgit.hh | 4 ++-- 2 files changed, 29 insertions(+), 9 deletions(-) diff --git a/src/libexpr/primops/fetchgit.cc b/src/libexpr/primops/fetchgit.cc index 3ab2644c8b0..1e01a452b7e 100644 --- a/src/libexpr/primops/fetchgit.cc +++ b/src/libexpr/primops/fetchgit.cc @@ -6,13 +6,27 @@ #include +#include + namespace nix { -Path exportGit(ref store, const std::string & uri, const std::string & rev) +Path exportGit(ref store, const std::string & uri, + const std::string & ref, const std::string & rev) { if (!isUri(uri)) throw EvalError(format("‘%s’ is not a valid URI") % uri); + if (rev != "") { + std::regex revRegex("^[0-9a-fA-F]{40}$"); + if (!std::regex_match(rev, revRegex)) + throw Error("invalid Git revision ‘%s’", rev); + } + + // FIXME: too restrictive, but better safe than sorry. + std::regex refRegex("^[0-9a-zA-Z][0-9a-zA-Z.-]+$"); + if (!std::regex_match(ref, refRegex)) + throw Error("invalid Git ref ‘%s’", ref); + Path cacheDir = getCacheDir() + "/nix/git"; if (!pathExists(cacheDir)) { @@ -22,7 +36,7 @@ Path exportGit(ref store, const std::string & uri, const std::string & re //Activity act(*logger, lvlInfo, format("fetching Git repository ‘%s’") % uri); - std::string localRef = hashString(htSHA256, fmt("%s-%s", uri, rev)).to_string(Base32, false); + std::string localRef = hashString(htSHA256, fmt("%s-%s", uri, ref)).to_string(Base32, false); Path localRefFile = cacheDir + "/refs/heads/" + localRef; @@ -33,7 +47,7 @@ Path exportGit(ref store, const std::string & uri, const std::string & re if (stat(localRefFile.c_str(), &st) != 0 || st.st_mtime < now - settings.tarballTtl) { - runProgram("git", true, { "-C", cacheDir, "fetch", "--force", uri, rev + ":" + localRef }); + runProgram("git", true, { "-C", cacheDir, "fetch", "--force", uri, ref + ":" + localRef }); struct timeval times[2]; times[0].tv_sec = now; @@ -44,7 +58,9 @@ Path exportGit(ref store, const std::string & uri, const std::string & re utimes(localRefFile.c_str(), times); } - std::string commitHash = chomp(readFile(localRefFile)); + // FIXME: check whether rev is an ancestor of ref. + std::string commitHash = + rev != "" ? rev : chomp(readFile(localRefFile)); printTalkative("using revision %s of repo ‘%s’", uri, commitHash); @@ -81,7 +97,8 @@ static void prim_fetchgit(EvalState & state, const Pos & pos, Value * * args, Va if (state.restricted) throw Error("‘fetchgit’ is not allowed in restricted mode"); std::string url; - std::string rev = "master"; + std::string ref = "master"; + std::string rev; state.forceValue(*args[0]); @@ -95,7 +112,10 @@ static void prim_fetchgit(EvalState & state, const Pos & pos, Value * * args, Va PathSet context; url = state.coerceToString(*attr.pos, *attr.value, context, false, false); if (hasPrefix(url, "/")) url = "file://" + url; - } else if (name == "rev") + } + else if (name == "ref") + ref = state.forceStringNoCtx(*attr.value, *attr.pos); + else if (name == "rev") rev = state.forceStringNoCtx(*attr.value, *attr.pos); else throw EvalError("unsupported argument ‘%s’ to ‘fetchgit’, at %s", attr.name, *attr.pos); @@ -107,7 +127,7 @@ static void prim_fetchgit(EvalState & state, const Pos & pos, Value * * args, Va } else url = state.forceStringNoCtx(*args[0], pos); - Path storePath = exportGit(state.store, url, rev); + Path storePath = exportGit(state.store, url, ref, rev); mkString(v, storePath, PathSet({storePath})); } diff --git a/src/libexpr/primops/fetchgit.hh b/src/libexpr/primops/fetchgit.hh index 6ffb21a96da..ff228f3b3c6 100644 --- a/src/libexpr/primops/fetchgit.hh +++ b/src/libexpr/primops/fetchgit.hh @@ -8,7 +8,7 @@ namespace nix { class Store; -Path exportGit(ref store, - const std::string & uri, const std::string & rev); +Path exportGit(ref store, const std::string & uri, + const std::string & ref, const std::string & rev = ""); } From af4689f9e9c846f8b8d902b05cabad88427d46b6 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 28 Jul 2017 14:56:39 +0200 Subject: [PATCH 0473/2196] nix-prefetch-url: Fix regression in hash printing --- src/libutil/hash.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index 817ddc0b8bc..e16c3b6ea93 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -98,7 +98,7 @@ static string printHash32(const Hash & hash) string printHash16or32(const Hash & hash) { - return hash.to_string(hash.type == htMD5 ? Base16 : Base32); + return hash.to_string(hash.type == htMD5 ? Base16 : Base32, false); } From c7654bc491d9ce7c1fbadecd7769418fa79a2060 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 28 Jul 2017 15:03:59 +0200 Subject: [PATCH 0474/2196] nix-build: Fix regression causing all outputs to be built --- src/nix-build/nix-build.cc | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 8a3c8a02506..769ca8cd03a 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -453,13 +453,13 @@ void mainWrapped(int argc, char * * argv) for (auto & drvInfo : drvs) { auto drvPath = drvInfo.queryDrvPath(); - pathsToBuild.insert(drvPath); + auto outPath = drvInfo.queryOutPath(); auto outputName = drvInfo.queryOutputName(); if (outputName == "") throw Error("derivation ‘%s’ lacks an ‘outputName’ attribute", drvPath); - pathsToBuild.insert(drvPath + (outputName != "out" ? "!" + outputName : "")); + pathsToBuild.insert(drvPath + "!" + outputName); std::string drvPrefix; auto i = drvPrefixes.find(drvPath); @@ -475,8 +475,8 @@ void mainWrapped(int argc, char * * argv) std::string symlink = drvPrefix; if (outputName != "out") symlink += "-" + outputName; - resultSymlinks[symlink] = drvInfo.queryOutPath(); - outPaths.push_back(drvInfo.queryOutPath()); + resultSymlinks[symlink] = outPath; + outPaths.push_back(outPath); } buildPaths(pathsToBuild); From 92bcb61127abd9da6ee1c11a53bf7b8da72f0c57 Mon Sep 17 00:00:00 2001 From: davidak Date: Sun, 30 Jul 2017 12:26:17 +0200 Subject: [PATCH 0475/2196] replace "Mac OS X" with "macOS" except in older release notes where the name was actually Mac OS X. --- doc/manual/command-ref/conf-file.xml | 2 +- doc/manual/installation/installing-binary.xml | 4 ++-- doc/manual/installation/multi-user.xml | 2 +- doc/manual/installation/supported-platforms.xml | 2 +- doc/manual/introduction/about-nix.xml | 2 +- src/libutil/archive.cc | 2 +- 6 files changed, 7 insertions(+), 7 deletions(-) diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index 3512777dd71..47ceff2625e 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -262,7 +262,7 @@ false. system (except that fixed-output derivations do not run in private network namespace to ensure they can access the network). - Currently, sandboxing only work on Linux and Mac OS X. The use + Currently, sandboxing only work on Linux and macOS. The use of a sandbox requires that Nix is run as root (so you should use the “build users” feature to perform the actual builds under different users diff --git a/doc/manual/installation/installing-binary.xml b/doc/manual/installation/installing-binary.xml index 2a9beec98c9..24e76eafeb1 100644 --- a/doc/manual/installation/installing-binary.xml +++ b/doc/manual/installation/installing-binary.xml @@ -6,7 +6,7 @@ Installing a Binary Distribution -If you are using Linux or Mac OS X, the easiest way to install +If you are using Linux or macOS, the easiest way to install Nix is to run the following command: @@ -39,7 +39,7 @@ behaviour. \ No newline at end of file From 71987b18d472c1c32214b8e4b36c09a972c82c0c Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Tue, 19 Dec 2017 12:48:34 -0600 Subject: [PATCH 0727/2196] linenoise.cpp: allow completions from empty input Fixes #1742. --- src/linenoise/linenoise.cpp | 7 ------- 1 file changed, 7 deletions(-) diff --git a/src/linenoise/linenoise.cpp b/src/linenoise/linenoise.cpp index 8ee8984d694..c57505d2fa9 100644 --- a/src/linenoise/linenoise.cpp +++ b/src/linenoise/linenoise.cpp @@ -2587,13 +2587,6 @@ int InputBuffer::getInputLine(PromptBase& pi) { // ctrl-I/tab, command completion, needs to be before switch statement if (c == ctrlChar('I') && completionCallback) { - if (pos == 0) // SERVER-4967 -- in earlier versions, you could paste - // previous output - continue; // back into the shell ... this output may have leading - // tabs. - // This hack (i.e. what the old code did) prevents command completion - // on an empty line but lets users paste text with leading tabs. - killRing.lastAction = KillRing::actionOther; historyRecallMostRecent = false; From af1e2ffca11c311333f6be13add8a93e7d5428db Mon Sep 17 00:00:00 2001 From: Frederik Rietdijk Date: Wed, 20 Dec 2017 13:05:56 +0100 Subject: [PATCH 0728/2196] Fix escaping, fixes build --- scripts/install-darwin-multi-user.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/install-darwin-multi-user.sh b/scripts/install-darwin-multi-user.sh index b6b3305bac7..7bd00e42b97 100644 --- a/scripts/install-darwin-multi-user.sh +++ b/scripts/install-darwin-multi-user.sh @@ -153,7 +153,7 @@ subheader() { } row() { - printf "$BOLD%s$ESC:\t%s\n" "$1" "$2" + printf "$BOLD%s$ESC:\\t%s\\n" "$1" "$2" } task() { @@ -218,7 +218,7 @@ __sudo() { echo "I am executing:" echo "" - printf " $ sudo %s\n" "$cmd" + printf " $ sudo %s\\n" "$cmd" echo "" echo "$expl" echo "" From 6d8087083278b078d0db6238fb16929163388acd Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 22 Dec 2017 11:33:34 +0100 Subject: [PATCH 0729/2196] release.nix: Use fetchTarball and fetchGit In particular, using fetchGit means we don't need hackery to clean the source tree when building from an unclean tree. --- local.mk | 3 +-- release.nix | 33 +++++++++++++++++---------------- tests/nix-copy-closure.nix | 4 ++-- tests/remote-builds.nix | 4 ++-- tests/setuid.nix | 4 ++-- 5 files changed, 24 insertions(+), 24 deletions(-) diff --git a/local.mk b/local.mk index 0a225423741..40a910991a4 100644 --- a/local.mk +++ b/local.mk @@ -1,6 +1,5 @@ ifeq ($(MAKECMDGOALS), dist) - # Make sure we are in repo root with `--git-dir` - dist-files += $(shell git --git-dir=.git ls-files || find * -type f) + dist-files += $(shell cat .dist-files) endif dist-files += configure config.h.in nix.spec perl/configure diff --git a/release.nix b/release.nix index c5c2170f78d..04fbcd563b8 100644 --- a/release.nix +++ b/release.nix @@ -1,12 +1,12 @@ -{ nix ? { outPath = ./.; revCount = 1234; shortRev = "abcdef"; } -, nixpkgs ? { outPath = ; revCount = 1234; shortRev = "abcdef"; } +{ nix ? fetchGit ./. +, nixpkgs ? fetchTarball channel:nixos-17.09 , officialRelease ? false , systems ? [ "x86_64-linux" "i686-linux" "x86_64-darwin" "aarch64-linux" ] }: let - pkgs = import {}; + pkgs = import nixpkgs {}; jobs = rec { @@ -27,16 +27,14 @@ let pkgconfig sqlite libsodium boehmgc docbook5 docbook5_xsl autoconf-archive - git ] ++ lib.optional stdenv.isLinux libseccomp; configureFlags = "--enable-gc"; postUnpack = '' - # Clean up when building from a working tree. - if [[ -d $sourceRoot/.git ]]; then - git -C $sourceRoot clean -fd - fi + ls -la source + (cd source && find . -type f) | cut -c3- > source/.dist-files + cat source/.dist-files ''; preConfigure = '' @@ -62,7 +60,7 @@ let build = pkgs.lib.genAttrs systems (system: - with import { inherit system; }; + with import nixpkgs { inherit system; }; with import ./release-common.nix { inherit pkgs; }; @@ -105,7 +103,7 @@ let perlBindings = pkgs.lib.genAttrs systems (system: - let pkgs = import { inherit system; }; in with pkgs; + let pkgs = import nixpkgs { inherit system; }; in with pkgs; releaseTools.nixBuild { name = "nix-perl"; @@ -131,7 +129,7 @@ let binaryTarball = pkgs.lib.genAttrs systems (system: # FIXME: temporarily use a different branch for the Darwin build. - with import { inherit system; }; + with import nixpkgs { inherit system; }; let toplevel = builtins.getAttr system jobs.build; @@ -174,7 +172,7 @@ let coverage = - with import { system = "x86_64-linux"; }; + with import nixpkgs { system = "x86_64-linux"; }; releaseTools.coverageAnalysis { name = "nix-build"; @@ -218,20 +216,23 @@ let # System tests. tests.remoteBuilds = (import ./tests/remote-builds.nix rec { + inherit nixpkgs; nix = build.x86_64-linux; system = "x86_64-linux"; }); tests.nix-copy-closure = (import ./tests/nix-copy-closure.nix rec { + inherit nixpkgs; nix = build.x86_64-linux; system = "x86_64-linux"; }); tests.setuid = pkgs.lib.genAttrs (pkgs.lib.filter (pkgs.lib.hasSuffix "-linux") systems) (system: import ./tests/setuid.nix rec { + inherit nixpkgs; nix = build.${system}; inherit system; }); tests.binaryTarball = - with import { system = "x86_64-linux"; }; + with import nixpkgs { system = "x86_64-linux"; }; vmTools.runInLinuxImage (runCommand "nix-binary-tarball-test" { diskImage = vmTools.diskImages.ubuntu1204x86_64; } @@ -250,7 +251,7 @@ let ''); # */ tests.evalNixpkgs = - import { + import (nixpkgs + "/pkgs/top-level/make-tarball.nix") { inherit nixpkgs; inherit pkgs; nix = build.x86_64-linux; @@ -304,7 +305,7 @@ let makeRPM = system: diskImageFun: extraPackages: - with import { inherit system; }; + with import nixpkgs { inherit system; }; releaseTools.rpmBuild rec { name = "nix-rpm"; @@ -326,7 +327,7 @@ let makeDeb = system: diskImageFun: extraPackages: extraDebPackages: - with import { inherit system; }; + with import nixpkgs { inherit system; }; releaseTools.debBuild { name = "nix-deb"; diff --git a/tests/nix-copy-closure.nix b/tests/nix-copy-closure.nix index 44126dd64e4..0bf5b42d84a 100644 --- a/tests/nix-copy-closure.nix +++ b/tests/nix-copy-closure.nix @@ -1,8 +1,8 @@ # Test ‘nix-copy-closure’. -{ system, nix }: +{ nixpkgs, system, nix }: -with import { inherit system; }; +with import (nixpkgs + /nixos/lib/testing.nix) { inherit system; }; makeTest (let pkgA = pkgs.cowsay; pkgB = pkgs.wget; pkgC = pkgs.hello; in { diff --git a/tests/remote-builds.nix b/tests/remote-builds.nix index 58a26d8b618..75704ace2db 100644 --- a/tests/remote-builds.nix +++ b/tests/remote-builds.nix @@ -1,8 +1,8 @@ # Test Nix's remote build feature. -{ system, nix }: +{ nixpkgs, system, nix }: -with import { inherit system; }; +with import (nixpkgs + "/nixos/lib/testing.nix") { inherit system; }; makeTest ( diff --git a/tests/setuid.nix b/tests/setuid.nix index c982d9cf036..77e83c8d6c2 100644 --- a/tests/setuid.nix +++ b/tests/setuid.nix @@ -1,8 +1,8 @@ # Verify that Linux builds cannot create setuid or setgid binaries. -{ system, nix }: +{ nixpkgs, system, nix }: -with import { inherit system; }; +with import (nixpkgs + "/nixos/lib/testing.nix") { inherit system; }; makeTest { From aa43cbb7646e880f871df4280f8a1909520136f0 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 22 Dec 2017 12:05:13 +0100 Subject: [PATCH 0730/2196] Check aws-sdk-cpp version --- configure.ac | 8 +++++++- src/libstore/s3-binary-cache-store.cc | 10 +++++++++- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/configure.ac b/configure.ac index 9d8a81d0427..c395b8713f2 100644 --- a/configure.ac +++ b/configure.ac @@ -186,11 +186,17 @@ fi # Look for aws-cpp-sdk-s3. AC_LANG_PUSH(C++) AC_CHECK_HEADERS([aws/s3/S3Client.h], - [AC_DEFINE([ENABLE_S3], [1], [Whether to enable S3 support via aws-cpp-sdk-s3.]) + [AC_DEFINE([ENABLE_S3], [1], [Whether to enable S3 support via aws-sdk-cpp.]) enable_s3=1], [enable_s3=]) AC_SUBST(ENABLE_S3, [$enable_s3]) AC_LANG_POP(C++) +if test -n "$enable_s3"; then + declare -a aws_version_tokens=($(printf '#include \nAWS_SDK_VERSION_STRING' | cpp -E | grep -v '^#.*' | sed 's/"//g' | tr '.' ' ')) + AC_DEFINE_UNQUOTED([AWS_VERSION_MAJOR], ${aws_version_tokens@<:@0@:>@}, [Major version of aws-sdk-cpp.]) + AC_DEFINE_UNQUOTED([AWS_VERSION_MINOR], ${aws_version_tokens@<:@1@:>@}, [Minor version of aws-sdk-cpp.]) +fi + # Whether to use the Boehm garbage collector. AC_ARG_ENABLE(gc, AC_HELP_STRING([--enable-gc], diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index f5b6a9125ce..23af452094c 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -10,6 +10,7 @@ #include "istringstream_nocopy.hh" #include +#include #include #include #include @@ -87,7 +88,14 @@ S3Helper::S3Helper(const std::string & profile, const std::string & region) std::make_shared()) : std::dynamic_pointer_cast( std::make_shared(profile.c_str())), - *config, Aws::Client::AWSAuthV4Signer::PayloadSigningPolicy::Never, false)) + *config, + // FIXME: https://github.com/aws/aws-sdk-cpp/issues/759 +#if AWS_VERSION_MAJOR == 1 && AWS_VERSION_MINOR < 3 + false, +#else + Aws::Client::AWSAuthV4Signer::PayloadSigningPolicy::Never, +#endif + false)) { } From 2e6f06c37e26a5ac5be35fe18f283a1b26de64bf Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Fri, 22 Dec 2017 14:18:29 -0600 Subject: [PATCH 0731/2196] fetchGit: Fix handling of local repo when not using 'master' branch Add tests checking this behavior. --- src/libexpr/primops/fetchGit.cc | 6 +++++- tests/fetchGit.sh | 26 ++++++++++++++++++++++++++ 2 files changed, 31 insertions(+), 1 deletion(-) diff --git a/src/libexpr/primops/fetchGit.cc b/src/libexpr/primops/fetchGit.cc index e92e0638031..0d0b11958a4 100644 --- a/src/libexpr/primops/fetchGit.cc +++ b/src/libexpr/primops/fetchGit.cc @@ -23,7 +23,7 @@ struct GitInfo }; GitInfo exportGit(ref store, const std::string & uri, - std::experimental::optional ref, const std::string & rev, + std::experimental::optional ref, std::string rev, const std::string & name) { if (!ref && rev == "" && hasPrefix(uri, "/") && pathExists(uri + "/.git")) { @@ -68,6 +68,10 @@ GitInfo exportGit(ref store, const std::string & uri, return gitInfo; } + + // clean working tree, but no ref or rev specified. Use 'HEAD'. + rev = chomp(runProgram("git", true, { "-C", uri, "rev-parse", "HEAD" })); + ref = "HEAD"s; } if (!ref) ref = "master"s; diff --git a/tests/fetchGit.sh b/tests/fetchGit.sh index 09e4f742668..65d673c0885 100644 --- a/tests/fetchGit.sh +++ b/tests/fetchGit.sh @@ -93,3 +93,29 @@ git -C $repo add hello git -C $repo commit -m 'Bla4' rev3=$(git -C $repo rev-parse HEAD) nix eval --tarball-ttl 3600 "(builtins.fetchGit { url = $repo; rev = \"$rev3\"; })" >/dev/null + +# Update 'path' to reflect latest master +path=$(nix eval --raw "(builtins.fetchGit file://$repo).outPath") + +# Check behavior when non-master branch is used +git -C $repo checkout $rev2 -b dev +echo dev > $repo/hello + +# File URI uses 'master' unless specified otherwise +path2=$(nix eval --raw "(builtins.fetchGit file://$repo).outPath") +[[ $path = $path2 ]] + +# Using local path with branch other than 'master' should work when clean or dirty +path3=$(nix eval --raw "(builtins.fetchGit $repo).outPath") +# (check dirty-tree handling was used) +[[ $(nix eval --raw "(builtins.fetchGit $repo).rev") = 0000000000000000000000000000000000000000 ]] + +# Committing shouldn't change store path, or switch to using 'master' +git -C $repo commit -m 'Bla5' -a +path4=$(nix eval --raw "(builtins.fetchGit $repo).outPath") +[[ $(cat $path4/hello) = dev ]] +[[ $path3 = $path4 ]] + +# Confirm same as 'dev' branch +path5=$(nix eval --raw "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath") +[[ $path3 = $path5 ]] From 4801420893766a3845f6ce6dc8c07bab1a2f15e0 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 25 Dec 2017 14:53:15 +0100 Subject: [PATCH 0732/2196] Remove debug line --- release.nix | 1 - 1 file changed, 1 deletion(-) diff --git a/release.nix b/release.nix index 04fbcd563b8..68b58623289 100644 --- a/release.nix +++ b/release.nix @@ -32,7 +32,6 @@ let configureFlags = "--enable-gc"; postUnpack = '' - ls -la source (cd source && find . -type f) | cut -c3- > source/.dist-files cat source/.dist-files ''; From bd17ccf1d822ba76cdd58e9547bc18db35189c55 Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Tue, 26 Dec 2017 19:22:28 -0600 Subject: [PATCH 0733/2196] nix repl: use linenoiseKeyType to differentiate ^C and ^D Fixes #1757. --- src/nix/repl.cc | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/src/nix/repl.cc b/src/nix/repl.cc index 1adb816c5bf..9216209173d 100644 --- a/src/nix/repl.cc +++ b/src/nix/repl.cc @@ -186,7 +186,16 @@ bool NixRepl::getLine(string & input, const std::string &prompt) { char * s = linenoise(prompt.c_str()); Finally doFree([&]() { free(s); }); - if (!s) return false; + if (!s) { + switch (auto type = linenoiseKeyType()) { + case 1: // ctrl-C + return true; + case 2: // ctrl-D + return false; + default: + throw Error(format("Unexpected linenoise keytype: %1%") % type); + } + } input += s; return true; } From ab8ba712054e170dfbef30f53b4a5a88b93aad6f Mon Sep 17 00:00:00 2001 From: Frederik Rietdijk Date: Fri, 29 Dec 2017 13:45:54 +0100 Subject: [PATCH 0734/2196] Do not export ASPELL_CONF This does not belong in Nix. Setting this env var is already done by the aspell derivation found in Nixpkgs. --- scripts/nix-profile.sh.in | 6 ------ 1 file changed, 6 deletions(-) diff --git a/scripts/nix-profile.sh.in b/scripts/nix-profile.sh.in index ab95c09c830..450d683c7f4 100644 --- a/scripts/nix-profile.sh.in +++ b/scripts/nix-profile.sh.in @@ -60,12 +60,6 @@ if [ -n "$HOME" ] && [ -n "$USER" ]; then # This part should be kept in sync with nixpkgs:nixos/modules/programs/environment.nix NIX_PROFILES="@localstatedir@/nix/profiles/default $NIX_USER_PROFILE_DIR" - for i in $NIX_PROFILES; do - if [ -d "$i/lib/aspell" ]; then - export ASPELL_CONF="dict-dir $i/lib/aspell" - fi - done - # Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work. if [ -e /etc/ssl/certs/ca-certificates.crt ]; then # NixOS, Ubuntu, Debian, Gentoo, Arch export NIX_SSL_CERT_FILE=/etc/ssl/certs/ca-certificates.crt From 9dd2b8ac7b8d82df8c1f3f36efb683175fd6ecee Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Fri, 29 Dec 2017 14:42:14 -0600 Subject: [PATCH 0735/2196] use libbrotli directly when available * Look for both 'brotli' and 'bro' as external command, since upstream has renamed it in newer versions. If neither are found, current runtime behavior is preserved: try to find 'bro' on PATH. * Limit amount handed to BrotliEncoderCompressStream to ensure interrupts are processed in a timely manner. Testing shows negligible performance impact. (Other compression sinks don't seem to require this) --- Makefile.config.in | 4 +- configure.ac | 9 +- src/libutil/compression.cc | 171 +++++++++++++++++++++++++++++++++++-- src/libutil/local.mk | 4 +- tests/brotli.sh | 28 ++++++ tests/common.sh.in | 1 + tests/local.mk | 3 +- 7 files changed, 207 insertions(+), 13 deletions(-) create mode 100644 tests/brotli.sh diff --git a/Makefile.config.in b/Makefile.config.in index 45a70cd6dd1..fab82194656 100644 --- a/Makefile.config.in +++ b/Makefile.config.in @@ -6,6 +6,7 @@ CXXFLAGS = @CXXFLAGS@ ENABLE_S3 = @ENABLE_S3@ HAVE_SODIUM = @HAVE_SODIUM@ HAVE_READLINE = @HAVE_READLINE@ +HAVE_BROTLI = @HAVE_BROTLI@ LIBCURL_LIBS = @LIBCURL_LIBS@ OPENSSL_LIBS = @OPENSSL_LIBS@ PACKAGE_NAME = @PACKAGE_NAME@ @@ -13,9 +14,10 @@ PACKAGE_VERSION = @PACKAGE_VERSION@ SODIUM_LIBS = @SODIUM_LIBS@ LIBLZMA_LIBS = @LIBLZMA_LIBS@ SQLITE3_LIBS = @SQLITE3_LIBS@ +LIBBROTLI_LIBS = @LIBBROTLI_LIBS@ bash = @bash@ bindir = @bindir@ -bro = @bro@ +brotli = @brotli@ lsof = @lsof@ datadir = @datadir@ datarootdir = @datarootdir@ diff --git a/configure.ac b/configure.ac index c395b8713f2..9db92ce9140 100644 --- a/configure.ac +++ b/configure.ac @@ -127,7 +127,7 @@ NEED_PROG(gzip, gzip) NEED_PROG(xz, xz) AC_PATH_PROG(dot, dot) AC_PATH_PROG(pv, pv, pv) -AC_PATH_PROG(bro, bro, bro) +AC_PATH_PROGS(brotli, brotli bro, bro) AC_PATH_PROG(lsof, lsof, lsof) @@ -176,6 +176,13 @@ AC_SUBST(HAVE_SODIUM, [$have_sodium]) PKG_CHECK_MODULES([LIBLZMA], [liblzma], [CXXFLAGS="$LIBLZMA_CFLAGS $CXXFLAGS"]) +# Look for libbrotli{enc,dec}, optional dependencies +PKG_CHECK_MODULES([LIBBROTLI], [libbrotlienc libbrotlidec], + [AC_DEFINE([HAVE_BROTLI], [1], [Whether to use libbrotli.]) + CXXFLAGS="$LIBBROTLI_CFLAGS $CXXFLAGS"] + have_brotli=1], [have_brotli=]) +AC_SUBST(HAVE_BROTLI, [$have_brotli]) + # Look for libseccomp, required for Linux sandboxing. if test "$sys_name" = linux; then PKG_CHECK_MODULES([LIBSECCOMP], [libseccomp], diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc index 2b3dff3a5ea..5e2631ba340 100644 --- a/src/libutil/compression.cc +++ b/src/libutil/compression.cc @@ -7,6 +7,11 @@ #include #include +#if HAVE_BROTLI +#include +#include +#endif // HAVE_BROTLI + #include namespace nix { @@ -94,8 +99,56 @@ static ref decompressBzip2(const std::string & in) static ref decompressBrotli(const std::string & in) { - // FIXME: use libbrotli - return make_ref(runProgram(BRO, true, {"-d"}, {in})); +#if !HAVE_BROTLI + return make_ref(runProgram(BROTLI, true, {"-d"}, {in})); +#else + auto *s = BrotliDecoderCreateInstance(nullptr, nullptr, nullptr); + if (!s) + throw CompressionError("unable to initialize brotli decoder"); + + Finally free([s]() { BrotliDecoderDestroyInstance(s); }); + + uint8_t outbuf[BUFSIZ]; + ref res = make_ref(); + const uint8_t *next_in = (uint8_t *)in.c_str(); + size_t avail_in = in.size(); + uint8_t *next_out = outbuf; + size_t avail_out = sizeof(outbuf); + + while (true) { + checkInterrupt(); + + auto ret = BrotliDecoderDecompressStream(s, + &avail_in, &next_in, + &avail_out, &next_out, + nullptr); + + switch (ret) { + case BROTLI_DECODER_RESULT_ERROR: + throw CompressionError("error while decompressing brotli file"); + case BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: + throw CompressionError("incomplete or corrupt brotli file"); + case BROTLI_DECODER_RESULT_SUCCESS: + if (avail_in != 0) + throw CompressionError("unexpected input after brotli decompression"); + break; + case BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: + // I'm not sure if this can happen, but abort if this happens with empty buffer + if (avail_out == sizeof(outbuf)) + throw CompressionError("brotli decompression requires larger buffer"); + break; + } + + // Always ensure we have full buffer for next invocation + if (avail_out < sizeof(outbuf)) { + res->append((char*)outbuf, sizeof(outbuf) - avail_out); + next_out = outbuf; + avail_out = sizeof(outbuf); + } + + if (ret == BROTLI_DECODER_RESULT_SUCCESS) return res; + } +#endif // HAVE_BROTLI } ref compress(const std::string & method, const std::string & in) @@ -270,33 +323,131 @@ struct BzipSink : CompressionSink } }; -struct BrotliSink : CompressionSink +struct LambdaCompressionSink : CompressionSink { Sink & nextSink; std::string data; + using CompressFnTy = std::function; + CompressFnTy compressFn; + LambdaCompressionSink(Sink& nextSink, CompressFnTy compressFn) + : nextSink(nextSink) + , compressFn(std::move(compressFn)) + { + }; + + void finish() override + { + flush(); + nextSink(compressFn(data)); + } + + void write(const unsigned char * data, size_t len) override + { + checkInterrupt(); + this->data.append((const char *) data, len); + } +}; + +struct BrotliCmdSink : LambdaCompressionSink +{ + BrotliCmdSink(Sink& nextSink) + : LambdaCompressionSink(nextSink, [](const std::string& data) { + return runProgram(BROTLI, true, {}, data); + }) + { + } +}; + +#if HAVE_BROTLI +struct BrotliSink : CompressionSink +{ + Sink & nextSink; + uint8_t outbuf[BUFSIZ]; + BrotliEncoderState *state; + bool finished = false; BrotliSink(Sink & nextSink) : nextSink(nextSink) { + state = BrotliEncoderCreateInstance(nullptr, nullptr, nullptr); + if (!state) + throw CompressionError("unable to initialise brotli encoder"); } ~BrotliSink() { + BrotliEncoderDestroyInstance(state); } - // FIXME: use libbrotli - void finish() override { flush(); - nextSink(runProgram(BRO, true, {}, data)); + assert(!finished); + + const uint8_t *next_in = nullptr; + size_t avail_in = 0; + uint8_t *next_out = outbuf; + size_t avail_out = sizeof(outbuf); + while (!finished) { + checkInterrupt(); + + if (!BrotliEncoderCompressStream(state, + BROTLI_OPERATION_FINISH, + &avail_in, &next_in, + &avail_out, &next_out, + nullptr)) + throw CompressionError("error while finishing brotli file"); + + finished = BrotliEncoderIsFinished(state); + if (avail_out == 0 || finished) { + nextSink(outbuf, sizeof(outbuf) - avail_out); + next_out = outbuf; + avail_out = sizeof(outbuf); + } + } } void write(const unsigned char * data, size_t len) override { - checkInterrupt(); - this->data.append((const char *) data, len); + assert(!finished); + + // Don't feed brotli too much at once + const size_t CHUNK_SIZE = sizeof(outbuf) << 2; + while (len) { + size_t n = std::min(CHUNK_SIZE, len); + writeInternal(data, n); + data += n; + len -= n; + } + } + private: + void writeInternal(const unsigned char * data, size_t len) + { + assert(!finished); + + const uint8_t *next_in = data; + size_t avail_in = len; + uint8_t *next_out = outbuf; + size_t avail_out = sizeof(outbuf); + + while (avail_in > 0) { + checkInterrupt(); + + if (!BrotliEncoderCompressStream(state, + BROTLI_OPERATION_PROCESS, + &avail_in, &next_in, + &avail_out, &next_out, + nullptr)) + throw CompressionError("error while compressing brotli file"); + + if (avail_out < sizeof(outbuf) || avail_in == 0) { + nextSink(outbuf, sizeof(outbuf) - avail_out); + next_out = outbuf; + avail_out = sizeof(outbuf); + } + } } }; +#endif // HAVE_BROTLI ref makeCompressionSink(const std::string & method, Sink & nextSink) { @@ -307,7 +458,11 @@ ref makeCompressionSink(const std::string & method, Sink & next else if (method == "bzip2") return make_ref(nextSink); else if (method == "br") +#if HAVE_BROTLI return make_ref(nextSink); +#else + return make_ref(nextSink); +#endif else throw UnknownCompressionMethod(format("unknown compression method '%s'") % method); } diff --git a/src/libutil/local.mk b/src/libutil/local.mk index 0721b21c208..5fc2aab569d 100644 --- a/src/libutil/local.mk +++ b/src/libutil/local.mk @@ -6,8 +6,8 @@ libutil_DIR := $(d) libutil_SOURCES := $(wildcard $(d)/*.cc) -libutil_LDFLAGS = $(LIBLZMA_LIBS) -lbz2 -pthread $(OPENSSL_LIBS) +libutil_LDFLAGS = $(LIBLZMA_LIBS) -lbz2 -pthread $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) libutil_LIBS = libformat -libutil_CXXFLAGS = -DBRO=\"$(bro)\" +libutil_CXXFLAGS = -DBROTLI=\"$(brotli)\" diff --git a/tests/brotli.sh b/tests/brotli.sh new file mode 100644 index 00000000000..645dd4214ec --- /dev/null +++ b/tests/brotli.sh @@ -0,0 +1,28 @@ +source common.sh + + +# Only test if we found brotli libraries +# (CLI tool is likely unavailable if libraries are missing) +if [ -n "$HAVE_BROTLI" ]; then + +clearStore +clearCache + +cacheURI="file://$cacheDir?compression=br" + +outPath=$(nix-build dependencies.nix --no-out-link) + +nix copy --to $cacheURI $outPath + +HASH=$(nix hash-path $outPath) + +clearStore +clearCacheCache + +nix copy --from $cacheURI $outPath --no-check-sigs + +HASH2=$(nix hash-path $outPath) + +[[ $HASH = $HASH2 ]] + +fi # HAVE_BROTLI diff --git a/tests/common.sh.in b/tests/common.sh.in index 09f2949141a..83643d8b06b 100644 --- a/tests/common.sh.in +++ b/tests/common.sh.in @@ -32,6 +32,7 @@ export xmllint="@xmllint@" export SHELL="@bash@" export PAGER=cat export HAVE_SODIUM="@HAVE_SODIUM@" +export HAVE_BROTLI="@HAVE_BROTLI@" export version=@PACKAGE_VERSION@ export system=@system@ diff --git a/tests/local.mk b/tests/local.mk index baf74224bb1..83154228e99 100644 --- a/tests/local.mk +++ b/tests/local.mk @@ -19,7 +19,8 @@ nix_tests = \ fetchGit.sh \ fetchMercurial.sh \ signing.sh \ - run.sh + run.sh \ + brotli.sh # parallel.sh install-tests += $(foreach x, $(nix_tests), tests/$(x)) From 5afee18726ce8241d56ea9690f228923441a8ea8 Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Sat, 30 Dec 2017 21:29:23 -0600 Subject: [PATCH 0736/2196] run.sh: include lib64 in sandbox-paths to fix on ubuntu 16.XX (cc #1769) --- tests/run.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/run.sh b/tests/run.sh index 784d29183cf..194e767dd05 100644 --- a/tests/run.sh +++ b/tests/run.sh @@ -18,10 +18,10 @@ if [[ $(uname) = Linux ]]; then # Note: we need the sandbox paths to ensure that the shell is # visible in the sandbox. nix run --sandbox-build-dir /build-tmp \ - --sandbox-paths '/nix? /bin? /lib? /usr?' \ + --sandbox-paths '/nix? /bin? /lib? /lib64? /usr?' \ --store $TEST_ROOT/store0 -f run.nix hello -c hello | grep 'Hello World' - path2=$(nix run --sandbox-paths '/nix? /bin? /lib? /usr?' --store $TEST_ROOT/store0 -f run.nix hello -c $SHELL -c 'type -p hello') + path2=$(nix run --sandbox-paths '/nix? /bin? /lib? /lib64? /usr?' --store $TEST_ROOT/store0 -f run.nix hello -c $SHELL -c 'type -p hello') [[ $path/bin/hello = $path2 ]] From 689b2783fcae0cd0559ebd1d37ccfc3a09c4b182 Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Tue, 2 Jan 2018 12:22:52 -0500 Subject: [PATCH 0737/2196] Add hasContext primop --- src/libexpr/primops.cc | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index e3b5dfb420b..6029714273a 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1653,6 +1653,14 @@ static void prim_unsafeDiscardStringContext(EvalState & state, const Pos & pos, } +static void prim_hasContext(EvalState & state, const Pos & pos, Value * * args, Value & v) +{ + PathSet context; + state.forceString(*args[0], context, pos); + mkBool(v, !context.empty()); +} + + /* Sometimes we want to pass a derivation path (i.e. pkg.drvPath) to a builder without causing the derivation to be built (for instance, in the derivation that builds NARs in nix-push, when doing @@ -2083,6 +2091,7 @@ void EvalState::createBaseEnv() addPrimOp("toString", 1, prim_toString); addPrimOp("__substring", 3, prim_substring); addPrimOp("__stringLength", 1, prim_stringLength); + addPrimOp("__hasContext", 1, prim_hasContext); addPrimOp("__unsafeDiscardStringContext", 1, prim_unsafeDiscardStringContext); addPrimOp("__unsafeDiscardOutputDependency", 1, prim_unsafeDiscardOutputDependency); addPrimOp("__hashString", 2, prim_hashString); From 4cb5c513754ae709cd6dd2db746c43f7fafcbb0e Mon Sep 17 00:00:00 2001 From: Benjamin Hipple Date: Tue, 2 Jan 2018 23:39:42 -0500 Subject: [PATCH 0738/2196] Fix RPM builds by increasing VM memory size The VM was running out of RAM while handling debug symbols, which caused the eu-strip to fail while separating debug symbols. --- release.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/release.nix b/release.nix index 68b58623289..ec1bebf61e9 100644 --- a/release.nix +++ b/release.nix @@ -313,7 +313,7 @@ let { extraPackages = [ "sqlite" "sqlite-devel" "bzip2-devel" "libcurl-devel" "openssl-devel" "xz-devel" "libseccomp-devel" ] ++ extraPackages; }; - memSize = 1024; + memSize = 2048; meta.schedulingPriority = 50; postRPMInstall = "cd /tmp/rpmout/BUILD/nix-* && make installcheck"; #enableParallelBuilding = true; From 27788f406078489b2fc04749ebb86e267a19a6a5 Mon Sep 17 00:00:00 2001 From: Daiderd Jordan Date: Wed, 3 Jan 2018 22:29:54 +0100 Subject: [PATCH 0739/2196] installer: don't touch /etc/profile The default profile already loads /etc/bashrc. --- scripts/install-darwin-multi-user.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/install-darwin-multi-user.sh b/scripts/install-darwin-multi-user.sh index 7bd00e42b97..91194a299a9 100644 --- a/scripts/install-darwin-multi-user.sh +++ b/scripts/install-darwin-multi-user.sh @@ -33,7 +33,7 @@ readonly NIX_FIRST_BUILD_UID="30001" readonly NIX_ROOT="/nix" readonly PLIST_DEST=/Library/LaunchDaemons/org.nixos.nix-daemon.plist -readonly PROFILE_TARGETS=("/etc/profile" "/etc/bashrc" "/etc/zshrc") +readonly PROFILE_TARGETS=("/etc/bashrc" "/etc/zshrc") readonly PROFILE_BACKUP_SUFFIX=".backup-before-nix" readonly PROFILE_NIX_FILE="$NIX_ROOT/var/nix/profiles/default/etc/profile.d/nix-daemon.sh" From d15826164c34b71994c66ae8bf1203492b0fb44a Mon Sep 17 00:00:00 2001 From: Daiderd Jordan Date: Wed, 3 Jan 2018 22:34:34 +0100 Subject: [PATCH 0740/2196] installer: create 'enough' build users --- scripts/install-darwin-multi-user.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/install-darwin-multi-user.sh b/scripts/install-darwin-multi-user.sh index 7bd00e42b97..515b4a87ed8 100644 --- a/scripts/install-darwin-multi-user.sh +++ b/scripts/install-darwin-multi-user.sh @@ -24,7 +24,7 @@ readonly YELLOW='\033[38;33m' readonly YELLOW_UL='\033[38;4;33m' readonly CORES=$(sysctl -n hw.ncpu) -readonly NIX_USER_COUNT="$CORES" +readonly NIX_USER_COUNT="32" readonly NIX_BUILD_GROUP_ID="30000" readonly NIX_BUILD_GROUP_NAME="nixbld" readonly NIX_FIRST_BUILD_UID="30001" From 44272d87193e6ab17d15501ebdcd317cdd39f616 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 4 Jan 2018 16:57:25 +0100 Subject: [PATCH 0741/2196] Rename "use-substitutes" to "substitute" Commit c2154d4c8422ddc1c201d503bb52edff854af2ad renamed "build-use-substitutes" to "use-substitutes", but that broke "nix-copy-closure --use-substitutes". --- doc/manual/command-ref/conf-file.xml | 2 +- src/libstore/globals.hh | 2 +- tests/shell.shebang.sh | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index e52cbcd535e..87f05fb1c70 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -312,7 +312,7 @@ false. - use-substitutes + substitute If set to true (default), Nix will use binary substitutes if available. This option can be diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 5c857cbb6a9..ae4b78a018e 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -150,7 +150,7 @@ public: Setting syncBeforeRegistering{this, false, "sync-before-registering", "Whether to call sync() before registering a path as valid."}; - Setting useSubstitutes{this, true, "use-substitutes", + Setting useSubstitutes{this, true, "substitute", "Whether to use substitutes.", {"build-use-substitutes"}}; diff --git a/tests/shell.shebang.sh b/tests/shell.shebang.sh index c8e55ca9b90..f7132043de4 100755 --- a/tests/shell.shebang.sh +++ b/tests/shell.shebang.sh @@ -1,4 +1,4 @@ #! @ENV_PROG@ nix-shell -#! nix-shell -I nixpkgs=shell.nix --no-use-substitutes +#! nix-shell -I nixpkgs=shell.nix --no-substitute #! nix-shell --pure -i bash -p foo bar echo "$(foo) $(bar) $@" From 1882e802e760e61669c9cff6b4e933d87cf977ba Mon Sep 17 00:00:00 2001 From: Benjamin Hipple Date: Thu, 4 Jan 2018 19:44:32 -0500 Subject: [PATCH 0742/2196] Fix Fedora 25 i386 RPM build --- release.nix | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/release.nix b/release.nix index ec1bebf61e9..2dd7264d912 100644 --- a/release.nix +++ b/release.nix @@ -313,7 +313,8 @@ let { extraPackages = [ "sqlite" "sqlite-devel" "bzip2-devel" "libcurl-devel" "openssl-devel" "xz-devel" "libseccomp-devel" ] ++ extraPackages; }; - memSize = 2048; + # At most 2047MB can be simulated in qemu-system-i386 + memSize = 2047; meta.schedulingPriority = 50; postRPMInstall = "cd /tmp/rpmout/BUILD/nix-* && make installcheck"; #enableParallelBuilding = true; From 7b9583680e9e1be1df2a5cd0d71ed8f9a7d45fad Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 8 Jan 2018 19:13:48 +0100 Subject: [PATCH 0743/2196] Improve error message with --repair for untrusted users --- src/nix-daemon/nix-daemon.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index 5629cc64b96..b5d49b6428a 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -411,7 +411,7 @@ static void performOp(TunnelLogger * logger, ref store, /* Repairing is not atomic, so disallowed for "untrusted" clients. */ if (mode == bmRepair && !trusted) - throw Error("repairing is not supported when building through the Nix daemon"); + throw Error("repairing is not allowed because you are not in 'trusted-users'"); } logger->startWork(); store->buildPaths(drvs, mode); From 84d9e213d2dc8b09705b0668184f33ddd0a004b1 Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Tue, 9 Jan 2018 08:58:19 -0600 Subject: [PATCH 0744/2196] fetchGit.sh: Test we don't "corrupt" cache if invoke w/o git avail --- tests/fetchGit.sh | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/tests/fetchGit.sh b/tests/fetchGit.sh index 65d673c0885..b556fe594ce 100644 --- a/tests/fetchGit.sh +++ b/tests/fetchGit.sh @@ -119,3 +119,16 @@ path4=$(nix eval --raw "(builtins.fetchGit $repo).outPath") # Confirm same as 'dev' branch path5=$(nix eval --raw "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath") [[ $path3 = $path5 ]] + + +# Nuke the cache +rm -rf $TEST_HOME/.cache/nix/git + +# Try again, but without 'git' on PATH +NIX=$(command -v nix) +# This should fail +(! PATH= $NIX eval --raw "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath" ) + +# Try again, with 'git' available. This should work. +path5=$(nix eval --raw "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath") +[[ $path3 = $path5 ]] From 428680b3076a01f278ed629aa3b1744d11a2c231 Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Tue, 9 Jan 2018 09:05:18 -0600 Subject: [PATCH 0745/2196] fetchGit: fix creation of uninitialized cache dir, let git create it fetchGit test (as modified in previous commit) now passes. --- src/libexpr/primops/fetchGit.cc | 1 - 1 file changed, 1 deletion(-) diff --git a/src/libexpr/primops/fetchGit.cc b/src/libexpr/primops/fetchGit.cc index 0d0b11958a4..fb664cffb5b 100644 --- a/src/libexpr/primops/fetchGit.cc +++ b/src/libexpr/primops/fetchGit.cc @@ -85,7 +85,6 @@ GitInfo exportGit(ref store, const std::string & uri, Path cacheDir = getCacheDir() + "/nix/git"; if (!pathExists(cacheDir)) { - createDirs(cacheDir); runProgram("git", true, { "init", "--bare", cacheDir }); } From b0328c244dd15da9e45f4e95d900c81fe0c47dda Mon Sep 17 00:00:00 2001 From: Renzo Carbonara Date: Tue, 9 Jan 2018 22:40:07 +0100 Subject: [PATCH 0746/2196] nix.conf: builders-use-substitutes Fixes #937 --- doc/manual/command-ref/conf-file.xml | 14 ++++++++++++++ src/build-remote/build-remote.cc | 6 ++++-- src/libstore/globals.hh | 5 +++++ 3 files changed, 23 insertions(+), 2 deletions(-) diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index 87f05fb1c70..2ddca991f19 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -321,6 +321,20 @@ false. + builders-use-substitutes + + If set to true, Nix will instruct + remote build machines to use their own binary substitutes if available. In + practical terms, this means that remote hosts will fetch as many build + dependencies as possible from their own substitutes (e.g, from + cache.nixos.org), instead of waiting for this host to + upload them all. This can drastically reduce build times if the network + connection between this computer and the remote build host is slow. Defaults + to false. + + + + fallback If set to true, Nix will fall diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index 445006b327f..df579729af2 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -218,9 +218,11 @@ int main (int argc, char * * argv) signal(SIGALRM, old); } + auto substitute = settings.buildersUseSubstitutes ? Substitute : NoSubstitute; + { Activity act(*logger, lvlTalkative, actUnknown, fmt("copying dependencies to '%s'", storeUri)); - copyPaths(store, ref(sshStore), inputs, NoRepair, NoCheckSigs); + copyPaths(store, ref(sshStore), inputs, NoRepair, NoCheckSigs, substitute); } uploadLock = -1; @@ -240,7 +242,7 @@ int main (int argc, char * * argv) if (!missing.empty()) { Activity act(*logger, lvlTalkative, actUnknown, fmt("copying outputs from '%s'", storeUri)); setenv("NIX_HELD_LOCKS", concatStringsSep(" ", missing).c_str(), 1); /* FIXME: ugly */ - copyPaths(ref(sshStore), store, missing, NoRepair, NoCheckSigs); + copyPaths(ref(sshStore), store, missing, NoRepair, NoCheckSigs, substitute); } return; diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index ae4b78a018e..af72f7b1e35 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -138,6 +138,11 @@ public: Setting builders{this, "@" + nixConfDir + "/machines", "builders", "A semicolon-separated list of build machines, in the format of nix.machines."}; + Setting buildersUseSubstitutes{this, false, "builders-use-substitutes", + "Whether build machines should use their own substitutes for obtaining " + "build dependencies if possible, rather than waiting for this host to " + "upload them."}; + Setting reservedSize{this, 8 * 1024 * 1024, "gc-reserved-space", "Amount of reserved disk space for the garbage collector."}; From 435ccc798077e6291fd34fd1720c6abcf3521557 Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Wed, 10 Jan 2018 14:16:49 -0600 Subject: [PATCH 0747/2196] release: access fetchGit from builtins to fix eval w/1.11 (<1.12) --- release.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/release.nix b/release.nix index 2dd7264d912..30bec2a6e3e 100644 --- a/release.nix +++ b/release.nix @@ -1,4 +1,4 @@ -{ nix ? fetchGit ./. +{ nix ? builtins.fetchGit ./. , nixpkgs ? fetchTarball channel:nixos-17.09 , officialRelease ? false , systems ? [ "x86_64-linux" "i686-linux" "x86_64-darwin" "aarch64-linux" ] From 74f75c855837bce7f48491e9ce8ac03794e5b40d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 12 Jan 2018 17:31:08 +0100 Subject: [PATCH 0748/2196] import, builtins.readFile: Handle diverted stores Fixes #1791 --- src/libexpr/eval.cc | 10 ++++++++++ src/libexpr/eval.hh | 9 +++++++++ src/libexpr/primops.cc | 12 ++++++------ src/libstore/store-api.hh | 10 ++++++++-- 4 files changed, 33 insertions(+), 8 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 63de2d60a14..087a95ddef8 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -375,6 +375,16 @@ void EvalState::checkURI(const std::string & uri) } +Path EvalState::toRealPath(const Path & path, const PathSet & context) +{ + // FIXME: check whether 'path' is in 'context'. + return + !context.empty() && store->isInStore(path) + ? store->toRealPath(path) + : path; +}; + + void EvalState::addConstant(const string & name, Value & v) { Value * v2 = allocValue(); diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index f0ab1435bff..cc971ae80a4 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -112,6 +112,15 @@ public: void checkURI(const std::string & uri); + /* When using a diverted store and 'path' is in the Nix store, map + 'path' to the diverted location (e.g. /nix/store/foo is mapped + to /home/alice/my-nix/nix/store/foo). However, this is only + done if the context is not empty, since otherwise we're + probably trying to read from the actual /nix/store. This is + intended to distinguish between import-from-derivation and + sources stored in the actual /nix/store. */ + Path toRealPath(const Path & path, const PathSet & context); + /* Parse a Nix expression from the specified file. */ Expr * parseExprFromFile(const Path & path); Expr * parseExprFromFile(const Path & path, StaticEnv & staticEnv); diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 6029714273a..98fe2199e9f 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -84,10 +84,10 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args % path % e.path % pos); } - path = state.checkSourcePath(path); + Path realPath = state.checkSourcePath(state.toRealPath(path, context)); if (state.store->isStorePath(path) && state.store->isValidPath(path) && isDerivation(path)) { - Derivation drv = readDerivation(path); + Derivation drv = readDerivation(realPath); Value & w = *state.allocValue(); state.mkAttrs(w, 3 + drv.outputs.size()); Value * v2 = state.allocAttr(w, state.sDrvPath); @@ -114,7 +114,7 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args } else { state.forceAttrs(*args[0]); if (args[0]->attrs->empty()) - state.evalFile(path, v); + state.evalFile(realPath, v); else { Env * env = &state.allocEnv(args[0]->attrs->size()); env->up = &state.baseEnv; @@ -127,8 +127,8 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args env->values[displ++] = attr.value; } - printTalkative("evaluating file '%1%'", path); - Expr * e = state.parseExprFromFile(resolveExprPath(path), staticEnv); + printTalkative("evaluating file '%1%'", realPath); + Expr * e = state.parseExprFromFile(resolveExprPath(realPath), staticEnv); e->eval(state, *env, v); } @@ -863,7 +863,7 @@ static void prim_readFile(EvalState & state, const Pos & pos, Value * * args, Va throw EvalError(format("cannot read '%1%', since path '%2%' is not valid, at %3%") % path % e.path % pos); } - string s = readFile(state.checkSourcePath(path)); + string s = readFile(state.checkSourcePath(state.toRealPath(path, context))); if (s.find((char) 0) != string::npos) throw Error(format("the contents of the file '%1%' cannot be represented as a Nix string") % path); mkString(v, s.c_str()); diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index d1e1b5d6f45..c0e735cd314 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -597,6 +597,11 @@ public: "nix-cache-info" file. Lower value means higher priority. */ virtual int getPriority() { return 0; } + virtual Path toRealPath(const Path & storePath) + { + return storePath; + } + protected: Stats stats; @@ -639,9 +644,10 @@ public: virtual Path getRealStoreDir() { return storeDir; } - Path toRealPath(const Path & storePath) + Path toRealPath(const Path & storePath) override { - return getRealStoreDir() + "/" + baseNameOf(storePath); + assert(isInStore(storePath)); + return getRealStoreDir() + "/" + std::string(storePath, storeDir.size() + 1); } std::shared_ptr getBuildLog(const Path & path) override; From a65376b01da1f5e612ecbc7232f23c0bebcabe22 Mon Sep 17 00:00:00 2001 From: Peter Stuart Date: Fri, 12 Jan 2018 14:27:29 -0500 Subject: [PATCH 0749/2196] Remove extra space. --- scripts/install-darwin-multi-user.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/install-darwin-multi-user.sh b/scripts/install-darwin-multi-user.sh index 91194a299a9..2e9d368c020 100644 --- a/scripts/install-darwin-multi-user.sh +++ b/scripts/install-darwin-multi-user.sh @@ -647,7 +647,7 @@ chat_about_sudo() { cat < Date: Fri, 12 Jan 2018 14:45:05 -0600 Subject: [PATCH 0750/2196] nix log: use pager --- src/nix/log.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/nix/log.cc b/src/nix/log.cc index 966ad8b6508..f07ec4e93a1 100644 --- a/src/nix/log.cc +++ b/src/nix/log.cc @@ -50,6 +50,7 @@ struct CmdLog : InstallableCommand auto b = installable->toBuildable(); + RunPager pager; for (auto & sub : subs) { auto log = b.drvPath != "" ? sub->getBuildLog(b.drvPath) : nullptr; for (auto & output : b.outputs) { From 59086e459c2650b7dc42bd2fc4a9a98b23aaf6e7 Mon Sep 17 00:00:00 2001 From: Tuomas Tynkkynen Date: Sat, 13 Jan 2018 15:18:35 +0200 Subject: [PATCH 0751/2196] Fix tests using user namespaces on kernels that don't have it Disable various tests if the kernel doesn't support unprivileged user namespaces (e.g. Arch Linux disables them) or disable them via a sysctl (Debian, Ubuntu). Fixes #1521 Fixes #1625 --- tests/build-remote.sh | 2 +- tests/common.sh.in | 18 ++++++++++++++++++ tests/linux-sandbox.sh | 2 +- tests/run.sh | 27 +++++++++++++-------------- 4 files changed, 33 insertions(+), 16 deletions(-) diff --git a/tests/build-remote.sh b/tests/build-remote.sh index cf3bb463318..9bca0f4a385 100644 --- a/tests/build-remote.sh +++ b/tests/build-remote.sh @@ -2,7 +2,7 @@ source common.sh clearStore -if [[ $(uname) != Linux ]]; then exit; fi +if ! canUseSandbox; then exit; fi if [[ ! $SHELL =~ /nix/store ]]; then exit; fi chmod -R u+w $TEST_ROOT/store0 || true diff --git a/tests/common.sh.in b/tests/common.sh.in index 83643d8b06b..186f9d6b955 100644 --- a/tests/common.sh.in +++ b/tests/common.sh.in @@ -87,6 +87,24 @@ killDaemon() { trap "" EXIT } +canUseSandbox() { + if [[ $(uname) != Linux ]]; then return 1; fi + + if [ ! -L /proc/self/ns/user ]; then + echo "Kernel doesn't support user namespaces, skipping this test..." + return 1 + fi + + if [ -e /proc/sys/kernel/unprivileged_userns_clone ]; then + if [ "$(cat /proc/sys/kernel/unprivileged_userns_clone)" != 1 ]; then + echo "Unprivileged user namespaces disabled by sysctl, skipping this test..." + return 1 + fi + fi + + return 0 +} + fail() { echo "$1" exit 1 diff --git a/tests/linux-sandbox.sh b/tests/linux-sandbox.sh index 4a686bb59a3..acfd46c5417 100644 --- a/tests/linux-sandbox.sh +++ b/tests/linux-sandbox.sh @@ -2,7 +2,7 @@ source common.sh clearStore -if [[ $(uname) != Linux ]]; then exit; fi +if ! canUseSandbox; then exit; fi # Note: we need to bind-mount $SHELL into the chroot. Currently we # only support the case where $SHELL is in the Nix store, because diff --git a/tests/run.sh b/tests/run.sh index 194e767dd05..d1dbfd6bd4a 100644 --- a/tests/run.sh +++ b/tests/run.sh @@ -6,24 +6,23 @@ clearCache nix run -f run.nix hello -c hello | grep 'Hello World' nix run -f run.nix hello -c hello NixOS | grep 'Hello NixOS' -if [[ $(uname) = Linux ]]; then +if ! canUseSandbox; then exit; fi - chmod -R u+w $TEST_ROOT/store0 || true - rm -rf $TEST_ROOT/store0 +chmod -R u+w $TEST_ROOT/store0 || true +rm -rf $TEST_ROOT/store0 - clearStore +clearStore - path=$(nix eval --raw -f run.nix hello) +path=$(nix eval --raw -f run.nix hello) - # Note: we need the sandbox paths to ensure that the shell is - # visible in the sandbox. - nix run --sandbox-build-dir /build-tmp \ - --sandbox-paths '/nix? /bin? /lib? /lib64? /usr?' \ - --store $TEST_ROOT/store0 -f run.nix hello -c hello | grep 'Hello World' +# Note: we need the sandbox paths to ensure that the shell is +# visible in the sandbox. +nix run --sandbox-build-dir /build-tmp \ + --sandbox-paths '/nix? /bin? /lib? /lib64? /usr?' \ + --store $TEST_ROOT/store0 -f run.nix hello -c hello | grep 'Hello World' - path2=$(nix run --sandbox-paths '/nix? /bin? /lib? /lib64? /usr?' --store $TEST_ROOT/store0 -f run.nix hello -c $SHELL -c 'type -p hello') +path2=$(nix run --sandbox-paths '/nix? /bin? /lib? /lib64? /usr?' --store $TEST_ROOT/store0 -f run.nix hello -c $SHELL -c 'type -p hello') - [[ $path/bin/hello = $path2 ]] +[[ $path/bin/hello = $path2 ]] - [[ -e $TEST_ROOT/store0/nix/store/$(basename $path)/bin/hello ]] -fi +[[ -e $TEST_ROOT/store0/nix/store/$(basename $path)/bin/hello ]] From ebc42f8b5944e6669362e7d13856f126b8e075d7 Mon Sep 17 00:00:00 2001 From: Iavael Date: Mon, 15 Jan 2018 00:43:39 +0300 Subject: [PATCH 0752/2196] Fix manpath detection Checking for MANPATH without quotes always returns true, so that it breaks bash-completion for man pages on modern systems without MANPATH environment variable. --- scripts/nix-profile.sh.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/nix-profile.sh.in b/scripts/nix-profile.sh.in index 450d683c7f4..a5f52274fc7 100644 --- a/scripts/nix-profile.sh.in +++ b/scripts/nix-profile.sh.in @@ -75,7 +75,7 @@ if [ -n "$HOME" ] && [ -n "$USER" ]; then export NIX_SSL_CERT_FILE="$NIX_LINK/etc/ca-bundle.crt" fi - if [ -n ${MANPATH} ]; then + if [ -n "${MANPATH}" ]; then export MANPATH="$NIX_LINK/share/man:$MANPATH" fi From ba75c69e0014178cf524cc07427bb4e9eac333f7 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 15 Jan 2018 12:14:43 +0100 Subject: [PATCH 0753/2196] Barf when using a diverted store on macOS Fixes #1792. --- src/libstore/build.cc | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index d4bd650baf2..523d737d9bf 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -1810,8 +1810,13 @@ void DerivationGoal::startBuilder() useChroot = !fixedOutput && get(drv->env, "__noChroot") != "1"; } - if (worker.store.storeDir != worker.store.realStoreDir) - useChroot = true; + if (worker.store.storeDir != worker.store.realStoreDir) { + #if __linux__ + useChroot = true; + #else + throw Error("building using a diverted store is not supported on this platform"); + #endif + } /* If `build-users-group' is not empty, then we have to build as one of the members of that group. */ From 23fa7e3606a2bee6e3622a61f07e66bdda9b5304 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 16 Jan 2018 17:11:58 +0100 Subject: [PATCH 0754/2196] parseExprFromFile -> evalFile parseExprFromFile() should be avoided since it doesn't cache anything. --- src/nix/installables.cc | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/nix/installables.cc b/src/nix/installables.cc index ae93c4ef649..c3b06c22eba 100644 --- a/src/nix/installables.cc +++ b/src/nix/installables.cc @@ -30,10 +30,8 @@ Value * SourceExprCommand::getSourceExpr(EvalState & state) vSourceExpr = state.allocValue(); - if (file != "") { - Expr * e = state.parseExprFromFile(resolveExprPath(lookupFileArg(state, file))); - state.eval(e, *vSourceExpr); - } + if (file != "") + state.evalFile(lookupFileArg(state, file), *vSourceExpr); else { From d4dcffd64349bb52ad5f1b184bee5cc7c2be73b4 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 16 Jan 2018 18:50:38 +0100 Subject: [PATCH 0755/2196] Add pure evaluation mode In this mode, the following restrictions apply: * The builtins currentTime, currentSystem and storePath throw an error. * $NIX_PATH and -I are ignored. * fetchGit and fetchMercurial require a revision hash. * fetchurl and fetchTarball require a sha256 attribute. * No file system access is allowed outside of the paths returned by fetch{Git,Mercurial,url,Tarball}. Thus 'nix build -f ./foo.nix' is not allowed. Thus, the evaluation result is completely reproducible from the command line arguments. E.g. nix build --pure-eval '( let nix = fetchGit { url = https://github.com/NixOS/nixpkgs.git; rev = "9c927de4b179a6dd210dd88d34bda8af4b575680"; }; nixpkgs = fetchGit { url = https://github.com/NixOS/nixpkgs.git; ref = "release-17.09"; rev = "66b4de79e3841530e6d9c6baf98702aa1f7124e4"; }; in (import (nix + "/release.nix") { inherit nix nixpkgs; }).build.x86_64-linux )' The goal is to enable completely reproducible and traceable evaluation. For example, a NixOS configuration could be fully described by a single Git commit hash. 'nixos-rebuild' would do something like nix build --pure-eval '( (import (fetchGit { url = file:///my-nixos-config; rev = "..."; })).system ') where the Git repository /my-nixos-config would use further fetchGit calls or Git externals to fetch Nixpkgs and whatever other dependencies it has. Either way, the commit hash would uniquely identify the NixOS configuration and allow it to reproduced. --- mk/tests.mk | 2 +- release.nix | 2 +- src/libexpr/eval.cc | 65 +++++++++++++++----------- src/libexpr/eval.hh | 8 ++-- src/libexpr/primops.cc | 44 +++++++++++++---- src/libexpr/primops/fetchGit.cc | 15 ++++-- src/libexpr/primops/fetchMercurial.cc | 6 +++ src/libstore/globals.hh | 3 ++ src/libutil/util.cc | 6 +++ src/libutil/util.hh | 6 ++- src/nix-build/nix-build.cc | 4 +- src/nix-instantiate/nix-instantiate.cc | 2 +- tests/fetchGit.sh | 7 +++ tests/fetchMercurial.sh | 7 +++ tests/local.mk | 3 +- tests/pure-eval.nix | 3 ++ tests/pure-eval.sh | 18 +++++++ tests/restricted.nix | 1 + tests/restricted.sh | 10 +++- 19 files changed, 159 insertions(+), 53 deletions(-) create mode 100644 tests/pure-eval.nix create mode 100644 tests/pure-eval.sh create mode 100644 tests/restricted.nix diff --git a/mk/tests.mk b/mk/tests.mk index e353d46a0d0..70c30661b95 100644 --- a/mk/tests.mk +++ b/mk/tests.mk @@ -39,7 +39,7 @@ installcheck: echo "$${red}$$failed out of $$total tests failed $$normal"; \ exit 1; \ else \ - echo "$${green}All tests succeeded"; \ + echo "$${green}All tests succeeded$$normal"; \ fi .PHONY: check installcheck diff --git a/release.nix b/release.nix index 30bec2a6e3e..04f1f836743 100644 --- a/release.nix +++ b/release.nix @@ -6,7 +6,7 @@ let - pkgs = import nixpkgs {}; + pkgs = import nixpkgs { system = "x86_64-linux"; }; jobs = rec { diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 087a95ddef8..f8685e010e1 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -300,16 +300,25 @@ EvalState::EvalState(const Strings & _searchPath, ref store) { countCalls = getEnv("NIX_COUNT_CALLS", "0") != "0"; - restricted = settings.restrictEval; - assert(gcInitialised); /* Initialise the Nix expression search path. */ - Strings paths = parseNixPath(getEnv("NIX_PATH", "")); - for (auto & i : _searchPath) addToSearchPath(i); - for (auto & i : paths) addToSearchPath(i); + if (!settings.pureEval) { + Strings paths = parseNixPath(getEnv("NIX_PATH", "")); + for (auto & i : _searchPath) addToSearchPath(i); + for (auto & i : paths) addToSearchPath(i); + } addToSearchPath("nix=" + settings.nixDataDir + "/nix/corepkgs"); + if (settings.restrictEval || settings.pureEval) { + allowedPaths = PathSet(); + for (auto & i : searchPath) { + auto r = resolveSearchPathElem(i); + if (!r.first) continue; + allowedPaths->insert(r.second); + } + } + clearValue(vEmptySet); vEmptySet.type = tAttrs; vEmptySet.attrs = allocBindings(0); @@ -326,38 +335,39 @@ EvalState::~EvalState() Path EvalState::checkSourcePath(const Path & path_) { - if (!restricted) return path_; + if (!allowedPaths) return path_; + + auto doThrow = [&]() [[noreturn]] { + throw RestrictedPathError("access to path '%1%' is forbidden in restricted mode", path_); + }; + + bool found = false; + + for (auto & i : *allowedPaths) { + if (isDirOrInDir(path_, i)) { + found = true; + break; + } + } + + if (!found) doThrow(); /* Resolve symlinks. */ debug(format("checking access to '%s'") % path_); Path path = canonPath(path_, true); - for (auto & i : searchPath) { - auto r = resolveSearchPathElem(i); - if (!r.first) continue; - if (path == r.second || isInDir(path, r.second)) + for (auto & i : *allowedPaths) { + if (isDirOrInDir(path, i)) return path; } - /* To support import-from-derivation, allow access to anything in - the store. FIXME: only allow access to paths that have been - constructed by this evaluation. */ - if (store->isInStore(path)) return path; - -#if 0 - /* Hack to support the chroot dependencies of corepkgs (see - corepkgs/config.nix.in). */ - if (path == settings.nixPrefix && isStorePath(settings.nixPrefix)) - return path; -#endif - - throw RestrictedPathError(format("access to path '%1%' is forbidden in restricted mode") % path_); + doThrow(); } void EvalState::checkURI(const std::string & uri) { - if (!restricted) return; + if (!settings.restrictEval) return; /* 'uri' should be equal to a prefix, or in a subdirectory of a prefix. Thus, the prefix https://github.co does not permit @@ -396,7 +406,7 @@ void EvalState::addConstant(const string & name, Value & v) } -void EvalState::addPrimOp(const string & name, +Value * EvalState::addPrimOp(const string & name, unsigned int arity, PrimOpFun primOp) { Value * v = allocValue(); @@ -407,6 +417,7 @@ void EvalState::addPrimOp(const string & name, staticBaseEnv.vars[symbols.create(name)] = baseEnvDispl; baseEnv.values[baseEnvDispl++] = v; baseEnv.values[0]->attrs->push_back(Attr(sym, v)); + return v; } @@ -659,8 +670,10 @@ Value * ExprPath::maybeThunk(EvalState & state, Env & env) } -void EvalState::evalFile(const Path & path, Value & v) +void EvalState::evalFile(const Path & path_, Value & v) { + auto path = checkSourcePath(path_); + FileEvalCache::iterator i; if ((i = fileEvalCache.find(path)) != fileEvalCache.end()) { v = i->second; diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index cc971ae80a4..9e3d30d95f4 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -76,9 +76,9 @@ public: already exist there. */ RepairFlag repair; - /* If set, don't allow access to files outside of the Nix search - path or to environment variables. */ - bool restricted; + /* The allowed filesystem paths in restricted or pure evaluation + mode. */ + std::experimental::optional allowedPaths; Value vEmptySet; @@ -212,7 +212,7 @@ private: void addConstant(const string & name, Value & v); - void addPrimOp(const string & name, + Value * addPrimOp(const string & name, unsigned int arity, PrimOpFun primOp); public: diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 98fe2199e9f..0ec035b8624 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -439,7 +439,7 @@ static void prim_tryEval(EvalState & state, const Pos & pos, Value * * args, Val static void prim_getEnv(EvalState & state, const Pos & pos, Value * * args, Value & v) { string name = state.forceStringNoCtx(*args[0], pos); - mkString(v, state.restricted ? "" : getEnv(name)); + mkString(v, settings.restrictEval || settings.pureEval ? "" : getEnv(name)); } @@ -1929,7 +1929,14 @@ void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v, state.checkURI(url); + if (settings.pureEval && !expectedHash) + throw Error("in pure evaluation mode, '%s' requires a 'sha256' argument", who); + Path res = getDownloader()->downloadCached(state.store, url, unpack, name, expectedHash); + + if (state.allowedPaths) + state.allowedPaths->insert(res); + mkString(v, res, PathSet({res})); } @@ -1981,11 +1988,28 @@ void EvalState::createBaseEnv() mkNull(v); addConstant("null", v); - mkInt(v, time(0)); - addConstant("__currentTime", v); + auto vThrow = addPrimOp("throw", 1, prim_throw); - mkString(v, settings.thisSystem); - addConstant("__currentSystem", v); + auto addPurityError = [&](const std::string & name) { + Value * v2 = allocValue(); + mkString(*v2, fmt("'%s' is not allowed in pure evaluation mode", name)); + mkApp(v, *vThrow, *v2); + addConstant(name, v); + }; + + if (settings.pureEval) + addPurityError("__currentTime"); + else { + mkInt(v, time(0)); + addConstant("__currentTime", v); + } + + if (settings.pureEval) + addPurityError("__currentSystem"); + else { + mkString(v, settings.thisSystem); + addConstant("__currentSystem", v); + } mkString(v, nixVersion); addConstant("__nixVersion", v); @@ -2001,10 +2025,10 @@ void EvalState::createBaseEnv() addConstant("__langVersion", v); // Miscellaneous - addPrimOp("scopedImport", 2, prim_scopedImport); + auto vScopedImport = addPrimOp("scopedImport", 2, prim_scopedImport); Value * v2 = allocValue(); mkAttrs(*v2, 0); - mkApp(v, *baseEnv.values[baseEnvDispl - 1], *v2); + mkApp(v, *vScopedImport, *v2); forceValue(v); addConstant("import", v); if (settings.enableNativeCode) { @@ -2020,7 +2044,6 @@ void EvalState::createBaseEnv() addPrimOp("__isBool", 1, prim_isBool); addPrimOp("__genericClosure", 1, prim_genericClosure); addPrimOp("abort", 1, prim_abort); - addPrimOp("throw", 1, prim_throw); addPrimOp("__addErrorContext", 2, prim_addErrorContext); addPrimOp("__tryEval", 1, prim_tryEval); addPrimOp("__getEnv", 1, prim_getEnv); @@ -2035,7 +2058,10 @@ void EvalState::createBaseEnv() // Paths addPrimOp("__toPath", 1, prim_toPath); - addPrimOp("__storePath", 1, prim_storePath); + if (settings.pureEval) + addPurityError("__storePath"); + else + addPrimOp("__storePath", 1, prim_storePath); addPrimOp("__pathExists", 1, prim_pathExists); addPrimOp("baseNameOf", 1, prim_baseNameOf); addPrimOp("dirOf", 1, prim_dirOf); diff --git a/src/libexpr/primops/fetchGit.cc b/src/libexpr/primops/fetchGit.cc index fb664cffb5b..2e3e2634db8 100644 --- a/src/libexpr/primops/fetchGit.cc +++ b/src/libexpr/primops/fetchGit.cc @@ -22,10 +22,15 @@ struct GitInfo uint64_t revCount = 0; }; +std::regex revRegex("^[0-9a-fA-F]{40}$"); + GitInfo exportGit(ref store, const std::string & uri, std::experimental::optional ref, std::string rev, const std::string & name) { + if (settings.pureEval && rev == "") + throw Error("in pure evaluation mode, 'fetchGit' requires a Git revision"); + if (!ref && rev == "" && hasPrefix(uri, "/") && pathExists(uri + "/.git")) { bool clean = true; @@ -76,11 +81,8 @@ GitInfo exportGit(ref store, const std::string & uri, if (!ref) ref = "master"s; - if (rev != "") { - std::regex revRegex("^[0-9a-fA-F]{40}$"); - if (!std::regex_match(rev, revRegex)) - throw Error("invalid Git revision '%s'", rev); - } + if (rev != "" && !std::regex_match(rev, revRegex)) + throw Error("invalid Git revision '%s'", rev); Path cacheDir = getCacheDir() + "/nix/git"; @@ -231,6 +233,9 @@ static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Va mkString(*state.allocAttr(v, state.symbols.create("shortRev")), gitInfo.shortRev); mkInt(*state.allocAttr(v, state.symbols.create("revCount")), gitInfo.revCount); v.attrs->sort(); + + if (state.allowedPaths) + state.allowedPaths->insert(gitInfo.storePath); } static RegisterPrimOp r("fetchGit", 1, prim_fetchGit); diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc index a317476c582..5517d83df82 100644 --- a/src/libexpr/primops/fetchMercurial.cc +++ b/src/libexpr/primops/fetchMercurial.cc @@ -27,6 +27,9 @@ std::regex commitHashRegex("^[0-9a-fA-F]{40}$"); HgInfo exportMercurial(ref store, const std::string & uri, std::string rev, const std::string & name) { + if (settings.pureEval && rev == "") + throw Error("in pure evaluation mode, 'fetchMercurial' requires a Mercurial revision"); + if (rev == "" && hasPrefix(uri, "/") && pathExists(uri + "/.hg")) { bool clean = runProgram("hg", true, { "status", "-R", uri, "--modified", "--added", "--removed" }) == ""; @@ -196,6 +199,9 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar mkString(*state.allocAttr(v, state.symbols.create("shortRev")), std::string(hgInfo.rev, 0, 12)); mkInt(*state.allocAttr(v, state.symbols.create("revCount")), hgInfo.revCount); v.attrs->sort(); + + if (state.allowedPaths) + state.allowedPaths->insert(hgInfo.storePath); } static RegisterPrimOp r("fetchMercurial", 1, prim_fetchMercurial); diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index af72f7b1e35..81bb24a4eb3 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -232,6 +232,9 @@ public: "Whether to restrict file system access to paths in $NIX_PATH, " "and network access to the URI prefixes listed in 'allowed-uris'."}; + Setting pureEval{this, false, "pure-eval", + "Whether to restrict file system and network access to files specified by cryptographic hash."}; + Setting buildRepeat{this, 0, "repeat", "The number of times to repeat a build in order to verify determinism.", {"build-repeat"}}; diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 197df0c44aa..27299739779 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -192,6 +192,12 @@ bool isInDir(const Path & path, const Path & dir) } +bool isDirOrInDir(const Path & path, const Path & dir) +{ + return path == dir or isInDir(path, dir); +} + + struct stat lstat(const Path & path) { struct stat st; diff --git a/src/libutil/util.hh b/src/libutil/util.hh index a3494e09b09..75eb9751524 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -53,10 +53,12 @@ Path dirOf(const Path & path); following the final `/'. */ string baseNameOf(const Path & path); -/* Check whether a given path is a descendant of the given - directory. */ +/* Check whether 'path' is a descendant of 'dir'. */ bool isInDir(const Path & path, const Path & dir); +/* Check whether 'path' is equal to 'dir' or a descendant of 'dir'. */ +bool isDirOrInDir(const Path & path, const Path & dir); + /* Get status of `path'. */ struct stat lstat(const Path & path); diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 58366daa6e8..1b249427537 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -279,8 +279,8 @@ void mainWrapped(int argc, char * * argv) else /* If we're in a #! script, interpret filenames relative to the script. */ - exprs.push_back(state.parseExprFromFile(resolveExprPath(lookupFileArg(state, - inShebang && !packages ? absPath(i, absPath(dirOf(script))) : i)))); + exprs.push_back(state.parseExprFromFile(resolveExprPath(state.checkSourcePath(lookupFileArg(state, + inShebang && !packages ? absPath(i, absPath(dirOf(script))) : i))))); } /* Evaluate them into derivations. */ diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index 55ac007e868..e05040a42de 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -182,7 +182,7 @@ int main(int argc, char * * argv) for (auto & i : files) { Expr * e = fromArgs ? state.parseExprFromString(i, absPath(".")) - : state.parseExprFromFile(resolveExprPath(lookupFileArg(state, i))); + : state.parseExprFromFile(resolveExprPath(state.checkSourcePath(lookupFileArg(state, i)))); processExpr(state, attrPaths, parseOnly, strict, autoArgs, evalOnly, outputKind, xmlOutputSourceLocation, e); } diff --git a/tests/fetchGit.sh b/tests/fetchGit.sh index b556fe594ce..530ac7bb813 100644 --- a/tests/fetchGit.sh +++ b/tests/fetchGit.sh @@ -29,10 +29,17 @@ rev2=$(git -C $repo rev-parse HEAD) path=$(nix eval --raw "(builtins.fetchGit file://$repo).outPath") [[ $(cat $path/hello) = world ]] +# In pure eval mode, fetchGit without a revision should fail. +[[ $(nix eval --raw "(builtins.readFile (fetchGit file://$repo + \"/hello\"))") = world ]] +(! nix eval --pure-eval --raw "(builtins.readFile (fetchGit file://$repo + \"/hello\"))") + # Fetch using an explicit revision hash. path2=$(nix eval --raw "(builtins.fetchGit { url = file://$repo; rev = \"$rev2\"; }).outPath") [[ $path = $path2 ]] +# In pure eval mode, fetchGit with a revision should succeed. +[[ $(nix eval --pure-eval --raw "(builtins.readFile (fetchGit { url = file://$repo; rev = \"$rev2\"; } + \"/hello\"))") = world ]] + # Fetch again. This should be cached. mv $repo ${repo}-tmp path2=$(nix eval --raw "(builtins.fetchGit file://$repo).outPath") diff --git a/tests/fetchMercurial.sh b/tests/fetchMercurial.sh index 271350ecd17..4088dbd3979 100644 --- a/tests/fetchMercurial.sh +++ b/tests/fetchMercurial.sh @@ -29,10 +29,17 @@ rev2=$(hg log --cwd $repo -r tip --template '{node}') path=$(nix eval --raw "(builtins.fetchMercurial file://$repo).outPath") [[ $(cat $path/hello) = world ]] +# In pure eval mode, fetchGit without a revision should fail. +[[ $(nix eval --raw "(builtins.readFile (fetchMercurial file://$repo + \"/hello\"))") = world ]] +(! nix eval --pure-eval --raw "(builtins.readFile (fetchMercurial file://$repo + \"/hello\"))") + # Fetch using an explicit revision hash. path2=$(nix eval --raw "(builtins.fetchMercurial { url = file://$repo; rev = \"$rev2\"; }).outPath") [[ $path = $path2 ]] +# In pure eval mode, fetchGit with a revision should succeed. +[[ $(nix eval --pure-eval --raw "(builtins.readFile (fetchMercurial { url = file://$repo; rev = \"$rev2\"; } + \"/hello\"))") = world ]] + # Fetch again. This should be cached. mv $repo ${repo}-tmp path2=$(nix eval --raw "(builtins.fetchMercurial file://$repo).outPath") diff --git a/tests/local.mk b/tests/local.mk index 83154228e99..82502a8e5f0 100644 --- a/tests/local.mk +++ b/tests/local.mk @@ -20,7 +20,8 @@ nix_tests = \ fetchMercurial.sh \ signing.sh \ run.sh \ - brotli.sh + brotli.sh \ + pure-eval.sh # parallel.sh install-tests += $(foreach x, $(nix_tests), tests/$(x)) diff --git a/tests/pure-eval.nix b/tests/pure-eval.nix new file mode 100644 index 00000000000..ed25b3d4563 --- /dev/null +++ b/tests/pure-eval.nix @@ -0,0 +1,3 @@ +{ + x = 123; +} diff --git a/tests/pure-eval.sh b/tests/pure-eval.sh new file mode 100644 index 00000000000..49c8564487c --- /dev/null +++ b/tests/pure-eval.sh @@ -0,0 +1,18 @@ +source common.sh + +clearStore + +nix eval --pure-eval '(assert 1 + 2 == 3; true)' + +[[ $(nix eval '(builtins.readFile ./pure-eval.sh)') =~ clearStore ]] + +(! nix eval --pure-eval '(builtins.readFile ./pure-eval.sh)') + +(! nix eval --pure-eval '(builtins.currentTime)') +(! nix eval --pure-eval '(builtins.currentSystem)') + +(! nix-instantiate --pure-eval ./simple.nix) + +[[ $(nix eval "((import (builtins.fetchurl { url = file://$(pwd)/pure-eval.nix; })).x)") == 123 ]] +(! nix eval --pure-eval "((import (builtins.fetchurl { url = file://$(pwd)/pure-eval.nix; })).x)") +nix eval --pure-eval "((import (builtins.fetchurl { url = file://$(pwd)/pure-eval.nix; sha256 = \"$(nix hash-file pure-eval.nix --type sha256)\"; })).x)" diff --git a/tests/restricted.nix b/tests/restricted.nix new file mode 100644 index 00000000000..e0ef5840209 --- /dev/null +++ b/tests/restricted.nix @@ -0,0 +1 @@ +1 + 2 diff --git a/tests/restricted.sh b/tests/restricted.sh index c063c8693d5..6c0392facf3 100644 --- a/tests/restricted.sh +++ b/tests/restricted.sh @@ -3,7 +3,8 @@ source common.sh clearStore nix-instantiate --restrict-eval --eval -E '1 + 2' -(! nix-instantiate --restrict-eval ./simple.nix) +(! nix-instantiate --restrict-eval ./restricted.nix) +(! nix-instantiate --eval --restrict-eval <(echo '1 + 2')) nix-instantiate --restrict-eval ./simple.nix -I src=. nix-instantiate --restrict-eval ./simple.nix -I src1=simple.nix -I src2=config.nix -I src3=./simple.builder.sh @@ -28,3 +29,10 @@ nix eval --raw "(builtins.fetchurl file://$(pwd)/restricted.sh)" --restrict-eval (! nix eval --raw "(builtins.fetchurl https://github.com/NixOS/patchelf/archive/master.tar.gz)" --restrict-eval) (! nix eval --raw "(builtins.fetchTarball https://github.com/NixOS/patchelf/archive/master.tar.gz)" --restrict-eval) (! nix eval --raw "(fetchGit git://github.com/NixOS/patchelf.git)" --restrict-eval) + +ln -sfn $(pwd)/restricted.nix $TEST_ROOT/restricted.nix +[[ $(nix-instantiate --eval $TEST_ROOT/restricted.nix) == 3 ]] +(! nix-instantiate --eval --restrict-eval $TEST_ROOT/restricted.nix) +(! nix-instantiate --eval --restrict-eval $TEST_ROOT/restricted.nix -I $TEST_ROOT) +(! nix-instantiate --eval --restrict-eval $TEST_ROOT/restricted.nix -I .) +nix-instantiate --eval --restrict-eval $TEST_ROOT/restricted.nix -I $TEST_ROOT -I . From 75b9670df61b3e82e6d60a0572316fdacc9cbd91 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 16 Jan 2018 19:03:31 +0100 Subject: [PATCH 0756/2196] Make show-trace a config setting --- src/libmain/shared.cc | 3 --- src/libstore/globals.hh | 3 ++- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 85d3c077ba5..90a4867163d 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -193,9 +193,6 @@ LegacyArgs::LegacyArgs(const std::string & programName, mkFlag(0, "readonly-mode", "do not write to the Nix store", &settings.readOnlyMode); - mkFlag(0, "show-trace", "show Nix expression stack trace in evaluation errors", - &settings.showTrace); - mkFlag(0, "no-gc-warning", "disable warning about not using '--add-root'", &gcWarning, false); diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 81bb24a4eb3..1e50e2d13e9 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -211,7 +211,8 @@ public: bool lockCPU; /* Whether to show a stack trace if Nix evaluation fails. */ - bool showTrace = false; + Setting showTrace{this, false, "show-trace", + "Whether to show a stack trace on evaluation errors."}; Setting enableNativeCode{this, false, "allow-unsafe-native-code-during-evaluation", "Whether builtin functions that allow executing native code should be enabled."}; From 6ddfe9a999027f9867bbf61ae92c19c591c89a86 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 16 Jan 2018 19:07:27 +0100 Subject: [PATCH 0757/2196] : Don't access builtins.currentSystem This doesn't work in pure evaluation mode. --- corepkgs/fetchurl.nix | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/corepkgs/fetchurl.nix b/corepkgs/fetchurl.nix index e135b947fdb..0ce1bab112f 100644 --- a/corepkgs/fetchurl.nix +++ b/corepkgs/fetchurl.nix @@ -1,4 +1,4 @@ -{ system ? builtins.currentSystem +{ system ? "" # obsolete , url , md5 ? "", sha1 ? "", sha256 ? "", sha512 ? "" , outputHash ? @@ -17,7 +17,9 @@ derivation { inherit outputHashAlgo outputHash; outputHashMode = if unpack || executable then "recursive" else "flat"; - inherit name system url executable unpack; + inherit name url executable unpack; + + system = "builtin"; # No need to double the amount of network traffic preferLocalBuild = true; From d8b4cfad823524338f87a681b8790b8dbd3a7a1b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 17 Jan 2018 11:53:08 +0100 Subject: [PATCH 0758/2196] Typo --- src/libexpr/primops.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 0ec035b8624..e90a1da25e0 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -39,7 +39,7 @@ std::pair decodeContext(const string & s) size_t index = s.find("!", 1); return std::pair(string(s, index + 1), string(s, 1, index - 1)); } else - return std::pair(s.at(0) == '/' ? s: string(s, 1), ""); + return std::pair(s.at(0) == '/' ? s : string(s, 1), ""); } From cfeff3b2731fd7e6dd030d4e4d56ff3a3aeac903 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 17 Jan 2018 11:53:16 +0100 Subject: [PATCH 0759/2196] Move show-trace docs --- doc/manual/command-ref/conf-file.xml | 8 ++++++++ doc/manual/command-ref/opt-common-syn.xml | 1 - doc/manual/command-ref/opt-common.xml | 7 ------- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index 2ddca991f19..fff7994f28d 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -734,6 +734,14 @@ builtins.fetchurl { + show-trace + + Causes Nix to print out a stack trace in case of Nix + expression evaluation errors. + + + + diff --git a/doc/manual/command-ref/opt-common-syn.xml b/doc/manual/command-ref/opt-common-syn.xml index 3aff4e1b635..168bef080f4 100644 --- a/doc/manual/command-ref/opt-common-syn.xml +++ b/doc/manual/command-ref/opt-common-syn.xml @@ -47,7 +47,6 @@ - path diff --git a/doc/manual/command-ref/opt-common.xml b/doc/manual/command-ref/opt-common.xml index 32d53c753a2..bcb60b30125 100644 --- a/doc/manual/command-ref/opt-common.xml +++ b/doc/manual/command-ref/opt-common.xml @@ -301,13 +301,6 @@ - - - Causes Nix to print out a stack trace in case of Nix - expression evaluation errors. - - - path From 16e0287556cbf04d2642c1091b4fe00fa6e352af Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 17 Jan 2018 12:03:06 +0100 Subject: [PATCH 0760/2196] nix eval: Take only one argument Thus --json no longer produces a list. --- src/nix/eval.cc | 26 +++++++++++--------------- 1 file changed, 11 insertions(+), 15 deletions(-) diff --git a/src/nix/eval.cc b/src/nix/eval.cc index 0fbeca1c121..2bc58b7dd4b 100644 --- a/src/nix/eval.cc +++ b/src/nix/eval.cc @@ -8,7 +8,7 @@ using namespace nix; -struct CmdEval : MixJSON, InstallablesCommand +struct CmdEval : MixJSON, InstallableCommand { bool raw = false; @@ -56,20 +56,16 @@ struct CmdEval : MixJSON, InstallablesCommand auto state = getEvalState(); - auto jsonOut = json ? std::make_unique(std::cout) : nullptr; - - for (auto & i : installables) { - auto v = i->toValue(*state); - PathSet context; - if (raw) { - std::cout << state->coerceToString(noPos, *v, context); - } else if (json) { - auto jsonElem = jsonOut->placeholder(); - printValueAsJSON(*state, true, *v, jsonElem, context); - } else { - state->forceValueDeep(*v); - std::cout << *v << "\n"; - } + auto v = installable->toValue(*state); + PathSet context; + if (raw) { + std::cout << state->coerceToString(noPos, *v, context); + } else if (json) { + JSONPlaceholder jsonOut(std::cout); + printValueAsJSON(*state, true, *v, jsonOut, context); + } else { + state->forceValueDeep(*v); + std::cout << *v << "\n"; } } }; From 27b510af5c3f8ad562dbc90dd39f82236fdf57db Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 17 Jan 2018 12:04:44 +0100 Subject: [PATCH 0761/2196] nix eval: Stop progress bar before printing the result --- src/nix/eval.cc | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/nix/eval.cc b/src/nix/eval.cc index 2bc58b7dd4b..b7058361cbe 100644 --- a/src/nix/eval.cc +++ b/src/nix/eval.cc @@ -5,6 +5,7 @@ #include "eval.hh" #include "json.hh" #include "value-to-json.hh" +#include "progress-bar.hh" using namespace nix; @@ -58,6 +59,9 @@ struct CmdEval : MixJSON, InstallableCommand auto v = installable->toValue(*state); PathSet context; + + stopProgressBar(); + if (raw) { std::cout << state->coerceToString(noPos, *v, context); } else if (json) { From 0c95776c3efdc63c4b957823d4e51d851a64ed84 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 18 Jan 2018 16:37:39 +0100 Subject: [PATCH 0762/2196] Don't define builtins.{currentSystem,currentTime} in pure mode This makes it easier to provide a default, e.g. system = builtins.currentSystem or "x86_64-linux"; --- release.nix | 2 +- src/libexpr/primops.cc | 8 ++------ 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/release.nix b/release.nix index 04f1f836743..bdac283cfe3 100644 --- a/release.nix +++ b/release.nix @@ -6,7 +6,7 @@ let - pkgs = import nixpkgs { system = "x86_64-linux"; }; + pkgs = import nixpkgs { system = builtins.currentSystem or "x86_64-linux"; }; jobs = rec { diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index e90a1da25e0..975f0e8309e 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1997,16 +1997,12 @@ void EvalState::createBaseEnv() addConstant(name, v); }; - if (settings.pureEval) - addPurityError("__currentTime"); - else { + if (!settings.pureEval) { mkInt(v, time(0)); addConstant("__currentTime", v); } - if (settings.pureEval) - addPurityError("__currentSystem"); - else { + if (!settings.pureEval) { mkString(v, settings.thisSystem); addConstant("__currentSystem", v); } From 87e3d142cce442417b52610652b9d1a4fd7a6b4f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 19 Jan 2018 13:58:28 +0100 Subject: [PATCH 0763/2196] Add a test for --check / --repeat --- tests/check.nix | 12 ++++++++++++ tests/check.sh | 18 ++++++++++++++++++ tests/local.mk | 3 ++- 3 files changed, 32 insertions(+), 1 deletion(-) create mode 100644 tests/check.nix create mode 100644 tests/check.sh diff --git a/tests/check.nix b/tests/check.nix new file mode 100644 index 00000000000..b330ab9c984 --- /dev/null +++ b/tests/check.nix @@ -0,0 +1,12 @@ +with import ./config.nix; + +{ + nondeterministic = mkDerivation { + name = "nondeterministic"; + buildCommand = + '' + mkdir $out + date +%s.%N > $out/date + ''; + }; +} diff --git a/tests/check.sh b/tests/check.sh new file mode 100644 index 00000000000..3efcef04492 --- /dev/null +++ b/tests/check.sh @@ -0,0 +1,18 @@ +source common.sh + +clearStore + +nix-build dependencies.nix --no-out-link +nix-build dependencies.nix --no-out-link --check + +nix-build check.nix -A nondeterministic --no-out-link +(! nix-build check.nix -A nondeterministic --no-out-link --check 2> $TEST_ROOT/log) +grep 'may not be deterministic' $TEST_ROOT/log + +clearStore + +nix-build dependencies.nix --no-out-link --repeat 3 + +(! nix-build check.nix -A nondeterministic --no-out-link --repeat 1 2> $TEST_ROOT/log) +grep 'differs from previous round' $TEST_ROOT/log + diff --git a/tests/local.mk b/tests/local.mk index 82502a8e5f0..e90b9f7da4a 100644 --- a/tests/local.mk +++ b/tests/local.mk @@ -21,7 +21,8 @@ nix_tests = \ signing.sh \ run.sh \ brotli.sh \ - pure-eval.sh + pure-eval.sh \ + check.sh # parallel.sh install-tests += $(foreach x, $(nix_tests), tests/$(x)) From 2896bb682691e8ce2be055a386bdeef93ae05586 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 19 Jan 2018 14:05:08 +0100 Subject: [PATCH 0764/2196] Don't retry CURLE_URL_MALFORMAT --- src/libstore/download.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 4474dfd4b96..ef417685f1a 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -303,6 +303,7 @@ struct CurlDownloader : public Downloader // Don't bother retrying on certain cURL errors either switch (code) { case CURLE_FAILED_INIT: + case CURLE_URL_MALFORMAT: case CURLE_NOT_BUILT_IN: case CURLE_REMOTE_ACCESS_DENIED: case CURLE_FILE_COULDNT_READ_FILE: From 3c4c30eadd879f512ac2075a7ba39c37ff77bf5c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 19 Jan 2018 14:53:34 +0100 Subject: [PATCH 0765/2196] Rewrite builtin derivation environment Also add a test. Fixes #1803. Closes #1805. --- src/libstore/build.cc | 7 ++++++- tests/check.nix | 5 +++++ tests/check.sh | 14 ++++++++++++++ 3 files changed, 25 insertions(+), 1 deletion(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 523d737d9bf..55066205db3 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2933,8 +2933,13 @@ void DerivationGoal::runChild() if (drv->isBuiltin()) { try { logger = makeJSONLogger(*logger); + + BasicDerivation drv2(*drv); + for (auto & e : drv2.env) + e.second = rewriteStrings(e.second, inputRewrites); + if (drv->builder == "builtin:fetchurl") - builtinFetchurl(*drv, netrcData); + builtinFetchurl(drv2, netrcData); else throw Error(format("unsupported builtin function '%1%'") % string(drv->builder, 8)); _exit(0); diff --git a/tests/check.nix b/tests/check.nix index b330ab9c984..585d43032a9 100644 --- a/tests/check.nix +++ b/tests/check.nix @@ -9,4 +9,9 @@ with import ./config.nix; date +%s.%N > $out/date ''; }; + + fetchurl = import { + url = "file://" + toString ./lang/eval-okay-xml.out; + sha256 = "426fefcd2430e986551db13fcc2b1e45eeec17e68ffeb6ff155be2f8aaf5407e"; + }; } diff --git a/tests/check.sh b/tests/check.sh index 3efcef04492..a3067e06f14 100644 --- a/tests/check.sh +++ b/tests/check.sh @@ -16,3 +16,17 @@ nix-build dependencies.nix --no-out-link --repeat 3 (! nix-build check.nix -A nondeterministic --no-out-link --repeat 1 2> $TEST_ROOT/log) grep 'differs from previous round' $TEST_ROOT/log +path=$(nix-build check.nix -A fetchurl --no-out-link) + +chmod +w $path +echo foo > $path +chmod -w $path + +nix-build check.nix -A fetchurl --no-out-link --check + +# Note: "check" doesn't repair anything, it just compares to the hash stored in the database. +[[ $(cat $path) = foo ]] + +nix-build check.nix -A fetchurl --no-out-link --repair + +[[ $(cat $path) != foo ]] From 89a2a11d9f39f60097619212d3fed9ab8c216c8b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 19 Jan 2018 14:58:26 +0100 Subject: [PATCH 0766/2196] Don't use [[noreturn]] --- src/libexpr/eval.cc | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index f8685e010e1..33a9bc61428 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -337,10 +337,6 @@ Path EvalState::checkSourcePath(const Path & path_) { if (!allowedPaths) return path_; - auto doThrow = [&]() [[noreturn]] { - throw RestrictedPathError("access to path '%1%' is forbidden in restricted mode", path_); - }; - bool found = false; for (auto & i : *allowedPaths) { @@ -350,7 +346,8 @@ Path EvalState::checkSourcePath(const Path & path_) } } - if (!found) doThrow(); + if (!found) + throw RestrictedPathError("access to path '%1%' is forbidden in restricted mode", path_); /* Resolve symlinks. */ debug(format("checking access to '%s'") % path_); @@ -361,7 +358,7 @@ Path EvalState::checkSourcePath(const Path & path_) return path; } - doThrow(); + throw RestrictedPathError("access to path '%1%' is forbidden in restricted mode", path); } From 9304fde8de3b9a54200025701199dcb51725c8b1 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 22 Jan 2018 16:56:10 +0100 Subject: [PATCH 0767/2196] Don't access tarballs.nixos.org in a test https://hydra.nixos.org/build/67806811 --- tests/check.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/check.sh b/tests/check.sh index a3067e06f14..b05e40ffbee 100644 --- a/tests/check.sh +++ b/tests/check.sh @@ -16,17 +16,17 @@ nix-build dependencies.nix --no-out-link --repeat 3 (! nix-build check.nix -A nondeterministic --no-out-link --repeat 1 2> $TEST_ROOT/log) grep 'differs from previous round' $TEST_ROOT/log -path=$(nix-build check.nix -A fetchurl --no-out-link) +path=$(nix-build check.nix -A fetchurl --no-out-link --hashed-mirrors '') chmod +w $path echo foo > $path chmod -w $path -nix-build check.nix -A fetchurl --no-out-link --check +nix-build check.nix -A fetchurl --no-out-link --check --hashed-mirrors '' # Note: "check" doesn't repair anything, it just compares to the hash stored in the database. [[ $(cat $path) = foo ]] -nix-build check.nix -A fetchurl --no-out-link --repair +nix-build check.nix -A fetchurl --no-out-link --repair --hashed-mirrors '' [[ $(cat $path) != foo ]] From c382866cd26e19900c638e3724af89cf599f1f46 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 22 Jan 2018 17:04:08 +0100 Subject: [PATCH 0768/2196] Fix test https://hydra.nixos.org/build/67806811 --- tests/check.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/check.nix b/tests/check.nix index 585d43032a9..08aac2fb0a7 100644 --- a/tests/check.nix +++ b/tests/check.nix @@ -11,7 +11,7 @@ with import ./config.nix; }; fetchurl = import { - url = "file://" + toString ./lang/eval-okay-xml.out; - sha256 = "426fefcd2430e986551db13fcc2b1e45eeec17e68ffeb6ff155be2f8aaf5407e"; + url = "file://" + toString ./lang/eval-okay-xml.exp.xml; + sha256 = "0kg4sla7ihm8ijr8cb3117fhl99zrc2bwy1jrngsfmkh8bav4m0v"; }; } From f7c26365ebc6b24dd41cf1c953b002737f9d6187 Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Mon, 22 Jan 2018 12:19:50 -0600 Subject: [PATCH 0769/2196] nlohmann-json: 2.1.1 -> 3.0.1 --- src/nlohmann/json.hpp | 1713 ++++++++++++++++++++++------------------- 1 file changed, 932 insertions(+), 781 deletions(-) diff --git a/src/nlohmann/json.hpp b/src/nlohmann/json.hpp index 9754e464c76..5b0b0ea5b30 100644 --- a/src/nlohmann/json.hpp +++ b/src/nlohmann/json.hpp @@ -1,7 +1,7 @@ /* __ _____ _____ _____ __| | __| | | | JSON for Modern C++ -| | |__ | | | | | | version 2.1.1 +| | |__ | | | | | | version 3.0.1 |_____|_____|_____|_|___| https://github.com/nlohmann/json Licensed under the MIT License . @@ -109,7 +109,7 @@ SOFTWARE. #define JSON_UNLIKELY(x) x #endif -// cpp language standard detection +// C++ language standard detection #if (defined(__cplusplus) && __cplusplus >= 201703L) || (defined(_HAS_CXX17) && _HAS_CXX17 == 1) // fix for issue #464 #define JSON_HAS_CPP_17 #define JSON_HAS_CPP_14 @@ -128,20 +128,18 @@ template struct adl_serializer; // forward declaration of basic_json (required to split the class) -template class ObjectType = - std::map, - template class ArrayType = std::vector, +template class ObjectType = std::map, + template class ArrayType = std::vector, class StringType = std::string, class BooleanType = bool, class NumberIntegerType = std::int64_t, class NumberUnsignedType = std::uint64_t, class NumberFloatType = double, - template class AllocatorType = std::allocator, - template class JSONSerializer = - adl_serializer> + template class AllocatorType = std::allocator, + template class JSONSerializer = adl_serializer> class basic_json; -// Ugly macros to avoid uglier copy-paste when specializing basic_json -// This is only temporary and will be removed in 3.0 +// Ugly macros to avoid uglier copy-paste when specializing basic_json. They +// may be removed in the future once the class is split. #define NLOHMANN_BASIC_JSON_TPL_DECLARATION \ template class ObjectType, \ @@ -227,7 +225,7 @@ class exception : public std::exception /*! @brief exception indicating a parse error -This excpetion is thrown by the library when a parse error occurs. Parse errors +This exception is thrown by the library when a parse error occurs. Parse errors can occur during the deserialization of JSON text, CBOR, MessagePack, as well as when using JSON Patch. @@ -243,12 +241,12 @@ json.exception.parse_error.102 | parse error at 14: missing or wrong low surroga json.exception.parse_error.103 | parse error: code points above 0x10FFFF are invalid | Unicode supports code points up to 0x10FFFF. Code points above 0x10FFFF are invalid. json.exception.parse_error.104 | parse error: JSON patch must be an array of objects | [RFC 6902](https://tools.ietf.org/html/rfc6902) requires a JSON Patch document to be a JSON document that represents an array of objects. json.exception.parse_error.105 | parse error: operation must have string member 'op' | An operation of a JSON Patch document must contain exactly one "op" member, whose value indicates the operation to perform. Its value must be one of "add", "remove", "replace", "move", "copy", or "test"; other values are errors. -json.exception.parse_error.106 | parse error: array index '01' must not begin with '0' | An array index in a JSON Pointer ([RFC 6901](https://tools.ietf.org/html/rfc6901)) may be `0` or any number wihtout a leading `0`. +json.exception.parse_error.106 | parse error: array index '01' must not begin with '0' | An array index in a JSON Pointer ([RFC 6901](https://tools.ietf.org/html/rfc6901)) may be `0` or any number without a leading `0`. json.exception.parse_error.107 | parse error: JSON pointer must be empty or begin with '/' - was: 'foo' | A JSON Pointer must be a Unicode string containing a sequence of zero or more reference tokens, each prefixed by a `/` character. json.exception.parse_error.108 | parse error: escape character '~' must be followed with '0' or '1' | In a JSON Pointer, only `~0` and `~1` are valid escape sequences. json.exception.parse_error.109 | parse error: array index 'one' is not a number | A JSON Pointer array index must be a number. json.exception.parse_error.110 | parse error at 1: cannot read 2 bytes from vector | When parsing CBOR or MessagePack, the byte vector ends before the complete value has been read. -json.exception.parse_error.112 | parse error at 1: error reading CBOR; last byte: 0xf8 | Not all types of CBOR or MessagePack are supported. This exception occurs if an unsupported byte was read. +json.exception.parse_error.112 | parse error at 1: error reading CBOR; last byte: 0xF8 | Not all types of CBOR or MessagePack are supported. This exception occurs if an unsupported byte was read. json.exception.parse_error.113 | parse error at 2: expected a CBOR string; last byte: 0x98 | While parsing a map key, a value that is not a string has been read. @note For an input with n bytes, 1 is the index of the first character and n+1 @@ -378,6 +376,7 @@ json.exception.type_error.312 | cannot use update() with string | The @ref updat json.exception.type_error.313 | invalid value to unflatten | The @ref unflatten function converts an object whose keys are JSON Pointers back into an arbitrary nested JSON value. The JSON Pointers must not overlap, because then the resulting value would not be well defined. json.exception.type_error.314 | only objects can be unflattened | The @ref unflatten function only works for an object whose keys are JSON Pointers. json.exception.type_error.315 | values in object must be primitive | The @ref unflatten function only works for an object whose keys are JSON Pointers and whose values are primitive. +json.exception.type_error.316 | invalid UTF-8 byte at index 10: 0x7E | The @ref dump function only works with UTF-8 encoded strings; that is, if you assign a `std::string` to a JSON value, make sure it is UTF-8 encoded. | @liveexample{The following code shows how a `type_error` exception can be caught.,type_error} @@ -457,7 +456,6 @@ Exceptions have ids 5xx. name / id | example message | description ------------------------------ | --------------- | ------------------------- json.exception.other_error.501 | unsuccessful: {"op":"test","path":"/baz", "value":"bar"} | A JSON Patch operation 'test' failed. The unsuccessful operation is also printed. -json.exception.other_error.502 | invalid object size for conversion | Some conversions to user-defined types impose constraints on the object size (e.g. std::pair) @sa @ref exception for the base class of the library exceptions @sa @ref parse_error for exceptions indicating a parse error @@ -540,20 +538,14 @@ Returns an ordering that is similar to Python: inline bool operator<(const value_t lhs, const value_t rhs) noexcept { static constexpr std::array order = {{ - 0, // null - 3, // object - 4, // array - 5, // string - 1, // boolean - 2, // integer - 2, // unsigned - 2, // float + 0 /* null */, 3 /* object */, 4 /* array */, 5 /* string */, + 1 /* boolean */, 2 /* integer */, 2 /* unsigned */, 2 /* float */ } }; const auto l_index = static_cast(lhs); const auto r_index = static_cast(rhs); - return (l_index < order.size() and r_index < order.size() and order[l_index] < order[r_index]); + return l_index < order.size() and r_index < order.size() and order[l_index] < order[r_index]; } @@ -591,17 +583,15 @@ struct merge_and_renumber; template struct merge_and_renumber, index_sequence> - : index_sequence < I1..., (sizeof...(I1) + I2)... > - {}; + : index_sequence < I1..., (sizeof...(I1) + I2)... > {}; template struct make_index_sequence : merge_and_renumber < typename make_index_sequence < N / 2 >::type, - typename make_index_sequence < N - N / 2 >::type > -{}; + typename make_index_sequence < N - N / 2 >::type > {}; -template<> struct make_index_sequence<0> : index_sequence<> { }; -template<> struct make_index_sequence<1> : index_sequence<0> { }; +template<> struct make_index_sequence<0> : index_sequence<> {}; +template<> struct make_index_sequence<1> : index_sequence<0> {}; template using index_sequence_for = make_index_sequence; @@ -624,7 +614,7 @@ template struct conjunction : B1 {}; template struct conjunction : std::conditional, B1>::type {}; -template struct negation : std::integral_constant < bool, !B::value > {}; +template struct negation : std::integral_constant {}; // dispatch utility (taken from ranges-v3) template struct priority_tag : priority_tag < N - 1 > {}; @@ -725,8 +715,7 @@ struct external_constructor } template::value, + enable_if_t::value, int> = 0> static void construct(BasicJsonType& j, const CompatibleArrayType& arr) { @@ -743,7 +732,7 @@ struct external_constructor j.m_type = value_t::array; j.m_value = value_t::array; j.m_value.array->reserve(arr.size()); - for (bool x : arr) + for (const bool x : arr) { j.m_value.array->push_back(x); } @@ -782,8 +771,7 @@ struct external_constructor } template::value, int> = 0> + enable_if_t::value, int> = 0> static void construct(BasicJsonType& j, const CompatibleObjectType& obj) { using std::begin; @@ -896,7 +884,7 @@ struct is_compatible_integer_type is_compatible_integer_type_impl < std::is_integral::value and not std::is_same::value, - RealIntegerType, CompatibleNumberIntegerType > ::value; + RealIntegerType, CompatibleNumberIntegerType >::value; }; @@ -922,10 +910,8 @@ template struct has_non_default_from_json { private: - template < - typename U, - typename = enable_if_t::from_json(std::declval()))>::value >> + template::from_json(std::declval()))>::value>> static int detect(U&&); static void detect(...); @@ -954,22 +940,21 @@ struct has_to_json // to_json // ///////////// -template::value, int> = 0> +template::value, int> = 0> void to_json(BasicJsonType& j, T b) noexcept { external_constructor::construct(j, b); } template::value, int> = 0> + enable_if_t::value, int> = 0> void to_json(BasicJsonType& j, const CompatibleString& s) { external_constructor::construct(j, s); } -template +template void to_json(BasicJsonType& j, typename BasicJsonType::string_t&& s) { external_constructor::construct(j, std::move(s)); @@ -982,19 +967,15 @@ void to_json(BasicJsonType& j, FloatType val) noexcept external_constructor::construct(j, static_cast(val)); } -template < - typename BasicJsonType, typename CompatibleNumberUnsignedType, - enable_if_t::value, int> = 0 > +template::value, int> = 0> void to_json(BasicJsonType& j, CompatibleNumberUnsignedType val) noexcept { external_constructor::construct(j, static_cast(val)); } -template < - typename BasicJsonType, typename CompatibleNumberIntegerType, - enable_if_t::value, int> = 0 > +template::value, int> = 0> void to_json(BasicJsonType& j, CompatibleNumberIntegerType val) noexcept { external_constructor::construct(j, static_cast(val)); @@ -1014,49 +995,43 @@ void to_json(BasicJsonType& j, const std::vector& e) external_constructor::construct(j, e); } -template < - typename BasicJsonType, typename CompatibleArrayType, - enable_if_t < - is_compatible_array_type::value or - std::is_same::value, - int > = 0 > +template::value or + std::is_same::value, + int> = 0> void to_json(BasicJsonType& j, const CompatibleArrayType& arr) { external_constructor::construct(j, arr); } -template ::value, int> = 0> +template::value, int> = 0> void to_json(BasicJsonType& j, std::valarray arr) { external_constructor::construct(j, std::move(arr)); } -template +template void to_json(BasicJsonType& j, typename BasicJsonType::array_t&& arr) { external_constructor::construct(j, std::move(arr)); } -template < - typename BasicJsonType, typename CompatibleObjectType, - enable_if_t::value, - int> = 0 > +template::value, int> = 0> void to_json(BasicJsonType& j, const CompatibleObjectType& obj) { external_constructor::construct(j, obj); } -template +template void to_json(BasicJsonType& j, typename BasicJsonType::object_t&& obj) { external_constructor::construct(j, std::move(obj)); } template::value, - int> = 0> + enable_if_t::value, int> = 0> void to_json(BasicJsonType& j, T (&arr)[N]) { external_constructor::construct(j, arr); @@ -1087,8 +1062,7 @@ void to_json(BasicJsonType& j, const std::tuple& t) // overloads for basic_json template parameters template::value and - not std::is_same::value, + not std::is_same::value, int> = 0> void get_arithmetic_value(const BasicJsonType& j, ArithmeticType& val) { @@ -1351,6 +1325,13 @@ struct to_json_fn { static_assert(sizeof(BasicJsonType) == 0, "could not find to_json() method in T's namespace"); + +#ifdef _MSC_VER + // MSVC does not show a stacktrace for the above assert + using decayed = uncvref_t; + static_assert(sizeof(typename decayed::force_msvc_stacktrace) == 0, + "forcing MSVC stacktrace to show which T we're talking about."); +#endif } public: @@ -1378,6 +1359,12 @@ struct from_json_fn { static_assert(sizeof(BasicJsonType) == 0, "could not find from_json() method in T's namespace"); +#ifdef _MSC_VER + // MSVC does not show a stacktrace for the above assert + using decayed = uncvref_t; + static_assert(sizeof(typename decayed::force_msvc_stacktrace) == 0, + "forcing MSVC stacktrace to show which T we're talking about."); +#endif } public: @@ -1448,7 +1435,7 @@ class input_stream_adapter : public input_adapter_protocol explicit input_stream_adapter(std::istream& i) : is(i), sb(*i.rdbuf()) { - // ignore Byte Order Mark at start of input + // skip byte order mark std::char_traits::int_type c; if ((c = get_character()) == 0xEF) { @@ -1472,7 +1459,7 @@ class input_stream_adapter : public input_adapter_protocol } else if (c != std::char_traits::eof()) { - is.unget(); // Not BOM. Process as usual. + is.unget(); // no byte order mark; process as usual } } @@ -1481,8 +1468,8 @@ class input_stream_adapter : public input_adapter_protocol input_stream_adapter& operator=(input_stream_adapter&) = delete; // std::istream/std::streambuf use std::char_traits::to_int_type, to - // ensure that std::char_traits::eof() and the character 0xff do not - // end up as the same value, eg. 0xffffffff. + // ensure that std::char_traits::eof() and the character 0xFF do not + // end up as the same value, eg. 0xFFFFFFFF. std::char_traits::int_type get_character() override { return sb.sbumpc(); @@ -1561,8 +1548,7 @@ class input_adapter template::value and - std::is_integral< - typename std::remove_pointer::type>::value and + std::is_integral::type>::value and sizeof(typename std::remove_pointer::type) == 1, int>::type = 0> input_adapter(CharT b, std::size_t l) @@ -1574,8 +1560,7 @@ class input_adapter template::value and - std::is_integral< - typename std::remove_pointer::type>::value and + std::is_integral::type>::value and sizeof(typename std::remove_pointer::type) == 1, int>::type = 0> input_adapter(CharT b) @@ -1585,8 +1570,7 @@ class input_adapter /// input adapter for iterator range with contiguous storage template::iterator_category, - std::random_access_iterator_tag>::value, + std::is_same::iterator_category, std::random_access_iterator_tag>::value, int>::type = 0> input_adapter(IteratorType first, IteratorType last) { @@ -1624,13 +1608,10 @@ class input_adapter : input_adapter(std::begin(array), std::end(array)) {} /// input adapter for contiguous container - template < - class ContiguousContainer, - typename std::enable_if < - not std::is_pointer::value and - std::is_base_of()))>::iterator_category>::value, - int >::type = 0 > + template::value and + std::is_base_of()))>::iterator_category>::value, + int>::type = 0> input_adapter(const ContiguousContainer& c) : input_adapter(std::begin(c), std::end(c)) {} @@ -1804,6 +1785,12 @@ class lexer checks if it is inside the range. If a violation was detected, set up an error message and return false. Otherwise, return true. + @param[in] ranges list of integers; interpreted as list of pairs of + inclusive lower and upper bound, respectively + + @pre The passed list @a ranges must have 2, 4, or 6 elements; that is, + 1, 2, or 3 pairs. This precondition is enforced by an assertion. + @return true if and only if no range violation was detected */ bool next_byte_in_range(std::initializer_list ranges) @@ -1970,19 +1957,19 @@ class lexer // result of the above calculation yields a proper codepoint assert(0x00 <= codepoint and codepoint <= 0x10FFFF); - // translate code point to bytes + // translate codepoint into bytes if (codepoint < 0x80) { // 1-byte characters: 0xxxxxxx (ASCII) add(codepoint); } - else if (codepoint <= 0x7ff) + else if (codepoint <= 0x7FF) { // 2-byte characters: 110xxxxx 10xxxxxx add(0xC0 | (codepoint >> 6)); add(0x80 | (codepoint & 0x3F)); } - else if (codepoint <= 0xffff) + else if (codepoint <= 0xFFFF) { // 3-byte characters: 1110xxxx 10xxxxxx 10xxxxxx add(0xE0 | (codepoint >> 12)); @@ -2021,12 +2008,12 @@ class lexer case 0x07: case 0x08: case 0x09: - case 0x0a: - case 0x0b: - case 0x0c: - case 0x0d: - case 0x0e: - case 0x0f: + case 0x0A: + case 0x0B: + case 0x0C: + case 0x0D: + case 0x0E: + case 0x0F: case 0x10: case 0x11: case 0x12: @@ -2037,12 +2024,12 @@ class lexer case 0x17: case 0x18: case 0x19: - case 0x1a: - case 0x1b: - case 0x1c: - case 0x1d: - case 0x1e: - case 0x1f: + case 0x1A: + case 0x1B: + case 0x1C: + case 0x1D: + case 0x1E: + case 0x1F: { error_message = "invalid string: control character must be escaped"; return token_type::parse_error; @@ -2058,12 +2045,12 @@ class lexer case 0x27: case 0x28: case 0x29: - case 0x2a: - case 0x2b: - case 0x2c: - case 0x2d: - case 0x2e: - case 0x2f: + case 0x2A: + case 0x2B: + case 0x2C: + case 0x2D: + case 0x2E: + case 0x2F: case 0x30: case 0x31: case 0x32: @@ -2074,12 +2061,12 @@ class lexer case 0x37: case 0x38: case 0x39: - case 0x3a: - case 0x3b: - case 0x3c: - case 0x3d: - case 0x3e: - case 0x3f: + case 0x3A: + case 0x3B: + case 0x3C: + case 0x3D: + case 0x3E: + case 0x3F: case 0x40: case 0x41: case 0x42: @@ -2090,12 +2077,12 @@ class lexer case 0x47: case 0x48: case 0x49: - case 0x4a: - case 0x4b: - case 0x4c: - case 0x4d: - case 0x4e: - case 0x4f: + case 0x4A: + case 0x4B: + case 0x4C: + case 0x4D: + case 0x4E: + case 0x4F: case 0x50: case 0x51: case 0x52: @@ -2106,11 +2093,11 @@ class lexer case 0x57: case 0x58: case 0x59: - case 0x5a: - case 0x5b: - case 0x5d: - case 0x5e: - case 0x5f: + case 0x5A: + case 0x5B: + case 0x5D: + case 0x5E: + case 0x5F: case 0x60: case 0x61: case 0x62: @@ -2121,12 +2108,12 @@ class lexer case 0x67: case 0x68: case 0x69: - case 0x6a: - case 0x6b: - case 0x6c: - case 0x6d: - case 0x6e: - case 0x6f: + case 0x6A: + case 0x6B: + case 0x6C: + case 0x6D: + case 0x6E: + case 0x6F: case 0x70: case 0x71: case 0x72: @@ -2137,48 +2124,48 @@ class lexer case 0x77: case 0x78: case 0x79: - case 0x7a: - case 0x7b: - case 0x7c: - case 0x7d: - case 0x7e: - case 0x7f: + case 0x7A: + case 0x7B: + case 0x7C: + case 0x7D: + case 0x7E: + case 0x7F: { add(current); break; } // U+0080..U+07FF: bytes C2..DF 80..BF - case 0xc2: - case 0xc3: - case 0xc4: - case 0xc5: - case 0xc6: - case 0xc7: - case 0xc8: - case 0xc9: - case 0xca: - case 0xcb: - case 0xcc: - case 0xcd: - case 0xce: - case 0xcf: - case 0xd0: - case 0xd1: - case 0xd2: - case 0xd3: - case 0xd4: - case 0xd5: - case 0xd6: - case 0xd7: - case 0xd8: - case 0xd9: - case 0xda: - case 0xdb: - case 0xdc: - case 0xdd: - case 0xde: - case 0xdf: + case 0xC2: + case 0xC3: + case 0xC4: + case 0xC5: + case 0xC6: + case 0xC7: + case 0xC8: + case 0xC9: + case 0xCA: + case 0xCB: + case 0xCC: + case 0xCD: + case 0xCE: + case 0xCF: + case 0xD0: + case 0xD1: + case 0xD2: + case 0xD3: + case 0xD4: + case 0xD5: + case 0xD6: + case 0xD7: + case 0xD8: + case 0xD9: + case 0xDA: + case 0xDB: + case 0xDC: + case 0xDD: + case 0xDE: + case 0xDF: { if (JSON_UNLIKELY(not next_byte_in_range({0x80, 0xBF}))) { @@ -2188,7 +2175,7 @@ class lexer } // U+0800..U+0FFF: bytes E0 A0..BF 80..BF - case 0xe0: + case 0xE0: { if (JSON_UNLIKELY(not (next_byte_in_range({0xA0, 0xBF, 0x80, 0xBF})))) { @@ -2199,20 +2186,20 @@ class lexer // U+1000..U+CFFF: bytes E1..EC 80..BF 80..BF // U+E000..U+FFFF: bytes EE..EF 80..BF 80..BF - case 0xe1: - case 0xe2: - case 0xe3: - case 0xe4: - case 0xe5: - case 0xe6: - case 0xe7: - case 0xe8: - case 0xe9: - case 0xea: - case 0xeb: - case 0xec: - case 0xee: - case 0xef: + case 0xE1: + case 0xE2: + case 0xE3: + case 0xE4: + case 0xE5: + case 0xE6: + case 0xE7: + case 0xE8: + case 0xE9: + case 0xEA: + case 0xEB: + case 0xEC: + case 0xEE: + case 0xEF: { if (JSON_UNLIKELY(not (next_byte_in_range({0x80, 0xBF, 0x80, 0xBF})))) { @@ -2222,7 +2209,7 @@ class lexer } // U+D000..U+D7FF: bytes ED 80..9F 80..BF - case 0xed: + case 0xED: { if (JSON_UNLIKELY(not (next_byte_in_range({0x80, 0x9F, 0x80, 0xBF})))) { @@ -2232,7 +2219,7 @@ class lexer } // U+10000..U+3FFFF F0 90..BF 80..BF 80..BF - case 0xf0: + case 0xF0: { if (JSON_UNLIKELY(not (next_byte_in_range({0x90, 0xBF, 0x80, 0xBF, 0x80, 0xBF})))) { @@ -2242,9 +2229,9 @@ class lexer } // U+40000..U+FFFFF F1..F3 80..BF 80..BF 80..BF - case 0xf1: - case 0xf2: - case 0xf3: + case 0xF1: + case 0xF2: + case 0xF3: { if (JSON_UNLIKELY(not (next_byte_in_range({0x80, 0xBF, 0x80, 0xBF, 0x80, 0xBF})))) { @@ -2254,7 +2241,7 @@ class lexer } // U+100000..U+10FFFF F4 80..8F 80..BF 80..BF - case 0xf4: + case 0xF4: { if (JSON_UNLIKELY(not (next_byte_in_range({0x80, 0x8F, 0x80, 0xBF, 0x80, 0xBF})))) { @@ -2772,9 +2759,9 @@ class lexer { // escape control characters std::string result; - for (auto c : token_string) + for (const auto c : token_string) { - if ('\x00' <= c and c <= '\x1f') + if ('\x00' <= c and c <= '\x1F') { // escape control characters std::stringstream ss; @@ -2877,10 +2864,10 @@ class lexer std::size_t chars_read = 0; /// raw input token string (for error messages) - std::vector token_string { }; + std::vector token_string {}; /// buffer for variable-length tokens (numbers, strings) - std::string yytext { }; + std::string yytext {}; /// a description of occurred lexer errors const char* error_message = ""; @@ -3281,7 +3268,7 @@ class parser } /*! - @brief the acutal acceptor + @brief the actual acceptor @invariant 1. The last token is not yet processed. Therefore, the caller of this function must make sure a token has been read. @@ -3539,7 +3526,7 @@ class primitive_iterator_t return *this; } - primitive_iterator_t operator++(int) + primitive_iterator_t const operator++(int) { auto result = *this; m_it++; @@ -3552,7 +3539,7 @@ class primitive_iterator_t return *this; } - primitive_iterator_t operator--(int) + primitive_iterator_t const operator--(int) { auto result = *this; m_it--; @@ -3618,7 +3605,7 @@ This class implements a both iterators (iterator and const_iterator) for the iterators in version 3.0.0 (see https://github.com/nlohmann/json/issues/593) */ template -class iter_impl : public std::iterator +class iter_impl { /// allow basic_json to access private members friend iter_impl::value, typename std::remove_const::type, const BasicJsonType>::type>; @@ -3632,6 +3619,14 @@ class iter_impl : public std::iterator { public: using difference_type = std::ptrdiff_t; - /// shortcut to the reverse iterator adaptor + /// shortcut to the reverse iterator adapter using base_iterator = std::reverse_iterator; /// the reference type for the pointed-to element using reference = typename Base::reference; @@ -4304,7 +4299,7 @@ class json_reverse_iterator : public std::reverse_iterator json_reverse_iterator(const base_iterator& it) noexcept : base_iterator(it) {} /// post-increment (it++) - json_reverse_iterator operator++(int) + json_reverse_iterator const operator++(int) { return static_cast(base_iterator::operator++(1)); } @@ -4316,7 +4311,7 @@ class json_reverse_iterator : public std::reverse_iterator } /// post-decrement (it--) - json_reverse_iterator operator--(int) + json_reverse_iterator const operator--(int) { return static_cast(base_iterator::operator--(1)); } @@ -4576,12 +4571,12 @@ class binary_reader case 0x07: case 0x08: case 0x09: - case 0x0a: - case 0x0b: - case 0x0c: - case 0x0d: - case 0x0e: - case 0x0f: + case 0x0A: + case 0x0B: + case 0x0C: + case 0x0D: + case 0x0E: + case 0x0F: case 0x10: case 0x11: case 0x12: @@ -4598,10 +4593,10 @@ class binary_reader case 0x19: // Unsigned integer (two-byte uint16_t follows) return get_number(); - case 0x1a: // Unsigned integer (four-byte uint32_t follows) + case 0x1A: // Unsigned integer (four-byte uint32_t follows) return get_number(); - case 0x1b: // Unsigned integer (eight-byte uint64_t follows) + case 0x1B: // Unsigned integer (eight-byte uint64_t follows) return get_number(); // Negative integer -1-0x00..-1-0x17 (-1..-24) @@ -4615,12 +4610,12 @@ class binary_reader case 0x27: case 0x28: case 0x29: - case 0x2a: - case 0x2b: - case 0x2c: - case 0x2d: - case 0x2e: - case 0x2f: + case 0x2A: + case 0x2B: + case 0x2C: + case 0x2D: + case 0x2E: + case 0x2F: case 0x30: case 0x31: case 0x32: @@ -4642,12 +4637,12 @@ class binary_reader return static_cast(-1) - get_number(); } - case 0x3a: // Negative integer -1-n (four-byte uint32_t follows) + case 0x3A: // Negative integer -1-n (four-byte uint32_t follows) { return static_cast(-1) - get_number(); } - case 0x3b: // Negative integer -1-n (eight-byte uint64_t follows) + case 0x3B: // Negative integer -1-n (eight-byte uint64_t follows) { return static_cast(-1) - static_cast(get_number()); @@ -4664,12 +4659,12 @@ class binary_reader case 0x67: case 0x68: case 0x69: - case 0x6a: - case 0x6b: - case 0x6c: - case 0x6d: - case 0x6e: - case 0x6f: + case 0x6A: + case 0x6B: + case 0x6C: + case 0x6D: + case 0x6E: + case 0x6F: case 0x70: case 0x71: case 0x72: @@ -4680,9 +4675,9 @@ class binary_reader case 0x77: case 0x78: // UTF-8 string (one-byte uint8_t for n follows) case 0x79: // UTF-8 string (two-byte uint16_t for n follow) - case 0x7a: // UTF-8 string (four-byte uint32_t for n follow) - case 0x7b: // UTF-8 string (eight-byte uint64_t for n follow) - case 0x7f: // UTF-8 string (indefinite length) + case 0x7A: // UTF-8 string (four-byte uint32_t for n follow) + case 0x7B: // UTF-8 string (eight-byte uint64_t for n follow) + case 0x7F: // UTF-8 string (indefinite length) { return get_cbor_string(); } @@ -4698,12 +4693,12 @@ class binary_reader case 0x87: case 0x88: case 0x89: - case 0x8a: - case 0x8b: - case 0x8c: - case 0x8d: - case 0x8e: - case 0x8f: + case 0x8A: + case 0x8B: + case 0x8C: + case 0x8D: + case 0x8E: + case 0x8F: case 0x90: case 0x91: case 0x92: @@ -4713,7 +4708,7 @@ class binary_reader case 0x96: case 0x97: { - return get_cbor_array(current & 0x1f); + return get_cbor_array(current & 0x1F); } case 0x98: // array (one-byte uint8_t for n follows) @@ -4726,20 +4721,20 @@ class binary_reader return get_cbor_array(get_number()); } - case 0x9a: // array (four-byte uint32_t for n follow) + case 0x9A: // array (four-byte uint32_t for n follow) { return get_cbor_array(get_number()); } - case 0x9b: // array (eight-byte uint64_t for n follow) + case 0x9B: // array (eight-byte uint64_t for n follow) { return get_cbor_array(get_number()); } - case 0x9f: // array (indefinite length) + case 0x9F: // array (indefinite length) { BasicJsonType result = value_t::array; - while (get() != 0xff) + while (get() != 0xFF) { result.push_back(parse_cbor_internal(false)); } @@ -4747,58 +4742,58 @@ class binary_reader } // map (0x00..0x17 pairs of data items follow) - case 0xa0: - case 0xa1: - case 0xa2: - case 0xa3: - case 0xa4: - case 0xa5: - case 0xa6: - case 0xa7: - case 0xa8: - case 0xa9: - case 0xaa: - case 0xab: - case 0xac: - case 0xad: - case 0xae: - case 0xaf: - case 0xb0: - case 0xb1: - case 0xb2: - case 0xb3: - case 0xb4: - case 0xb5: - case 0xb6: - case 0xb7: - { - return get_cbor_object(current & 0x1f); - } - - case 0xb8: // map (one-byte uint8_t for n follows) + case 0xA0: + case 0xA1: + case 0xA2: + case 0xA3: + case 0xA4: + case 0xA5: + case 0xA6: + case 0xA7: + case 0xA8: + case 0xA9: + case 0xAA: + case 0xAB: + case 0xAC: + case 0xAD: + case 0xAE: + case 0xAF: + case 0xB0: + case 0xB1: + case 0xB2: + case 0xB3: + case 0xB4: + case 0xB5: + case 0xB6: + case 0xB7: + { + return get_cbor_object(current & 0x1F); + } + + case 0xB8: // map (one-byte uint8_t for n follows) { return get_cbor_object(get_number()); } - case 0xb9: // map (two-byte uint16_t for n follow) + case 0xB9: // map (two-byte uint16_t for n follow) { return get_cbor_object(get_number()); } - case 0xba: // map (four-byte uint32_t for n follow) + case 0xBA: // map (four-byte uint32_t for n follow) { return get_cbor_object(get_number()); } - case 0xbb: // map (eight-byte uint64_t for n follow) + case 0xBB: // map (eight-byte uint64_t for n follow) { return get_cbor_object(get_number()); } - case 0xbf: // map (indefinite length) + case 0xBF: // map (indefinite length) { BasicJsonType result = value_t::object; - while (get() != 0xff) + while (get() != 0xFF) { auto key = get_cbor_string(); result[key] = parse_cbor_internal(); @@ -4806,22 +4801,22 @@ class binary_reader return result; } - case 0xf4: // false + case 0xF4: // false { return false; } - case 0xf5: // true + case 0xF5: // true { return true; } - case 0xf6: // null + case 0xF6: // null { return value_t::null; } - case 0xf9: // Half-Precision Float (two-byte IEEE 754) + case 0xF9: // Half-Precision Float (two-byte IEEE 754) { const int byte1 = get(); check_eof(); @@ -4837,8 +4832,8 @@ class binary_reader // half-precision floating-point numbers in the C language // is shown in Fig. 3. const int half = (byte1 << 8) + byte2; - const int exp = (half >> 10) & 0x1f; - const int mant = half & 0x3ff; + const int exp = (half >> 10) & 0x1F; + const int mant = half & 0x3FF; double val; if (exp == 0) { @@ -4856,12 +4851,12 @@ class binary_reader return (half & 0x8000) != 0 ? -val : val; } - case 0xfa: // Single-Precision Float (four-byte IEEE 754) + case 0xFA: // Single-Precision Float (four-byte IEEE 754) { return get_number(); } - case 0xfb: // Double-Precision Float (eight-byte IEEE 754) + case 0xFB: // Double-Precision Float (eight-byte IEEE 754) { return get_number(); } @@ -4869,7 +4864,7 @@ class binary_reader default: // anything else (0xFF is handled inside the other types) { std::stringstream ss; - ss << std::setw(2) << std::setfill('0') << std::hex << current; + ss << std::setw(2) << std::uppercase << std::setfill('0') << std::hex << current; JSON_THROW(parse_error::create(112, chars_read, "error reading CBOR; last byte: 0x" + ss.str())); } } @@ -4894,12 +4889,12 @@ class binary_reader case 0x07: case 0x08: case 0x09: - case 0x0a: - case 0x0b: - case 0x0c: - case 0x0d: - case 0x0e: - case 0x0f: + case 0x0A: + case 0x0B: + case 0x0C: + case 0x0D: + case 0x0E: + case 0x0F: case 0x10: case 0x11: case 0x12: @@ -4910,12 +4905,12 @@ class binary_reader case 0x17: case 0x18: case 0x19: - case 0x1a: - case 0x1b: - case 0x1c: - case 0x1d: - case 0x1e: - case 0x1f: + case 0x1A: + case 0x1B: + case 0x1C: + case 0x1D: + case 0x1E: + case 0x1F: case 0x20: case 0x21: case 0x22: @@ -4926,12 +4921,12 @@ class binary_reader case 0x27: case 0x28: case 0x29: - case 0x2a: - case 0x2b: - case 0x2c: - case 0x2d: - case 0x2e: - case 0x2f: + case 0x2A: + case 0x2B: + case 0x2C: + case 0x2D: + case 0x2E: + case 0x2F: case 0x30: case 0x31: case 0x32: @@ -4942,12 +4937,12 @@ class binary_reader case 0x37: case 0x38: case 0x39: - case 0x3a: - case 0x3b: - case 0x3c: - case 0x3d: - case 0x3e: - case 0x3f: + case 0x3A: + case 0x3B: + case 0x3C: + case 0x3D: + case 0x3E: + case 0x3F: case 0x40: case 0x41: case 0x42: @@ -4958,12 +4953,12 @@ class binary_reader case 0x47: case 0x48: case 0x49: - case 0x4a: - case 0x4b: - case 0x4c: - case 0x4d: - case 0x4e: - case 0x4f: + case 0x4A: + case 0x4B: + case 0x4C: + case 0x4D: + case 0x4E: + case 0x4F: case 0x50: case 0x51: case 0x52: @@ -4974,12 +4969,12 @@ class binary_reader case 0x57: case 0x58: case 0x59: - case 0x5a: - case 0x5b: - case 0x5c: - case 0x5d: - case 0x5e: - case 0x5f: + case 0x5A: + case 0x5B: + case 0x5C: + case 0x5D: + case 0x5E: + case 0x5F: case 0x60: case 0x61: case 0x62: @@ -4990,12 +4985,12 @@ class binary_reader case 0x67: case 0x68: case 0x69: - case 0x6a: - case 0x6b: - case 0x6c: - case 0x6d: - case 0x6e: - case 0x6f: + case 0x6A: + case 0x6B: + case 0x6C: + case 0x6D: + case 0x6E: + case 0x6F: case 0x70: case 0x71: case 0x72: @@ -5006,12 +5001,12 @@ class binary_reader case 0x77: case 0x78: case 0x79: - case 0x7a: - case 0x7b: - case 0x7c: - case 0x7d: - case 0x7e: - case 0x7f: + case 0x7A: + case 0x7B: + case 0x7C: + case 0x7D: + case 0x7E: + case 0x7F: return static_cast(current); // fixmap @@ -5025,14 +5020,14 @@ class binary_reader case 0x87: case 0x88: case 0x89: - case 0x8a: - case 0x8b: - case 0x8c: - case 0x8d: - case 0x8e: - case 0x8f: + case 0x8A: + case 0x8B: + case 0x8C: + case 0x8D: + case 0x8E: + case 0x8F: { - return get_msgpack_object(current & 0x0f); + return get_msgpack_object(current & 0x0F); } // fixarray @@ -5046,154 +5041,154 @@ class binary_reader case 0x97: case 0x98: case 0x99: - case 0x9a: - case 0x9b: - case 0x9c: - case 0x9d: - case 0x9e: - case 0x9f: + case 0x9A: + case 0x9B: + case 0x9C: + case 0x9D: + case 0x9E: + case 0x9F: { - return get_msgpack_array(current & 0x0f); + return get_msgpack_array(current & 0x0F); } // fixstr - case 0xa0: - case 0xa1: - case 0xa2: - case 0xa3: - case 0xa4: - case 0xa5: - case 0xa6: - case 0xa7: - case 0xa8: - case 0xa9: - case 0xaa: - case 0xab: - case 0xac: - case 0xad: - case 0xae: - case 0xaf: - case 0xb0: - case 0xb1: - case 0xb2: - case 0xb3: - case 0xb4: - case 0xb5: - case 0xb6: - case 0xb7: - case 0xb8: - case 0xb9: - case 0xba: - case 0xbb: - case 0xbc: - case 0xbd: - case 0xbe: - case 0xbf: + case 0xA0: + case 0xA1: + case 0xA2: + case 0xA3: + case 0xA4: + case 0xA5: + case 0xA6: + case 0xA7: + case 0xA8: + case 0xA9: + case 0xAA: + case 0xAB: + case 0xAC: + case 0xAD: + case 0xAE: + case 0xAF: + case 0xB0: + case 0xB1: + case 0xB2: + case 0xB3: + case 0xB4: + case 0xB5: + case 0xB6: + case 0xB7: + case 0xB8: + case 0xB9: + case 0xBA: + case 0xBB: + case 0xBC: + case 0xBD: + case 0xBE: + case 0xBF: return get_msgpack_string(); - case 0xc0: // nil + case 0xC0: // nil return value_t::null; - case 0xc2: // false + case 0xC2: // false return false; - case 0xc3: // true + case 0xC3: // true return true; - case 0xca: // float 32 + case 0xCA: // float 32 return get_number(); - case 0xcb: // float 64 + case 0xCB: // float 64 return get_number(); - case 0xcc: // uint 8 + case 0xCC: // uint 8 return get_number(); - case 0xcd: // uint 16 + case 0xCD: // uint 16 return get_number(); - case 0xce: // uint 32 + case 0xCE: // uint 32 return get_number(); - case 0xcf: // uint 64 + case 0xCF: // uint 64 return get_number(); - case 0xd0: // int 8 + case 0xD0: // int 8 return get_number(); - case 0xd1: // int 16 + case 0xD1: // int 16 return get_number(); - case 0xd2: // int 32 + case 0xD2: // int 32 return get_number(); - case 0xd3: // int 64 + case 0xD3: // int 64 return get_number(); - case 0xd9: // str 8 - case 0xda: // str 16 - case 0xdb: // str 32 + case 0xD9: // str 8 + case 0xDA: // str 16 + case 0xDB: // str 32 return get_msgpack_string(); - case 0xdc: // array 16 + case 0xDC: // array 16 { return get_msgpack_array(get_number()); } - case 0xdd: // array 32 + case 0xDD: // array 32 { return get_msgpack_array(get_number()); } - case 0xde: // map 16 + case 0xDE: // map 16 { return get_msgpack_object(get_number()); } - case 0xdf: // map 32 + case 0xDF: // map 32 { return get_msgpack_object(get_number()); } // positive fixint - case 0xe0: - case 0xe1: - case 0xe2: - case 0xe3: - case 0xe4: - case 0xe5: - case 0xe6: - case 0xe7: - case 0xe8: - case 0xe9: - case 0xea: - case 0xeb: - case 0xec: - case 0xed: - case 0xee: - case 0xef: - case 0xf0: - case 0xf1: - case 0xf2: - case 0xf3: - case 0xf4: - case 0xf5: - case 0xf6: - case 0xf7: - case 0xf8: - case 0xf9: - case 0xfa: - case 0xfb: - case 0xfc: - case 0xfd: - case 0xfe: - case 0xff: + case 0xE0: + case 0xE1: + case 0xE2: + case 0xE3: + case 0xE4: + case 0xE5: + case 0xE6: + case 0xE7: + case 0xE8: + case 0xE9: + case 0xEA: + case 0xEB: + case 0xEC: + case 0xED: + case 0xEE: + case 0xEF: + case 0xF0: + case 0xF1: + case 0xF2: + case 0xF3: + case 0xF4: + case 0xF5: + case 0xF6: + case 0xF7: + case 0xF8: + case 0xF9: + case 0xFA: + case 0xFB: + case 0xFC: + case 0xFD: + case 0xFE: + case 0xFF: return static_cast(current); default: // anything else { std::stringstream ss; - ss << std::setw(2) << std::setfill('0') << std::hex << current; + ss << std::setw(2) << std::uppercase << std::setfill('0') << std::hex << current; JSON_THROW(parse_error::create(112, chars_read, "error reading MessagePack; last byte: 0x" + ss.str())); } @@ -5309,12 +5304,12 @@ class binary_reader case 0x67: case 0x68: case 0x69: - case 0x6a: - case 0x6b: - case 0x6c: - case 0x6d: - case 0x6e: - case 0x6f: + case 0x6A: + case 0x6B: + case 0x6C: + case 0x6D: + case 0x6E: + case 0x6F: case 0x70: case 0x71: case 0x72: @@ -5324,7 +5319,7 @@ class binary_reader case 0x76: case 0x77: { - return get_string(current & 0x1f); + return get_string(current & 0x1F); } case 0x78: // UTF-8 string (one-byte uint8_t for n follows) @@ -5337,20 +5332,20 @@ class binary_reader return get_string(get_number()); } - case 0x7a: // UTF-8 string (four-byte uint32_t for n follow) + case 0x7A: // UTF-8 string (four-byte uint32_t for n follow) { return get_string(get_number()); } - case 0x7b: // UTF-8 string (eight-byte uint64_t for n follow) + case 0x7B: // UTF-8 string (eight-byte uint64_t for n follow) { return get_string(get_number()); } - case 0x7f: // UTF-8 string (indefinite length) + case 0x7F: // UTF-8 string (indefinite length) { std::string result; - while (get() != 0xff) + while (get() != 0xFF) { check_eof(); result.push_back(static_cast(current)); @@ -5361,7 +5356,7 @@ class binary_reader default: { std::stringstream ss; - ss << std::setw(2) << std::setfill('0') << std::hex << current; + ss << std::setw(2) << std::uppercase << std::setfill('0') << std::hex << current; JSON_THROW(parse_error::create(113, chars_read, "expected a CBOR string; last byte: 0x" + ss.str())); } } @@ -5412,53 +5407,53 @@ class binary_reader switch (current) { // fixstr - case 0xa0: - case 0xa1: - case 0xa2: - case 0xa3: - case 0xa4: - case 0xa5: - case 0xa6: - case 0xa7: - case 0xa8: - case 0xa9: - case 0xaa: - case 0xab: - case 0xac: - case 0xad: - case 0xae: - case 0xaf: - case 0xb0: - case 0xb1: - case 0xb2: - case 0xb3: - case 0xb4: - case 0xb5: - case 0xb6: - case 0xb7: - case 0xb8: - case 0xb9: - case 0xba: - case 0xbb: - case 0xbc: - case 0xbd: - case 0xbe: - case 0xbf: - { - return get_string(current & 0x1f); - } - - case 0xd9: // str 8 + case 0xA0: + case 0xA1: + case 0xA2: + case 0xA3: + case 0xA4: + case 0xA5: + case 0xA6: + case 0xA7: + case 0xA8: + case 0xA9: + case 0xAA: + case 0xAB: + case 0xAC: + case 0xAD: + case 0xAE: + case 0xAF: + case 0xB0: + case 0xB1: + case 0xB2: + case 0xB3: + case 0xB4: + case 0xB5: + case 0xB6: + case 0xB7: + case 0xB8: + case 0xB9: + case 0xBA: + case 0xBB: + case 0xBC: + case 0xBD: + case 0xBE: + case 0xBF: + { + return get_string(current & 0x1F); + } + + case 0xD9: // str 8 { return get_string(get_number()); } - case 0xda: // str 16 + case 0xDA: // str 16 { return get_string(get_number()); } - case 0xdb: // str 32 + case 0xDB: // str 32 { return get_string(get_number()); } @@ -5466,7 +5461,7 @@ class binary_reader default: { std::stringstream ss; - ss << std::setw(2) << std::setfill('0') << std::hex << current; + ss << std::setw(2) << std::uppercase << std::setfill('0') << std::hex << current; JSON_THROW(parse_error::create(113, chars_read, "expected a MessagePack string; last byte: 0x" + ss.str())); } @@ -5562,15 +5557,15 @@ class binary_writer { case value_t::null: { - oa->write_character(static_cast(0xf6)); + oa->write_character(static_cast(0xF6)); break; } case value_t::boolean: { oa->write_character(j.m_value.boolean - ? static_cast(0xf5) - : static_cast(0xf4)); + ? static_cast(0xF5) + : static_cast(0xF4)); break; } @@ -5597,12 +5592,12 @@ class binary_writer } else if (j.m_value.number_integer <= (std::numeric_limits::max)()) { - oa->write_character(static_cast(0x1a)); + oa->write_character(static_cast(0x1A)); write_number(static_cast(j.m_value.number_integer)); } else { - oa->write_character(static_cast(0x1b)); + oa->write_character(static_cast(0x1B)); write_number(static_cast(j.m_value.number_integer)); } } @@ -5627,12 +5622,12 @@ class binary_writer } else if (positive_number <= (std::numeric_limits::max)()) { - oa->write_character(static_cast(0x3a)); + oa->write_character(static_cast(0x3A)); write_number(static_cast(positive_number)); } else { - oa->write_character(static_cast(0x3b)); + oa->write_character(static_cast(0x3B)); write_number(static_cast(positive_number)); } } @@ -5657,12 +5652,12 @@ class binary_writer } else if (j.m_value.number_unsigned <= (std::numeric_limits::max)()) { - oa->write_character(static_cast(0x1a)); + oa->write_character(static_cast(0x1A)); write_number(static_cast(j.m_value.number_unsigned)); } else { - oa->write_character(static_cast(0x1b)); + oa->write_character(static_cast(0x1B)); write_number(static_cast(j.m_value.number_unsigned)); } break; @@ -5670,7 +5665,7 @@ class binary_writer case value_t::number_float: // Double-Precision Float { - oa->write_character(static_cast(0xfb)); + oa->write_character(static_cast(0xFB)); write_number(j.m_value.number_float); break; } @@ -5683,25 +5678,25 @@ class binary_writer { write_number(static_cast(0x60 + N)); } - else if (N <= 0xff) + else if (N <= 0xFF) { oa->write_character(static_cast(0x78)); write_number(static_cast(N)); } - else if (N <= 0xffff) + else if (N <= 0xFFFF) { oa->write_character(static_cast(0x79)); write_number(static_cast(N)); } - else if (N <= 0xffffffff) + else if (N <= 0xFFFFFFFF) { - oa->write_character(static_cast(0x7a)); + oa->write_character(static_cast(0x7A)); write_number(static_cast(N)); } // LCOV_EXCL_START - else if (N <= 0xffffffffffffffff) + else if (N <= 0xFFFFFFFFFFFFFFFF) { - oa->write_character(static_cast(0x7b)); + oa->write_character(static_cast(0x7B)); write_number(static_cast(N)); } // LCOV_EXCL_STOP @@ -5721,25 +5716,25 @@ class binary_writer { write_number(static_cast(0x80 + N)); } - else if (N <= 0xff) + else if (N <= 0xFF) { oa->write_character(static_cast(0x98)); write_number(static_cast(N)); } - else if (N <= 0xffff) + else if (N <= 0xFFFF) { oa->write_character(static_cast(0x99)); write_number(static_cast(N)); } - else if (N <= 0xffffffff) + else if (N <= 0xFFFFFFFF) { - oa->write_character(static_cast(0x9a)); + oa->write_character(static_cast(0x9A)); write_number(static_cast(N)); } // LCOV_EXCL_START - else if (N <= 0xffffffffffffffff) + else if (N <= 0xFFFFFFFFFFFFFFFF) { - oa->write_character(static_cast(0x9b)); + oa->write_character(static_cast(0x9B)); write_number(static_cast(N)); } // LCOV_EXCL_STOP @@ -5758,27 +5753,27 @@ class binary_writer const auto N = j.m_value.object->size(); if (N <= 0x17) { - write_number(static_cast(0xa0 + N)); + write_number(static_cast(0xA0 + N)); } - else if (N <= 0xff) + else if (N <= 0xFF) { - oa->write_character(static_cast(0xb8)); + oa->write_character(static_cast(0xB8)); write_number(static_cast(N)); } - else if (N <= 0xffff) + else if (N <= 0xFFFF) { - oa->write_character(static_cast(0xb9)); + oa->write_character(static_cast(0xB9)); write_number(static_cast(N)); } - else if (N <= 0xffffffff) + else if (N <= 0xFFFFFFFF) { - oa->write_character(static_cast(0xba)); + oa->write_character(static_cast(0xBA)); write_number(static_cast(N)); } // LCOV_EXCL_START - else if (N <= 0xffffffffffffffff) + else if (N <= 0xFFFFFFFFFFFFFFFF) { - oa->write_character(static_cast(0xbb)); + oa->write_character(static_cast(0xBB)); write_number(static_cast(N)); } // LCOV_EXCL_STOP @@ -5806,15 +5801,15 @@ class binary_writer { case value_t::null: // nil { - oa->write_character(static_cast(0xc0)); + oa->write_character(static_cast(0xC0)); break; } case value_t::boolean: // true and false { oa->write_character(j.m_value.boolean - ? static_cast(0xc3) - : static_cast(0xc2)); + ? static_cast(0xC3) + : static_cast(0xC2)); break; } @@ -5833,25 +5828,25 @@ class binary_writer else if (j.m_value.number_unsigned <= (std::numeric_limits::max)()) { // uint 8 - oa->write_character(static_cast(0xcc)); + oa->write_character(static_cast(0xCC)); write_number(static_cast(j.m_value.number_integer)); } else if (j.m_value.number_unsigned <= (std::numeric_limits::max)()) { // uint 16 - oa->write_character(static_cast(0xcd)); + oa->write_character(static_cast(0xCD)); write_number(static_cast(j.m_value.number_integer)); } else if (j.m_value.number_unsigned <= (std::numeric_limits::max)()) { // uint 32 - oa->write_character(static_cast(0xce)); + oa->write_character(static_cast(0xCE)); write_number(static_cast(j.m_value.number_integer)); } else if (j.m_value.number_unsigned <= (std::numeric_limits::max)()) { // uint 64 - oa->write_character(static_cast(0xcf)); + oa->write_character(static_cast(0xCF)); write_number(static_cast(j.m_value.number_integer)); } } @@ -5866,28 +5861,28 @@ class binary_writer j.m_value.number_integer <= (std::numeric_limits::max)()) { // int 8 - oa->write_character(static_cast(0xd0)); + oa->write_character(static_cast(0xD0)); write_number(static_cast(j.m_value.number_integer)); } else if (j.m_value.number_integer >= (std::numeric_limits::min)() and j.m_value.number_integer <= (std::numeric_limits::max)()) { // int 16 - oa->write_character(static_cast(0xd1)); + oa->write_character(static_cast(0xD1)); write_number(static_cast(j.m_value.number_integer)); } else if (j.m_value.number_integer >= (std::numeric_limits::min)() and j.m_value.number_integer <= (std::numeric_limits::max)()) { // int 32 - oa->write_character(static_cast(0xd2)); + oa->write_character(static_cast(0xD2)); write_number(static_cast(j.m_value.number_integer)); } else if (j.m_value.number_integer >= (std::numeric_limits::min)() and j.m_value.number_integer <= (std::numeric_limits::max)()) { // int 64 - oa->write_character(static_cast(0xd3)); + oa->write_character(static_cast(0xD3)); write_number(static_cast(j.m_value.number_integer)); } } @@ -5904,25 +5899,25 @@ class binary_writer else if (j.m_value.number_unsigned <= (std::numeric_limits::max)()) { // uint 8 - oa->write_character(static_cast(0xcc)); + oa->write_character(static_cast(0xCC)); write_number(static_cast(j.m_value.number_integer)); } else if (j.m_value.number_unsigned <= (std::numeric_limits::max)()) { // uint 16 - oa->write_character(static_cast(0xcd)); + oa->write_character(static_cast(0xCD)); write_number(static_cast(j.m_value.number_integer)); } else if (j.m_value.number_unsigned <= (std::numeric_limits::max)()) { // uint 32 - oa->write_character(static_cast(0xce)); + oa->write_character(static_cast(0xCE)); write_number(static_cast(j.m_value.number_integer)); } else if (j.m_value.number_unsigned <= (std::numeric_limits::max)()) { // uint 64 - oa->write_character(static_cast(0xcf)); + oa->write_character(static_cast(0xCF)); write_number(static_cast(j.m_value.number_integer)); } break; @@ -5930,7 +5925,7 @@ class binary_writer case value_t::number_float: // float 64 { - oa->write_character(static_cast(0xcb)); + oa->write_character(static_cast(0xCB)); write_number(j.m_value.number_float); break; } @@ -5942,24 +5937,24 @@ class binary_writer if (N <= 31) { // fixstr - write_number(static_cast(0xa0 | N)); + write_number(static_cast(0xA0 | N)); } else if (N <= 255) { // str 8 - oa->write_character(static_cast(0xd9)); + oa->write_character(static_cast(0xD9)); write_number(static_cast(N)); } else if (N <= 65535) { // str 16 - oa->write_character(static_cast(0xda)); + oa->write_character(static_cast(0xDA)); write_number(static_cast(N)); } else if (N <= 4294967295) { // str 32 - oa->write_character(static_cast(0xdb)); + oa->write_character(static_cast(0xDB)); write_number(static_cast(N)); } @@ -5979,16 +5974,16 @@ class binary_writer // fixarray write_number(static_cast(0x90 | N)); } - else if (N <= 0xffff) + else if (N <= 0xFFFF) { // array 16 - oa->write_character(static_cast(0xdc)); + oa->write_character(static_cast(0xDC)); write_number(static_cast(N)); } - else if (N <= 0xffffffff) + else if (N <= 0xFFFFFFFF) { // array 32 - oa->write_character(static_cast(0xdd)); + oa->write_character(static_cast(0xDD)); write_number(static_cast(N)); } @@ -6007,18 +6002,18 @@ class binary_writer if (N <= 15) { // fixmap - write_number(static_cast(0x80 | (N & 0xf))); + write_number(static_cast(0x80 | (N & 0xF))); } else if (N <= 65535) { // map 16 - oa->write_character(static_cast(0xde)); + oa->write_character(static_cast(0xDE)); write_number(static_cast(N)); } else if (N <= 4294967295) { // map 32 - oa->write_character(static_cast(0xdf)); + oa->write_character(static_cast(0xDF)); write_number(static_cast(N)); } @@ -6363,9 +6358,9 @@ class serializer case 0x05: case 0x06: case 0x07: - case 0x0b: - case 0x0e: - case 0x0f: + case 0x0B: + case 0x0E: + case 0x0F: case 0x10: case 0x11: case 0x12: @@ -6376,12 +6371,12 @@ class serializer case 0x17: case 0x18: case 0x19: - case 0x1a: - case 0x1b: - case 0x1c: - case 0x1d: - case 0x1e: - case 0x1f: + case 0x1A: + case 0x1B: + case 0x1C: + case 0x1D: + case 0x1E: + case 0x1F: { // from c (1 byte) to \uxxxx (6 bytes) res += 5; @@ -6393,12 +6388,8 @@ class serializer if (ensure_ascii and (s[i] & 0x80 or s[i] == 0x7F)) { const auto bytes = bytes_following(static_cast(s[i])); - if (bytes == std::string::npos) - { - // invalid characters are treated as is, so no - // additional space will be used - break; - } + // invalid characters will be detected by throw_if_invalid_utf8 + assert (bytes != std::string::npos); if (bytes == 3) { @@ -6492,6 +6483,8 @@ class serializer */ void dump_escaped(const string_t& s, const bool ensure_ascii) const { + throw_if_invalid_utf8(s); + const auto space = extra_space(s, ensure_ascii); if (space == 0) { @@ -6514,7 +6507,7 @@ class serializer break; } - case '\\': // reverse solidus (0x5c) + case '\\': // reverse solidus (0x5C) { // nothing to change pos += 2; @@ -6528,21 +6521,21 @@ class serializer break; } - case '\f': // formfeed (0x0c) + case '\f': // formfeed (0x0C) { result[pos + 1] = 'f'; pos += 2; break; } - case '\n': // newline (0x0a) + case '\n': // newline (0x0A) { result[pos + 1] = 'n'; pos += 2; break; } - case '\r': // carriage return (0x0d) + case '\r': // carriage return (0x0D) { result[pos + 1] = 'r'; pos += 2; @@ -6564,21 +6557,18 @@ class serializer (ensure_ascii and (s[i] & 0x80 or s[i] == 0x7F))) { const auto bytes = bytes_following(static_cast(s[i])); - if (bytes == std::string::npos) - { - // copy invalid character as is - result[pos++] = s[i]; - break; - } + // invalid characters will be detected by throw_if_invalid_utf8 + assert (bytes != std::string::npos); // check that the additional bytes are present assert(i + bytes < s.size()); - // to use \uxxxx escaping, we first need to caluclate + // to use \uxxxx escaping, we first need to calculate // the codepoint from the UTF-8 bytes int codepoint = 0; - assert(0 <= bytes and bytes <= 3); + // bytes is unsigned type: + assert(bytes <= 3); switch (bytes) { case 0: @@ -6641,11 +6631,10 @@ class serializer @param[in] x integer number (signed or unsigned) to dump @tparam NumberType either @a number_integer_t or @a number_unsigned_t */ - template < - typename NumberType, - detail::enable_if_t::value or - std::is_same::value, - int> = 0 > + template::value or + std::is_same::value, + int> = 0> void dump_integer(NumberType x) { // special case for "0" @@ -6743,6 +6732,87 @@ class serializer } } + /*! + @brief check whether a string is UTF-8 encoded + + The function checks each byte of a string whether it is UTF-8 encoded. The + result of the check is stored in the @a state parameter. The function must + be called initially with state 0 (accept). State 1 means the string must + be rejected, because the current byte is not allowed. If the string is + completely processed, but the state is non-zero, the string ended + prematurely; that is, the last byte indicated more bytes should have + followed. + + @param[in,out] state the state of the decoding + @param[in] byte next byte to decode + + @note The function has been edited: a std::array is used and the code + point is not calculated. + + @copyright Copyright (c) 2008-2009 Bjoern Hoehrmann + @sa http://bjoern.hoehrmann.de/utf-8/decoder/dfa/ + */ + static void decode(uint8_t& state, const uint8_t byte) + { + static const std::array utf8d = + { + { + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 00..1F + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 20..3F + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 40..5F + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 60..7F + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, // 80..9F + 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, // A0..BF + 8, 8, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, // C0..DF + 0xA, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x4, 0x3, 0x3, // E0..EF + 0xB, 0x6, 0x6, 0x6, 0x5, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, // F0..FF + 0x0, 0x1, 0x2, 0x3, 0x5, 0x8, 0x7, 0x1, 0x1, 0x1, 0x4, 0x6, 0x1, 0x1, 0x1, 0x1, // s0..s0 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, // s1..s2 + 1, 2, 1, 1, 1, 1, 1, 2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, // s3..s4 + 1, 2, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 3, 1, 1, 1, 1, 1, 1, // s5..s6 + 1, 3, 1, 1, 1, 1, 1, 3, 1, 3, 1, 1, 1, 1, 1, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 // s7..s8 + } + }; + + const uint8_t type = utf8d[byte]; + state = utf8d[256u + state * 16u + type]; + } + + /*! + @brief throw an exception if a string is not UTF-8 encoded + + @param[in] str UTF-8 string to check + @throw type_error.316 if passed string is not UTF-8 encoded + + @since version 3.0.0 + */ + static void throw_if_invalid_utf8(const std::string& str) + { + // start with state 0 (= accept) + uint8_t state = 0; + + for (size_t i = 0; i < str.size(); ++i) + { + const auto byte = static_cast(str[i]); + decode(state, byte); + if (state == 1) + { + // state 1 means reject + std::stringstream ss; + ss << std::setw(2) << std::uppercase << std::setfill('0') << std::hex << static_cast(byte); + JSON_THROW(type_error::create(316, "invalid UTF-8 byte at index " + std::to_string(i) + ": 0x" + ss.str())); + } + } + + if (state != 0) + { + // we finish reading, but do not accept: string was incomplete + std::stringstream ss; + ss << std::setw(2) << std::uppercase << std::setfill('0') << std::hex << static_cast(static_cast(str.back())); + JSON_THROW(type_error::create(316, "incomplete UTF-8 string; last byte: 0x" + ss.str())); + } + } + private: /// the output of the serializer output_adapter_t o = nullptr; @@ -6771,27 +6841,20 @@ class json_ref using value_type = BasicJsonType; json_ref(value_type&& value) - : owned_value(std::move(value)), - value_ref(&owned_value), - is_rvalue(true) + : owned_value(std::move(value)), value_ref(&owned_value), is_rvalue(true) {} json_ref(const value_type& value) - : value_ref(const_cast(&value)), - is_rvalue(false) + : value_ref(const_cast(&value)), is_rvalue(false) {} json_ref(std::initializer_list init) - : owned_value(init), - value_ref(&owned_value), - is_rvalue(true) + : owned_value(init), value_ref(&owned_value), is_rvalue(true) {} - template + template json_ref(Args&& ... args) - : owned_value(std::forward(args)...), - value_ref(&owned_value), - is_rvalue(true) + : owned_value(std::forward(args)...), value_ref(&owned_value), is_rvalue(true) {} // class should be movable only @@ -6949,6 +7012,27 @@ class json_pointer return to_string(); } + /*! + @param[in] s reference token to be converted into an array index + + @return integer representation of @a s + + @throw out_of_range.404 if string @a s could not be converted to an integer + */ + static int array_index(const std::string& s) + { + size_t processed_chars = 0; + const int res = std::stoi(s, &processed_chars); + + // check if the string was completely read + if (JSON_UNLIKELY(processed_chars != s.size())) + { + JSON_THROW(detail::out_of_range::create(404, "unresolved reference token '" + s + "'")); + } + + return res; + } + private: /*! @brief remove and return last reference pointer @@ -6984,7 +7068,6 @@ class json_pointer return result; } - /*! @brief create and return a reference to the pointed to value @@ -7320,11 +7403,11 @@ class basic_json public: using value_t = detail::value_t; - // forward declarations + /// @copydoc nlohmann::json_pointer using json_pointer = ::nlohmann::json_pointer; template using json_serializer = JSONSerializer; - + /// helper type for initializer lists of basic_json values using initializer_list_t = std::initializer_list>; //////////////// @@ -7436,7 +7519,7 @@ class basic_json result["url"] = "https://github.com/nlohmann/json"; result["version"] = { - {"string", "2.1.1"}, {"major", 2}, {"minor", 1}, {"patch", 1} + {"string", "3.0.1"}, {"major", 3}, {"minor", 0}, {"patch", 1} }; #ifdef _WIN32 @@ -7489,6 +7572,14 @@ class basic_json /// the template arguments passed to class @ref basic_json. /// @{ +#if defined(JSON_HAS_CPP_14) + // Use transparent comparator if possible, combined with perfect forwarding + // on find() and count() calls prevents unnecessary string construction. + using object_comparator_t = std::less<>; +#else + using object_comparator_t = std::less; +#endif + /*! @brief a type for an object @@ -7572,14 +7663,6 @@ class basic_json 7159](http://rfc7159.net/rfc7159), because any order implements the specified "unordered" nature of JSON objects. */ - -#if defined(JSON_HAS_CPP_14) - // Use transparent comparator if possible, combined with perfect forwarding - // on find() and count() calls prevents unnecessary string construction. - using object_comparator_t = std::less<>; -#else - using object_comparator_t = std::less; -#endif using object_t = ObjectType alloc; + using AllocatorTraits = std::allocator_traits>; + auto deleter = [&](T * object) { - alloc.deallocate(object, 1); + AllocatorTraits::deallocate(alloc, object, 1); }; - std::unique_ptr object(alloc.allocate(1), deleter); - alloc.construct(object.get(), std::forward(args)...); + std::unique_ptr object(AllocatorTraits::allocate(alloc, 1), deleter); + AllocatorTraits::construct(alloc, object.get(), std::forward(args)...); assert(object != nullptr); return object.release(); } @@ -8054,7 +8139,7 @@ class basic_json object = nullptr; // silence warning, see #821 if (JSON_UNLIKELY(t == value_t::null)) { - JSON_THROW(other_error::create(500, "961c151d2e87f2686a955a9be24d316f1362bf21 2.1.1")); // LCOV_EXCL_LINE + JSON_THROW(other_error::create(500, "961c151d2e87f2686a955a9be24d316f1362bf21 3.0.1")); // LCOV_EXCL_LINE } break; } @@ -8104,24 +8189,24 @@ class basic_json case value_t::object: { AllocatorType alloc; - alloc.destroy(object); - alloc.deallocate(object, 1); + std::allocator_traits::destroy(alloc, object); + std::allocator_traits::deallocate(alloc, object, 1); break; } case value_t::array: { AllocatorType alloc; - alloc.destroy(array); - alloc.deallocate(array, 1); + std::allocator_traits::destroy(alloc, array); + std::allocator_traits::deallocate(alloc, array, 1); break; } case value_t::string: { AllocatorType alloc; - alloc.destroy(string); - alloc.deallocate(string, 1); + std::allocator_traits::destroy(alloc, string); + std::allocator_traits::deallocate(alloc, string, 1); break; } @@ -8154,6 +8239,21 @@ class basic_json // JSON parser callback // ////////////////////////// + /*! + @brief parser event types + + The parser callback distinguishes the following events: + - `object_start`: the parser read `{` and started to process a JSON object + - `key`: the parser read a key of a value in an object + - `object_end`: the parser read `}` and finished processing a JSON object + - `array_start`: the parser read `[` and started to process a JSON array + - `array_end`: the parser read `]` and finished processing a JSON array + - `value`: the parser finished reading a JSON value + + @image html callback_events.png "Example when certain parse events are triggered" + + @sa @ref parser_callback_t for more information and examples + */ using parse_event_t = typename parser::parse_event_t; /*! @@ -8280,7 +8380,7 @@ class basic_json @brief create a JSON value This is a "catch all" constructor for all compatible JSON types; that is, - types for which a `to_json()` method exsits. The constructor forwards the + types for which a `to_json()` method exists. The constructor forwards the parameter @a val to that method (to `json_serializer::to_json` method with `U = uncvref_t`, to be exact). @@ -8952,11 +9052,14 @@ class basic_json @param[in] indent_char The character to use for indentation if @a indent is greater than `0`. The default is ` ` (space). @param[in] ensure_ascii If @a ensure_ascii is true, all non-ASCII characters - in the output are escaped with \uXXXX sequences, and the result consists + in the output are escaped with `\uXXXX` sequences, and the result consists of ASCII characters only. @return string containing the serialization of the JSON value + @throw type_error.316 if a string stored inside the JSON value is not + UTF-8 encoded + @complexity Linear. @exceptionsafety Strong guarantee: if an exception is thrown, there are no @@ -8968,8 +9071,8 @@ class basic_json @see https://docs.python.org/2/library/json.html#json.dump - @since version 1.0.0; indentation character @a indent_char and option - @a ensure_ascii added in version 3.0.0 + @since version 1.0.0; indentation character @a indent_char, option + @a ensure_ascii and exceptions added in version 3.0.0 */ string_t dump(const int indent = -1, const char indent_char = ' ', const bool ensure_ascii = false) const @@ -9003,7 +9106,7 @@ class basic_json string | value_t::string number (integer) | value_t::number_integer number (unsigned integer) | value_t::number_unsigned - number (foating-point) | value_t::number_float + number (floating-point) | value_t::number_float object | value_t::object array | value_t::array discarded | value_t::discarded @@ -9507,11 +9610,9 @@ class basic_json @since version 2.1.0 */ - template < - typename BasicJsonType, - detail::enable_if_t::type, - basic_json_t>::value, - int> = 0 > + template::type, basic_json_t>::value, + int> = 0> basic_json get() const { return *this; @@ -9556,14 +9657,12 @@ class basic_json @since version 2.1.0 */ - template < - typename ValueTypeCV, - typename ValueType = detail::uncvref_t, - detail::enable_if_t < - not std::is_same::value and - detail::has_from_json::value and - not detail::has_non_default_from_json::value, - int > = 0 > + template, + detail::enable_if_t < + not std::is_same::value and + detail::has_from_json::value and + not detail::has_non_default_from_json::value, + int> = 0> ValueType get() const noexcept(noexcept( JSONSerializer::from_json(std::declval(), std::declval()))) { @@ -9611,12 +9710,10 @@ class basic_json @since version 2.1.0 */ - template < - typename ValueTypeCV, - typename ValueType = detail::uncvref_t, - detail::enable_if_t::value and - detail::has_non_default_from_json::value, int> = 0 > + template, + detail::enable_if_t::value and + detail::has_non_default_from_json::value, + int> = 0> ValueType get() const noexcept(noexcept( JSONSerializer::from_json(std::declval()))) { @@ -10110,7 +10207,7 @@ class basic_json @return const reference to the element at index @a idx - @throw type_error.305 if the JSON value is not an array; in that cases, + @throw type_error.305 if the JSON value is not an array; in that case, using the [] operator with an index makes no sense. @complexity Constant. @@ -10193,7 +10290,7 @@ class basic_json @pre The element with key @a key must exist. **This precondition is enforced with an assertion.** - @throw type_error.305 if the JSON value is not an object; in that cases, + @throw type_error.305 if the JSON value is not an object; in that case, using the [] operator with a key makes no sense. @complexity Logarithmic in the size of the container. @@ -10282,7 +10379,7 @@ class basic_json @pre The element with key @a key must exist. **This precondition is enforced with an assertion.** - @throw type_error.305 if the JSON value is not an object; in that cases, + @throw type_error.305 if the JSON value is not an object; in that case, using the [] operator with a key makes no sense. @complexity Logarithmic in the size of the container. @@ -10342,7 +10439,7 @@ class basic_json @return copy of the element at key @a key or @a default_value if @a key is not found - @throw type_error.306 if the JSON value is not an objec; in that cases, + @throw type_error.306 if the JSON value is not an object; in that case, using `value()` with a key makes no sense. @complexity Logarithmic in the size of the container. @@ -10415,7 +10512,7 @@ class basic_json @return copy of the element at key @a key or @a default_value if @a key is not found - @throw type_error.306 if the JSON value is not an objec; in that cases, + @throw type_error.306 if the JSON value is not an objec; in that case, using `value()` with a key makes no sense. @complexity Logarithmic in the size of the container. @@ -10619,8 +10716,8 @@ class basic_json if (is_string()) { AllocatorType alloc; - alloc.destroy(m_value.string); - alloc.deallocate(m_value.string, 1); + std::allocator_traits::destroy(alloc, m_value.string); + std::allocator_traits::deallocate(alloc, m_value.string, 1); m_value.string = nullptr; } @@ -10725,8 +10822,8 @@ class basic_json if (is_string()) { AllocatorType alloc; - alloc.destroy(m_value.string); - alloc.deallocate(m_value.string, 1); + std::allocator_traits::destroy(alloc, m_value.string); + std::allocator_traits::deallocate(alloc, m_value.string, 1); m_value.string = nullptr; } @@ -11220,22 +11317,62 @@ class basic_json reference to the JSON values is returned, so there is no access to the underlying iterator. + For loop without iterator_wrapper: + + @code{cpp} + for (auto it = j_object.begin(); it != j_object.end(); ++it) + { + std::cout << "key: " << it.key() << ", value:" << it.value() << '\n'; + } + @endcode + + Range-based for loop without iterator proxy: + + @code{cpp} + for (auto it : j_object) + { + // "it" is of type json::reference and has no key() member + std::cout << "value: " << it << '\n'; + } + @endcode + + Range-based for loop with iterator proxy: + + @code{cpp} + for (auto it : json::iterator_wrapper(j_object)) + { + std::cout << "key: " << it.key() << ", value:" << it.value() << '\n'; + } + @endcode + + @note When iterating over an array, `key()` will return the index of the + element as string (see example). + + @param[in] ref reference to a JSON value + @return iteration proxy object wrapping @a ref with an interface to use in + range-based for loops + @liveexample{The following code shows how the wrapper is used,iterator_wrapper} + @exceptionsafety Strong guarantee: if an exception is thrown, there are no + changes in the JSON value. + + @complexity Constant. + @note The name of this function is not yet final and may change in the future. */ - static iteration_proxy iterator_wrapper(reference cont) + static iteration_proxy iterator_wrapper(reference ref) { - return iteration_proxy(cont); + return iteration_proxy(ref); } /*! @copydoc iterator_wrapper(reference) */ - static iteration_proxy iterator_wrapper(const_reference cont) + static iteration_proxy iterator_wrapper(const_reference ref) { - return iteration_proxy(cont); + return iteration_proxy(ref); } /// @} @@ -12120,7 +12257,7 @@ class basic_json JSON_THROW(type_error::create(312, "cannot use update() with " + std::string(j.type_name()))); } - for (auto it = j.begin(); it != j.end(); ++it) + for (auto it = j.cbegin(); it != j.cend(); ++it) { m_value.object->operator[](it.key()) = it.value(); } @@ -12341,7 +12478,7 @@ class basic_json [comparison function](https://github.com/mariokonrad/marnav/blob/master/src/marnav/math/floatingpoint.hpp#L34-#L39) could be used, for instance @code {.cpp} - template ::value, T>::type> + template::value, T>::type> inline bool is_same(T a, T b, T epsilon = std::numeric_limits::epsilon()) noexcept { return std::abs(a - b) <= epsilon; @@ -12769,7 +12906,7 @@ class basic_json `std::setw(4)` on @a o sets the indentation level to `4` and the serialization result is the same as calling `dump(4)`. - - The indentation characrer can be controlled with the member variable + - The indentation character can be controlled with the member variable `fill` of the output stream @a o. For instance, the manipulator `std::setfill('\\t')` sets indentation to use a tab character rather than the default space character. @@ -12779,12 +12916,15 @@ class basic_json @return the stream @a o + @throw type_error.316 if a string stored inside the JSON value is not + UTF-8 encoded + @complexity Linear. @liveexample{The example below shows the serialization with different parameters to `width` to adjust the indentation level.,operator_serialize} - @since version 1.0.0; indentaction character added in version 3.0.0 + @since version 1.0.0; indentation character added in version 3.0.0 */ friend std::ostream& operator<<(std::ostream& o, const basic_json& j) { @@ -13124,40 +13264,40 @@ class basic_json JSON value type | value/range | CBOR type | first byte --------------- | ------------------------------------------ | ---------------------------------- | --------------- - null | `null` | Null | 0xf6 - boolean | `true` | True | 0xf5 - boolean | `false` | False | 0xf4 - number_integer | -9223372036854775808..-2147483649 | Negative integer (8 bytes follow) | 0x3b - number_integer | -2147483648..-32769 | Negative integer (4 bytes follow) | 0x3a + null | `null` | Null | 0xF6 + boolean | `true` | True | 0xF5 + boolean | `false` | False | 0xF4 + number_integer | -9223372036854775808..-2147483649 | Negative integer (8 bytes follow) | 0x3B + number_integer | -2147483648..-32769 | Negative integer (4 bytes follow) | 0x3A number_integer | -32768..-129 | Negative integer (2 bytes follow) | 0x39 number_integer | -128..-25 | Negative integer (1 byte follow) | 0x38 number_integer | -24..-1 | Negative integer | 0x20..0x37 number_integer | 0..23 | Integer | 0x00..0x17 number_integer | 24..255 | Unsigned integer (1 byte follow) | 0x18 number_integer | 256..65535 | Unsigned integer (2 bytes follow) | 0x19 - number_integer | 65536..4294967295 | Unsigned integer (4 bytes follow) | 0x1a - number_integer | 4294967296..18446744073709551615 | Unsigned integer (8 bytes follow) | 0x1b + number_integer | 65536..4294967295 | Unsigned integer (4 bytes follow) | 0x1A + number_integer | 4294967296..18446744073709551615 | Unsigned integer (8 bytes follow) | 0x1B number_unsigned | 0..23 | Integer | 0x00..0x17 number_unsigned | 24..255 | Unsigned integer (1 byte follow) | 0x18 number_unsigned | 256..65535 | Unsigned integer (2 bytes follow) | 0x19 - number_unsigned | 65536..4294967295 | Unsigned integer (4 bytes follow) | 0x1a - number_unsigned | 4294967296..18446744073709551615 | Unsigned integer (8 bytes follow) | 0x1b - number_float | *any value* | Double-Precision Float | 0xfb + number_unsigned | 65536..4294967295 | Unsigned integer (4 bytes follow) | 0x1A + number_unsigned | 4294967296..18446744073709551615 | Unsigned integer (8 bytes follow) | 0x1B + number_float | *any value* | Double-Precision Float | 0xFB string | *length*: 0..23 | UTF-8 string | 0x60..0x77 string | *length*: 23..255 | UTF-8 string (1 byte follow) | 0x78 string | *length*: 256..65535 | UTF-8 string (2 bytes follow) | 0x79 - string | *length*: 65536..4294967295 | UTF-8 string (4 bytes follow) | 0x7a - string | *length*: 4294967296..18446744073709551615 | UTF-8 string (8 bytes follow) | 0x7b + string | *length*: 65536..4294967295 | UTF-8 string (4 bytes follow) | 0x7A + string | *length*: 4294967296..18446744073709551615 | UTF-8 string (8 bytes follow) | 0x7B array | *size*: 0..23 | array | 0x80..0x97 array | *size*: 23..255 | array (1 byte follow) | 0x98 array | *size*: 256..65535 | array (2 bytes follow) | 0x99 - array | *size*: 65536..4294967295 | array (4 bytes follow) | 0x9a - array | *size*: 4294967296..18446744073709551615 | array (8 bytes follow) | 0x9b - object | *size*: 0..23 | map | 0xa0..0xb7 - object | *size*: 23..255 | map (1 byte follow) | 0xb8 - object | *size*: 256..65535 | map (2 bytes follow) | 0xb9 - object | *size*: 65536..4294967295 | map (4 bytes follow) | 0xba - object | *size*: 4294967296..18446744073709551615 | map (8 bytes follow) | 0xbb + array | *size*: 65536..4294967295 | array (4 bytes follow) | 0x9A + array | *size*: 4294967296..18446744073709551615 | array (8 bytes follow) | 0x9B + object | *size*: 0..23 | map | 0xA0..0xB7 + object | *size*: 23..255 | map (1 byte follow) | 0xB8 + object | *size*: 256..65535 | map (2 bytes follow) | 0xB9 + object | *size*: 65536..4294967295 | map (4 bytes follow) | 0xBA + object | *size*: 4294967296..18446744073709551615 | map (8 bytes follow) | 0xBB @note The mapping is **complete** in the sense that any JSON value type can be converted to a CBOR value. @@ -13167,20 +13307,20 @@ class basic_json function which serializes NaN or Infinity to `null`. @note The following CBOR types are not used in the conversion: - - byte strings (0x40..0x5f) - - UTF-8 strings terminated by "break" (0x7f) - - arrays terminated by "break" (0x9f) - - maps terminated by "break" (0xbf) - - date/time (0xc0..0xc1) - - bignum (0xc2..0xc3) - - decimal fraction (0xc4) - - bigfloat (0xc5) - - tagged items (0xc6..0xd4, 0xd8..0xdb) - - expected conversions (0xd5..0xd7) - - simple values (0xe0..0xf3, 0xf8) - - undefined (0xf7) - - half and single-precision floats (0xf9-0xfa) - - break (0xff) + - byte strings (0x40..0x5F) + - UTF-8 strings terminated by "break" (0x7F) + - arrays terminated by "break" (0x9F) + - maps terminated by "break" (0xBF) + - date/time (0xC0..0xC1) + - bignum (0xC2..0xC3) + - decimal fraction (0xC4) + - bigfloat (0xC5) + - tagged items (0xC6..0xD4, 0xD8..0xDB) + - expected conversions (0xD5..0xD7) + - simple values (0xE0..0xF3, 0xF8) + - undefined (0xF7) + - half and single-precision floats (0xF9-0xFA) + - break (0xFF) @param[in] j JSON value to serialize @return MessagePack serialization as byte vector @@ -13226,35 +13366,35 @@ class basic_json JSON value type | value/range | MessagePack type | first byte --------------- | --------------------------------- | ---------------- | ---------- - null | `null` | nil | 0xc0 - boolean | `true` | true | 0xc3 - boolean | `false` | false | 0xc2 - number_integer | -9223372036854775808..-2147483649 | int64 | 0xd3 - number_integer | -2147483648..-32769 | int32 | 0xd2 - number_integer | -32768..-129 | int16 | 0xd1 - number_integer | -128..-33 | int8 | 0xd0 - number_integer | -32..-1 | negative fixint | 0xe0..0xff - number_integer | 0..127 | positive fixint | 0x00..0x7f - number_integer | 128..255 | uint 8 | 0xcc - number_integer | 256..65535 | uint 16 | 0xcd - number_integer | 65536..4294967295 | uint 32 | 0xce - number_integer | 4294967296..18446744073709551615 | uint 64 | 0xcf - number_unsigned | 0..127 | positive fixint | 0x00..0x7f - number_unsigned | 128..255 | uint 8 | 0xcc - number_unsigned | 256..65535 | uint 16 | 0xcd - number_unsigned | 65536..4294967295 | uint 32 | 0xce - number_unsigned | 4294967296..18446744073709551615 | uint 64 | 0xcf - number_float | *any value* | float 64 | 0xcb - string | *length*: 0..31 | fixstr | 0xa0..0xbf - string | *length*: 32..255 | str 8 | 0xd9 - string | *length*: 256..65535 | str 16 | 0xda - string | *length*: 65536..4294967295 | str 32 | 0xdb - array | *size*: 0..15 | fixarray | 0x90..0x9f - array | *size*: 16..65535 | array 16 | 0xdc - array | *size*: 65536..4294967295 | array 32 | 0xdd - object | *size*: 0..15 | fix map | 0x80..0x8f - object | *size*: 16..65535 | map 16 | 0xde - object | *size*: 65536..4294967295 | map 32 | 0xdf + null | `null` | nil | 0xC0 + boolean | `true` | true | 0xC3 + boolean | `false` | false | 0xC2 + number_integer | -9223372036854775808..-2147483649 | int64 | 0xD3 + number_integer | -2147483648..-32769 | int32 | 0xD2 + number_integer | -32768..-129 | int16 | 0xD1 + number_integer | -128..-33 | int8 | 0xD0 + number_integer | -32..-1 | negative fixint | 0xE0..0xFF + number_integer | 0..127 | positive fixint | 0x00..0x7F + number_integer | 128..255 | uint 8 | 0xCC + number_integer | 256..65535 | uint 16 | 0xCD + number_integer | 65536..4294967295 | uint 32 | 0xCE + number_integer | 4294967296..18446744073709551615 | uint 64 | 0xCF + number_unsigned | 0..127 | positive fixint | 0x00..0x7F + number_unsigned | 128..255 | uint 8 | 0xCC + number_unsigned | 256..65535 | uint 16 | 0xCD + number_unsigned | 65536..4294967295 | uint 32 | 0xCE + number_unsigned | 4294967296..18446744073709551615 | uint 64 | 0xCF + number_float | *any value* | float 64 | 0xCB + string | *length*: 0..31 | fixstr | 0xA0..0xBF + string | *length*: 32..255 | str 8 | 0xD9 + string | *length*: 256..65535 | str 16 | 0xDA + string | *length*: 65536..4294967295 | str 32 | 0xDB + array | *size*: 0..15 | fixarray | 0x90..0x9F + array | *size*: 16..65535 | array 16 | 0xDC + array | *size*: 65536..4294967295 | array 32 | 0xDD + object | *size*: 0..15 | fix map | 0x80..0x8F + object | *size*: 16..65535 | map 16 | 0xDE + object | *size*: 65536..4294967295 | map 32 | 0xDF @note The mapping is **complete** in the sense that any JSON value type can be converted to a MessagePack value. @@ -13265,10 +13405,10 @@ class basic_json - objects with more than 4294967295 elements @note The following MessagePack types are not used in the conversion: - - bin 8 - bin 32 (0xc4..0xc6) - - ext 8 - ext 32 (0xc7..0xc9) - - float 32 (0xca) - - fixext 1 - fixext 16 (0xd4..0xd8) + - bin 8 - bin 32 (0xC4..0xC6) + - ext 8 - ext 32 (0xC7..0xC9) + - float 32 (0xCA) + - fixext 1 - fixext 16 (0xD4..0xD8) @note Any MessagePack output created @ref to_msgpack can be successfully parsed by @ref from_msgpack. @@ -13322,51 +13462,51 @@ class basic_json Integer | number_unsigned | 0x00..0x17 Unsigned integer | number_unsigned | 0x18 Unsigned integer | number_unsigned | 0x19 - Unsigned integer | number_unsigned | 0x1a - Unsigned integer | number_unsigned | 0x1b + Unsigned integer | number_unsigned | 0x1A + Unsigned integer | number_unsigned | 0x1B Negative integer | number_integer | 0x20..0x37 Negative integer | number_integer | 0x38 Negative integer | number_integer | 0x39 - Negative integer | number_integer | 0x3a - Negative integer | number_integer | 0x3b + Negative integer | number_integer | 0x3A + Negative integer | number_integer | 0x3B Negative integer | number_integer | 0x40..0x57 UTF-8 string | string | 0x60..0x77 UTF-8 string | string | 0x78 UTF-8 string | string | 0x79 - UTF-8 string | string | 0x7a - UTF-8 string | string | 0x7b - UTF-8 string | string | 0x7f + UTF-8 string | string | 0x7A + UTF-8 string | string | 0x7B + UTF-8 string | string | 0x7F array | array | 0x80..0x97 array | array | 0x98 array | array | 0x99 - array | array | 0x9a - array | array | 0x9b - array | array | 0x9f - map | object | 0xa0..0xb7 - map | object | 0xb8 - map | object | 0xb9 - map | object | 0xba - map | object | 0xbb - map | object | 0xbf - False | `false` | 0xf4 - True | `true` | 0xf5 - Nill | `null` | 0xf6 - Half-Precision Float | number_float | 0xf9 - Single-Precision Float | number_float | 0xfa - Double-Precision Float | number_float | 0xfb + array | array | 0x9A + array | array | 0x9B + array | array | 0x9F + map | object | 0xA0..0xB7 + map | object | 0xB8 + map | object | 0xB9 + map | object | 0xBA + map | object | 0xBB + map | object | 0xBF + False | `false` | 0xF4 + True | `true` | 0xF5 + Nill | `null` | 0xF6 + Half-Precision Float | number_float | 0xF9 + Single-Precision Float | number_float | 0xFA + Double-Precision Float | number_float | 0xFB @warning The mapping is **incomplete** in the sense that not all CBOR types can be converted to a JSON value. The following CBOR types are not supported and will yield parse errors (parse_error.112): - - byte strings (0x40..0x5f) - - date/time (0xc0..0xc1) - - bignum (0xc2..0xc3) - - decimal fraction (0xc4) - - bigfloat (0xc5) - - tagged items (0xc6..0xd4, 0xd8..0xdb) - - expected conversions (0xd5..0xd7) - - simple values (0xe0..0xf3, 0xf8) - - undefined (0xf7) + - byte strings (0x40..0x5F) + - date/time (0xC0..0xC1) + - bignum (0xC2..0xC3) + - decimal fraction (0xC4) + - bigfloat (0xC5) + - tagged items (0xC6..0xD4, 0xD8..0xDB) + - expected conversions (0xD5..0xD7) + - simple values (0xE0..0xF3, 0xF8) + - undefined (0xF7) @warning CBOR allows map keys of any type, whereas JSON only allows strings as keys in object values. Therefore, CBOR maps with keys @@ -13426,38 +13566,38 @@ class basic_json MessagePack type | JSON value type | first byte ---------------- | --------------- | ---------- - positive fixint | number_unsigned | 0x00..0x7f - fixmap | object | 0x80..0x8f - fixarray | array | 0x90..0x9f - fixstr | string | 0xa0..0xbf - nil | `null` | 0xc0 - false | `false` | 0xc2 - true | `true` | 0xc3 - float 32 | number_float | 0xca - float 64 | number_float | 0xcb - uint 8 | number_unsigned | 0xcc - uint 16 | number_unsigned | 0xcd - uint 32 | number_unsigned | 0xce - uint 64 | number_unsigned | 0xcf - int 8 | number_integer | 0xd0 - int 16 | number_integer | 0xd1 - int 32 | number_integer | 0xd2 - int 64 | number_integer | 0xd3 - str 8 | string | 0xd9 - str 16 | string | 0xda - str 32 | string | 0xdb - array 16 | array | 0xdc - array 32 | array | 0xdd - map 16 | object | 0xde - map 32 | object | 0xdf - negative fixint | number_integer | 0xe0-0xff + positive fixint | number_unsigned | 0x00..0x7F + fixmap | object | 0x80..0x8F + fixarray | array | 0x90..0x9F + fixstr | string | 0xA0..0xBF + nil | `null` | 0xC0 + false | `false` | 0xC2 + true | `true` | 0xC3 + float 32 | number_float | 0xCA + float 64 | number_float | 0xCB + uint 8 | number_unsigned | 0xCC + uint 16 | number_unsigned | 0xCD + uint 32 | number_unsigned | 0xCE + uint 64 | number_unsigned | 0xCF + int 8 | number_integer | 0xD0 + int 16 | number_integer | 0xD1 + int 32 | number_integer | 0xD2 + int 64 | number_integer | 0xD3 + str 8 | string | 0xD9 + str 16 | string | 0xDA + str 32 | string | 0xDB + array 16 | array | 0xDC + array 32 | array | 0xDD + map 16 | object | 0xDE + map 32 | object | 0xDF + negative fixint | number_integer | 0xE0-0xFF @warning The mapping is **incomplete** in the sense that not all MessagePack types can be converted to a JSON value. The following MessagePack types are not supported and will yield parse errors: - - bin 8 - bin 32 (0xc4..0xc6) - - ext 8 - ext 32 (0xc7..0xc9) - - fixext 1 - fixext 16 (0xd4..0xd8) + - bin 8 - bin 32 (0xC4..0xC6) + - ext 8 - ext 32 (0xC7..0xC9) + - fixext 1 - fixext 16 (0xD4..0xD8) @note Any MessagePack output created @ref to_msgpack can be successfully parsed by @ref from_msgpack. @@ -13601,6 +13741,9 @@ class basic_json pointer @a ptr. As `at` provides checked access (and no elements are implicitly inserted), the index '-' is always invalid. See example below. + @throw out_of_range.403 if the JSON pointer describes a key of an object + which cannot be found. See example below. + @throw out_of_range.404 if the JSON pointer @a ptr can not be resolved. See example below. @@ -13641,6 +13784,9 @@ class basic_json pointer @a ptr. As `at` provides checked access (and no elements are implicitly inserted), the index '-' is always invalid. See example below. + @throw out_of_range.403 if the JSON pointer describes a key of an object + which cannot be found. See example below. + @throw out_of_range.404 if the JSON pointer @a ptr can not be resolved. See example below. @@ -13856,7 +14002,7 @@ class basic_json } else { - const auto idx = std::stoi(last_path); + const auto idx = json_pointer::array_index(last_path); if (JSON_UNLIKELY(static_cast(idx) > parent.size())) { // avoid undefined behavior @@ -13904,7 +14050,7 @@ class basic_json else if (parent.is_array()) { // note erase performs range check - parent.erase(static_cast(std::stoi(last_path))); + parent.erase(static_cast(json_pointer::array_index(last_path))); } }; @@ -13999,7 +14145,12 @@ class basic_json const json_pointer from_ptr(from_path); // the "from" location must exist - use at() - result[ptr] = result.at(from_ptr); + basic_json v = result.at(from_ptr); + + // The copy is functionally identical to an "add" + // operation at the target location using the value + // specified in the "from" member. + operation_add(ptr, v); break; } @@ -14141,7 +14292,7 @@ class basic_json case value_t::object: { // first pass: traverse this object's elements - for (auto it = source.begin(); it != source.end(); ++it) + for (auto it = source.cbegin(); it != source.cend(); ++it) { // escape the key name to be used in a JSON patch const auto key = json_pointer::escape(it.key()); @@ -14163,7 +14314,7 @@ class basic_json } // second pass: traverse other object's elements - for (auto it = target.begin(); it != target.end(); ++it) + for (auto it = target.cbegin(); it != target.cend(); ++it) { if (source.find(it.key()) == source.end()) { @@ -14256,7 +14407,7 @@ json_pointer::get_and_create(NLOHMANN_BASIC_JSON_TPL& j) const // create an entry in the array JSON_TRY { - result = &result->operator[](static_cast(std::stoi(reference_token))); + result = &result->operator[](static_cast(array_index(reference_token))); } JSON_CATCH(std::invalid_argument&) { @@ -14333,7 +14484,7 @@ json_pointer::get_unchecked(NLOHMANN_BASIC_JSON_TPL* ptr) const JSON_TRY { ptr = &ptr->operator[]( - static_cast(std::stoi(reference_token))); + static_cast(array_index(reference_token))); } JSON_CATCH(std::invalid_argument&) { @@ -14388,7 +14539,7 @@ json_pointer::get_checked(NLOHMANN_BASIC_JSON_TPL* ptr) const // note: at performs range check JSON_TRY { - ptr = &ptr->at(static_cast(std::stoi(reference_token))); + ptr = &ptr->at(static_cast(array_index(reference_token))); } JSON_CATCH(std::invalid_argument&) { @@ -14443,7 +14594,7 @@ json_pointer::get_unchecked(const NLOHMANN_BASIC_JSON_TPL* ptr) const JSON_TRY { ptr = &ptr->operator[]( - static_cast(std::stoi(reference_token))); + static_cast(array_index(reference_token))); } JSON_CATCH(std::invalid_argument&) { @@ -14497,7 +14648,7 @@ json_pointer::get_checked(const NLOHMANN_BASIC_JSON_TPL* ptr) const // note: at performs range check JSON_TRY { - ptr = &ptr->at(static_cast(std::stoi(reference_token))); + ptr = &ptr->at(static_cast(array_index(reference_token))); } JSON_CATCH(std::invalid_argument&) { From d43a8b25f044dffdbdd94777924e270cae3e0c8f Mon Sep 17 00:00:00 2001 From: Dan Peebles Date: Tue, 23 Jan 2018 14:13:30 -0500 Subject: [PATCH 0770/2196] Fix obscure corner case in name resolution for builtin:fetchurl in sandboxed environments --- src/libstore/build.cc | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 55066205db3..cca51f17ee2 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -6,6 +6,7 @@ #include "archive.hh" #include "affinity.hh" #include "builtins.hh" +#include "download.hh" #include "finally.hh" #include "compression.hh" #include "json.hh" @@ -1777,6 +1778,19 @@ PathSet exportReferences(Store & store, PathSet storePaths) return paths; } +static std::once_flag dns_resolve_flag; + +static void preloadNSS() { + /* builtin:fetchurl can trigger a DNS lookup, which with glibc can trigger a dynamic library load of + one of the glibc NSS libraries in a sandboxed child, which will fail unless the library's already + been loaded in the parent. So we force a download of an invalid URL to force the NSS machinery to + load its lookup libraries in the parent before any child gets a chance to. */ + std::call_once(dns_resolve_flag, []() { + DownloadRequest request("http://this.pre-initializes.the.dns.resolvers.invalid"); + request.tries = 1; // We only need to do it once, and this also suppresses an annoying warning + try { getDownloader()->download(request); } catch (...) {} + }); +} void DerivationGoal::startBuilder() { @@ -1787,6 +1801,9 @@ void DerivationGoal::startBuilder() % drv->platform % settings.thisSystem % drvPath); } + if (drv->isBuiltin()) + preloadNSS(); + #if __APPLE__ additionalSandboxProfile = get(drv->env, "__sandboxProfile"); #endif From e09161d05cfbd7c6d4cf41a35765e3fe346ea181 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 26 Jan 2018 17:10:52 +0100 Subject: [PATCH 0771/2196] Remove signed-binary-caches as the default for require-sigs This was for backward compatibility. However, with security-related configuration settings, it's best not to have any confusion. Issue #495. --- src/libstore/globals.hh | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 1e50e2d13e9..20ac8fe4e9a 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -287,10 +287,7 @@ public: Setting tarballTtl{this, 60 * 60, "tarball-ttl", "How soon to expire files fetched by builtins.fetchTarball and builtins.fetchurl."}; - Setting signedBinaryCaches{this, "*", "signed-binary-caches", - "Obsolete."}; - - Setting requireSigs{this, signedBinaryCaches == "*", "require-sigs", + Setting requireSigs{this, true, "require-sigs", "Whether to check that any non-content-addressed path added to the " "Nix store has a valid signature (that is, one signed using a key " "listed in 'trusted-public-keys'."}; From 1d5d277ac7bf8a4bc601358b38746005416e935e Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Fri, 26 Jan 2018 11:12:30 -0800 Subject: [PATCH 0772/2196] HttpBinaryCacheStore: Support upsertFile with PUT. Some servers, such as Artifactory, allow uploading with PUT and BASIC auth. This allows nix copy to work to upload binaries to those servers. Worked on together with @adelbertc --- src/libstore/download.cc | 28 ++++++++++++++++++++++++- src/libstore/download.hh | 6 ++++-- src/libstore/http-binary-cache-store.cc | 8 ++++++- 3 files changed, 38 insertions(+), 4 deletions(-) diff --git a/src/libstore/download.cc b/src/libstore/download.cc index ef417685f1a..4b37826c46c 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -22,6 +22,7 @@ #include #include #include +#include using namespace std::string_literals; @@ -91,6 +92,8 @@ struct CurlDownloader : public Downloader { if (!request.expectedETag.empty()) requestHeaders = curl_slist_append(requestHeaders, ("If-None-Match: " + request.expectedETag).c_str()); + if (!request.mimeType.empty()) + requestHeaders = curl_slist_append(requestHeaders, ("Content-Type: " + request.mimeType).c_str()); } ~DownloadItem() @@ -185,6 +188,22 @@ struct CurlDownloader : public Downloader return 0; } + size_t readOffset = 0; + int readCallback(char *buffer, size_t size, size_t nitems) + { + if (readOffset == request.data->length()) + return 0; + auto count = std::min(size * nitems, request.data->length() - readOffset); + memcpy(buffer, request.data->data() + readOffset, count); + readOffset += count; + return count; + } + + static int readCallbackWrapper(char *buffer, size_t size, size_t nitems, void * userp) + { + return ((DownloadItem *) userp)->readCallback(buffer, size, nitems); + } + long lowSpeedTimeout = 300; void init() @@ -225,6 +244,13 @@ struct CurlDownloader : public Downloader if (request.head) curl_easy_setopt(req, CURLOPT_NOBODY, 1); + if (request.data) { + curl_easy_setopt(req, CURLOPT_UPLOAD, 1L); + curl_easy_setopt(req, CURLOPT_READFUNCTION, readCallbackWrapper); + curl_easy_setopt(req, CURLOPT_READDATA, this); + curl_easy_setopt(req, CURLOPT_INFILESIZE_LARGE, (curl_off_t) request.data->length()); + } + if (request.verifyTLS) { if (settings.caFile != "") curl_easy_setopt(req, CURLOPT_CAINFO, settings.caFile.c_str()); @@ -265,7 +291,7 @@ struct CurlDownloader : public Downloader } if (code == CURLE_OK && - (httpStatus == 200 || httpStatus == 304 || httpStatus == 226 /* FTP */ || httpStatus == 0 /* other protocol */)) + (httpStatus == 200 || httpStatus == 201 || httpStatus == 204 || httpStatus == 304 || httpStatus == 226 /* FTP */ || httpStatus == 0 /* other protocol */)) { result.cached = httpStatus == 304; done = true; diff --git a/src/libstore/download.hh b/src/libstore/download.hh index f2d65ad8d61..0a278a05e0e 100644 --- a/src/libstore/download.hh +++ b/src/libstore/download.hh @@ -18,9 +18,11 @@ struct DownloadRequest unsigned int baseRetryTimeMs = 250; ActivityId parentAct; bool decompress = true; + std::shared_ptr data; + std::string mimeType; - DownloadRequest(const std::string & uri) - : uri(uri), parentAct(curActivity) { } + DownloadRequest(const std::string & uri, std::shared_ptr data = nullptr, std::string mimeType = "") + : uri(uri), parentAct(curActivity), data(std::move(data)), mimeType(std::move(mimeType)) { } }; struct DownloadResult diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index 05733768579..93bd3e5d598 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -67,7 +67,13 @@ class HttpBinaryCacheStore : public BinaryCacheStore const std::string & data, const std::string & mimeType) override { - throw UploadToHTTP("uploading to an HTTP binary cache is not supported"); + auto data_ = std::make_shared(data); + auto req = DownloadRequest(cacheUri + "/" + path, data_, mimeType); + try { + getDownloader()->download(req); + } catch (DownloadError & e) { + throw UploadToHTTP(format("uploading to HTTP binary cache at %1% not supported: %2%") % cacheUri % e.msg()); + } } void getFile(const std::string & path, From 746f8aed86a990a6a8277cd2596f83166a73d718 Mon Sep 17 00:00:00 2001 From: Spencer Baugh Date: Sun, 14 Jan 2018 21:20:22 -0500 Subject: [PATCH 0773/2196] remote_store: register for NIX_REMOTE=unix://path This allows overriding the socket path so the daemon may be listening at an arbitrary Unix domain socket location. Fixes #1800 --- src/libstore/remote-store.cc | 26 ++++++++++++++++++++++++-- src/libstore/remote-store.hh | 2 ++ 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 77b41b6bf8a..8f0b65557ac 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -78,9 +78,22 @@ UDSRemoteStore::UDSRemoteStore(const Params & params) } +UDSRemoteStore::UDSRemoteStore(std::string socket_path, const Params & params) + : Store(params) + , LocalFSStore(params) + , RemoteStore(params) + , path(socket_path) +{ +} + + std::string UDSRemoteStore::getUri() { - return "daemon"; + if (path) { + return std::string("unix://") + *path; + } else { + return "daemon"; + } } @@ -98,7 +111,7 @@ ref UDSRemoteStore::openConnection() throw SysError("cannot create Unix domain socket"); closeOnExec(conn->fd.get()); - string socketPath = settings.nixDaemonSocketFile; + string socketPath = path ? *path : settings.nixDaemonSocketFile; struct sockaddr_un addr; addr.sun_family = AF_UNIX; @@ -721,5 +734,14 @@ void RemoteStore::Connection::processStderr(Sink * sink, Source * source) } } +static std::string uriScheme = "unix://"; + +static RegisterStoreImplementation regStore([]( + const std::string & uri, const Store::Params & params) + -> std::shared_ptr +{ + if (std::string(uri, 0, uriScheme.size()) != uriScheme) return 0; + return std::make_shared(std::string(uri, uriScheme.size()), params); +}); } diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh index 30c6beae6ff..7f36e206416 100644 --- a/src/libstore/remote-store.hh +++ b/src/libstore/remote-store.hh @@ -134,6 +134,7 @@ class UDSRemoteStore : public LocalFSStore, public RemoteStore public: UDSRemoteStore(const Params & params); + UDSRemoteStore(std::string path, const Params & params); std::string getUri() override; @@ -145,6 +146,7 @@ private: }; ref openConnection() override; + std::experimental::optional path; }; From 0167eac571d6d92f18640d05cfb7fa10a4cd0fd9 Mon Sep 17 00:00:00 2001 From: Eric Wolf Date: Sat, 27 Jan 2018 15:10:51 +0100 Subject: [PATCH 0774/2196] Improve manual on inheriting attributes Expands first paragraph a bit Adds a more comprehensive example --- .../expressions/language-constructs.xml | 32 +++++++++++++++++-- 1 file changed, 30 insertions(+), 2 deletions(-) diff --git a/doc/manual/expressions/language-constructs.xml b/doc/manual/expressions/language-constructs.xml index 2f0027d479c..47d95f8a13e 100644 --- a/doc/manual/expressions/language-constructs.xml +++ b/doc/manual/expressions/language-constructs.xml @@ -61,7 +61,7 @@ evaluates to "foobar". Inheriting attributes -When defining a set it is often convenient to copy variables +When defining a set or in a let-expression it is often convenient to copy variables from the surrounding lexical scope (e.g., when you want to propagate attributes). This can be shortened using the inherit keyword. For instance, @@ -72,7 +72,15 @@ let x = 123; in y = 456; } -evaluates to { x = 123; y = 456; }. (Note that +is equivalent to + + +let x = 123; in +{ x = x; + y = 456; +} + +and both evaluate to { x = 123; y = 456; }. (Note that this works because x is added to the lexical scope by the let construct.) It is also possible to inherit attributes from another set. For instance, in this fragment @@ -101,6 +109,26 @@ variables from the surrounding scope (fetchurl libXaw (the X Athena Widgets) from the xlibs (X11 client-side libraries) set. + +Summarizing the fragment + + +... +inherit x y z; +inherit (src-set) a b c; +... + +is equivalent to + + +... +x = x; y = y; z = z; +a = src-set.a; b = src-set.b; c = src-set.c; +... + +when used while defining local variables in a let-expression or +while defining a set. + From f93e890b4d19f48fefde449c46c29e8330b23998 Mon Sep 17 00:00:00 2001 From: Ben Gamari Date: Sun, 28 Jan 2018 14:19:56 -0500 Subject: [PATCH 0775/2196] configure: Use $CPP instead of cpp directly The latter breaks in the case of cross-compilation, when `cpp` bears a target prefix. --- configure.ac | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/configure.ac b/configure.ac index 9db92ce9140..83b2346d065 100644 --- a/configure.ac +++ b/configure.ac @@ -61,6 +61,7 @@ CFLAGS= CXXFLAGS= AC_PROG_CC AC_PROG_CXX +AC_PROG_CPP AX_CXX_COMPILE_STDCXX_11 @@ -199,7 +200,7 @@ AC_SUBST(ENABLE_S3, [$enable_s3]) AC_LANG_POP(C++) if test -n "$enable_s3"; then - declare -a aws_version_tokens=($(printf '#include \nAWS_SDK_VERSION_STRING' | cpp -E | grep -v '^#.*' | sed 's/"//g' | tr '.' ' ')) + declare -a aws_version_tokens=($(printf '#include \nAWS_SDK_VERSION_STRING' | $CPP - | grep -v '^#.*' | sed 's/"//g' | tr '.' ' ')) AC_DEFINE_UNQUOTED([AWS_VERSION_MAJOR], ${aws_version_tokens@<:@0@:>@}, [Major version of aws-sdk-cpp.]) AC_DEFINE_UNQUOTED([AWS_VERSION_MINOR], ${aws_version_tokens@<:@1@:>@}, [Minor version of aws-sdk-cpp.]) fi From 9f9393df5504c43be95df55ed1c5d5a90ea214a2 Mon Sep 17 00:00:00 2001 From: Giorgio Gallo Date: Mon, 29 Jan 2018 21:33:17 +0100 Subject: [PATCH 0776/2196] solves #1582 --- scripts/install-darwin-multi-user.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/install-darwin-multi-user.sh b/scripts/install-darwin-multi-user.sh index 2e9d368c020..8d59c1c2b9a 100644 --- a/scripts/install-darwin-multi-user.sh +++ b/scripts/install-darwin-multi-user.sh @@ -695,7 +695,7 @@ install_from_extracted_nix() { cd "$EXTRACTED_NIX_PATH" _sudo "to copy the basic Nix files to the new store at $NIX_ROOT/store" \ - rsync -rlpt "$(pwd)/store/" "$NIX_ROOT/store/" + rsync -rlpt ./store/* "$NIX_ROOT/store/" if [ -d "$NIX_INSTALLED_NIX" ]; then echo " Alright! We have our first nix at $NIX_INSTALLED_NIX" From 478e3e46497322d00430c77d27f36eb64be7f9ba Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 Jan 2018 16:22:59 +0100 Subject: [PATCH 0777/2196] Indent properly --- src/libstore/download.cc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libstore/download.cc b/src/libstore/download.cc index ef417685f1a..3121e6441f4 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -312,10 +312,10 @@ struct CurlDownloader : public Downloader case CURLE_BAD_FUNCTION_ARGUMENT: case CURLE_INTERFACE_FAILED: case CURLE_UNKNOWN_OPTION: - err = Misc; - break; + err = Misc; + break; default: // Shut up warnings - break; + break; } } From f8e8dd827eb9005fcc3ba42fd0855729ef77d9fb Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 31 Jan 2018 15:08:46 +0100 Subject: [PATCH 0778/2196] Manual: Remove old cruft --- doc/manual/expressions/debug-build.xml | 34 --------------- .../expressions/simple-building-testing.xml | 2 - doc/manual/installation/installing-binary.xml | 10 ----- .../installation/supported-platforms.xml | 3 -- doc/manual/introduction/quick-start.xml | 2 +- doc/manual/manual.xml | 6 +-- .../troubleshooting/collisions-nixenv.xml | 38 ---------------- .../troubleshooting/links-nix-store.xml | 43 ------------------- .../troubleshooting/troubleshooting.xml | 16 ------- 9 files changed, 2 insertions(+), 152 deletions(-) delete mode 100644 doc/manual/expressions/debug-build.xml delete mode 100644 doc/manual/troubleshooting/collisions-nixenv.xml delete mode 100644 doc/manual/troubleshooting/links-nix-store.xml delete mode 100644 doc/manual/troubleshooting/troubleshooting.xml diff --git a/doc/manual/expressions/debug-build.xml b/doc/manual/expressions/debug-build.xml deleted file mode 100644 index 0c1f4e6719b..00000000000 --- a/doc/manual/expressions/debug-build.xml +++ /dev/null @@ -1,34 +0,0 @@ -
- -Debugging Build Failures - -At the beginning of each phase of the build (such as unpacking, -building or installing), the set of all shell variables is written to -the file env-vars at the top-level build -directory. This is useful for debugging: it allows you to recreate -the environment in which a build was performed. For instance, if a -build fails, then assuming you used the flag, you -can go to the output directory and switch to the -environment of the builder: - - -$ nix-build -K ./foo.nix -... fails, keeping build directory `/tmp/nix-1234-0' - -$ cd /tmp/nix-1234-0 - -$ source env-vars - -(edit some files...) - -$ make - -(execution continues with the same GCC, make, etc.) - - - -
diff --git a/doc/manual/expressions/simple-building-testing.xml b/doc/manual/expressions/simple-building-testing.xml index bd3901a1335..0348c082b20 100644 --- a/doc/manual/expressions/simple-building-testing.xml +++ b/doc/manual/expressions/simple-building-testing.xml @@ -81,6 +81,4 @@ Just pass the option
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index c0e735cd314..b21d3dd8a93 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -705,6 +705,9 @@ void removeTempRoots(); * ‘daemon’: The Nix store accessed via a Unix domain socket connection to nix-daemon. + * ‘unix://’: The Nix store accessed via a Unix domain socket + connection to nix-daemon, with the socket located at . + * ‘auto’ or ‘’: Equivalent to ‘local’ or ‘daemon’ depending on whether the user has write access to the local Nix store/database. From 855699855fd271e0b8e43001bf1793ca3ef9b71c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 1 Feb 2018 10:39:16 +0100 Subject: [PATCH 0784/2196] Remove obsolete references to manifests Closes #323. --- src/libstore/gc.cc | 4 +--- tests/common.sh.in | 1 - 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index ab2c5ca0274..943b16c28fa 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -324,10 +324,8 @@ Roots LocalStore::findRootsNoTemp() { Roots roots; - /* Process direct roots in {gcroots,manifests,profiles}. */ + /* Process direct roots in {gcroots,profiles}. */ findRoots(stateDir + "/" + gcRootsDir, DT_UNKNOWN, roots); - if (pathExists(stateDir + "/manifests")) - findRoots(stateDir + "/manifests", DT_UNKNOWN, roots); findRoots(stateDir + "/profiles", DT_UNKNOWN, roots); /* Add additional roots returned by the program specified by the diff --git a/tests/common.sh.in b/tests/common.sh.in index 186f9d6b955..195205988af 100644 --- a/tests/common.sh.in +++ b/tests/common.sh.in @@ -11,7 +11,6 @@ export NIX_LOCALSTATE_DIR=$TEST_ROOT/var export NIX_LOG_DIR=$TEST_ROOT/var/log/nix export NIX_STATE_DIR=$TEST_ROOT/var/nix export NIX_CONF_DIR=$TEST_ROOT/etc -export NIX_MANIFESTS_DIR=$TEST_ROOT/var/nix/manifests export _NIX_TEST_SHARED=$TEST_ROOT/shared if [[ -n $NIX_STORE ]]; then export _NIX_TEST_NO_SANDBOX=1 From e7b23eb5ab418ca7194f6575014f40bf4d9e6afd Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 1 Feb 2018 16:40:58 +0100 Subject: [PATCH 0785/2196] Remove docs on removed --drv-link and --add-drv-link options --- doc/manual/command-ref/nix-build.xml | 21 --------------------- 1 file changed, 21 deletions(-) diff --git a/doc/manual/command-ref/nix-build.xml b/doc/manual/command-ref/nix-build.xml index d6b2e5e5adb..40fe7a43f10 100644 --- a/doc/manual/command-ref/nix-build.xml +++ b/doc/manual/command-ref/nix-build.xml @@ -29,8 +29,6 @@ attrPath - drvlink - @@ -91,25 +89,6 @@ also . - drvlink - - Add a symlink named - drvlink to the store derivation - produced by nix-instantiate. The derivation is - a root of the garbage collector until the symlink is deleted or - renamed. If there are multiple derivations, numbers are suffixed - to drvlink to distinguish between - them. - - - - - - Shorthand for - ./derivation. - - - Do not create a symlink to the output path. Note From 19477e8815a7c9b91c82278ec0e22957c97bec16 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 1 Feb 2018 17:09:56 +0100 Subject: [PATCH 0786/2196] nix-build: Ignore --indirect Note that nix-build always creates indirect roots. Fixes #1830. --- src/nix-build/nix-build.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 1b249427537..3d02276bf42 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -141,7 +141,7 @@ void mainWrapped(int argc, char * * argv) else if (*arg == "--version") printVersion(myName); - else if (*arg == "--add-drv-link") + else if (*arg == "--add-drv-link" || *arg == "--indirect") ; // obsolete else if (*arg == "--no-out-link" || *arg == "--no-link") From 84722d67d2b09b5c28e1c2d9dd438ba592df2296 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Sat, 3 Feb 2018 10:04:29 +0100 Subject: [PATCH 0787/2196] Remove nix-build --hash Instead, if a fixed-output derivation produces has an incorrect output hash, we now unconditionally move the outputs to the path corresponding with the actual hash and register it as valid. Thus, after correcting the hash in the Nix expression (e.g. in a fetchurl call), the fixed-output derivation doesn't have to be built again. It would still be good to have a command for reporting the actual hash of a fixed-output derivation (instead of throwing an error), but "nix-build --hash" didn't do that. --- doc/manual/release-notes/rl-2.0.xml | 10 +++--- src/libstore/build.cc | 47 +++++++++++++++++------------ src/libstore/store-api.hh | 2 +- src/nix-build/nix-build.cc | 3 -- src/nix-store/nix-store.cc | 1 - tests/fixed.sh | 13 ++++++-- 6 files changed, 43 insertions(+), 33 deletions(-) diff --git a/doc/manual/release-notes/rl-2.0.xml b/doc/manual/release-notes/rl-2.0.xml index 0d485c36928..32cdb1d0cef 100644 --- a/doc/manual/release-notes/rl-2.0.xml +++ b/doc/manual/release-notes/rl-2.0.xml @@ -99,11 +99,11 @@ - New build mode nix-build --hash that - builds a derivation, computes the hash of the output, and moves - the output to the store path corresponding to what a fixed-output - derivation with that hash would produce. - (Add docs and examples; see d367b8e7875161e655deaa96bf8a5dd0bcf8229e) + If a fixed-output derivation produces a result with an + incorrect hash, the output path will be moved to the location + corresponding to the actual hash and registered as valid. Thus, a + subsequent build of the fixed-output derivation with the correct + hash is unnecessary. diff --git a/src/libstore/build.cc b/src/libstore/build.cc index cca51f17ee2..d4b93b5104c 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -1124,11 +1124,6 @@ void DerivationGoal::haveDerivation() return; } - /* Reject doing a hash build of anything other than a fixed-output - derivation. */ - if (buildMode == bmHash && !drv->isFixedOutput()) - throw Error("cannot do a hash build of non-fixed-output derivation '%1%'", drvPath); - /* We are first going to try to create the invalid output paths through substitutes. If that doesn't work, we'll build them. */ @@ -1320,9 +1315,7 @@ void DerivationGoal::inputsRealised() allPaths.insert(inputPaths.begin(), inputPaths.end()); /* Is this a fixed-output derivation? */ - fixedOutput = true; - for (auto & i : drv->outputs) - if (i.second.hash == "") fixedOutput = false; + fixedOutput = drv->isFixedOutput(); /* Don't repeat fixed-output derivations since they're already verified by their output hash.*/ @@ -3019,6 +3012,8 @@ void DerivationGoal::registerOutputs() bool runDiffHook = settings.runDiffHook; bool keepPreviousRound = settings.keepFailed || runDiffHook; + std::exception_ptr delayedException; + /* Check whether the output paths were created, and grep each output path to determine what other paths it references. Also make all output paths read-only. */ @@ -3093,7 +3088,7 @@ void DerivationGoal::registerOutputs() /* Check that fixed-output derivations produced the right outputs (i.e., the content hash should match the specified hash). */ - if (i.second.hash != "") { + if (fixedOutput) { bool recursive; Hash h; i.second.parseHashInfo(recursive, h); @@ -3109,27 +3104,34 @@ void DerivationGoal::registerOutputs() /* Check the hash. In hash mode, move the path produced by the derivation to its content-addressed location. */ Hash h2 = recursive ? hashPath(h.type, actualPath).first : hashFile(h.type, actualPath); - if (buildMode == bmHash) { - Path dest = worker.store.makeFixedOutputPath(recursive, h2, drv->env["name"]); - printError(format("build produced path '%1%' with %2% hash '%3%'") - % dest % printHashType(h.type) % printHash16or32(h2)); - if (worker.store.isValidPath(dest)) - return; + + Path dest = worker.store.makeFixedOutputPath(recursive, h2, drv->env["name"]); + + if (h != h2) { + + /* Throw an error after registering the path as + valid. */ + delayedException = std::make_exception_ptr( + BuildError("fixed-output derivation produced path '%s' with %s hash '%s' instead of the expected hash '%s'", + dest, printHashType(h.type), printHash16or32(h2), printHash16or32(h))); + Path actualDest = worker.store.toRealPath(dest); + + if (worker.store.isValidPath(dest)) + std::rethrow_exception(delayedException); + if (actualPath != actualDest) { PathLocks outputLocks({actualDest}); deletePath(actualDest); if (rename(actualPath.c_str(), actualDest.c_str()) == -1) throw SysError(format("moving '%1%' to '%2%'") % actualPath % dest); } + path = dest; actualPath = actualDest; - } else { - if (h != h2) - throw BuildError( - format("output path '%1%' has %2% hash '%3%' when '%4%' was expected") - % path % i.second.hashAlgo % printHash16or32(h2) % printHash16or32(h)); } + else + assert(path == dest); info.ca = makeFixedOutputCA(recursive, h2); } @@ -3306,6 +3308,11 @@ void DerivationGoal::registerOutputs() paths referenced by each of them. If there are cycles in the outputs, this will fail. */ worker.store.registerValidPaths(infos); + + /* In case of a fixed-output derivation hash mismatch, throw an + exception now that we have registered the output as valid. */ + if (delayedException) + std::rethrow_exception(delayedException); } diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index b21d3dd8a93..70f23e1fcaf 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -192,7 +192,7 @@ struct ValidPathInfo typedef list ValidPathInfos; -enum BuildMode { bmNormal, bmRepair, bmCheck, bmHash }; +enum BuildMode { bmNormal, bmRepair, bmCheck }; struct BuildResult diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 3d02276bf42..1581c282c75 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -167,9 +167,6 @@ void mainWrapped(int argc, char * * argv) buildMode = bmRepair; } - else if (*arg == "--hash") - buildMode = bmHash; - else if (*arg == "--run-env") // obsolete runEnv = true; diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index f6f276dd179..4fc3421c0dd 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -122,7 +122,6 @@ static void opRealise(Strings opFlags, Strings opArgs) if (i == "--dry-run") dryRun = true; else if (i == "--repair") buildMode = bmRepair; else if (i == "--check") buildMode = bmCheck; - else if (i == "--hash") buildMode = bmHash; else if (i == "--ignore-unknown") ignoreUnknown = true; else throw UsageError(format("unknown flag '%1%'") % i); diff --git a/tests/fixed.sh b/tests/fixed.sh index cac3f0be91b..8f51403a707 100644 --- a/tests/fixed.sh +++ b/tests/fixed.sh @@ -5,15 +5,22 @@ clearStore export IMPURE_VAR1=foo export IMPURE_VAR2=bar +path=$(nix-store -q $(nix-instantiate fixed.nix -A good.0)) + +echo 'testing bad...' +nix-build fixed.nix -A bad --no-out-link && fail "should fail" + +# Building with the bad hash should produce the "good" output path as +# a side-effect. +[[ -e $path ]] +nix path-info --json $path | grep fixed:md5:2qk15sxzzjlnpjk9brn7j8ppcd + echo 'testing good...' nix-build fixed.nix -A good --no-out-link echo 'testing good2...' nix-build fixed.nix -A good2 --no-out-link -echo 'testing bad...' -nix-build fixed.nix -A bad --no-out-link && fail "should fail" - echo 'testing reallyBad...' nix-instantiate fixed.nix -A reallyBad && fail "should fail" From 0ffa615420fc1ee67a03dcb1a491fed9e8cdb782 Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Mon, 5 Feb 2018 10:25:26 -0600 Subject: [PATCH 0788/2196] busybox shell: enable various useful/expected features Matches changes made in nixpkgs: https://github.com/NixOS/nixpkgs/pull/34628 --- release-common.nix | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/release-common.nix b/release-common.nix index 4553118e1f5..de5af452c0d 100644 --- a/release-common.nix +++ b/release-common.nix @@ -6,10 +6,22 @@ rec { enableStatic = true; enableMinimal = true; extraConfig = '' + CONFIG_FEATURE_FANCY_ECHO y + CONFIG_FEATURE_SH_MATH y + CONFIG_FEATURE_SH_MATH_64 y + CONFIG_ASH y + CONFIG_ASH_OPTIMIZE_FOR_SIZE y + + CONFIG_ASH_ALIAS y + CONFIG_ASH_BASH_COMPAT y + CONFIG_ASH_CMDCMD y CONFIG_ASH_ECHO y + CONFIG_ASH_GETOPTS y + CONFIG_ASH_INTERNAL_GLOB y + CONFIG_ASH_JOB_CONTROL y + CONFIG_ASH_PRINTF y CONFIG_ASH_TEST y - CONFIG_ASH_OPTIMIZE_FOR_SIZE y ''; }; From 2175eee9fec07dea32e07471946d26a242a07760 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 5 Feb 2018 17:46:43 +0100 Subject: [PATCH 0789/2196] Fix segfault using non-binary cache stores as substituters --- src/libstore/build.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index d4b93b5104c..5540d57a867 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -3670,7 +3670,7 @@ void SubstitutionGoal::tryNext() /* Update the total expected download size. */ auto narInfo = std::dynamic_pointer_cast(info); - maintainExpectedNar = std::make_unique>(worker.expectedNarSize, narInfo->narSize); + maintainExpectedNar = std::make_unique>(worker.expectedNarSize, info->narSize); maintainExpectedDownload = narInfo && narInfo->fileSize From bb1d046f5c37a01ee85fc30d4602e8be8213eb84 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 5 Feb 2018 18:08:30 +0100 Subject: [PATCH 0790/2196] Allow substituters to be marked as trusted This is needed by nixos-install, which uses the Nix store on the installation CD as a substituter. We don't want to disable signature checking entirely because substitutes from cache.nixos.org should still be checked. So now we can pas "local?trusted=1" to mark only the Nix store in /nix as not requiring signatures. Fixes #1819. --- src/libstore/build.cc | 7 +++++-- src/libstore/store-api.hh | 2 ++ 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 5540d57a867..5be7ce60dab 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -3684,7 +3684,10 @@ void SubstitutionGoal::tryNext() /* Bail out early if this substituter lacks a valid signature. LocalStore::addToStore() also checks for this, but only after we've downloaded the path. */ - if (worker.store.requireSigs && !info->checkSignatures(worker.store, worker.store.publicKeys)) { + if (worker.store.requireSigs + && !sub->isTrusted + && !info->checkSignatures(worker.store, worker.store.publicKeys)) + { printInfo(format("warning: substituter '%s' does not have a valid signature for path '%s'") % sub->getUri() % storePath); tryNext(); @@ -3752,7 +3755,7 @@ void SubstitutionGoal::tryToRun() PushActivity pact(act.id); copyStorePath(ref(sub), ref(worker.store.shared_from_this()), - storePath, repair); + storePath, repair, sub->isTrusted ? NoCheckSigs : CheckSigs); promise.set_value(); } catch (...) { diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 70f23e1fcaf..aa83c2ded8c 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -248,6 +248,8 @@ public: const Setting pathInfoCacheSize{this, 65536, "path-info-cache-size", "size of the in-memory store path information cache"}; + const Setting isTrusted{this, false, "trusted", "whether paths from this store can be used as substitutes even when they lack trusted signatures"}; + protected: struct State From 55012ec0b93351a0747b8d5d58b83ebc29dfaf61 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 5 Feb 2018 18:32:23 +0100 Subject: [PATCH 0791/2196] Tweak progress bar message Say "copying" instead of "fetching" when copying from another local store. Nice for nixos-install. --- src/nix/progress-bar.cc | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/nix/progress-bar.cc b/src/nix/progress-bar.cc index fb9955190b4..8bffda54a55 100644 --- a/src/nix/progress-bar.cc +++ b/src/nix/progress-bar.cc @@ -167,7 +167,12 @@ class ProgressBar : public Logger if (type == actSubstitute) { auto name = storePathToName(getS(fields, 0)); - i->s = fmt("fetching " ANSI_BOLD "%s" ANSI_NORMAL " from %s", name, getS(fields, 1)); + auto sub = getS(fields, 1); + i->s = fmt( + hasPrefix(sub, "local") + ? "copying " ANSI_BOLD "%s" ANSI_NORMAL " from %s" + : "fetching " ANSI_BOLD "%s" ANSI_NORMAL " from %s", + name, sub); } if (type == actQueryPathInfo) { From 47dc6076afa6c7b919320353ac299f9551acd39e Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Mon, 5 Feb 2018 11:28:27 -0600 Subject: [PATCH 0792/2196] release-common: use shell from nixpkgs, provide fallback for compat --- release-common.nix | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/release-common.nix b/release-common.nix index 4553118e1f5..a4ae24ba482 100644 --- a/release-common.nix +++ b/release-common.nix @@ -1,7 +1,9 @@ { pkgs }: rec { - sh = pkgs.busybox.override { + # Use "busybox-sandbox-shell" if present, + # if not (legacy) fallback and hope it's sufficient. + sh = pkgs.busybox-sandbox-shell or (pkgs.busybox.override { useMusl = true; enableStatic = true; enableMinimal = true; @@ -11,7 +13,7 @@ rec { CONFIG_ASH_TEST y CONFIG_ASH_OPTIMIZE_FOR_SIZE y ''; - }; + }); configureFlags = [ "--disable-init-state" From f539085e651b133f023e8d02a8036124ac47d36c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 5 Feb 2018 21:48:09 +0100 Subject: [PATCH 0793/2196] Fix evaluation --- tests/nix-copy-closure.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/nix-copy-closure.nix b/tests/nix-copy-closure.nix index 0bf5b42d84a..be0a4a683cd 100644 --- a/tests/nix-copy-closure.nix +++ b/tests/nix-copy-closure.nix @@ -2,7 +2,7 @@ { nixpkgs, system, nix }: -with import (nixpkgs + /nixos/lib/testing.nix) { inherit system; }; +with import (nixpkgs + "/nixos/lib/testing.nix") { inherit system; }; makeTest (let pkgA = pkgs.cowsay; pkgB = pkgs.wget; pkgC = pkgs.hello; in { From f24e726ba53e23235d33d8bdc7877ad3a8632fde Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 6 Feb 2018 14:35:14 +0100 Subject: [PATCH 0794/2196] checkURI(): Check file URIs against allowedPaths This makes e.g. 'fetchGit ./.' work (assuming that ./. is an allowed path). --- src/libexpr/eval.cc | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 33a9bc61428..7775cbe53cc 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -378,6 +378,18 @@ void EvalState::checkURI(const std::string & uri) && (prefix[prefix.size() - 1] == '/' || uri[prefix.size()] == '/'))) return; + /* If the URI is a path, then check it against allowedPaths as + well. */ + if (hasPrefix(uri, "/")) { + checkSourcePath(uri); + return; + } + + if (hasPrefix(uri, "file://")) { + checkSourcePath(std::string(uri, 7)); + return; + } + throw RestrictedPathError("access to URI '%s' is forbidden in restricted mode", uri); } From 43f8ef73c6aeb23aee40d485556004d6262d4e3b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 6 Feb 2018 15:38:45 +0100 Subject: [PATCH 0795/2196] realiseContext(): Add derivation outputs to the allowed paths This makes import-from-derivation work in restricted mode again. --- src/libexpr/primops.cc | 34 ++++++++++++++++++++++++---------- tests/restricted.sh | 2 ++ 2 files changed, 26 insertions(+), 10 deletions(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 975f0e8309e..5fe7da21643 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -49,24 +49,38 @@ InvalidPathError::InvalidPathError(const Path & path) : void EvalState::realiseContext(const PathSet & context) { PathSet drvs; + for (auto & i : context) { std::pair decoded = decodeContext(i); Path ctx = decoded.first; assert(store->isStorePath(ctx)); if (!store->isValidPath(ctx)) throw InvalidPathError(ctx); - if (!decoded.second.empty() && nix::isDerivation(ctx)) + if (!decoded.second.empty() && nix::isDerivation(ctx)) { drvs.insert(decoded.first + "!" + decoded.second); + + /* Add the output of this derivation to the allowed + paths. */ + if (allowedPaths) { + auto drv = store->derivationFromPath(decoded.first); + DerivationOutputs::iterator i = drv.outputs.find(decoded.second); + if (i == drv.outputs.end()) + throw Error("derivation '%s' does not have an output named '%s'", decoded.first, decoded.second); + allowedPaths->insert(i->second.path); + } + } } - if (!drvs.empty()) { - if (!settings.enableImportFromDerivation) - throw EvalError(format("attempted to realize '%1%' during evaluation but 'allow-import-from-derivation' is false") % *(drvs.begin())); - /* For performance, prefetch all substitute info. */ - PathSet willBuild, willSubstitute, unknown; - unsigned long long downloadSize, narSize; - store->queryMissing(drvs, willBuild, willSubstitute, unknown, downloadSize, narSize); - store->buildPaths(drvs); - } + + if (drvs.empty()) return; + + if (!settings.enableImportFromDerivation) + throw EvalError(format("attempted to realize '%1%' during evaluation but 'allow-import-from-derivation' is false") % *(drvs.begin())); + + /* For performance, prefetch all substitute info. */ + PathSet willBuild, willSubstitute, unknown; + unsigned long long downloadSize, narSize; + store->queryMissing(drvs, willBuild, willSubstitute, unknown, downloadSize, narSize); + store->buildPaths(drvs); } diff --git a/tests/restricted.sh b/tests/restricted.sh index 6c0392facf3..0605383cc86 100644 --- a/tests/restricted.sh +++ b/tests/restricted.sh @@ -36,3 +36,5 @@ ln -sfn $(pwd)/restricted.nix $TEST_ROOT/restricted.nix (! nix-instantiate --eval --restrict-eval $TEST_ROOT/restricted.nix -I $TEST_ROOT) (! nix-instantiate --eval --restrict-eval $TEST_ROOT/restricted.nix -I .) nix-instantiate --eval --restrict-eval $TEST_ROOT/restricted.nix -I $TEST_ROOT -I . + +[[ $(nix eval --raw --restrict-eval -I . '(builtins.readFile "${import ./simple.nix}/hello")') == 'Hello World!' ]] From 6f6bfc820544c3fe9cc35ec67ed3f9d4c6a293a3 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 6 Feb 2018 20:51:37 +0100 Subject: [PATCH 0796/2196] Update the progress bar at most 20 times per second Fixes #1834. --- src/nix/local.mk | 2 ++ src/nix/progress-bar.cc | 43 +++++++++++++++++++++++++++-------------- 2 files changed, 31 insertions(+), 14 deletions(-) diff --git a/src/nix/local.mk b/src/nix/local.mk index bddd53b168d..f76da194467 100644 --- a/src/nix/local.mk +++ b/src/nix/local.mk @@ -6,4 +6,6 @@ nix_SOURCES := $(wildcard $(d)/*.cc) $(wildcard src/linenoise/*.cpp) nix_LIBS = libexpr libmain libstore libutil libformat +nix_LDFLAGS = -pthread + $(eval $(call install-symlink, nix, $(bindir)/nix-hash)) diff --git a/src/nix/progress-bar.cc b/src/nix/progress-bar.cc index 8bffda54a55..252d12c5d37 100644 --- a/src/nix/progress-bar.cc +++ b/src/nix/progress-bar.cc @@ -3,8 +3,9 @@ #include "sync.hh" #include "store-api.hh" -#include #include +#include +#include namespace nix { @@ -101,15 +102,28 @@ class ProgressBar : public Logger Sync state_; + std::thread updateThread; + + std::condition_variable quitCV, updateCV; + public: ProgressBar() { + updateThread = std::thread([&]() { + auto state(state_.lock()); + while (state->active) { + state.wait(updateCV); + draw(*state); + state.wait_for(quitCV, std::chrono::milliseconds(50)); + } + }); } ~ProgressBar() { stop(); + updateThread.join(); } void stop() @@ -121,6 +135,8 @@ class ProgressBar : public Logger writeToStderr("\r\e[K"); if (status != "") writeToStderr("[" + status + "]\n"); + updateCV.notify_one(); + quitCV.notify_one(); } void log(Verbosity lvl, const FormatOrString & fs) override @@ -132,7 +148,7 @@ class ProgressBar : public Logger void log(State & state, Verbosity lvl, const std::string & s) { writeToStderr("\r\e[K" + s + ANSI_NORMAL "\n"); - update(state); + draw(state); } void startActivity(ActivityId act, Verbosity lvl, ActivityType type, @@ -185,7 +201,7 @@ class ProgressBar : public Logger || (type == actCopyPath && hasAncestor(*state, actSubstitute, parent))) i->visible = false; - update(*state); + update(); } /* Check whether an activity has an ancestore with the specified @@ -220,7 +236,7 @@ class ProgressBar : public Logger state->its.erase(i); } - update(*state); + update(); } void result(ActivityId act, ResultType type, const std::vector & fields) override @@ -230,7 +246,7 @@ class ProgressBar : public Logger if (type == resFileLinked) { state->filesLinked++; state->bytesLinked += getI(fields, 0); - update(*state); + update(); } else if (type == resBuildLogLine) { @@ -243,25 +259,25 @@ class ProgressBar : public Logger info.lastLine = lastLine; state->activities.emplace_back(info); i->second = std::prev(state->activities.end()); - update(*state); + update(); } } else if (type == resUntrustedPath) { state->untrustedPaths++; - update(*state); + update(); } else if (type == resCorruptedPath) { state->corruptedPaths++; - update(*state); + update(); } else if (type == resSetPhase) { auto i = state->its.find(act); assert(i != state->its.end()); i->second->phase = getS(fields, 0); - update(*state); + update(); } else if (type == resProgress) { @@ -272,7 +288,7 @@ class ProgressBar : public Logger actInfo.expected = getI(fields, 1); actInfo.running = getI(fields, 2); actInfo.failed = getI(fields, 3); - update(*state); + update(); } else if (type == resSetExpected) { @@ -284,17 +300,16 @@ class ProgressBar : public Logger state->activitiesByType[type].expected -= j; j = getI(fields, 1); state->activitiesByType[type].expected += j; - update(*state); + update(); } } void update() { - auto state(state_.lock()); - update(*state); + updateCV.notify_one(); } - void update(State & state) + void draw(State & state) { if (!state.active) return; From bc7e3a4dd62baa99dbd1985d329a2a806d59a422 Mon Sep 17 00:00:00 2001 From: AmineChikhaoui Date: Tue, 6 Feb 2018 22:42:02 +0100 Subject: [PATCH 0797/2196] support multi threaded xz encoder, this might be particularly useful in the case of hydra where the overhead of single threaded encoding is more noticeable e.g most of the time spent in "Sending inputs"/"Receiving outputs" is due to compression while the actual upload to the binary cache seems to be negligible. --- src/libutil/compression.cc | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc index 5e2631ba340..aad6e9b5bc1 100644 --- a/src/libutil/compression.cc +++ b/src/libutil/compression.cc @@ -191,8 +191,13 @@ struct XzSink : CompressionSink XzSink(Sink & nextSink) : nextSink(nextSink) { - lzma_ret ret = lzma_easy_encoder( - &strm, 6, LZMA_CHECK_CRC64); + lzma_mt mt_options = {}; + mt_options.flags = 0; + mt_options.timeout = 300; + mt_options.check = LZMA_CHECK_CRC64; + mt_options.threads = lzma_cputhreads(); + lzma_ret ret = lzma_stream_encoder_mt( + &strm, &mt_options); if (ret != LZMA_OK) throw CompressionError("unable to initialise lzma encoder"); // FIXME: apply the x86 BCJ filter? From 69d82e5c58bf6d7e16fc296f598c352da2a618d0 Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Thu, 25 Jan 2018 07:05:57 -0800 Subject: [PATCH 0798/2196] Add path primop. builtins.path allows specifying the name of a path (which makes paths with store-illegal names now addable), allows adding paths with flat instead of recursive hashes, allows specifying a filter (so is a generalization of filterSource), and allows specifying an expected hash (enabling safe path adding in pure mode). --- doc/manual/expressions/builtins.xml | 74 ++++++++++++++++++++++- src/libexpr/eval.cc | 2 +- src/libexpr/primops.cc | 93 +++++++++++++++++++++++------ src/libstore/store-api.cc | 5 +- src/libstore/store-api.hh | 6 +- tests/lang/data | 1 + tests/lang/eval-okay-path.exp | 1 + tests/lang/eval-okay-path.nix | 7 +++ 8 files changed, 162 insertions(+), 27 deletions(-) create mode 100644 tests/lang/data create mode 100644 tests/lang/eval-okay-path.exp create mode 100644 tests/lang/eval-okay-path.nix diff --git a/doc/manual/expressions/builtins.xml b/doc/manual/expressions/builtins.xml index 5a3a8645c1d..81770bcf629 100644 --- a/doc/manual/expressions/builtins.xml +++ b/doc/manual/expressions/builtins.xml @@ -308,8 +308,9 @@ stdenv.mkDerivation { … } - builtins.filterSource - e1 e2 + + builtins.filterSource + e1 e2 @@ -768,6 +769,75 @@ Evaluates to [ "foo" ]. + + + builtins.path + args + + + + + An enrichment of the built-in path type, based on the attributes + present in args. All are optional + except path: + + + + + path + + The underlying path. + + + + name + + + The name of the path when added to the store. This can + used to reference paths that have nix-illegal characters + in their names, like @. + + + + + filter + + + A function of the type expected by + builtins.filterSource, + with the same semantics. + + + + + recursive + + + When false, when + path is added to the store it is with a + flat hash, rather than a hash of the NAR serialization of + the file. Thus, path must refer to a + regular file, not a directory. This allows similar + behavior to fetchurl. Defaults to + true. + + + + + sha256 + + + When provided, this is the expected hash of the file at + the path. Evaluation will fail if the hash is incorrect, + and providing a hash allows + builtins.path to be used even when the + pure-eval nix config option is on. + + + + + + builtins.pathExists path diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 33a9bc61428..9499ebe7098 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -1566,7 +1566,7 @@ string EvalState::copyPathToStore(PathSet & context, const Path & path) dstPath = srcToStore[path]; else { dstPath = settings.readOnlyMode - ? store->computeStorePathForPath(checkSourcePath(path)).first + ? store->computeStorePathForPath(baseNameOf(path), checkSourcePath(path)).first : store->addToStore(baseNameOf(path), checkSourcePath(path), true, htSHA256, defaultPathFilter, repair); srcToStore[path] = dstPath; printMsg(lvlChatty, format("copied source '%1%' -> '%2%'") diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 975f0e8309e..5c8dfd9dfe4 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1009,20 +1009,13 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu } -static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void addPath(EvalState & state, const Pos & pos, const string & name, const Path & path_, + Value * filterFun, bool recursive, const Hash & expectedHash, Value & v) { - PathSet context; - Path path = state.coerceToPath(pos, *args[1], context); - if (!context.empty()) - throw EvalError(format("string '%1%' cannot refer to other paths, at %2%") % path % pos); - - state.forceValue(*args[0]); - if (args[0]->type != tLambda) - throw TypeError(format("first argument in call to 'filterSource' is not a function but %1%, at %2%") % showType(*args[0]) % pos); - - path = state.checkSourcePath(path); - - PathFilter filter = [&](const Path & path) { + const auto path = settings.pureEval && expectedHash ? + path_ : + state.checkSourcePath(path_); + PathFilter filter = filterFun ? ([&](const Path & path) { auto st = lstat(path); /* Call the filter function. The first argument is the path, @@ -1031,7 +1024,7 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args mkString(arg1, path); Value fun2; - state.callFunction(*args[0], arg1, fun2, noPos); + state.callFunction(*filterFun, arg1, fun2, noPos); Value arg2; mkString(arg2, @@ -1044,16 +1037,79 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args state.callFunction(fun2, arg2, res, noPos); return state.forceBool(res, pos); - }; + }) : defaultPathFilter; - Path dstPath = settings.readOnlyMode - ? state.store->computeStorePathForPath(path, true, htSHA256, filter).first - : state.store->addToStore(baseNameOf(path), path, true, htSHA256, filter, state.repair); + Path expectedStorePath; + if (expectedHash) { + expectedStorePath = + state.store->makeFixedOutputPath(recursive, expectedHash, name); + } + Path dstPath; + if (!expectedHash || !state.store->isValidPath(expectedStorePath)) { + dstPath = settings.readOnlyMode + ? state.store->computeStorePathForPath(name, path, recursive, htSHA256, filter).first + : state.store->addToStore(name, path, recursive, htSHA256, filter, state.repair); + if (expectedHash && expectedStorePath != dstPath) { + throw Error(format("store path mismatch in (possibly filtered) path added from '%1%'") % path); + } + } else + dstPath = expectedStorePath; mkString(v, dstPath, {dstPath}); } +static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args, Value & v) +{ + PathSet context; + Path path = state.coerceToPath(pos, *args[1], context); + if (!context.empty()) + throw EvalError(format("string '%1%' cannot refer to other paths, at %2%") % path % pos); + + state.forceValue(*args[0]); + if (args[0]->type != tLambda) + throw TypeError(format("first argument in call to 'filterSource' is not a function but %1%, at %2%") % showType(*args[0]) % pos); + + addPath(state, pos, baseNameOf(path), path, args[0], true, Hash(), v); +} + +static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value & v) +{ + state.forceAttrs(*args[0], pos); + Path path; + string name; + Value * filterFun = nullptr; + auto recursive = true; + Hash expectedHash; + + for (auto & attr : *args[0]->attrs) { + const string & n(attr.name); + if (n == "path") { + PathSet context; + path = state.coerceToPath(*attr.pos, *attr.value, context); + if (!context.empty()) + throw EvalError(format("string '%1%' cannot refer to other paths, at %2%") % path % *attr.pos); + } else if (attr.name == state.sName) + name = state.forceStringNoCtx(*attr.value, *attr.pos); + else if (n == "filter") { + state.forceValue(*attr.value); + filterFun = attr.value; + } else if (n == "recursive") + recursive = state.forceBool(*attr.value, *attr.pos); + else if (n == "sha256") + expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256); + else + throw EvalError(format("unsupported argument '%1%' to 'addPath', at %2%") % attr.name % *attr.pos); + } + if (path.empty()) + throw EvalError(format("'path' required, at %1%") % pos); + if (name.empty()) + name = baseNameOf(path); + + addPath(state, pos, name, path, filterFun, recursive, expectedHash, v); +} + + /************************************************************* * Sets *************************************************************/ @@ -2071,6 +2127,7 @@ void EvalState::createBaseEnv() addPrimOp("__fromJSON", 1, prim_fromJSON); addPrimOp("__toFile", 2, prim_toFile); addPrimOp("__filterSource", 2, prim_filterSource); + addPrimOp("__path", 1, prim_path); // Sets addPrimOp("__attrNames", 1, prim_attrNames); diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 77ab87ef728..7abb300a9bb 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -222,11 +222,10 @@ Path Store::makeTextPath(const string & name, const Hash & hash, } -std::pair Store::computeStorePathForPath(const Path & srcPath, - bool recursive, HashType hashAlgo, PathFilter & filter) const +std::pair Store::computeStorePathForPath(const string & name, + const Path & srcPath, bool recursive, HashType hashAlgo, PathFilter & filter) const { Hash h = recursive ? hashPath(hashAlgo, srcPath, filter).first : hashFile(hashAlgo, srcPath); - string name = baseNameOf(srcPath); Path dstPath = makeFixedOutputPath(recursive, h, name); return std::pair(dstPath, h); } diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index c0e735cd314..bf0862ef1bb 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -305,9 +305,9 @@ public: /* This is the preparatory part of addToStore(); it computes the store path to which srcPath is to be copied. Returns the store path and the cryptographic hash of the contents of srcPath. */ - std::pair computeStorePathForPath(const Path & srcPath, - bool recursive = true, HashType hashAlgo = htSHA256, - PathFilter & filter = defaultPathFilter) const; + std::pair computeStorePathForPath(const string & name, + const Path & srcPath, bool recursive = true, + HashType hashAlgo = htSHA256, PathFilter & filter = defaultPathFilter) const; /* Preparatory part of addTextToStore(). diff --git a/tests/lang/data b/tests/lang/data new file mode 100644 index 00000000000..257cc5642cb --- /dev/null +++ b/tests/lang/data @@ -0,0 +1 @@ +foo diff --git a/tests/lang/eval-okay-path.exp b/tests/lang/eval-okay-path.exp new file mode 100644 index 00000000000..6827d49ffa1 --- /dev/null +++ b/tests/lang/eval-okay-path.exp @@ -0,0 +1 @@ +"/run/user/1000/nix-test/store/wjagrv37lfvfx92g2gf3yqflwypj0q1y-output" diff --git a/tests/lang/eval-okay-path.nix b/tests/lang/eval-okay-path.nix new file mode 100644 index 00000000000..e67168cf3ed --- /dev/null +++ b/tests/lang/eval-okay-path.nix @@ -0,0 +1,7 @@ +builtins.path + { path = ./.; + filter = path: _: baseNameOf path == "data"; + recursive = true; + sha256 = "1yhm3gwvg5a41yylymgblsclk95fs6jy72w0wv925mmidlhcq4sw"; + name = "output"; + } From 9d1e22f743ea9ca232d39d498b675d7e5ac1ca87 Mon Sep 17 00:00:00 2001 From: AmineChikhaoui Date: Wed, 7 Feb 2018 11:18:55 +0100 Subject: [PATCH 0799/2196] set block size to 0 to let the lzma lib choose the right one, add some comments about possible improvements wrt memory usage/threading. --- src/libutil/compression.cc | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc index aad6e9b5bc1..a36c4405f8b 100644 --- a/src/libutil/compression.cc +++ b/src/libutil/compression.cc @@ -193,9 +193,14 @@ struct XzSink : CompressionSink { lzma_mt mt_options = {}; mt_options.flags = 0; - mt_options.timeout = 300; + mt_options.timeout = 300; // Using the same setting as the xz cmd line mt_options.check = LZMA_CHECK_CRC64; mt_options.threads = lzma_cputhreads(); + mt_options.block_size = 0; + if (mt_options.threads == 0) + mt_options.threads = 1; + // FIXME: maybe use lzma_stream_encoder_mt_memusage() to control the + // number of threads. lzma_ret ret = lzma_stream_encoder_mt( &strm, &mt_options); if (ret != LZMA_OK) From cfdfad5c3451731879a5a693059c094f107c2bc8 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 7 Feb 2018 14:15:20 +0100 Subject: [PATCH 0800/2196] Simplify --- release.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/release.nix b/release.nix index d9c4f1efb4b..9e04f0b67f9 100644 --- a/release.nix +++ b/release.nix @@ -225,7 +225,7 @@ let }); tests.setuid = pkgs.lib.genAttrs - (pkgs.lib.filter (system: system == "x86_64-linux" || system == "i686-linux") systems) + ["i686-linux" "x86_64-linux"] (system: import ./tests/setuid.nix rec { inherit nixpkgs; From 84989d3af23c717744b8ddeacd6828bc87e7eda1 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 7 Feb 2018 15:19:10 +0100 Subject: [PATCH 0801/2196] Improve filtering of ANSI escape sequences in build logs All ANSI sequences except color setting are now filtered out. In particular, terminal resets (such as from NixOS VM tests) are filtered out. Also, fix the completely broken tab character handling. --- src/libstore/build.cc | 2 +- src/libutil/logging.cc | 2 +- src/libutil/util.cc | 67 +++++++++++++++++++++++++---------------- src/libutil/util.hh | 10 +++--- src/nix/progress-bar.cc | 43 +++----------------------- tests/misc.sh | 2 +- 6 files changed, 54 insertions(+), 72 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 5be7ce60dab..9f669f7e464 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -3428,7 +3428,7 @@ void DerivationGoal::flushLine() else { if (settings.verboseBuild && (settings.printRepeatedBuilds || curRound == 1)) - printError(filterANSIEscapes(currentLogLine, true)); + printError(currentLogLine); else { logTail.push_back(currentLogLine); if (logTail.size() > settings.logLines) logTail.pop_front(); diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index 6924e008047..27a631a37d1 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -44,7 +44,7 @@ class SimpleLogger : public Logger prefix = std::string("<") + c + ">"; } - writeToStderr(prefix + (tty ? fs.s : filterANSIEscapes(fs.s)) + "\n"); + writeToStderr(prefix + filterANSIEscapes(fs.s) + "\n"); } void startActivity(ActivityId act, Verbosity lvl, ActivityType type, diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 27299739779..f7a12d21b24 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -1178,36 +1178,51 @@ void ignoreException() } -string filterANSIEscapes(const string & s, bool nixOnly) -{ - string t, r; - enum { stTop, stEscape, stCSI } state = stTop; - for (auto c : s) { - if (state == stTop) { - if (c == '\e') { - state = stEscape; - r = c; - } else - t += c; - } else if (state == stEscape) { - r += c; - if (c == '[') - state = stCSI; - else { - t += r; - state = stTop; +std::string filterANSIEscapes(const std::string & s, unsigned int width) +{ + std::string t, e; + size_t w = 0; + auto i = s.begin(); + + while (w < (size_t) width && i != s.end()) { + + if (*i == '\e') { + std::string e; + e += *i++; + char last = 0; + + if (i != s.end() && *i == '[') { + e += *i++; + // eat parameter bytes + while (i != s.end() && *i >= 0x30 && *i <= 0x3f) e += *i++; + // eat intermediate bytes + while (i != s.end() && *i >= 0x20 && *i <= 0x2f) e += *i++; + // eat final byte + if (i != s.end() && *i >= 0x40 && *i <= 0x7e) e += last = *i++; + } else { + if (i != s.end() && *i >= 0x40 && *i <= 0x5f) e += *i++; } - } else { - r += c; - if (c >= 0x40 && c <= 0x7e) { - if (nixOnly && (c != 'p' && c != 'q' && c != 's' && c != 'a' && c != 'b')) - t += r; - state = stTop; - r.clear(); + + if (last == 'm') + t += e; + } + + else if (*i == '\t') { + i++; t += ' '; w++; + while (w < (size_t) width && w % 8) { + t += ' '; w++; } } + + else if (*i == '\r') + // do nothing for now + ; + + else { + t += *i++; w++; + } } - t += r; + return t; } diff --git a/src/libutil/util.hh b/src/libutil/util.hh index 75eb9751524..47e02bc898a 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -388,10 +388,12 @@ void ignoreException(); #define ANSI_BLUE "\e[34;1m" -/* Filter out ANSI escape codes from the given string. If ‘nixOnly’ is - set, only filter escape codes generated by Nixpkgs' stdenv (used to - denote nesting etc.). */ -string filterANSIEscapes(const string & s, bool nixOnly = false); +/* Truncate a string to 'width' printable characters. Certain ANSI + escape sequences (such as colour setting) are copied but not + included in the character count. Other ANSI escape sequences are + filtered. Also, tabs are expanded to spaces. */ +std::string filterANSIEscapes(const std::string & s, + unsigned int width = std::numeric_limits::max()); /* Base64 encoding/decoding. */ diff --git a/src/nix/progress-bar.cc b/src/nix/progress-bar.cc index 252d12c5d37..e6553c06f4a 100644 --- a/src/nix/progress-bar.cc +++ b/src/nix/progress-bar.cc @@ -23,44 +23,6 @@ static uint64_t getI(const std::vector & fields, size_t n) return fields[n].i; } -/* Truncate a string to 'width' printable characters. ANSI escape - sequences are copied but not included in the character count. Also, - tabs are expanded to spaces. */ -static std::string ansiTruncate(const std::string & s, int width) -{ - if (width <= 0) return s; - - std::string t; - size_t w = 0; - auto i = s.begin(); - - while (w < (size_t) width && i != s.end()) { - if (*i == '\e') { - t += *i++; - if (i != s.end() && *i == '[') { - t += *i++; - while (i != s.end() && (*i < 0x40 || *i > 0x7e)) { - t += *i++; - } - if (i != s.end()) t += *i++; - } - } - - else if (*i == '\t') { - t += ' '; w++; - while (w < (size_t) width && w & 8) { - t += ' '; w++; - } - } - - else { - t += *i++; w++; - } - } - - return t; -} - class ProgressBar : public Logger { private: @@ -343,7 +305,10 @@ class ProgressBar : public Logger } } - writeToStderr("\r" + ansiTruncate(line, getWindowSize().second) + "\e[K"); + auto width = getWindowSize().second; + if (width <= 0) std::numeric_limits::max(); + + writeToStderr("\r" + filterANSIEscapes(line, width) + "\e[K"); } std::string getStatus(State & state) diff --git a/tests/misc.sh b/tests/misc.sh index 6d0ab3adcec..eda0164167f 100644 --- a/tests/misc.sh +++ b/tests/misc.sh @@ -16,4 +16,4 @@ nix-env --foo 2>&1 | grep "no operation" nix-env -q --foo 2>&1 | grep "unknown flag" # Eval Errors. -nix-instantiate --eval -E 'let a = {} // a; in a.foo' 2>&1 | grep "infinite recursion encountered, at (string):1:15$" +nix-instantiate --eval -E 'let a = {} // a; in a.foo' 2>&1 | grep "infinite recursion encountered, at .*(string).*:1:15$" From 48c192ca2d5bc65b69d2336c8577258f8eb80cf8 Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Wed, 7 Feb 2018 10:26:53 -0500 Subject: [PATCH 0802/2196] builtins.path test: Don't rely on shlevy's XDG_RUNTIME_DIR --- tests/lang/eval-okay-path.exp | 1 - 1 file changed, 1 deletion(-) delete mode 100644 tests/lang/eval-okay-path.exp diff --git a/tests/lang/eval-okay-path.exp b/tests/lang/eval-okay-path.exp deleted file mode 100644 index 6827d49ffa1..00000000000 --- a/tests/lang/eval-okay-path.exp +++ /dev/null @@ -1 +0,0 @@ -"/run/user/1000/nix-test/store/wjagrv37lfvfx92g2gf3yqflwypj0q1y-output" From 55ecdfe2a83a161c27d6497733cdc60fa112a43d Mon Sep 17 00:00:00 2001 From: AmineChikhaoui Date: Wed, 7 Feb 2018 17:54:08 +0100 Subject: [PATCH 0803/2196] make multi threaded compression configurable and use single threaded by default. --- src/libstore/binary-cache-store.cc | 2 +- src/libstore/globals.hh | 3 +++ src/libutil/compression.cc | 42 ++++++++++++++++++------------ src/libutil/compression.hh | 4 +-- 4 files changed, 31 insertions(+), 20 deletions(-) diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index ab971dd8b6d..d34adbd60c7 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -149,7 +149,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, const refcompression = compression; auto now1 = std::chrono::steady_clock::now(); - auto narCompressed = compress(compression, *nar); + auto narCompressed = compress(compression, *nar, settings.parallelCompression); auto now2 = std::chrono::steady_clock::now(); narInfo->fileHash = hashString(htSHA256, *narCompressed); narInfo->fileSize = narCompressed->size(); diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 20ac8fe4e9a..aafec2ea269 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -174,6 +174,9 @@ public: "Whether to compress logs.", {"build-compress-log"}}; + Setting parallelCompression{this, false, "parallel-compression", + "Whether to enable parallel compression, only supported with xz currently"}; + Setting maxLogSize{this, 0, "max-build-log-size", "Maximum number of bytes a builder can write to stdout/stderr " "before being killed (0 means no limit).", diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc index a36c4405f8b..ed15761b32a 100644 --- a/src/libutil/compression.cc +++ b/src/libutil/compression.cc @@ -151,10 +151,10 @@ static ref decompressBrotli(const std::string & in) #endif // HAVE_BROTLI } -ref compress(const std::string & method, const std::string & in) +ref compress(const std::string & method, const std::string & in, const bool parallel) { StringSink ssink; - auto sink = makeCompressionSink(method, ssink); + auto sink = makeCompressionSink(method, ssink, parallel); (*sink)(in); sink->finish(); return ssink.s; @@ -189,20 +189,28 @@ struct XzSink : CompressionSink lzma_stream strm = LZMA_STREAM_INIT; bool finished = false; - XzSink(Sink & nextSink) : nextSink(nextSink) + XzSink(Sink & nextSink, const bool parallel) : nextSink(nextSink) { - lzma_mt mt_options = {}; - mt_options.flags = 0; - mt_options.timeout = 300; // Using the same setting as the xz cmd line - mt_options.check = LZMA_CHECK_CRC64; - mt_options.threads = lzma_cputhreads(); - mt_options.block_size = 0; - if (mt_options.threads == 0) - mt_options.threads = 1; - // FIXME: maybe use lzma_stream_encoder_mt_memusage() to control the - // number of threads. - lzma_ret ret = lzma_stream_encoder_mt( - &strm, &mt_options); + lzma_ret ret; + if (parallel) { + lzma_mt mt_options = {}; + mt_options.flags = 0; + mt_options.timeout = 300; // Using the same setting as the xz cmd line + mt_options.preset = LZMA_PRESET_DEFAULT; + mt_options.filters = NULL; + mt_options.check = LZMA_CHECK_CRC64; + mt_options.threads = lzma_cputhreads(); + mt_options.block_size = 0; + if (mt_options.threads == 0) + mt_options.threads = 1; + // FIXME: maybe use lzma_stream_encoder_mt_memusage() to control the + // number of threads. + ret = lzma_stream_encoder_mt( + &strm, &mt_options); + } else + ret = lzma_easy_encoder( + &strm, 6, LZMA_CHECK_CRC64); + if (ret != LZMA_OK) throw CompressionError("unable to initialise lzma encoder"); // FIXME: apply the x86 BCJ filter? @@ -459,12 +467,12 @@ struct BrotliSink : CompressionSink }; #endif // HAVE_BROTLI -ref makeCompressionSink(const std::string & method, Sink & nextSink) +ref makeCompressionSink(const std::string & method, Sink & nextSink, const bool parallel) { if (method == "none") return make_ref(nextSink); else if (method == "xz") - return make_ref(nextSink); + return make_ref(nextSink, parallel); else if (method == "bzip2") return make_ref(nextSink); else if (method == "br") diff --git a/src/libutil/compression.hh b/src/libutil/compression.hh index e3e6f5a9930..a0d7530d74f 100644 --- a/src/libutil/compression.hh +++ b/src/libutil/compression.hh @@ -8,7 +8,7 @@ namespace nix { -ref compress(const std::string & method, const std::string & in); +ref compress(const std::string & method, const std::string & in, const bool parallel = false); ref decompress(const std::string & method, const std::string & in); @@ -17,7 +17,7 @@ struct CompressionSink : BufferedSink virtual void finish() = 0; }; -ref makeCompressionSink(const std::string & method, Sink & nextSink); +ref makeCompressionSink(const std::string & method, Sink & nextSink, const bool parallel = false); MakeError(UnknownCompressionMethod, Error); From 88b5d0c8e89afefbc547b6243c5aa5a3ec8176e9 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 7 Feb 2018 19:07:38 +0100 Subject: [PATCH 0804/2196] Prevent accidental recursive Nix --- src/nix-daemon/nix-daemon.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index b5d49b6428a..0e5ff370cf7 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -994,7 +994,7 @@ static void daemonLoop(char * * argv) if (matchUser(user, group, trustedUsers)) trusted = true; - if (!trusted && !matchUser(user, group, allowedUsers)) + if ((!trusted && !matchUser(user, group, allowedUsers)) || group == "nixbld") throw Error(format("user '%1%' is not allowed to connect to the Nix daemon") % user); printInfo(format((string) "accepted connection from pid %1%, user %2%" + (trusted ? " (trusted)" : "")) From 47ad88099b1cd2b19bdf3eef35c21baf35cc7e82 Mon Sep 17 00:00:00 2001 From: AmineChikhaoui Date: Wed, 7 Feb 2018 21:06:11 +0100 Subject: [PATCH 0805/2196] move the parallel-compression setting to binary-cache-store, the setting can be done now from the url e.g s3://nix-cache?parallel-compression=1 instead of nix.conf. --- src/libstore/binary-cache-store.cc | 2 +- src/libstore/binary-cache-store.hh | 2 ++ src/libstore/globals.hh | 3 --- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index d34adbd60c7..d1b278b8efb 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -149,7 +149,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, const refcompression = compression; auto now1 = std::chrono::steady_clock::now(); - auto narCompressed = compress(compression, *nar, settings.parallelCompression); + auto narCompressed = compress(compression, *nar, parallelCompression); auto now2 = std::chrono::steady_clock::now(); narInfo->fileHash = hashString(htSHA256, *narCompressed); narInfo->fileSize = narCompressed->size(); diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh index 8492ff600eb..e20b968442b 100644 --- a/src/libstore/binary-cache-store.hh +++ b/src/libstore/binary-cache-store.hh @@ -19,6 +19,8 @@ public: const Setting writeNARListing{this, false, "write-nar-listing", "whether to write a JSON file listing the files in each NAR"}; const Setting secretKeyFile{this, "", "secret-key", "path to secret key used to sign the binary cache"}; const Setting localNarCache{this, "", "local-nar-cache", "path to a local cache of NARs"}; + const Setting parallelCompression{this, false, "parallel-compression", + "enable multi-threading compression, available for xz only currently"}; private: diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index aafec2ea269..20ac8fe4e9a 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -174,9 +174,6 @@ public: "Whether to compress logs.", {"build-compress-log"}}; - Setting parallelCompression{this, false, "parallel-compression", - "Whether to enable parallel compression, only supported with xz currently"}; - Setting maxLogSize{this, 0, "max-build-log-size", "Maximum number of bytes a builder can write to stdout/stderr " "before being killed (0 means no limit).", From f201b7733e22cc236a41093a88cc789239d994bd Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Wed, 7 Feb 2018 15:17:44 -0600 Subject: [PATCH 0806/2196] More completely fix recursive nix, unbreak tests See: https://github.com/NixOS/nix/commit/88b5d0c8e89afefbc547b6243c5aa5a3ec8176e9#commitcomment-27406365 --- src/nix-daemon/nix-daemon.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index 0e5ff370cf7..d3a8ebbdda5 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -994,7 +994,7 @@ static void daemonLoop(char * * argv) if (matchUser(user, group, trustedUsers)) trusted = true; - if ((!trusted && !matchUser(user, group, allowedUsers)) || group == "nixbld") + if ((!trusted && !matchUser(user, group, allowedUsers)) || group == settings.buildUsersGroup) throw Error(format("user '%1%' is not allowed to connect to the Nix daemon") % user); printInfo(format((string) "accepted connection from pid %1%, user %2%" + (trusted ? " (trusted)" : "")) From 3780435a0e3166ff898b33b8eb9a057ffa16ce67 Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Wed, 7 Feb 2018 14:57:44 -0600 Subject: [PATCH 0807/2196] tests: Add (failing) tests for reported --dry-run issues. --- tests/build-dry.sh | 49 ++++++++++++++++++++++++++++++++++++++++++++++ tests/local.mk | 1 + 2 files changed, 50 insertions(+) create mode 100644 tests/build-dry.sh diff --git a/tests/build-dry.sh b/tests/build-dry.sh new file mode 100644 index 00000000000..9a2a70d5f11 --- /dev/null +++ b/tests/build-dry.sh @@ -0,0 +1,49 @@ +source common.sh + +################################################### +# Check that --dry-run isn't confused with read-only mode +# https://github.com/NixOS/nix/issues/1795 + +clearStore +clearCache + +# Ensure this builds successfully first +nix build -f dependencies.nix + +clearStore +clearCache + +# Try --dry-run using old command first +nix-build dependencies.nix --dry-run 2>&1 | grep "will be built" +# Now new command: +nix build -f dependencies.nix --dry-run 2>&1 | grep "will be built" + +clearStore +clearCache + +# Try --dry-run using new command first +nix build -f dependencies.nix --dry-run 2>&1 | grep "will be built" +# Now old command: +nix-build dependencies.nix --dry-run 2>&1 | grep "will be built" + + +################################################### +# Check --dry-run doesn't create links with --dry-run +# https://github.com/NixOS/nix/issues/1849 +clearStore +clearCache + +RESULT=$TEST_ROOT/result-link +rm -f $RESULT + +nix-build dependencies.nix -o $RESULT --dry-run + +[[ ! -h $RESULT ]] || fail "nix-build --dry-run created output link" + +nix build -f dependencies.nix -o $RESULT --dry-run + +[[ ! -h $RESULT ]] || fail "nix build --dry-run created output link" + +nix build -f dependencies.nix -o $RESULT + +[[ -h $RESULT ]] diff --git a/tests/local.mk b/tests/local.mk index e90b9f7da4a..0035dca2d61 100644 --- a/tests/local.mk +++ b/tests/local.mk @@ -13,6 +13,7 @@ nix_tests = \ check-reqs.sh pass-as-file.sh tarball.sh restricted.sh \ placeholders.sh nix-shell.sh \ linux-sandbox.sh \ + build-dry.sh \ build-remote.sh \ nar-access.sh \ structured-attrs.sh \ From 98031b6050000bccef915b99cf3a2ed810a1b35e Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Wed, 7 Feb 2018 14:58:38 -0600 Subject: [PATCH 0808/2196] nix build: Don't create output links with --dry-run. Fixes #1849. --- src/nix/build.cc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/nix/build.cc b/src/nix/build.cc index f7c99f12dbb..093415a0de1 100644 --- a/src/nix/build.cc +++ b/src/nix/build.cc @@ -52,6 +52,8 @@ struct CmdBuild : MixDryRun, InstallablesCommand { auto buildables = toBuildables(store, dryRun ? DryRun : Build, installables); + if (dryRun) return; + for (size_t i = 0; i < buildables.size(); ++i) { auto & b(buildables[i]); From c7e0be1bfc79909cafd52645c88ceb8bcf2a588a Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Wed, 7 Feb 2018 15:08:13 -0600 Subject: [PATCH 0809/2196] build-dry: disable failing portion of test until issue is fixed --- tests/build-dry.sh | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/build-dry.sh b/tests/build-dry.sh index 9a2a70d5f11..610e6070c5d 100644 --- a/tests/build-dry.sh +++ b/tests/build-dry.sh @@ -18,6 +18,9 @@ nix-build dependencies.nix --dry-run 2>&1 | grep "will be built" # Now new command: nix build -f dependencies.nix --dry-run 2>&1 | grep "will be built" +# TODO: XXX: FIXME: #1793 +# Disable this part of the test until the problem is resolved: +if [ -n "$ISSUE_1795_IS_FIXED" ]; then clearStore clearCache @@ -25,7 +28,7 @@ clearCache nix build -f dependencies.nix --dry-run 2>&1 | grep "will be built" # Now old command: nix-build dependencies.nix --dry-run 2>&1 | grep "will be built" - +fi ################################################### # Check --dry-run doesn't create links with --dry-run From 444bae44ef3c27959d837d2e1f28131290cb52a7 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 8 Feb 2018 13:46:23 +0100 Subject: [PATCH 0810/2196] dsa -> ed25519 DSS is disabled by default in NixOS 18.03. https://hydra.nixos.org/build/68788560 --- tests/nix-copy-closure.nix | 6 +++--- tests/remote-builds.nix | 10 +++++----- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/tests/nix-copy-closure.nix b/tests/nix-copy-closure.nix index be0a4a683cd..0dc147fb34e 100644 --- a/tests/nix-copy-closure.nix +++ b/tests/nix-copy-closure.nix @@ -29,10 +29,10 @@ makeTest (let pkgA = pkgs.cowsay; pkgB = pkgs.wget; pkgC = pkgs.hello; in { startAll; # Create an SSH key on the client. - my $key = `${pkgs.openssh}/bin/ssh-keygen -t dsa -f key -N ""`; + my $key = `${pkgs.openssh}/bin/ssh-keygen -t ed25519 -f key -N ""`; $client->succeed("mkdir -m 700 /root/.ssh"); - $client->copyFileFromHost("key", "/root/.ssh/id_dsa"); - $client->succeed("chmod 600 /root/.ssh/id_dsa"); + $client->copyFileFromHost("key", "/root/.ssh/id_ed25519"); + $client->succeed("chmod 600 /root/.ssh/id_ed25519"); # Install the SSH key on the server. $server->succeed("mkdir -m 700 /root/.ssh"); diff --git a/tests/remote-builds.nix b/tests/remote-builds.nix index 75704ace2db..d7a4b21989e 100644 --- a/tests/remote-builds.nix +++ b/tests/remote-builds.nix @@ -46,13 +46,13 @@ in nix.buildMachines = [ { hostName = "slave1"; sshUser = "root"; - sshKey = "/root/.ssh/id_dsa"; + sshKey = "/root/.ssh/id_ed25519"; system = "i686-linux"; maxJobs = 1; } { hostName = "slave2"; sshUser = "root"; - sshKey = "/root/.ssh/id_dsa"; + sshKey = "/root/.ssh/id_ed25519"; system = "i686-linux"; maxJobs = 1; } @@ -70,10 +70,10 @@ in startAll; # Create an SSH key on the client. - my $key = `${pkgs.openssh}/bin/ssh-keygen -t dsa -f key -N ""`; + my $key = `${pkgs.openssh}/bin/ssh-keygen -t ed25519 -f key -N ""`; $client->succeed("mkdir -p -m 700 /root/.ssh"); - $client->copyFileFromHost("key", "/root/.ssh/id_dsa"); - $client->succeed("chmod 600 /root/.ssh/id_dsa"); + $client->copyFileFromHost("key", "/root/.ssh/id_ed25519"); + $client->succeed("chmod 600 /root/.ssh/id_ed25519"); # Install the SSH key on the slaves. $client->waitForUnit("network.target"); From ad97a218344937d8586d0f4f710f3785da77f5bd Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 8 Feb 2018 15:25:03 +0100 Subject: [PATCH 0811/2196] nix-env: Fix parsing of --system https://hydra.nixos.org/build/68827814 --- src/libmain/common-args.cc | 4 ++++ tests/user-envs.sh | 3 +++ 2 files changed, 7 insertions(+) diff --git a/src/libmain/common-args.cc b/src/libmain/common-args.cc index d3aac6aba1f..bcc05c2cdad 100644 --- a/src/libmain/common-args.cc +++ b/src/libmain/common-args.cc @@ -37,6 +37,10 @@ MixCommonArgs::MixCommonArgs(const string & programName) std::string cat = "config"; settings.convertToArgs(*this, cat); + + // Backward compatibility hack: nix-env already had a --system flag. + if (programName == "nix-env") longFlags.erase("system"); + hiddenCategories.insert(cat); } diff --git a/tests/user-envs.sh b/tests/user-envs.sh index c4192fdc59b..ba63923113d 100644 --- a/tests/user-envs.sh +++ b/tests/user-envs.sh @@ -24,6 +24,9 @@ rm -f $HOME/.nix-defexpr ln -s $(pwd)/user-envs.nix $HOME/.nix-defexpr nix-env -qa '*' --description | grep -q silly +# Query the system. +nix-env -qa '*' --system | grep -q $system + # Install "foo-1.0". nix-env -i foo-1.0 From 88cd2d41acb994684a3e4ead1b1676019f43b4b6 Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Thu, 8 Feb 2018 11:26:18 -0500 Subject: [PATCH 0812/2196] Add plugins to make Nix more extensible. All plugins in plugin-files will be dlopened, allowing them to statically construct instances of the various Register* types Nix supports. --- .gitignore | 1 + Makefile | 3 ++- doc/manual/command-ref/conf-file.xml | 27 +++++++++++++++++++ doc/manual/release-notes/rl-2.0.xml | 7 +++++ mk/libraries.mk | 7 +++++ src/build-remote/build-remote.cc | 2 ++ src/libmain/shared.hh | 1 + src/libstore/globals.cc | 15 +++++++++++ src/libstore/globals.hh | 7 +++++ src/libstore/local.mk | 3 +++ src/nix-build/nix-build.cc | 2 ++ src/nix-channel/nix-channel.cc | 3 +++ .../nix-collect-garbage.cc | 2 ++ src/nix-copy-closure/nix-copy-closure.cc | 2 ++ src/nix-daemon/nix-daemon.cc | 2 ++ src/nix-env/nix-env.cc | 2 ++ src/nix-instantiate/nix-instantiate.cc | 2 ++ src/nix-prefetch-url/nix-prefetch-url.cc | 2 ++ src/nix-store/nix-store.cc | 2 ++ src/nix/main.cc | 2 ++ tests/local.mk | 5 ++-- tests/plugins.sh | 7 +++++ tests/plugins/local.mk | 9 +++++++ tests/plugins/plugintest.cc | 10 +++++++ 24 files changed, 122 insertions(+), 3 deletions(-) create mode 100644 tests/plugins.sh create mode 100644 tests/plugins/local.mk create mode 100644 tests/plugins/plugintest.cc diff --git a/.gitignore b/.gitignore index ce22fa007dc..0a959937856 100644 --- a/.gitignore +++ b/.gitignore @@ -38,6 +38,7 @@ perl/Makefile.config /scripts/nix-copy-closure /scripts/nix-reduce-build /scripts/nix-http-export.cgi +/scripts/nix-profile-daemon.sh # /src/libexpr/ /src/libexpr/lexer-tab.cc diff --git a/Makefile b/Makefile index 5d8e990cc5c..c867823fc48 100644 --- a/Makefile +++ b/Makefile @@ -24,7 +24,8 @@ makefiles = \ misc/launchd/local.mk \ misc/upstart/local.mk \ doc/manual/local.mk \ - tests/local.mk + tests/local.mk \ + tests/plugins/local.mk GLOBAL_CXXFLAGS += -std=c++14 -g -Wall -include config.h diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index fff7994f28d..cede6db3cea 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -742,6 +742,33 @@ builtins.fetchurl { + + plugin-files + + + A list of plugin files to be loaded by Nix. Each of these + files will be dlopened by Nix, allowing them to affect + execution through static initialization. In particular, these + plugins may construct static instances of RegisterPrimOp to + add new primops to the expression language, + RegisterStoreImplementation to add new store implementations, + and RegisterCommand to add new subcommands to the + nix command. See the constructors for those + types for more details. + + + Since these files are loaded into the same address space as + Nix itself, they must be DSOs compatible with the instance of + Nix running at the time (i.e. compiled against the same + headers, not linked to any incompatible libraries). They + should not be linked to any Nix libs directly, as those will + be available already at load time. + + + + + + diff --git a/doc/manual/release-notes/rl-2.0.xml b/doc/manual/release-notes/rl-2.0.xml index 32cdb1d0cef..effd2e39d30 100644 --- a/doc/manual/release-notes/rl-2.0.xml +++ b/doc/manual/release-notes/rl-2.0.xml @@ -389,6 +389,13 @@ configureFlags = "--prefix=${placeholder "out"} --includedir=${placeholder "dev"
+ + + Nix can now be extended with plugins. See the documentation of + the 'plugin-files' option for more details. + + + Some features were removed: diff --git a/mk/libraries.mk b/mk/libraries.mk index 3cd7a53107b..14c95fa91cf 100644 --- a/mk/libraries.mk +++ b/mk/libraries.mk @@ -45,6 +45,11 @@ endif # - $(1)_INSTALL_DIR: the directory where the library will be # installed. Defaults to $(libdir). # +# - $(1)_EXCLUDE_FROM_LIBRARY_LIST: if defined, the library will not +# be automatically marked as a dependency of the top-level all +# target andwill not be listed in the make help output. This is +# useful for libraries built solely for testing, for example. +# # - BUILD_SHARED_LIBS: if equal to ‘1’, a dynamic library will be # built, otherwise a static library. define build-library @@ -149,7 +154,9 @@ define build-library $(1)_DEPS := $$(foreach fn, $$($(1)_OBJS), $$(call filename-to-dep, $$(fn))) -include $$($(1)_DEPS) + ifndef $(1)_EXCLUDE_FROM_LIBRARY_LIST libs-list += $$($(1)_PATH) + endif clean-files += $$(_d)/*.a $$(_d)/*.$(SO_EXT) $$(_d)/*.o $$(_d)/.*.dep $$($(1)_DEPS) $$($(1)_OBJS) dist-files += $$(_srcs) endef diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index df579729af2..50eb6b29e51 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -64,6 +64,8 @@ int main (int argc, char * * argv) settings.maxBuildJobs.set("1"); // hack to make tests with local?root= work + initPlugins(); + auto store = openStore().cast(); /* It would be more appropriate to use $XDG_RUNTIME_DIR, since diff --git a/src/libmain/shared.hh b/src/libmain/shared.hh index 1dcc4f0ac94..8e4861232db 100644 --- a/src/libmain/shared.hh +++ b/src/libmain/shared.hh @@ -22,6 +22,7 @@ public: int handleExceptions(const string & programName, std::function fun); +/* Don't forget to call initPlugins() after settings are initialized! */ void initNix(); void parseCmdLine(int argc, char * * argv, diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index d3c96ddd6e6..21ab0e6296e 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -6,6 +6,7 @@ #include #include #include +#include namespace nix { @@ -137,4 +138,18 @@ void MaxBuildJobsSetting::set(const std::string & str) throw UsageError("configuration setting '%s' should be 'auto' or an integer", name); } + +void initPlugins() +{ + for (const auto & pluginFile : settings.pluginFiles.get()) { + /* handle is purposefully leaked as there may be state in the + DSO needed by the action of the plugin. */ + void *handle = + dlopen(pluginFile.c_str(), RTLD_LAZY | RTLD_LOCAL); + if (!handle) + throw Error(format("could not dynamically open plugin file '%1%': %2%") % pluginFile % dlerror()); + } +} + + } diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 20ac8fe4e9a..508084d08ac 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -367,12 +367,19 @@ public: Setting allowedUris{this, {}, "allowed-uris", "Prefixes of URIs that builtin functions such as fetchurl and fetchGit are allowed to fetch."}; + + Setting pluginFiles{this, {}, "plugin-files", + "Plugins to dynamically load at nix initialization time."}; }; // FIXME: don't use a global variable. extern Settings settings; +/* This should be called after settings are initialized, but before + anything else */ +void initPlugins(); + extern const string nixVersion; diff --git a/src/libstore/local.mk b/src/libstore/local.mk index 50c46ce6fe9..239356aee8d 100644 --- a/src/libstore/local.mk +++ b/src/libstore/local.mk @@ -9,6 +9,9 @@ libstore_SOURCES := $(wildcard $(d)/*.cc) libstore_LIBS = libutil libformat libstore_LDFLAGS = $(SQLITE3_LIBS) -lbz2 $(LIBCURL_LIBS) $(SODIUM_LIBS) -pthread +ifneq ($(OS), FreeBSD) + libstore_LDFLAGS += -ldl +endif libstore_FILES = sandbox-defaults.sb sandbox-minimal.sb sandbox-network.sb diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 1581c282c75..99f773451ff 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -232,6 +232,8 @@ void mainWrapped(int argc, char * * argv) myArgs.parseCmdline(args); + initPlugins(); + if (packages && fromArgs) throw UsageError("'-p' and '-E' are mutually exclusive"); diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc index 370f216abcc..ec9a7174ecb 100755 --- a/src/nix-channel/nix-channel.cc +++ b/src/nix-channel/nix-channel.cc @@ -213,6 +213,9 @@ int main(int argc, char ** argv) } return true; }); + + initPlugins(); + switch (cmd) { case cNone: throw UsageError("no command specified"); diff --git a/src/nix-collect-garbage/nix-collect-garbage.cc b/src/nix-collect-garbage/nix-collect-garbage.cc index cc663a96924..37fe22f4813 100644 --- a/src/nix-collect-garbage/nix-collect-garbage.cc +++ b/src/nix-collect-garbage/nix-collect-garbage.cc @@ -77,6 +77,8 @@ int main(int argc, char * * argv) return true; }); + initPlugins(); + auto profilesDir = settings.nixStateDir + "/profiles"; if (removeOld) removeOldGenerations(profilesDir); diff --git a/src/nix-copy-closure/nix-copy-closure.cc b/src/nix-copy-closure/nix-copy-closure.cc index 861fc2e5cd6..dfb1b8fc5dc 100755 --- a/src/nix-copy-closure/nix-copy-closure.cc +++ b/src/nix-copy-closure/nix-copy-closure.cc @@ -44,6 +44,8 @@ int main(int argc, char ** argv) return true; }); + initPlugins(); + if (sshHost.empty()) throw UsageError("no host name specified"); diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index d3a8ebbdda5..890bffa19aa 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -1060,6 +1060,8 @@ int main(int argc, char * * argv) return true; }); + initPlugins(); + if (stdio) { if (getStoreType() == tDaemon) { /* Forward on this connection to the real daemon */ diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index 016caf6d234..97e66cbd937 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -1393,6 +1393,8 @@ int main(int argc, char * * argv) myArgs.parseCmdline(argvToStrings(argc, argv)); + initPlugins(); + if (!op) throw UsageError("no operation specified"); auto store = openStore(); diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index e05040a42de..dd262bea091 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -151,6 +151,8 @@ int main(int argc, char * * argv) myArgs.parseCmdline(argvToStrings(argc, argv)); + initPlugins(); + if (evalOnly && !wantsReadWrite) settings.readOnlyMode = true; diff --git a/src/nix-prefetch-url/nix-prefetch-url.cc b/src/nix-prefetch-url/nix-prefetch-url.cc index fef3eaa4553..fa7ee254500 100644 --- a/src/nix-prefetch-url/nix-prefetch-url.cc +++ b/src/nix-prefetch-url/nix-prefetch-url.cc @@ -89,6 +89,8 @@ int main(int argc, char * * argv) myArgs.parseCmdline(argvToStrings(argc, argv)); + initPlugins(); + if (args.size() > 2) throw UsageError("too many arguments"); diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 4fc3421c0dd..efef7f15c09 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -1052,6 +1052,8 @@ int main(int argc, char * * argv) return true; }); + initPlugins(); + if (!op) throw UsageError("no operation specified"); if (op != opDump && op != opRestore) /* !!! hack */ diff --git a/src/nix/main.cc b/src/nix/main.cc index 8f6bbe8f51a..bb107ec7d3f 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -92,6 +92,8 @@ void mainWrapped(int argc, char * * argv) args.parseCmdline(argvToStrings(argc, argv)); + initPlugins(); + if (!args.command) args.showHelpAndExit(); Finally f([]() { stopProgressBar(); }); diff --git a/tests/local.mk b/tests/local.mk index e90b9f7da4a..51bc09dd432 100644 --- a/tests/local.mk +++ b/tests/local.mk @@ -22,7 +22,8 @@ nix_tests = \ run.sh \ brotli.sh \ pure-eval.sh \ - check.sh + check.sh \ + plugins.sh # parallel.sh install-tests += $(foreach x, $(nix_tests), tests/$(x)) @@ -31,4 +32,4 @@ tests-environment = NIX_REMOTE= $(bash) -e clean-files += $(d)/common.sh -installcheck: $(d)/common.sh +installcheck: $(d)/common.sh $(d)/plugins/plugintest.so diff --git a/tests/plugins.sh b/tests/plugins.sh new file mode 100644 index 00000000000..6d18d1da0d1 --- /dev/null +++ b/tests/plugins.sh @@ -0,0 +1,7 @@ +source common.sh + +set -o pipefail + +res=$(nix eval '(builtins.constNull true)' --option plugin-files $PWD/plugins/plugintest.so) + +[ "$res"x = "nullx" ] diff --git a/tests/plugins/local.mk b/tests/plugins/local.mk new file mode 100644 index 00000000000..a5f19b087c8 --- /dev/null +++ b/tests/plugins/local.mk @@ -0,0 +1,9 @@ +libraries += plugintest + +plugintest_DIR := $(d) + +plugintest_SOURCES := $(d)/plugintest.cc + +plugintest_ALLOW_UNDEFINED := 1 + +plugintest_EXCLUDE_FROM_LIBRARY_LIST := 1 diff --git a/tests/plugins/plugintest.cc b/tests/plugins/plugintest.cc new file mode 100644 index 00000000000..f788c481432 --- /dev/null +++ b/tests/plugins/plugintest.cc @@ -0,0 +1,10 @@ +#include "primops.hh" + +using namespace nix; + +static void prim_constNull (EvalState & state, const Pos & pos, Value ** args, Value & v) +{ + mkNull(v); +} + +static RegisterPrimOp r("constNull", 1, prim_constNull); From 081f14a169d36243f97263acb41fb108af243619 Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Thu, 8 Feb 2018 13:00:53 -0500 Subject: [PATCH 0813/2196] Allow using RegisterPrimop to define constants. This enables plugins to add new constants, as well as new primops. --- doc/manual/command-ref/conf-file.xml | 2 +- src/libexpr/eval.cc | 8 +++++++- src/libexpr/eval.hh | 2 +- src/libexpr/primops.hh | 3 +++ tests/plugins.sh | 2 +- tests/plugins/plugintest.cc | 4 ++-- 6 files changed, 15 insertions(+), 6 deletions(-) diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index cede6db3cea..5c4561f66d8 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -750,7 +750,7 @@ builtins.fetchurl { files will be dlopened by Nix, allowing them to affect execution through static initialization. In particular, these plugins may construct static instances of RegisterPrimOp to - add new primops to the expression language, + add new primops or constants to the expression language, RegisterStoreImplementation to add new store implementations, and RegisterCommand to add new subcommands to the nix command. See the constructors for those diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 0b0a0f7b179..1f3bbc0a53b 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -404,7 +404,7 @@ Path EvalState::toRealPath(const Path & path, const PathSet & context) }; -void EvalState::addConstant(const string & name, Value & v) +Value * EvalState::addConstant(const string & name, Value & v) { Value * v2 = allocValue(); *v2 = v; @@ -412,12 +412,18 @@ void EvalState::addConstant(const string & name, Value & v) baseEnv.values[baseEnvDispl++] = v2; string name2 = string(name, 0, 2) == "__" ? string(name, 2) : name; baseEnv.values[0]->attrs->push_back(Attr(symbols.create(name2), v2)); + return v2; } Value * EvalState::addPrimOp(const string & name, unsigned int arity, PrimOpFun primOp) { + if (arity == 0) { + Value v; + primOp(*this, noPos, nullptr, v); + return addConstant(name, v); + } Value * v = allocValue(); string name2 = string(name, 0, 2) == "__" ? string(name, 2) : name; Symbol sym = symbols.create(name2); diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 9e3d30d95f4..51905d7e1c6 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -210,7 +210,7 @@ private: void createBaseEnv(); - void addConstant(const string & name, Value & v); + Value * addConstant(const string & name, Value & v); Value * addPrimOp(const string & name, unsigned int arity, PrimOpFun primOp); diff --git a/src/libexpr/primops.hh b/src/libexpr/primops.hh index 39d23b04a5c..31bf3f84f6c 100644 --- a/src/libexpr/primops.hh +++ b/src/libexpr/primops.hh @@ -9,6 +9,9 @@ struct RegisterPrimOp { typedef std::vector> PrimOps; static PrimOps * primOps; + /* You can register a constant by passing an arity of 0. fun + will get called during EvalState initialization, so there + may be primops not yet added and builtins is not yet sorted. */ RegisterPrimOp(std::string name, size_t arity, PrimOpFun fun); }; diff --git a/tests/plugins.sh b/tests/plugins.sh index 6d18d1da0d1..23caf04f338 100644 --- a/tests/plugins.sh +++ b/tests/plugins.sh @@ -2,6 +2,6 @@ source common.sh set -o pipefail -res=$(nix eval '(builtins.constNull true)' --option plugin-files $PWD/plugins/plugintest.so) +res=$(nix eval '(builtins.anotherNull)' --option plugin-files $PWD/plugins/plugintest.so) [ "$res"x = "nullx" ] diff --git a/tests/plugins/plugintest.cc b/tests/plugins/plugintest.cc index f788c481432..6b5e6d7cde2 100644 --- a/tests/plugins/plugintest.cc +++ b/tests/plugins/plugintest.cc @@ -2,9 +2,9 @@ using namespace nix; -static void prim_constNull (EvalState & state, const Pos & pos, Value ** args, Value & v) +static void prim_anotherNull (EvalState & state, const Pos & pos, Value ** args, Value & v) { mkNull(v); } -static RegisterPrimOp r("constNull", 1, prim_constNull); +static RegisterPrimOp r("anotherNull", 0, prim_anotherNull); From aa02cdc33ceccce35e56d61535fdb2c55f13cc44 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 9 Feb 2018 14:36:38 +0100 Subject: [PATCH 0814/2196] getDefaultSubstituters(): Skip broken substituters Fixes #1340. --- src/libstore/store-api.cc | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 7abb300a9bb..4d43ef082d5 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -896,7 +896,11 @@ std::list> getDefaultSubstituters() auto addStore = [&](const std::string & uri) { if (done.count(uri)) return; done.insert(uri); - stores.push_back(openStore(uri)); + try { + stores.push_back(openStore(uri)); + } catch (Error & e) { + printError("warning: %s", e.what()); + } }; for (auto uri : settings.substituters.get()) From 960e9c560e335c65d378805fdb2594e7079a0c45 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 9 Feb 2018 15:05:41 +0100 Subject: [PATCH 0815/2196] nix: Ensure that the user sees errors from substituters --- src/libstore/build.cc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 9f669f7e464..392b494e65e 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -3688,8 +3688,8 @@ void SubstitutionGoal::tryNext() && !sub->isTrusted && !info->checkSignatures(worker.store, worker.store.publicKeys)) { - printInfo(format("warning: substituter '%s' does not have a valid signature for path '%s'") - % sub->getUri() % storePath); + printError("warning: substituter '%s' does not have a valid signature for path '%s'", + sub->getUri(), storePath); tryNext(); return; } @@ -3779,7 +3779,7 @@ void SubstitutionGoal::finished() try { promise.get_future().get(); } catch (Error & e) { - printInfo(e.msg()); + printError(e.msg()); /* Try the next substitute. */ state = &SubstitutionGoal::tryNext; From 5a082ad15a0f46dc1cbfd8aa5cb3ad9d94b5f178 Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Fri, 9 Feb 2018 20:57:31 -0600 Subject: [PATCH 0816/2196] configure.ac: check if lzma has MT support, fix deb build/etc. --- configure.ac | 2 ++ src/libutil/compression.cc | 2 ++ 2 files changed, 4 insertions(+) diff --git a/configure.ac b/configure.ac index 83b2346d065..c7498fb5240 100644 --- a/configure.ac +++ b/configure.ac @@ -175,6 +175,8 @@ AC_SUBST(HAVE_SODIUM, [$have_sodium]) # Look for liblzma, a required dependency. PKG_CHECK_MODULES([LIBLZMA], [liblzma], [CXXFLAGS="$LIBLZMA_CFLAGS $CXXFLAGS"]) +AC_CHECK_LIB([lzma], [lzma_stream_encoder_mt], + [AC_DEFINE([HAVE_LZMA_MT], [1], [xz multithreaded compression support])]) # Look for libbrotli{enc,dec}, optional dependencies diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc index ed15761b32a..c509472b384 100644 --- a/src/libutil/compression.cc +++ b/src/libutil/compression.cc @@ -192,6 +192,7 @@ struct XzSink : CompressionSink XzSink(Sink & nextSink, const bool parallel) : nextSink(nextSink) { lzma_ret ret; +#ifdef HAVE_LZMA_MT if (parallel) { lzma_mt mt_options = {}; mt_options.flags = 0; @@ -208,6 +209,7 @@ struct XzSink : CompressionSink ret = lzma_stream_encoder_mt( &strm, &mt_options); } else +#endif ret = lzma_easy_encoder( &strm, 6, LZMA_CHECK_CRC64); From 60eca585339b5cca7d04d82c48b9d5feb2d628ae Mon Sep 17 00:00:00 2001 From: Frederik Rietdijk Date: Sun, 11 Feb 2018 14:37:50 +0100 Subject: [PATCH 0817/2196] Nix stats: flatten statistics Flattens the list of statistics as suggested in https://github.com/NixOS/ofborg/issues/67. This makes it easier to work with. --- src/libexpr/eval.cc | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 0b0a0f7b179..11195af7793 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -1700,10 +1700,13 @@ void EvalState::printStats() printMsg(v, format(" time elapsed: %1%") % cpuTime); printMsg(v, format(" size of a value: %1%") % sizeof(Value)); printMsg(v, format(" size of an attr: %1%") % sizeof(Attr)); - printMsg(v, format(" environments allocated: %1% (%2% bytes)") % nrEnvs % bEnvs); - printMsg(v, format(" list elements: %1% (%2% bytes)") % nrListElems % bLists); + printMsg(v, format(" environments allocated count: %1%") % nrEnvs); + printMsg(v, format(" environments allocated bytes: %1%") % bEnvs); + printMsg(v, format(" list elements count: %1%") % nrListElems); + printMsg(v, format(" list elements bytes: %1%") % bLists); printMsg(v, format(" list concatenations: %1%") % nrListConcats); - printMsg(v, format(" values allocated: %1% (%2% bytes)") % nrValues % bValues); + printMsg(v, format(" values allocated count: %1%") % nrValues); + printMsg(v, format(" values allocated bytes: %1%") % bValues); printMsg(v, format(" sets allocated: %1% (%2% bytes)") % nrAttrsets % bAttrsets); printMsg(v, format(" right-biased unions: %1%") % nrOpUpdates); printMsg(v, format(" values copied in right-biased unions: %1%") % nrOpUpdateValuesCopied); From a0bdc96726b15b7f529156bccd60d0f8dd5544f3 Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Sun, 11 Feb 2018 12:47:42 -0600 Subject: [PATCH 0818/2196] compression: print warning if parallel requested but not supported --- src/libutil/compression.cc | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc index c509472b384..0b0ff110253 100644 --- a/src/libutil/compression.cc +++ b/src/libutil/compression.cc @@ -1,6 +1,7 @@ #include "compression.hh" #include "util.hh" #include "finally.hh" +#include "logging.hh" #include #include @@ -192,8 +193,8 @@ struct XzSink : CompressionSink XzSink(Sink & nextSink, const bool parallel) : nextSink(nextSink) { lzma_ret ret; -#ifdef HAVE_LZMA_MT if (parallel) { +#ifdef HAVE_LZMA_MT lzma_mt mt_options = {}; mt_options.flags = 0; mt_options.timeout = 300; // Using the same setting as the xz cmd line @@ -209,6 +210,9 @@ struct XzSink : CompressionSink ret = lzma_stream_encoder_mt( &strm, &mt_options); } else +#else + printMsg(lvlError, "Warning: parallel XZ compression requested but not supported, falling back to single-threaded compression"); + } #endif ret = lzma_easy_encoder( &strm, 6, LZMA_CHECK_CRC64); @@ -471,6 +475,9 @@ struct BrotliSink : CompressionSink ref makeCompressionSink(const std::string & method, Sink & nextSink, const bool parallel) { + if (parallel && method != "xz") + printMsg(lvlError, format("Warning: parallel compression requested but not supported for method '%1%', falling back to single-threaded compression") % method); + if (method == "none") return make_ref(nextSink); else if (method == "xz") From c6209030c424424ebd51283326d5e5df68a48533 Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Sun, 11 Feb 2018 13:23:31 -0600 Subject: [PATCH 0819/2196] compression: make parallel sink separate class --- src/libutil/compression.cc | 63 ++++++++++++++++++++------------------ 1 file changed, 34 insertions(+), 29 deletions(-) diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc index 0b0ff110253..470c925ed7a 100644 --- a/src/libutil/compression.cc +++ b/src/libutil/compression.cc @@ -190,33 +190,9 @@ struct XzSink : CompressionSink lzma_stream strm = LZMA_STREAM_INIT; bool finished = false; - XzSink(Sink & nextSink, const bool parallel) : nextSink(nextSink) - { - lzma_ret ret; - if (parallel) { -#ifdef HAVE_LZMA_MT - lzma_mt mt_options = {}; - mt_options.flags = 0; - mt_options.timeout = 300; // Using the same setting as the xz cmd line - mt_options.preset = LZMA_PRESET_DEFAULT; - mt_options.filters = NULL; - mt_options.check = LZMA_CHECK_CRC64; - mt_options.threads = lzma_cputhreads(); - mt_options.block_size = 0; - if (mt_options.threads == 0) - mt_options.threads = 1; - // FIXME: maybe use lzma_stream_encoder_mt_memusage() to control the - // number of threads. - ret = lzma_stream_encoder_mt( - &strm, &mt_options); - } else -#else - printMsg(lvlError, "Warning: parallel XZ compression requested but not supported, falling back to single-threaded compression"); - } -#endif - ret = lzma_easy_encoder( - &strm, 6, LZMA_CHECK_CRC64); - + template + XzSink(Sink & nextSink, F&& initEncoder) : nextSink(nextSink) { + lzma_ret ret = initEncoder(); if (ret != LZMA_OK) throw CompressionError("unable to initialise lzma encoder"); // FIXME: apply the x86 BCJ filter? @@ -224,6 +200,9 @@ struct XzSink : CompressionSink strm.next_out = outbuf; strm.avail_out = sizeof(outbuf); } + XzSink(Sink & nextSink) : XzSink(nextSink, [this]() { + return lzma_easy_encoder(&strm, 6, LZMA_CHECK_CRC64); + }) {} ~XzSink() { @@ -277,6 +256,27 @@ struct XzSink : CompressionSink } }; +#ifdef HAVE_LZMA_MT +struct ParallelXzSink : public XzSink +{ + ParallelXzSink(Sink &nextSink) : XzSink(nextSink, [this]() { + lzma_mt mt_options = {}; + mt_options.flags = 0; + mt_options.timeout = 300; // Using the same setting as the xz cmd line + mt_options.preset = LZMA_PRESET_DEFAULT; + mt_options.filters = NULL; + mt_options.check = LZMA_CHECK_CRC64; + mt_options.threads = lzma_cputhreads(); + mt_options.block_size = 0; + if (mt_options.threads == 0) + mt_options.threads = 1; + // FIXME: maybe use lzma_stream_encoder_mt_memusage() to control the + // number of threads. + return lzma_stream_encoder_mt(&strm, &mt_options); + }) {} +}; +#endif + struct BzipSink : CompressionSink { Sink & nextSink; @@ -475,13 +475,18 @@ struct BrotliSink : CompressionSink ref makeCompressionSink(const std::string & method, Sink & nextSink, const bool parallel) { - if (parallel && method != "xz") + if (parallel) { +#ifdef HAVE_LZMA_MT + if (method == "xz") + return make_ref(nextSink); +#endif printMsg(lvlError, format("Warning: parallel compression requested but not supported for method '%1%', falling back to single-threaded compression") % method); + } if (method == "none") return make_ref(nextSink); else if (method == "xz") - return make_ref(nextSink, parallel); + return make_ref(nextSink); else if (method == "bzip2") return make_ref(nextSink); else if (method == "br") From 35fd31770c589f28c569e7868f868a5b558cf83d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 9 Feb 2018 16:42:32 +0100 Subject: [PATCH 0820/2196] toBuildables -> build --- src/nix/build.cc | 2 +- src/nix/command.hh | 2 +- src/nix/installables.cc | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/nix/build.cc b/src/nix/build.cc index f7c99f12dbb..b4f21b32d78 100644 --- a/src/nix/build.cc +++ b/src/nix/build.cc @@ -50,7 +50,7 @@ struct CmdBuild : MixDryRun, InstallablesCommand void run(ref store) override { - auto buildables = toBuildables(store, dryRun ? DryRun : Build, installables); + auto buildables = build(store, dryRun ? DryRun : Build, installables); for (size_t i = 0; i < buildables.size(); ++i) { auto & b(buildables[i]); diff --git a/src/nix/command.hh b/src/nix/command.hh index a7863c49f37..97a6fee7fd2 100644 --- a/src/nix/command.hh +++ b/src/nix/command.hh @@ -198,7 +198,7 @@ std::shared_ptr parseInstallable( SourceExprCommand & cmd, ref store, const std::string & installable, bool useDefaultInstallables); -Buildables toBuildables(ref store, RealiseMode mode, +Buildables build(ref store, RealiseMode mode, std::vector> installables); PathSet toStorePaths(ref store, RealiseMode mode, diff --git a/src/nix/installables.cc b/src/nix/installables.cc index c3b06c22eba..a3fdd8a2808 100644 --- a/src/nix/installables.cc +++ b/src/nix/installables.cc @@ -253,7 +253,7 @@ std::shared_ptr parseInstallable( return installables.front(); } -Buildables toBuildables(ref store, RealiseMode mode, +Buildables build(ref store, RealiseMode mode, std::vector> installables) { if (mode != Build) @@ -291,7 +291,7 @@ PathSet toStorePaths(ref store, RealiseMode mode, { PathSet outPaths; - for (auto & b : toBuildables(store, mode, installables)) + for (auto & b : build(store, mode, installables)) for (auto & output : b.outputs) outPaths.insert(output.second); From 4f09ce7940689887a18d4aa44367d2e6abeaa3cf Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 12 Feb 2018 16:56:12 +0100 Subject: [PATCH 0821/2196] Fix 'deadlock: trying to re-acquire self-held lock' This was caused by derivations with 'allowSubstitutes = false'. Such derivations will be built locally. However, if there is another SubstitionGoal that has the output of the first derivation in its closure, then the path will be simultaneously built and substituted. There was a check to catch this situation (via pathIsLockedByMe()), but it no longer worked reliably because substitutions are now done in another thread. (Thus the comment 'It can't happen between here and the lockPaths() call below because we're not allowing multi-threading' was no longer valid.) The fix is to handle the path already being locked in both SubstitutionGoal and DerivationGoal. --- src/libstore/build.cc | 30 +++++++++++++++++------------- src/libstore/local-store.cc | 4 ++-- src/libstore/pathlocks.cc | 6 ++++-- src/libstore/pathlocks.hh | 6 +----- 4 files changed, 24 insertions(+), 22 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 392b494e65e..cc69ff1c74b 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -1335,19 +1335,6 @@ void DerivationGoal::tryToBuild() { trace("trying to build"); - /* Check for the possibility that some other goal in this process - has locked the output since we checked in haveDerivation(). - (It can't happen between here and the lockPaths() call below - because we're not allowing multi-threading.) If so, put this - goal to sleep until another goal finishes, then try again. */ - for (auto & i : drv->outputs) - if (pathIsLockedByMe(worker.store.toRealPath(i.second.path))) { - debug(format("putting derivation '%1%' to sleep because '%2%' is locked by another goal") - % drvPath % i.second.path); - worker.waitForAnyGoal(shared_from_this()); - return; - } - /* Obtain locks on all output paths. The locks are automatically released when we exit this function or Nix crashes. If we can't acquire the lock, then continue; hopefully some other @@ -3739,6 +3726,17 @@ void SubstitutionGoal::tryToRun() return; } + /* If the store path is already locked (probably by a + DerivationGoal), then put this goal to sleep. Note: we don't + acquire a lock here since that breaks addToStore(), so below we + handle an AlreadyLocked exception from addToStore(). The check + here is just an optimisation to prevent having to redo a + download due to a locked path. */ + if (pathIsLockedByMe(worker.store.toRealPath(storePath))) { + worker.waitForAWhile(shared_from_this()); + return; + } + maintainRunningSubstitutions = std::make_unique>(worker.runningSubstitutions); worker.updateProgress(); @@ -3778,6 +3776,12 @@ void SubstitutionGoal::finished() try { promise.get_future().get(); + } catch (AlreadyLocked & e) { + /* Probably a DerivationGoal is already building this store + path. Sleep for a while and try again. */ + state = &SubstitutionGoal::init; + worker.waitForAWhile(shared_from_this()); + return; } catch (Error & e) { printError(e.msg()); diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 7afecc1cfc6..8a79fc7235f 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -992,8 +992,8 @@ void LocalStore::addToStore(const ValidPathInfo & info, const ref & /* Lock the output path. But don't lock if we're being called from a build hook (whose parent process already acquired a lock on this path). */ - Strings locksHeld = tokenizeString(getEnv("NIX_HELD_LOCKS")); - if (find(locksHeld.begin(), locksHeld.end(), info.path) == locksHeld.end()) + static auto locksHeld = tokenizeString(getEnv("NIX_HELD_LOCKS")); + if (!locksHeld.count(info.path)) outputLock.lockPaths({realPath}); if (repair || !isValidPath(info.path)) { diff --git a/src/libstore/pathlocks.cc b/src/libstore/pathlocks.cc index 587f2959885..08d1efdbeb0 100644 --- a/src/libstore/pathlocks.cc +++ b/src/libstore/pathlocks.cc @@ -113,8 +113,10 @@ bool PathLocks::lockPaths(const PathSet & _paths, { auto lockedPaths(lockedPaths_.lock()); - if (lockedPaths->count(lockPath)) - throw Error("deadlock: trying to re-acquire self-held lock '%s'", lockPath); + if (lockedPaths->count(lockPath)) { + if (!wait) return false; + throw AlreadyLocked("deadlock: trying to re-acquire self-held lock '%s'", lockPath); + } lockedPaths->insert(lockPath); } diff --git a/src/libstore/pathlocks.hh b/src/libstore/pathlocks.hh index 2a7de611446..db51f950a32 100644 --- a/src/libstore/pathlocks.hh +++ b/src/libstore/pathlocks.hh @@ -2,10 +2,8 @@ #include "util.hh" - namespace nix { - /* Open (possibly create) a lock file and return the file descriptor. -1 is returned if create is false and the lock could not be opened because it doesn't exist. Any other error throws an exception. */ @@ -18,6 +16,7 @@ enum LockType { ltRead, ltWrite, ltNone }; bool lockFile(int fd, LockType lockType, bool wait); +MakeError(AlreadyLocked, Error); class PathLocks { @@ -38,9 +37,6 @@ public: void setDeletion(bool deletePaths); }; - -// FIXME: not thread-safe! bool pathIsLockedByMe(const Path & path); - } From 9bcb4d2dd99ba7509c27479eecf1e7ac88244fa8 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 12 Feb 2018 22:48:55 +0100 Subject: [PATCH 0822/2196] Fix hang in build-remote --- src/build-remote/build-remote.cc | 2 +- src/libstore/local-store.cc | 1 - src/libstore/local-store.hh | 3 +++ 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index df579729af2..c6e75e8cc70 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -241,7 +241,7 @@ int main (int argc, char * * argv) if (!missing.empty()) { Activity act(*logger, lvlTalkative, actUnknown, fmt("copying outputs from '%s'", storeUri)); - setenv("NIX_HELD_LOCKS", concatStringsSep(" ", missing).c_str(), 1); /* FIXME: ugly */ + store->locksHeld.insert(missing.begin(), missing.end()); /* FIXME: ugly */ copyPaths(ref(sshStore), store, missing, NoRepair, NoCheckSigs, substitute); } diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 8a79fc7235f..4afe51ea91e 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -992,7 +992,6 @@ void LocalStore::addToStore(const ValidPathInfo & info, const ref & /* Lock the output path. But don't lock if we're being called from a build hook (whose parent process already acquired a lock on this path). */ - static auto locksHeld = tokenizeString(getEnv("NIX_HELD_LOCKS")); if (!locksHeld.count(info.path)) outputLock.lockPaths({realPath}); diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index 30bef3a799d..bbd50e1c145 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -104,6 +104,9 @@ private: public: + // Hack for build-remote.cc. + PathSet locksHeld = tokenizeString(getEnv("NIX_HELD_LOCKS")); + /* Initialise the local store, upgrading the schema if necessary. */ LocalStore(const Params & params); From 6eb1040e909ab83fbc03983724d9c6ec223c4495 Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Tue, 13 Feb 2018 08:16:32 -0500 Subject: [PATCH 0823/2196] Allow includes from nix.conf --- doc/manual/command-ref/conf-file.xml | 7 ++++++- src/libutil/config.cc | 26 +++++++++++++++++++++++++- tests/init.sh | 5 +++++ 3 files changed, 36 insertions(+), 2 deletions(-) diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index 5c4561f66d8..8ed614963a4 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -40,7 +40,12 @@ The configuration files consist of name = -value pairs, one per line. +value pairs, one per line. Other +files can be included with a line like include +path, where +path is interpreted relative to the current +conf file and a missing file is an error unless +!include is used instead. Comments start with a # character. Here is an example configuration file: diff --git a/src/libutil/config.cc b/src/libutil/config.cc index d46ca65a386..0e502769edf 100644 --- a/src/libutil/config.cc +++ b/src/libutil/config.cc @@ -80,7 +80,31 @@ void Config::applyConfigFile(const Path & path, bool fatal) vector tokens = tokenizeString >(line); if (tokens.empty()) continue; - if (tokens.size() < 2 || tokens[1] != "=") + if (tokens.size() < 2) + throw UsageError("illegal configuration line '%1%' in '%2%'", line, path); + + auto include = false; + auto ignoreMissing = false; + if (tokens[0] == "include") + include = true; + else if (tokens[0] == "!include") { + include = true; + ignoreMissing = true; + } + + if (include) { + if (tokens.size() != 2) + throw UsageError("illegal configuration line '%1%' in '%2%'", line, path); + auto p = absPath(tokens[1], dirOf(path)); + if (pathExists(p)) { + applyConfigFile(p, fatal); + } else if (!ignoreMissing) { + throw Error("file '%1%' included from '%2%' not found", p, path); + } + continue; + } + + if (tokens[1] != "=") throw UsageError("illegal configuration line '%1%' in '%2%'", line, path); string name = tokens[0]; diff --git a/tests/init.sh b/tests/init.sh index 41cca047d8f..e5353598bcc 100644 --- a/tests/init.sh +++ b/tests/init.sh @@ -16,7 +16,12 @@ mkdir "$NIX_CONF_DIR" cat > "$NIX_CONF_DIR"/nix.conf < "$NIX_CONF_DIR"/nix.conf.extra < Date: Tue, 13 Feb 2018 11:05:25 +0000 Subject: [PATCH 0824/2196] Fix #1762 nix-store --export, nix-store --dump, and nix dump-path would previously fail silently if writing the data out failed, because a) FdSink::write ignored exceptions, and b) the commands relied on FdSink's destructor, which ignores exceptions, to flush the data out. This could cause rather opaque issues with installing nixos, because nix-store --export would happily proceed even if it couldn't write its data out (e.g. if nix-store --import on the other side of the pipe failed). This commit adds tests that expose these issues in the nix-store commands, and fixes them for all three. --- src/libutil/serialise.cc | 3 ++- src/nix-store/nix-store.cc | 2 ++ src/nix/dump-path.cc | 1 + tests/export.sh | 5 +++++ tests/nar-access.sh | 6 ++++++ 5 files changed, 16 insertions(+), 1 deletion(-) diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index 950e6362a24..9e2a502afaf 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -67,7 +67,8 @@ void FdSink::write(const unsigned char * data, size_t len) try { writeFull(fd, data, len); } catch (SysError & e) { - _good = true; + _good = false; + throw; } } diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 4fc3421c0dd..4bea1316134 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -631,6 +631,7 @@ static void opDump(Strings opFlags, Strings opArgs) FdSink sink(STDOUT_FILENO); string path = *opArgs.begin(); dumpPath(path, sink); + sink.flush(); } @@ -656,6 +657,7 @@ static void opExport(Strings opFlags, Strings opArgs) FdSink sink(STDOUT_FILENO); store->exportPaths(opArgs, sink); + sink.flush(); } diff --git a/src/nix/dump-path.cc b/src/nix/dump-path.cc index 1a1866437b0..f411c0cb7c8 100644 --- a/src/nix/dump-path.cc +++ b/src/nix/dump-path.cc @@ -29,6 +29,7 @@ struct CmdDumpPath : StorePathCommand { FdSink sink(STDOUT_FILENO); store->narFromPath(storePath, sink); + sink.flush(); } }; diff --git a/tests/export.sh b/tests/export.sh index ec7560f1972..2238539bcca 100644 --- a/tests/export.sh +++ b/tests/export.sh @@ -8,6 +8,11 @@ nix-store --export $outPath > $TEST_ROOT/exp nix-store --export $(nix-store -qR $outPath) > $TEST_ROOT/exp_all +if nix-store --export $outPath >/dev/full ; then + echo "exporting to a bad file descriptor should fail" + exit 1 +fi + clearStore diff --git a/tests/nar-access.sh b/tests/nar-access.sh index bd849cbfab1..553d6ca89d7 100644 --- a/tests/nar-access.sh +++ b/tests/nar-access.sh @@ -36,3 +36,9 @@ diff -u baz.cat-nar $storePath/foo/baz # Test missing files. nix ls-store --json -R $storePath/xyzzy 2>&1 | grep 'does not exist in NAR' nix ls-store $storePath/xyzzy 2>&1 | grep 'does not exist' + +# Test failure to dump. +if nix-store --dump $storePath >/dev/full ; then + echo "dumping to /dev/full should fail" + exit -1 +fi From b8739f2fb33a28a250cd2053c013b977a3f096e8 Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Tue, 13 Feb 2018 07:51:52 -0500 Subject: [PATCH 0825/2196] Enable specifying directories in plugin-files. --- doc/manual/command-ref/conf-file.xml | 4 ++++ src/libstore/globals.cc | 24 ++++++++++++++++++------ 2 files changed, 22 insertions(+), 6 deletions(-) diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index 5c4561f66d8..2b7a69a0c59 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -764,6 +764,10 @@ builtins.fetchurl { should not be linked to any Nix libs directly, as those will be available already at load time. + + If an entry in the list is a directory, all files in the + directory are loaded as plugins (non-recursively). + diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 21ab0e6296e..c6b508cbe82 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -142,12 +142,24 @@ void MaxBuildJobsSetting::set(const std::string & str) void initPlugins() { for (const auto & pluginFile : settings.pluginFiles.get()) { - /* handle is purposefully leaked as there may be state in the - DSO needed by the action of the plugin. */ - void *handle = - dlopen(pluginFile.c_str(), RTLD_LAZY | RTLD_LOCAL); - if (!handle) - throw Error(format("could not dynamically open plugin file '%1%': %2%") % pluginFile % dlerror()); + Paths pluginFiles; + try { + auto ents = readDirectory(pluginFile); + for (const auto & ent : ents) + pluginFiles.emplace_back(pluginFile + "/" + ent.name); + } catch (SysError & e) { + if (e.errNo != ENOTDIR) + throw; + pluginFiles.emplace_back(pluginFile); + } + for (const auto & file : pluginFiles) { + /* handle is purposefully leaked as there may be state in the + DSO needed by the action of the plugin. */ + void *handle = + dlopen(file.c_str(), RTLD_LAZY | RTLD_LOCAL); + if (!handle) + throw Error("could not dynamically open plugin file '%s%': %s%", file, dlerror()); + } } } From 3fe9767dd33499c2560d209dc13a01f5fcead1f0 Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Tue, 13 Feb 2018 12:49:14 -0500 Subject: [PATCH 0826/2196] Fix plugin tests on darwin --- tests/local.mk | 2 +- tests/plugins.sh | 2 +- tests/plugins/local.mk | 10 +++++----- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/local.mk b/tests/local.mk index 51bc09dd432..2a4832e1304 100644 --- a/tests/local.mk +++ b/tests/local.mk @@ -32,4 +32,4 @@ tests-environment = NIX_REMOTE= $(bash) -e clean-files += $(d)/common.sh -installcheck: $(d)/common.sh $(d)/plugins/plugintest.so +installcheck: $(d)/common.sh $(d)/plugins/libplugintest.$(SO_EXT) diff --git a/tests/plugins.sh b/tests/plugins.sh index 23caf04f338..0fad4f773a2 100644 --- a/tests/plugins.sh +++ b/tests/plugins.sh @@ -2,6 +2,6 @@ source common.sh set -o pipefail -res=$(nix eval '(builtins.anotherNull)' --option plugin-files $PWD/plugins/plugintest.so) +res=$(nix eval '(builtins.anotherNull)' --option plugin-files $PWD/plugins/libplugintest*) [ "$res"x = "nullx" ] diff --git a/tests/plugins/local.mk b/tests/plugins/local.mk index a5f19b087c8..1d2bac052fd 100644 --- a/tests/plugins/local.mk +++ b/tests/plugins/local.mk @@ -1,9 +1,9 @@ -libraries += plugintest +libraries += libplugintest -plugintest_DIR := $(d) +libplugintest_DIR := $(d) -plugintest_SOURCES := $(d)/plugintest.cc +libplugintest_SOURCES := $(d)/plugintest.cc -plugintest_ALLOW_UNDEFINED := 1 +libplugintest_ALLOW_UNDEFINED := 1 -plugintest_EXCLUDE_FROM_LIBRARY_LIST := 1 +libplugintest_EXCLUDE_FROM_LIBRARY_LIST := 1 From de4934ab3b26aa851b7044e9884102cc054dc092 Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Tue, 13 Feb 2018 14:43:32 -0500 Subject: [PATCH 0827/2196] Allow plugins to define new settings. --- doc/manual/command-ref/conf-file.xml | 7 +++-- src/libstore/globals.cc | 16 +++++++++++ src/libstore/globals.hh | 7 +++++ src/libstore/store-api.cc | 2 +- src/libutil/config.cc | 42 ++++++++++++++-------------- src/libutil/config.hh | 8 +++--- tests/plugins.sh | 2 +- tests/plugins/plugintest.cc | 13 +++++++-- 8 files changed, 65 insertions(+), 32 deletions(-) diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index 42906ddff5e..c14a4d206b8 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -757,9 +757,10 @@ builtins.fetchurl { plugins may construct static instances of RegisterPrimOp to add new primops or constants to the expression language, RegisterStoreImplementation to add new store implementations, - and RegisterCommand to add new subcommands to the - nix command. See the constructors for those - types for more details. + RegisterCommand to add new subcommands to the + nix command, and RegisterSetting to add new + nix config settings. See the constructors for those types for + more details. Since these files are loaded into the same address space as diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index c6b508cbe82..c5a4536ef2f 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -161,6 +161,22 @@ void initPlugins() throw Error("could not dynamically open plugin file '%s%': %s%", file, dlerror()); } } + /* We handle settings registrations here, since plugins can add settings */ + if (RegisterSetting::settingRegistrations) { + for (auto & registration : *RegisterSetting::settingRegistrations) + settings.addSetting(registration); + delete RegisterSetting::settingRegistrations; + } + settings.handleUnknownSettings(); +} + +RegisterSetting::SettingRegistrations * RegisterSetting::settingRegistrations; + +RegisterSetting::RegisterSetting(AbstractSetting * s) +{ + if (!settingRegistrations) + settingRegistrations = new SettingRegistrations; + settingRegistrations->emplace_back(s); } diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 508084d08ac..1d019aab9d2 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -383,5 +383,12 @@ void initPlugins(); extern const string nixVersion; +struct RegisterSetting +{ + typedef std::vector SettingRegistrations; + static SettingRegistrations * settingRegistrations; + RegisterSetting(AbstractSetting * s); +}; + } diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 4d43ef082d5..8830edcc344 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -839,7 +839,7 @@ ref openStore(const std::string & uri_, for (auto fun : *RegisterStoreImplementation::implementations) { auto store = fun(uri, params); if (store) { - store->warnUnknownSettings(); + store->handleUnknownSettings(); return ref(store); } } diff --git a/src/libutil/config.cc b/src/libutil/config.cc index 0e502769edf..cdd1e0a15aa 100644 --- a/src/libutil/config.cc +++ b/src/libutil/config.cc @@ -7,10 +7,12 @@ namespace nix { void Config::set(const std::string & name, const std::string & value) { auto i = _settings.find(name); - if (i == _settings.end()) - throw UsageError("unknown setting '%s'", name); - i->second.setting->set(value); - i->second.setting->overriden = true; + if (i == _settings.end()) { + extras.emplace(name, value); + } else { + i->second.setting->set(value); + i->second.setting->overriden = true; + } } void Config::addSetting(AbstractSetting * setting) @@ -21,34 +23,37 @@ void Config::addSetting(AbstractSetting * setting) bool set = false; - auto i = initials.find(setting->name); - if (i != initials.end()) { + auto i = extras.find(setting->name); + if (i != extras.end()) { setting->set(i->second); setting->overriden = true; - initials.erase(i); + extras.erase(i); set = true; } for (auto & alias : setting->aliases) { - auto i = initials.find(alias); - if (i != initials.end()) { + auto i = extras.find(alias); + if (i != extras.end()) { if (set) warn("setting '%s' is set, but it's an alias of '%s' which is also set", alias, setting->name); else { setting->set(i->second); setting->overriden = true; - initials.erase(i); + extras.erase(i); set = true; } } } } -void Config::warnUnknownSettings() +void Config::handleUnknownSettings(bool fatal) { - for (auto & i : initials) - warn("unknown setting '%s'", i.first); + for (auto & s : extras) + if (fatal) + throw UsageError("unknown setting '%s%'", s.first); + else + warn("unknown setting '%s'", s.first); } StringMap Config::getSettings(bool overridenOnly) @@ -60,7 +65,7 @@ StringMap Config::getSettings(bool overridenOnly) return res; } -void Config::applyConfigFile(const Path & path, bool fatal) +void Config::applyConfigFile(const Path & path) { try { string contents = readFile(path); @@ -97,7 +102,7 @@ void Config::applyConfigFile(const Path & path, bool fatal) throw UsageError("illegal configuration line '%1%' in '%2%'", line, path); auto p = absPath(tokens[1], dirOf(path)); if (pathExists(p)) { - applyConfigFile(p, fatal); + applyConfigFile(p); } else if (!ignoreMissing) { throw Error("file '%1%' included from '%2%' not found", p, path); } @@ -112,12 +117,7 @@ void Config::applyConfigFile(const Path & path, bool fatal) vector::iterator i = tokens.begin(); advance(i, 2); - try { - set(name, concatStringsSep(" ", Strings(i, tokens.end()))); // FIXME: slow - } catch (UsageError & e) { - if (fatal) throw; - warn("in configuration file '%s': %s", path, e.what()); - } + set(name, concatStringsSep(" ", Strings(i, tokens.end()))); // FIXME: slow }; } catch (SysError &) { } } diff --git a/src/libutil/config.hh b/src/libutil/config.hh index 9a32af528ec..c6783e13c2b 100644 --- a/src/libutil/config.hh +++ b/src/libutil/config.hh @@ -48,25 +48,25 @@ private: Settings _settings; - StringMap initials; + StringMap extras; public: Config(const StringMap & initials) - : initials(initials) + : extras(initials) { } void set(const std::string & name, const std::string & value); void addSetting(AbstractSetting * setting); - void warnUnknownSettings(); + void handleUnknownSettings(bool fatal = false); StringMap getSettings(bool overridenOnly = false); const Settings & _getSettings() { return _settings; } - void applyConfigFile(const Path & path, bool fatal = false); + void applyConfigFile(const Path & path); void resetOverriden(); diff --git a/tests/plugins.sh b/tests/plugins.sh index 0fad4f773a2..4b1baeddce3 100644 --- a/tests/plugins.sh +++ b/tests/plugins.sh @@ -2,6 +2,6 @@ source common.sh set -o pipefail -res=$(nix eval '(builtins.anotherNull)' --option plugin-files $PWD/plugins/libplugintest*) +res=$(nix eval '(builtins.anotherNull)' --option setting-set true --option plugin-files $PWD/plugins/libplugintest*) [ "$res"x = "nullx" ] diff --git a/tests/plugins/plugintest.cc b/tests/plugins/plugintest.cc index 6b5e6d7cde2..8da15ebabd7 100644 --- a/tests/plugins/plugintest.cc +++ b/tests/plugins/plugintest.cc @@ -1,10 +1,19 @@ +#include "globals.hh" #include "primops.hh" using namespace nix; +static BaseSetting settingSet{false, "setting-set", + "Whether the plugin-defined setting was set"}; + +static RegisterSetting rs(&settingSet); + static void prim_anotherNull (EvalState & state, const Pos & pos, Value ** args, Value & v) { - mkNull(v); + if (settingSet) + mkNull(v); + else + mkBool(v, false); } -static RegisterPrimOp r("anotherNull", 0, prim_anotherNull); +static RegisterPrimOp rp("anotherNull", 0, prim_anotherNull); From b095c06139fa267e6050e4c95208c627cc6251b8 Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Tue, 13 Feb 2018 18:28:27 -0500 Subject: [PATCH 0828/2196] Add splitVersion primop. Fixes #1868. --- doc/manual/expressions/builtins.xml | 11 +++++++++++ src/libexpr/names.cc | 2 +- src/libexpr/names.hh | 2 ++ src/libexpr/primops.cc | 21 +++++++++++++++++++++ tests/lang/eval-okay-splitversion.exp | 1 + tests/lang/eval-okay-splitversion.nix | 1 + 6 files changed, 37 insertions(+), 1 deletion(-) create mode 100644 tests/lang/eval-okay-splitversion.exp create mode 100644 tests/lang/eval-okay-splitversion.nix diff --git a/doc/manual/expressions/builtins.xml b/doc/manual/expressions/builtins.xml index 81770bcf629..8a32ed8b5c9 100644 --- a/doc/manual/expressions/builtins.xml +++ b/doc/manual/expressions/builtins.xml @@ -126,6 +126,17 @@ if builtins ? getEnv then builtins.getEnv "PATH" else "" + builtins.splitVersion + s + + Split a string representing a version into its + components, by the same version splitting logic underlying the + version comparison in + nix-env -u. + + + + builtins.concatLists lists diff --git a/src/libexpr/names.cc b/src/libexpr/names.cc index 6d78d211612..382088c7887 100644 --- a/src/libexpr/names.cc +++ b/src/libexpr/names.cc @@ -41,7 +41,7 @@ bool DrvName::matches(DrvName & n) } -static string nextComponent(string::const_iterator & p, +string nextComponent(string::const_iterator & p, const string::const_iterator end) { /* Skip any dots and dashes (component separators). */ diff --git a/src/libexpr/names.hh b/src/libexpr/names.hh index 9667fc96fd0..13c3093e77b 100644 --- a/src/libexpr/names.hh +++ b/src/libexpr/names.hh @@ -24,6 +24,8 @@ private: typedef list DrvNames; +string nextComponent(string::const_iterator & p, + const string::const_iterator end); int compareVersions(const string & v1, const string & v2); DrvNames drvNamesFromArgs(const Strings & opArgs); diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 466fd13e869..ca97b2b28bb 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1961,6 +1961,26 @@ static void prim_compareVersions(EvalState & state, const Pos & pos, Value * * a } +static void prim_splitVersion(EvalState & state, const Pos & pos, Value * * args, Value & v) +{ + string version = state.forceStringNoCtx(*args[0], pos); + auto iter = version.cbegin(); + Strings components; + while (iter != version.cend()) { + auto component = nextComponent(iter, version.cend()); + if (component.empty()) + break; + components.emplace_back(std::move(component)); + } + state.mkList(v, components.size()); + unsigned int n = 0; + for (auto & component : components) { + auto listElem = v.listElems()[n++] = state.allocValue(); + mkString(*listElem, std::move(component)); + } +} + + /************************************************************* * Networking *************************************************************/ @@ -2196,6 +2216,7 @@ void EvalState::createBaseEnv() // Versions addPrimOp("__parseDrvName", 1, prim_parseDrvName); addPrimOp("__compareVersions", 2, prim_compareVersions); + addPrimOp("__splitVersion", 1, prim_splitVersion); // Derivations addPrimOp("derivationStrict", 1, prim_derivationStrict); diff --git a/tests/lang/eval-okay-splitversion.exp b/tests/lang/eval-okay-splitversion.exp new file mode 100644 index 00000000000..153ceb8186a --- /dev/null +++ b/tests/lang/eval-okay-splitversion.exp @@ -0,0 +1 @@ +[ "1" "2" "3" ] diff --git a/tests/lang/eval-okay-splitversion.nix b/tests/lang/eval-okay-splitversion.nix new file mode 100644 index 00000000000..9e5c99d2e7f --- /dev/null +++ b/tests/lang/eval-okay-splitversion.nix @@ -0,0 +1 @@ +builtins.splitVersion "1.2.3" From ac973a6d3c0ff2f505dece8e9f1508c6f77553a5 Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Wed, 14 Feb 2018 12:35:07 -0500 Subject: [PATCH 0829/2196] monitor-fds: Fix on macOS. Fixes #1871. --- src/libutil/monitor-fd.hh | 30 +++++++++++++++++++++++------- 1 file changed, 23 insertions(+), 7 deletions(-) diff --git a/src/libutil/monitor-fd.hh b/src/libutil/monitor-fd.hh index e0ec66c0180..5ee0b88ef50 100644 --- a/src/libutil/monitor-fd.hh +++ b/src/libutil/monitor-fd.hh @@ -21,13 +21,29 @@ public: MonitorFdHup(int fd) { thread = std::thread([fd]() { - /* Wait indefinitely until a POLLHUP occurs. */ - struct pollfd fds[1]; - fds[0].fd = fd; - fds[0].events = 0; - if (poll(fds, 1, -1) == -1) abort(); // can't happen - assert(fds[0].revents & POLLHUP); - triggerInterrupt(); + while (true) { + /* Wait indefinitely until a POLLHUP occurs. */ + struct pollfd fds[1]; + fds[0].fd = fd; + /* This shouldn't be necessary, but macOS doesn't seem to + like a zeroed out events field. + See rdar://37537852. + */ + fds[0].events = POLLHUP; + auto count = poll(fds, 1, -1); + if (count == -1) abort(); // can't happen + /* This shouldn't happen, but can on macOS due to a bug. + See rdar://37550628. + + This may eventually need a delay or further + coordination with the main thread if spinning proves + too harmful. + */ + if (count == 0) continue; + assert(fds[0].revents & POLLHUP); + triggerInterrupt(); + break; + } }); }; From 8f186722a97882ba41330684f37bfe8b8637eba8 Mon Sep 17 00:00:00 2001 From: Matthew Bauer Date: Wed, 14 Feb 2018 16:05:55 -0600 Subject: [PATCH 0830/2196] =?UTF-8?q?Set=20backup=20MANPATH=20in=20case=20?= =?UTF-8?q?man=20path=20isn=E2=80=99t=20set=20correctly.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Previously, this would fail at startup for non-NixOS installs: nix-env --help The fix for this is to just use "nixManDir" as the value for MANPATH when spawning "man". To test this, I’m using the following: $ nix-build release.nix -A build $ MANPATH= ./result/bin/nix-env --help Fixes #1627 --- src/libmain/shared.cc | 1 + src/libstore/globals.cc | 1 + src/libstore/globals.hh | 3 +++ src/libstore/local.mk | 1 + 4 files changed, 6 insertions(+) diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 90a4867163d..7d888202bbf 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -262,6 +262,7 @@ void printVersion(const string & programName) void showManPage(const string & name) { restoreSignals(); + setenv("MANPATH", settings.nixManDir.c_str(), 1); execlp("man", "man", name.c_str(), NULL); throw SysError(format("command 'man %1%' failed") % name.c_str()); } diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index c6b508cbe82..247040606fb 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -38,6 +38,7 @@ Settings::Settings() , nixConfDir(canonPath(getEnv("NIX_CONF_DIR", NIX_CONF_DIR))) , nixLibexecDir(canonPath(getEnv("NIX_LIBEXEC_DIR", NIX_LIBEXEC_DIR))) , nixBinDir(canonPath(getEnv("NIX_BIN_DIR", NIX_BIN_DIR))) + , nixManDir(canonPath(NIX_MAN_DIR)) , nixDaemonSocketFile(canonPath(nixStateDir + DEFAULT_SOCKET_PATH)) { buildUsersGroup = getuid() == 0 ? "nixbld" : ""; diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 508084d08ac..fd3fbec9f01 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -82,6 +82,9 @@ public: /* The directory where the main programs are stored. */ Path nixBinDir; + /* The directory where the man pages are stored. */ + Path nixManDir; + /* File name of the socket the daemon listens to. */ Path nixDaemonSocketFile; diff --git a/src/libstore/local.mk b/src/libstore/local.mk index 239356aee8d..c7ac534e238 100644 --- a/src/libstore/local.mk +++ b/src/libstore/local.mk @@ -38,6 +38,7 @@ libstore_CXXFLAGS = \ -DNIX_CONF_DIR=\"$(sysconfdir)/nix\" \ -DNIX_LIBEXEC_DIR=\"$(libexecdir)\" \ -DNIX_BIN_DIR=\"$(bindir)\" \ + -DNIX_MAN_DIR=\"$(mandir)\" \ -DSANDBOX_SHELL="\"$(sandbox_shell)\"" \ -DLSOF=\"$(lsof)\" From f67a7007a230c84015793794277c0449e682ab54 Mon Sep 17 00:00:00 2001 From: Tuomas Tynkkynen Date: Tue, 13 Feb 2018 01:32:29 +0200 Subject: [PATCH 0831/2196] libexpr: Pre-reserve space in string in unescapeStr() Avoids some malloc() traffic. --- src/libexpr/lexer.l | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index 28a0a6a8789..e5e01fb5831 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -49,9 +49,10 @@ static void adjustLoc(YYLTYPE * loc, const char * s, size_t len) } -static Expr * unescapeStr(SymbolTable & symbols, const char * s) +static Expr * unescapeStr(SymbolTable & symbols, const char * s, size_t length) { string t; + t.reserve(length); char c; while ((c = *s++)) { if (c == '\\') { @@ -150,7 +151,7 @@ or { return OR_KW; } /* It is impossible to match strings ending with '$' with one regex because trailing contexts are only valid at the end of a rule. (A sane but undocumented limitation.) */ - yylval->e = unescapeStr(data->symbols, yytext); + yylval->e = unescapeStr(data->symbols, yytext, yyleng); return STR; } \$\{ { PUSH_STATE(INSIDE_DOLLAR_CURLY); return DOLLAR_CURLY; } @@ -178,7 +179,7 @@ or { return OR_KW; } return IND_STR; } \'\'\\. { - yylval->e = unescapeStr(data->symbols, yytext + 2); + yylval->e = unescapeStr(data->symbols, yytext + 2, yyleng - 2); return IND_STR; } \$\{ { PUSH_STATE(INSIDE_DOLLAR_CURLY); return DOLLAR_CURLY; } From b8bed7da14b26dcc328075522842dd16aa71b434 Mon Sep 17 00:00:00 2001 From: Tuomas Tynkkynen Date: Fri, 16 Feb 2018 05:14:35 +0200 Subject: [PATCH 0832/2196] libexpr: Optimize prim_attrNames a bit Instead of having lexicographicOrder() create a temporary sorted array of Attr*:s and copying attr names from that, copy the attr names first and then sort that. --- src/libexpr/primops.cc | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index ca97b2b28bb..89e984d2e58 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1138,8 +1138,11 @@ static void prim_attrNames(EvalState & state, const Pos & pos, Value * * args, V state.mkList(v, args[0]->attrs->size()); size_t n = 0; - for (auto & i : args[0]->attrs->lexicographicOrder()) - mkString(*(v.listElems()[n++] = state.allocValue()), i->name); + for (auto & i : *args[0]->attrs) + mkString(*(v.listElems()[n++] = state.allocValue()), i.name); + + std::sort(v.listElems(), v.listElems() + n, + [](Value * v1, Value * v2) { return strcmp(v1->string.s, v2->string.s) < 0; }); } From 0845cdf9443a6b304c1bcec304a462ae4995c744 Mon Sep 17 00:00:00 2001 From: Tuomas Tynkkynen Date: Tue, 13 Feb 2018 05:00:17 +0200 Subject: [PATCH 0833/2196] libexpr: Rely on Boehm returning zeroed memory in EvalState::allocEnv() Boehm guarantees that memory returned by GC_malloc() is zeroed, so take advantage of that. --- src/libexpr/attr-set.cc | 3 ++- src/libexpr/eval.cc | 7 +++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/libexpr/attr-set.cc b/src/libexpr/attr-set.cc index 910428c0268..b284daa3c2f 100644 --- a/src/libexpr/attr-set.cc +++ b/src/libexpr/attr-set.cc @@ -7,13 +7,14 @@ namespace nix { +/* Note: Various places expect the allocated memory to be zeroed. */ static void * allocBytes(size_t n) { void * p; #if HAVE_BOEHMGC p = GC_malloc(n); #else - p = malloc(n); + p = calloc(n, 1); #endif if (!p) throw std::bad_alloc(); return p; diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index b94bc597b01..48542d8e500 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -43,13 +43,14 @@ static char * dupString(const char * s) } +/* Note: Various places expect the allocated memory to be zeroed. */ static void * allocBytes(size_t n) { void * p; #if HAVE_BOEHMGC p = GC_malloc(n); #else - p = malloc(n); + p = calloc(n, 1); #endif if (!p) throw std::bad_alloc(); return p; @@ -582,9 +583,7 @@ Env & EvalState::allocEnv(unsigned int size) Env * env = (Env *) allocBytes(sizeof(Env) + size * sizeof(Value *)); env->size = size; - /* Clear the values because maybeThunk() and lookupVar fromWith expect this. */ - for (unsigned i = 0; i < size; ++i) - env->values[i] = 0; + /* We assume that env->values has been cleared by the allocator; maybeThunk() and lookupVar fromWith expect this. */ return *env; } From 7e0360504d1a964ad5bd0da996045bc3868d0d7d Mon Sep 17 00:00:00 2001 From: Tuomas Tynkkynen Date: Sat, 4 Mar 2017 15:24:06 +0200 Subject: [PATCH 0834/2196] libexpr: Optimize prim_derivationStrict by using more symbol comparisons --- src/libexpr/eval.cc | 4 ++++ src/libexpr/eval.hh | 3 ++- src/libexpr/primops.cc | 18 +++++++++--------- 3 files changed, 15 insertions(+), 10 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 48542d8e500..2144d345257 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -294,6 +294,10 @@ EvalState::EvalState(const Strings & _searchPath, ref store) , sWrong(symbols.create("wrong")) , sStructuredAttrs(symbols.create("__structuredAttrs")) , sBuilder(symbols.create("builder")) + , sArgs(symbols.create("args")) + , sOutputHash(symbols.create("outputHash")) + , sOutputHashAlgo(symbols.create("outputHashAlgo")) + , sOutputHashMode(symbols.create("outputHashMode")) , repair(NoRepair) , store(store) , baseEnv(allocEnv(128)) diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 51905d7e1c6..9d8799b7906 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -69,7 +69,8 @@ public: const Symbol sWith, sOutPath, sDrvPath, sType, sMeta, sName, sValue, sSystem, sOverrides, sOutputs, sOutputName, sIgnoreNulls, sFile, sLine, sColumn, sFunctor, sToString, - sRight, sWrong, sStructuredAttrs, sBuilder; + sRight, sWrong, sStructuredAttrs, sBuilder, sArgs, + sOutputHash, sOutputHashAlgo, sOutputHashMode; Symbol sDerivationNix; /* If set, force copying files to the Nix store even if they diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 89e984d2e58..317623b22f7 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -589,7 +589,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * /* The `args' attribute is special: it supplies the command-line arguments to the builder. */ - if (key == "args") { + if (i->name == state.sArgs) { state.forceList(*i->value, pos); for (unsigned int n = 0; n < i->value->listSize(); ++n) { string s = state.coerceToString(posDrvName, *i->value->listElems()[n], context, true); @@ -614,13 +614,13 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * drv.platform = state.forceStringNoCtx(*i->value, posDrvName); else if (i->name == state.sName) drvName = state.forceStringNoCtx(*i->value, posDrvName); - else if (key == "outputHash") + else if (i->name == state.sOutputHash) outputHash = state.forceStringNoCtx(*i->value, posDrvName); - else if (key == "outputHashAlgo") + else if (i->name == state.sOutputHashAlgo) outputHashAlgo = state.forceStringNoCtx(*i->value, posDrvName); - else if (key == "outputHashMode") + else if (i->name == state.sOutputHashMode) handleHashMode(state.forceStringNoCtx(*i->value, posDrvName)); - else if (key == "outputs") { + else if (i->name == state.sOutputs) { /* Require ‘outputs’ to be a list of strings. */ state.forceList(*i->value, posDrvName); Strings ss; @@ -638,10 +638,10 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * drvName = s; printMsg(lvlVomit, format("derivation name is '%1%'") % drvName); } - else if (key == "outputHash") outputHash = s; - else if (key == "outputHashAlgo") outputHashAlgo = s; - else if (key == "outputHashMode") handleHashMode(s); - else if (key == "outputs") + else if (i->name == state.sOutputHash) outputHash = s; + else if (i->name == state.sOutputHashAlgo) outputHashAlgo = s; + else if (i->name == state.sOutputHashMode) handleHashMode(s); + else if (i->name == state.sOutputs) handleOutputs(tokenizeString(s)); } From 66eeff33456d544e5852e580d8cac21f0c38c11f Mon Sep 17 00:00:00 2001 From: Tuomas Tynkkynen Date: Sat, 17 Feb 2018 16:51:10 +0200 Subject: [PATCH 0835/2196] libexpr: Remove unnecessary drvName assignment in prim_derivationStrict drvName is already assigned to the same value right at the start of the function. --- src/libexpr/primops.cc | 6 ------ 1 file changed, 6 deletions(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 317623b22f7..f6c34d52576 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -612,8 +612,6 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * drv.builder = state.forceString(*i->value, context, posDrvName); else if (i->name == state.sSystem) drv.platform = state.forceStringNoCtx(*i->value, posDrvName); - else if (i->name == state.sName) - drvName = state.forceStringNoCtx(*i->value, posDrvName); else if (i->name == state.sOutputHash) outputHash = state.forceStringNoCtx(*i->value, posDrvName); else if (i->name == state.sOutputHashAlgo) @@ -634,10 +632,6 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * drv.env.emplace(key, s); if (i->name == state.sBuilder) drv.builder = s; else if (i->name == state.sSystem) drv.platform = s; - else if (i->name == state.sName) { - drvName = s; - printMsg(lvlVomit, format("derivation name is '%1%'") % drvName); - } else if (i->name == state.sOutputHash) outputHash = s; else if (i->name == state.sOutputHashAlgo) outputHashAlgo = s; else if (i->name == state.sOutputHashMode) handleHashMode(s); From 37264ed0ad898cdd6880de8ce6e5dda7977eed5f Mon Sep 17 00:00:00 2001 From: Tuomas Tynkkynen Date: Fri, 16 Feb 2018 05:13:39 +0200 Subject: [PATCH 0836/2196] libexpr: Avoid an unnecessary string copy in prim_derivationStrict --- src/libexpr/primops.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index f6c34d52576..a800d24290a 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -553,7 +553,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * for (auto & i : args[0]->attrs->lexicographicOrder()) { if (i->name == state.sIgnoreNulls) continue; - string key = i->name; + const string & key = i->name; vomit("processing attribute '%1%'", key); auto handleHashMode = [&](const std::string & s) { From 690ac7c90b5bf3c599e210c53365c7d229c8b0ff Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Sun, 18 Feb 2018 02:35:01 -0500 Subject: [PATCH 0837/2196] configure: Add a flag to disable seccomp. This is needed for new arches where libseccomp support doesn't exist yet. Fixes #1878. --- Makefile.config.in | 1 + configure.ac | 16 ++++++++++++++-- src/libstore/build.cc | 4 +++- src/libstore/local.mk | 2 +- 4 files changed, 19 insertions(+), 4 deletions(-) diff --git a/Makefile.config.in b/Makefile.config.in index fab82194656..a9785dc7395 100644 --- a/Makefile.config.in +++ b/Makefile.config.in @@ -7,6 +7,7 @@ ENABLE_S3 = @ENABLE_S3@ HAVE_SODIUM = @HAVE_SODIUM@ HAVE_READLINE = @HAVE_READLINE@ HAVE_BROTLI = @HAVE_BROTLI@ +HAVE_SECCOMP = @HAVE_SECCOMP@ LIBCURL_LIBS = @LIBCURL_LIBS@ OPENSSL_LIBS = @OPENSSL_LIBS@ PACKAGE_NAME = @PACKAGE_NAME@ diff --git a/configure.ac b/configure.ac index 83b2346d065..14f742cf3ff 100644 --- a/configure.ac +++ b/configure.ac @@ -186,9 +186,21 @@ AC_SUBST(HAVE_BROTLI, [$have_brotli]) # Look for libseccomp, required for Linux sandboxing. if test "$sys_name" = linux; then - PKG_CHECK_MODULES([LIBSECCOMP], [libseccomp], - [CXXFLAGS="$LIBSECCOMP_CFLAGS $CXXFLAGS"]) + AC_ARG_ENABLE([seccomp-sandboxing], + AC_HELP_STRING([--disable-seccomp-sandboxing], + [Don't build support for seccomp sandboxing (only recommended if your arch doesn't support libseccomp yet!)] + )) + if test "x$enable_seccomp_sandboxing" != "xno"; then + PKG_CHECK_MODULES([LIBSECCOMP], [libseccomp], + [CXXFLAGS="$LIBSECCOMP_CFLAGS $CXXFLAGS"]) + have_seccomp=1 + else + have_seccomp= + fi +else + have_seccomp= fi +AC_SUBST(HAVE_SECCOMP, [$have_seccomp]) # Look for aws-cpp-sdk-s3. diff --git a/src/libstore/build.cc b/src/libstore/build.cc index cc69ff1c74b..9b7abaa3d1e 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -49,7 +49,9 @@ #include #include #include +#if HAVE_SECCOMP #include +#endif #define pivot_root(new_root, put_old) (syscall(SYS_pivot_root, new_root, put_old)) #endif @@ -2469,7 +2471,7 @@ void DerivationGoal::chownToBuilder(const Path & path) void setupSeccomp() { -#if __linux__ +#if __linux__ && HAVE_SECCOMP if (!settings.filterSyscalls) return; scmp_filter_ctx ctx; diff --git a/src/libstore/local.mk b/src/libstore/local.mk index c7ac534e238..e11efa5c2b5 100644 --- a/src/libstore/local.mk +++ b/src/libstore/local.mk @@ -25,7 +25,7 @@ ifeq ($(OS), SunOS) libstore_LDFLAGS += -lsocket endif -ifeq ($(OS), Linux) +ifeq ($(HAVE_SECCOMP), 1) libstore_LDFLAGS += -lseccomp endif From ed73d40c3b19dc0581bbf28ef29aad50cab3aaf2 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 19 Feb 2018 14:00:34 +0100 Subject: [PATCH 0838/2196] Config::handleUnknownSettings(): Remove unused 'fatal' argument --- src/libutil/config.cc | 7 ++----- src/libutil/config.hh | 2 +- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/src/libutil/config.cc b/src/libutil/config.cc index cdd1e0a15aa..ce6858f0d65 100644 --- a/src/libutil/config.cc +++ b/src/libutil/config.cc @@ -47,13 +47,10 @@ void Config::addSetting(AbstractSetting * setting) } } -void Config::handleUnknownSettings(bool fatal) +void Config::handleUnknownSettings() { for (auto & s : extras) - if (fatal) - throw UsageError("unknown setting '%s%'", s.first); - else - warn("unknown setting '%s'", s.first); + warn("unknown setting '%s'", s.first); } StringMap Config::getSettings(bool overridenOnly) diff --git a/src/libutil/config.hh b/src/libutil/config.hh index c6783e13c2b..d2e7faf1743 100644 --- a/src/libutil/config.hh +++ b/src/libutil/config.hh @@ -60,7 +60,7 @@ public: void addSetting(AbstractSetting * setting); - void handleUnknownSettings(bool fatal = false); + void handleUnknownSettings(); StringMap getSettings(bool overridenOnly = false); From e59a8a63e10992834a48e2b43c854c2e4f990dbe Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Mon, 19 Feb 2018 09:56:24 -0500 Subject: [PATCH 0839/2196] Don't silently succeed seccomp setup when !HAVE_SECCOMP. Running Nix with build users without seccomp on Linux is dangerous, and administrators should very explicitly opt-in to it. --- src/libstore/build.cc | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 9b7abaa3d1e..30f22833fa7 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2471,9 +2471,9 @@ void DerivationGoal::chownToBuilder(const Path & path) void setupSeccomp() { -#if __linux__ && HAVE_SECCOMP +#if __linux__ if (!settings.filterSyscalls) return; - +#if HAVE_SECCOMP scmp_filter_ctx ctx; if (!(ctx = seccomp_init(SCMP_ACT_ALLOW))) @@ -2519,6 +2519,11 @@ void setupSeccomp() if (seccomp_load(ctx) != 0) throw SysError("unable to load seccomp BPF program"); +#else + throw Error("%s\n%s", + "seccomp is not supported on this platform" + "you can avoid this by setting the filter-syscalls option to false, but note that untrusted builds can then create setuid binaries!"); +#endif #endif } From a6c0b773b72d4e30690e01f1f1dcffc28f2d9ea1 Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Mon, 19 Feb 2018 12:13:51 -0600 Subject: [PATCH 0840/2196] configure.ac: define HAVE_SECCOMP macro when using seccomp, fix build/tests Happily the failing tests should prevent anyone from using such a Nix in situations where they expect sandboxing to be on, which would otherwise be a risk. --- configure.ac | 1 + 1 file changed, 1 insertion(+) diff --git a/configure.ac b/configure.ac index 4102f32166f..54322d463ae 100644 --- a/configure.ac +++ b/configure.ac @@ -196,6 +196,7 @@ if test "$sys_name" = linux; then PKG_CHECK_MODULES([LIBSECCOMP], [libseccomp], [CXXFLAGS="$LIBSECCOMP_CFLAGS $CXXFLAGS"]) have_seccomp=1 + AC_DEFINE([HAVE_SECCOMP], [1], [Whether seccomp is available and should be used for sandboxing.]) else have_seccomp= fi From 70eb64147e073a0a7cf1d44af6815cfd4f8d507f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 19 Feb 2018 20:38:06 +0100 Subject: [PATCH 0841/2196] Update release notes Also add some examples to nix --help. --- doc/manual/release-notes/rl-2.0.xml | 983 +++++++++++++++++++++------- doc/manual/style.css | 12 +- src/nix/ls.cc | 20 + src/nix/run.cc | 4 + 4 files changed, 755 insertions(+), 264 deletions(-) diff --git a/doc/manual/release-notes/rl-2.0.xml b/doc/manual/release-notes/rl-2.0.xml index effd2e39d30..0ad17373ef4 100644 --- a/doc/manual/release-notes/rl-2.0.xml +++ b/doc/manual/release-notes/rl-2.0.xml @@ -6,165 +6,572 @@ Release 2.0 (2018-02-??) -This release has the following new features: +The following incompatible changes have been made: - Start of new nix command line - interface. This is a work in progress and the interface is subject - to change. + The manifest-based substituter mechanism + (download-using-manifests) has been removed. It + has been superseded by the binary cache substituter mechanism + since several years. As a result, the following programs have been + removed: + nix-pull + nix-generate-patches + bsdiff + bspatch + + + - Self-documenting: shows - all available command-line arguments. - - shows all - configuration options. + + The “copy from other stores” substituter mechanism + (copy-from-other-stores and the + NIX_OTHER_STORES environment variable) has been + removed. It was primarily used by the NixOS installer to copy + available paths from the installation medium. The replacement is + to use a chroot store as a substituter + (e.g. --substituters /mnt), or to build into a + chroot store (e.g. --store /mnt --substituter /). + - nix build: Replacement for - nix-build. + + The command nix-push has been removed as + part of the effort to eliminate Nix's dependency on Perl. You can + use nix copy instead, e.g. nix copy + --to /tmp/my-binary-cache paths… + - nix ls-store and nix - ls-nar allow listing the contents of a store path or - NAR file. + + The “nested” log output feature () has been removed. As a result, + nix-log2xml was also removed. + - nix cat-store and - nix cat-nar allow extracting a file from a - store path or NAR file. + + OpenSSL-based signing has been removed. This + feature was never well-supported. A better alternative is provided + by the and + options. + - nix verify checks whether a - store path is unmodified and/or is trusted. + + Failed build caching has been removed. This + feature was introduced to support the Hydra continuous build + system, but Hydra no longer uses it. + - nix copy-sigs copies - signatures from one store to another. + + nix-mode.el has been removed from + Nix. It is now a separate + repository and can be installed through the MELPA package + repository. + - nix sign-paths signs store - paths. + - nix copy copies paths between - arbitrary Nix stores, generalising - nix-copy-closure and - nix-push. +This release has the following new features: - nix path-info shows - information about store paths. + - nix run starts a shell in - which the specified packages are available. + + It introduces a new command named nix, + which is intended to eventually replace all + nix-* commands with a more consistent and + better designed user interface. It currently provides replacements + for some (but not all) of the functionality provided by + nix-store, nix-build, + nix-shell -p, nix-env -qa, + nix-instantiate --eval, + nix-push and + nix-copy-closure. It has the following major + features: - nix log shows the build log - of a package or path. If the build log is not available locally, - it will try to obtain it from a binary cache. + - nix eval replaces - nix-instantiate --eval. + + Unlike the legacy commands, it has a consistent way to + refer to packages and package-like argumements (like store + paths). For example, the following commands all copy the GNU + Hello package to a remote machine: + + nix copy --to ssh://machine nixpkgs.hello + nix copy --to ssh://machine /nix/store/0i2jd68mp5g6h2sa5k9c85rb80sn8hi9-hello-2.10 + nix copy --to ssh://machine '(with import <nixpkgs> {}; hello)' + + By contrast, nix-copy-closure only accepted + store paths as arguments. + + + + It is self-documenting: shows + all available command-line arguments. If + is given after a subcommand, it shows + examples for that subcommand. nix + --help-config shows all configuration + options. + + + + It is much less verbose. By default, it displays a + single-line progress indicator that shows how many packages + are left to be built or downloaded, and (if there are running + builds) the most recent line of builder output. If a build + fails, it shows the last few lines of builder output. The full + build log can be retrieved using nix + log. + + + + It provides + all nix.conf configuration options as + command line flags. For example, instead of --option + http-connections 100 you can write + --http-connections 100. Boolean options can + be written as + --foo or + --no-foo + (e.g. ). + + + + Many subcommands have a flag to + write results to stdout in JSON format. + - nix dump-path to get a NAR - from a store path. + - nix edit opens the source - code of a package in an editor. + Please note that the nix command + is a work in progress and the interface is subject to + change. - nix search replaces - nix-env -qa. It searches the available - packages for occurences of a search string in the attribute - name, package name or description. It caches available packages - to speed up searches. + It provides the following high-level (“porcelain”) + subcommands: - nix why-depends (d41c5eb13f4f3a37d80dbc6d3888644170c3b44a). + - nix show-derivation (e8d6ee7c1b90a2fe6d824f1a875acc56799ae6e2). + + nix build is a replacement for + nix-build. + + + + nix run executes a command in an + environment in which the specified packages are available. It + is (roughly) a replacement for nix-shell + -p. Unlike that command, it does not execute the + command in a shell, and has a flag (-c) + that specifies the unquoted command line to be + executed. + + It is particularly useful in conjunction with chroot + stores, allowing Linux users who do not have permission to + install Nix in /nix/store to still use + binary substitutes that assume + /nix/store. For example, + + nix run --store ~/my-nix nixpkgs.hello -c hello --greeting 'Hi everybody!' + + downloads (or if not substitutes are available, builds) the + GNU Hello package into + ~/my-nix/nix/store, then runs + hello in a mount namespace where + ~/my-nix/nix/store is mounted onto + /nix/store. + + + + nix search replaces nix-env + -qa. It searches the available packages for + occurences of a search string in the attribute name, package + name or description. Unlike nix-env -qa, it + has a cache to speed up subsequent searches. + + + + nix copy copies paths between + arbitrary Nix stores, generalising + nix-copy-closure and + nix-push. + + + + nix repl replaces the external + program nix-repl. It provides an + interactive environment for evaluating and building Nix + expressions. Note that it uses linenoise-ng + instead of GNU Readline. + + + + nix upgrade-nix upgrades Nix to the + latest stable version. This requires that Nix is installed in + a profile. (Thus it won’t work on NixOS, or if it’s installed + outside of the Nix store.) + + + + nix verify checks whether store paths + are unmodified and/or “trusted” (see below). It replaces + nix-store --verify and nix-store + --verify-path. + + + + nix log shows the build log of a + package or path. If the build log is not available locally, it + will try to obtain it from the configured substituters (such + as cache.nixos.org, which now provides build + logs). + + + + nix edit opens the source code of a + package in your editor. + + + + nix eval replaces + nix-instantiate --eval. + + + + nix + why-depends shows why one store path has another in + its closure. This is primarily useful to finding the causes of + closure bloat. For example, + + nix why-depends nixpkgs.vlc nixpkgs.libdrm.dev + + shows a chain of files and fragments of file contents that + cause the VLC package to have the “dev” output of + libdrm in its closure — an undesirable + situation. + + + + nix path-info shows information about + store paths, replacing nix-store -q. A + useful feature is the option + (). For example, the following command show + the closure sizes of every path in the current NixOS system + closure, sorted by size: + + nix path-info -rS /run/current-system | sort -nk2 + + + + + + nix optimise-store replaces + nix-store --optimise. The main difference + is that it has a progress indicator. + - nix add-to-store (970366266b8df712f5f9cedb45af183ef5a8357f). + - nix upgrade-nix upgrades Nix - to the latest stable version. This requires that Nix is - installed in a profile. (Thus it won’t work on NixOS, or if it’s - installed outside of the Nix store.) + A number of low-level (“plumbing”) commands are also + available: - Progress indicator. + - All options are available as flags now - (b8283773bd64d7da6859ed520ee19867742a03ba). + + nix ls-store and nix + ls-nar list the contents of a store path or NAR + file. The former is primarily useful in conjunction with + remote stores, e.g. + + nix ls-store --store https://cache.nixos.org/ -lR /nix/store/0i2jd68mp5g6h2sa5k9c85rb80sn8hi9-hello-2.10 + + lists the contents of path in a binary cache. + + + + nix cat-store and nix + cat-nar allow extracting a file from a store path or + NAR file. + + + + nix dump-path writes the contents of + a store path to stdout in NAR format. This replaces + nix-store --dump. + + + + nix + show-derivation displays a store derivation in JSON + format. This is an alternative to + pp-aterm. + + + + nix + add-to-store replaces nix-store + --add. + + + + nix sign-paths signs store + paths. (TODO: add examples) + + + + nix copy-sigs copies signatures from + one store to another. (TODO: add examples and + tests) + + + + nix show-config shows all + configuration options and their current values. + - The external program nix-repl has been - integrated into Nix as nix repl. - + The store abstraction that Nix has had for a long time to + support store access via the Nix daemon has been extended + significantly. In particular, substituters (which used to be + external programs such as + download-from-binary-cache) are now subclasses + of the abstract Store class. This allows + many Nix commands to operate on such store types. For example, + nix path-info shows information about paths in + your local Nix store, while nix path-info --store + https://cache.nixos.org/ shows information about paths + in the specified binary cache. Similarly, + nix-copy-closure, nix-push + and substitution are all instances of the general notion of + copying paths between different kinds of Nix stores. - - If a fixed-output derivation produces a result with an - incorrect hash, the output path will be moved to the location - corresponding to the actual hash and registered as valid. Thus, a - subsequent build of the fixed-output derivation with the correct - hash is unnecessary. - + Stores are specified using an URI-like syntax, + e.g. https://cache.nixos.org/ or + ssh://machine. The following store types are supported: + + + + + + LocalStore (stori URI + local or an absolute path) and the misnamed + RemoteStore (daemon) + provide access to a local Nix store, the latter via the Nix + daemon. You can use auto or the empty + string to auto-select a local or daemon store depending on + whether you have write permission to the Nix store. It is no + longer necessary to set the NIX_REMOTE + environment variable to use the Nix daemon. + + As noted above, LocalStore now + supports chroot builds, allowing the “physical” location of + the Nix store + (e.g. /home/alice/nix/store) to differ + from its “logical” location (typically + /nix/store). This allows non-root users + to use Nix while still getting the benefits from prebuilt + binaries from cache.nixos.org. + + + + + + BinaryCacheStore is the abstract + superclass of all binary cache stores. It supports writing + build logs and NAR content listings in JSON format. + + + + + + HttpBinaryCacheStore + (http://, https://) + supports binary caches via HTTP or HTTPS. If the server + supports PUT requests, it supports + uploading store paths via commands such as nix + copy. + + + + + + LocalBinaryCacheStore + (file://) supports binary caches in the + local filesystem. + + + + + + S3BinaryCacheStore + (s3://) supports binary caches stored in + Amazon S3, if enabled at compile time. + + + + + + LegacySSHStore (ssh://) + is used to implement remote builds and + nix-copy-closure. + + + + + + SSHStore + (ssh-ng://) supports arbitrary Nix + operations on a remote machine via the same protocol used by + nix-daemon. + + + + + + - - It is no longer necessary to set the - NIX_REMOTE environment variable if you need to use - the Nix daemon. Nix will use the daemon automatically if you don’t - have write access to the Nix database. - The Nix language now supports floating point numbers. They are - based on regular C++ float and compatible with - existing integers and number-related operations. Export and import to and - from JSON and XML works, too. + + Security has been improved in various ways: + + + + + Nix now stores signatures for local store + paths. When paths are copied between stores (e.g., copied from + a binary cache to a local store), signatures are + propagated. + + Locally-built paths are signed automatically using the + secret keys specified by the + store option. Secret/public key pairs can be generated using + nix-store + --generate-binary-cache-key. (TODO: rename) + + In addition, locally-built store paths are marked as + “ultimately trusted”, but this bit is not propagated when + paths are copied between stores. + + + + Content-addressable store paths no longer require + signatures — they can be imported into a store by unprivileged + users even if they lack signatures. + + + + The command nix verify checks whether + the specified paths are trusted, i.e., have a certain number + of trusted signatures, are ultimately trusted, or are + content-addressed. + + + + Substitutions from binary caches now + require signatures by default. This was already the case on + NixOS. + + + + In Linux sandbox builds, we now + use /build instead of + /tmp as the temporary build + directory. This fixes potential security problems when a build + accidentally stores its TMPDIR in some + security-sensitive place, such as an RPATH. + + + + + + - nix-shell now sets the - IN_NIX_SHELL environment variable during - evaluation and in the shell itself. This can be used to perform - different actions depending on whether you’re in a Nix shell or in - a regular build. Nixpkgs provides - lib.inNixShell to check this variable during - evaluation. (bb36a1a3cf3fbe6bc9d0afcc5fa0f928bed03170) + Pure evaluation mode. This is a variant + of the existing restricted evaluation mode. In pure mode, the Nix + evaluator forbids access to anything that could cause different + evaluations of the same command line arguments to produce a + different result. This includes builtin functions such as + builtins.getEnv, but more importantly, + all filesystem or network access unless a + content hash or commit hash is specified. For example, calls to + builtins.fetchGit are only allowed if a + rev attribute is specified. + + The goal of this feature is to enable true reproducibility + and traceability of builds (including NixOS system configurations) + at the evaluation level. For example, in the future, + nixos-rebuild might build configurations from a + Nix expression in a Git repository in pure mode. That expression + might fetch other repositories such as Nixpkgs via + builtins.fetchGit. The commit hash of the + top-level repository then uniquely identifies a running system, + and, in conjunction with that repository, allows it to be + reproduced or modified. + - Internal: all Store classes are now - thread-safe. RemoteStore supports multiple - concurrent connections to the daemon. This is primarily useful in - multi-threaded programs such as - hydra-queue-runner. + There are several new features to support binary + reproducibility (i.e. to help ensure that multiple builds of the + same derivation produce exactly the same output). When + is set to + false, it’s no + longer a fatal error if build rounds produce different + output. Also, a hook named is provided + to allow you to run tools such as diffoscope + when build rounds produce different output. - The dependency on Perl has been removed. As a result, some - (obsolete) programs have been removed: nix-push - (replaced by nix copy), - nix-pull (obsoleted by binary caches), - nix-generate-patches, - bsdiff, bspatch. + Configuring remote builds is a lot easier now. Provided you + are not using the Nix daemon, you can now just specify a remote + build machine on the command line, e.g. --option builders + 'ssh://my-mac x86_64-darwin'. The environment variable + NIX_BUILD_HOOK has been removed and is no longer + needed. The environment variable NIX_REMOTE_SYSTEMS + is still supported for compatibility, but it is also possible to + specify builders in nix.conf by setting the + option builders = + @path. - Improved store abstraction. Substituters - eliminated. BinaryCacheStore, LocalBinaryCacheStore, - HttpBinaryCacheStore, S3BinaryCacheStore (compile-time - optional), SSHStore. Add docs + examples? - + If a fixed-output derivation produces a result with an + incorrect hash, the output path is moved to the location + corresponding to the actual hash and registered as valid. Thus, a + subsequent build of the fixed-output derivation with the correct + hash is unnecessary. - Nix now stores signatures for local store - paths. Locally-built paths are now signed automatically using the - secret keys specified by the - store option. - - In addition, store paths that have been built locally are - marked as “ultimately trusted”, and content-addressable store - paths carry a “content-addressability assertion” that allow them - to be trusted without any signatures. + nix-shell now + sets the IN_NIX_SHELL environment variable + during evaluation and in the shell itself. This can be used to + perform different actions depending on whether you’re in a Nix + shell or in a regular build. Nixpkgs provides + lib.inNixShell to check this variable during + evaluation. @@ -179,7 +586,8 @@ https://nixos.org/channels/channel-name/nixexprs.tar.xz. For example, nix-build channel:nixos-15.09 -A hello will build the GNU Hello package from the - nixos-15.09 channel. + nixos-15.09 channel. In the future, this may + use Git to fetch updates more efficiently. @@ -189,45 +597,119 @@ - builtins.fetchGit. - (38539b943a060d9cdfc24d6e5d997c0885b8aa2f) + Networking has been improved: + + + + + HTTP/2 is now supported. This makes binary cache lookups + much + more efficient. + + + + We now retry downloads on many HTTP errors, making + binary caches substituters more resilient to temporary + failures. + + + + HTTP credentials can now be configured via the standard + netrc mechanism. + + + + If S3 support is enabled at compile time, + s3:// URIs are supported + in all places where Nix allows URIs. + + + + Brotli compression is now supported. In particular, + cache.nixos.org build logs are now compressed using + Brotli. + + + + + + - <nix/fetchurl.nix> now uses the - content-addressable tarball cache at - http://tarballs.nixos.org/, just like - fetchurl in - Nixpkgs. (f2682e6e18a76ecbfb8a12c17e3a0ca15c084197) + nix-env now + ignores packages with bad derivation names (in particular those + starting with a digit or containing a dot). - Chroot Nix stores: allow the “physical” location of the Nix - store (e.g. /home/alice/nix/store) to differ - from its “logical” location (typically - /nix/store). This allows non-root users to - use Nix while still getting the benefits from prebuilt binaries - from - cache.nixos.org. (4494000e04122f24558e1436e66d20d89028b4bd, - 3eb621750848e0e6b30e5a79f76afbb096bb6c8a) + Many configuration options have been renamed, either because + they were unnecessarily verbose + (e.g. is now just + ) or to reflect generalised behaviour + (e.g. is now + because it allows arbitrary store + URIs). The old names are still supported for compatibility. - On Linux, builds are now executed in a user - namespace with uid 1000 and gid 100. + The option can now + be set to auto to use the number of CPUs in the + system. - builtins.fetchurl and - builtins.fetchTarball now support - sha256 and name - attributes. + Hashes can now + be specified in base-64 format, in addition to base-16 and the + non-standard base-32. - HttpBinaryCacheStore (the replacement of - download-from-binary-cache) now retries - automatically on certain HTTP error codes. + nix-shell now uses + bashInteractive from Nixpkgs, rather than the + bash command that happens to be in the caller’s + PATH. This is especially important on macOS where + the bash provided by the system is seriously + outdated and cannot execute stdenv’s setup + script. + + + + Nix can now automatically trigger a garbage collection if + free disk space drops below a certain level during a build. This + is configured using the and + options. + + + + nix-store -q --roots and + nix-store --gc --print-roots now show temporary + and in-memory roots. + + + + + Nix can now be extended with plugins. See the documentation of + the option for more details. + + + + + +The Nix language has the following new features: + + + + + It supports floating point numbers. They are based on the + C++ float type and are supported by the + existing numerical operators. Export and import to and from JSON + and XML works, too. @@ -245,194 +727,187 @@ configureFlags = "--prefix=${placeholder "out"} --includedir=${placeholder "dev" add docs. - - Support for HTTP/2. This makes binary cache lookups much - more efficient. (90ad02bf626b885a5dd8967894e2eafc953bdf92) - + - - The configuration - option can now specify optional paths by appending a - ?, e.g. /dev/nvidiactl? will - bind-mount /dev/nvidiactl only if it - exists. - + - - More support for testing build reproducibility: when - is set to - false, it’s no longer a fatal error build - rounds produce different output - (8bdf83f936adae6f2c907a6d2541e80d4120f051); add a hook to run - diffoscope when build rounds produce different output - (9a313469a4bdea2d1e8df24d16289dc2a172a169w). - +The following builtin functions are new or extended: - - Kill builds as soon as stdout/stderr is closed. This fixes a - bug that allowed builds to hang Nix indefinitely (regardless of - timeouts). (21948deed99a3295e4d5666e027a6ca42dc00b40) - + - Add support for passing structured data to builders. TODO: - document. (6de33a9c675b187437a2e1abbcb290981a89ecb1) - + builtins.fetchGit + allows Git repositories to be fetched at evaluation time. Thus it + differs from the fetchgit function in + Nixpkgs, which fetches at build time and cannot be used to fetch + Nix expressions during evaluation. A typical use case is to import + external NixOS modules from your configuration, e.g. - - exportReferencesGraph: Export more - complete info in JSON - format. (c2b0d8749f7e77afc1c4b3e8dd36b7ee9720af4a) - + imports = [ (builtins.fetchGit https://github.com/edolstra/dwarffs + "/module.nix") ]; - - Support for - netrc. (e6e74f987f0fa284d220432d426eb965269a97d6, - 302386f775eea309679654e5ea7c972fb6e7b9af) + - Support s3:// URIs in all places where Nix allows - URIs. (9ff9c3f2f80ba4108e9c945bbfda2c64735f987b) + Similarly, builtins.fetchMercurial + allows you to fetch Mercurial repositories. - The option can be set to - auto to use the number of CPUs in the - system. (7251d048fa812d2551b7003bc9f13a8f5d4c95a5) + builtins.path generalises + builtins.filterSource and path literals + (e.g. ./foo). It allows specifying a store path + name that differs from the source path name + (e.g. builtins.path { path = ./foo; name = "bar"; + }) and also supports filtering out unwanted + files. - Add support for Brotli compression. - cache.nixos.org compresses build logs using - Brotli. + builtins.fetchurl and + builtins.fetchTarball now support + sha256 and name + attributes. - Substitutions from binary caches now require signatures by - default. This was already the case on - NixOS. (ecbc3fedd3d5bdc5a0e1a0a51b29062f2874ac8b) + builtins.split + splits a string using a POSIX extended regular expression as the + separator. - nix-env now ignores packages with bad - derivation names (in particular those starting with a digit or - containing a - dot). (b0cb11722626e906a73f10dd9a0c9eea29faf43a) + builtins.partition + partitions the elements of a list into two lists, depending on a + Boolean predicate. - Renamed various configuration options. (TODO: in progress) + <nix/fetchurl.nix> now uses the + content-addressable tarball cache at + http://tarballs.nixos.org/, just like + fetchurl in + Nixpkgs. (f2682e6e18a76ecbfb8a12c17e3a0ca15c084197) - Remote machines can now be specified on the command - line. TODO: - document. (1a68710d4dff609bbaf61db3e17a2573f0aadf17) + In restricted and pure evaluation mode, builtin functions + that download from the network (such as + fetchGit) are permitted to fetch underneath a + list of URI prefixes specified in the option + . - - In Linux sandbox builds, we now use - /build instead of /tmp - as the temporary build directory. This fixes potential security - problems when a build accidentally stores its - TMPDIR in some critical place, such as an - RPATH. (eba840c8a13b465ace90172ff76a0db2899ab11b) - + - - In Linux sandbox builds, we now provide a default - /bin/sh (namely ash from - BusyBox). (a2d92bb20e82a0957067ede60e91fab256948b41) - + - - Make all configuration options available as command line - flags (b8283773bd64d7da6859ed520ee19867742a03ba). - +The Nix build environment has the following changes: - - Support base-64 - hashes. (c0015e87af70f539f24d2aa2bc224a9d8b84276b) - + - nix-shell now uses - bashInteractive from Nixpkgs, rather than the - bash command that happens to be in the caller’s - PATH. This is especially important on macOS where - the bash provided by the system is seriously - outdated and cannot execute stdenv’s setup - script. + Values such as Booleans, integers, (nested) lists and + attribute sets can now + be passed to builders in a non-lossy way. If the special attribute + __structuredAttrs is set to + true, the other derivation attributes are + serialised in JSON format and made available to the builder via + the file .attrs.json in the builder’s temporary + directory. This obviates the need for + passAsFile since JSON files have no size + restrictions, unlike process environments. + + As + a convenience to Bash builders, Nix writes a script named + .attrs.sh to the builder’s directory that + initialises shell variables corresponding to all attributes that + are representable in Bash. This includes non-nested (associative) + arrays. For example, the attribute hardening.format = + true ends up as the Bash associative array element + ${hardening[format]}. + + + + Builders can now + communicate what build phase they are in by writing messages to + the file descriptor specified in NIX_LOG_FD. The + current phase is shown by the nix progress + indicator. + - New builtin functions: builtins.split - (b8867a0239b1930a16f9ef3f7f3e864b01416dff), - builtins.partition. + In Linux sandbox builds, we now + provide a default /bin/sh (namely + ash from BusyBox). - Automatic garbage collection. + In structured attribute mode, + exportReferencesGraph exports + extended information about closures in JSON format. In particular, + it includes the sizes and hashes of paths. This is primarily + useful for NixOS image builders. - nix-store -q --roots and - nix-store --gc --print-roots now show temporary - and in-memory roots. + Builds are now + killed as soon as Nix receives EOF on the builder’s stdout or + stderr. This fixes a bug that allowed builds to hang Nix + indefinitely, regardless of + timeouts. - Builders can now communicate what build phase they are in by - writing messages to the file descriptor specified in - NIX_LOG_FD. (88e6bb76de5564b3217be9688677d1c89101b2a3) - + The configuration + option can now specify optional paths by appending a + ?, e.g. /dev/nvidiactl? will + bind-mount /dev/nvidiactl only if it + exists. - - Nix can now be extended with plugins. See the documentation of - the 'plugin-files' option for more details. - + On Linux, builds are now executed in a user + namespace with uid 1000 and gid 100. -Some features were removed: - - - - - “Nested” log output. As a result, - nix-log2xml was also removed. - + - - OpenSSL-based signing. (f435f8247553656774dd1b2c88e9de5d59cab203) - +A number of significant internal changes were made: - - Caching of failed - builds. (8cffec84859cec8b610a2a22ab0c4d462a9351ff) - + - nix-mode.el has been removed from - Nix. It is now a separate repository in - https://github.com/NixOS/nix-mode and can be installed - through the MELPA package repository. + Nix no longer depends on Perl and all Perl components have + been rewritten in C++ or removed. The Perl bindings that used to + be part of Nix have been moved to a separate package, + nix-perl. - In restricted evaluation mode - (), builtin functions that - download from the network (such as fetchGit) - are permitted to fetch underneath the list of URI prefixes - specified in the option . + All Store classes are now + thread-safe. RemoteStore supports multiple + concurrent connections to the daemon. This is primarily useful in + multi-threaded programs such as + hydra-queue-runner. + + This release has contributions from TBD. diff --git a/doc/manual/style.css b/doc/manual/style.css index 53fd9d5709c..592583ab086 100644 --- a/doc/manual/style.css +++ b/doc/manual/style.css @@ -96,7 +96,6 @@ div.example margin-right: 1.5em; background: #f4f4f8; border-radius: 0.4em; - box-shadow: 0.4em 0.4em 0.5em #e0e0e0; } div.example p.title @@ -106,7 +105,6 @@ div.example p.title div.example pre { - box-shadow: none; } @@ -116,15 +114,12 @@ div.example pre pre.screen, pre.programlisting { - border: 1px solid #b0b0b0; - padding: 3px 3px; + padding: 6px 6px; margin-left: 1.5em; margin-right: 1.5em; color: #600000; background: #f4f4f8; font-family: monospace; - border-radius: 0.4em; - box-shadow: 0.4em 0.4em 0.5em #e0e0e0; } div.example pre.programlisting @@ -149,7 +144,6 @@ div.example pre.programlisting padding: 0.3em 0.3em 0.3em 0.3em; background: #fffff5; border-radius: 0.4em; - box-shadow: 0.4em 0.4em 0.5em #e0e0e0; } div.note, div.warning @@ -256,16 +250,14 @@ span.command strong div.calloutlist table { - box-shadow: none; } table { border-collapse: collapse; - box-shadow: 0.4em 0.4em 0.5em #e0e0e0; } div.affiliation { font-style: italic; -} \ No newline at end of file +} diff --git a/src/nix/ls.cc b/src/nix/ls.cc index 69620595d8c..e99622faf47 100644 --- a/src/nix/ls.cc +++ b/src/nix/ls.cc @@ -90,6 +90,16 @@ struct CmdLsStore : StoreCommand, MixLs expectArg("path", &path); } + Examples examples() override + { + return { + Example{ + "To list the contents of a store path in a binary cache:", + "nix ls-store --store https://cache.nixos.org/ -lR /nix/store/0i2jd68mp5g6h2sa5k9c85rb80sn8hi9-hello-2.10" + }, + }; + } + std::string name() override { return "ls-store"; @@ -116,6 +126,16 @@ struct CmdLsNar : Command, MixLs expectArg("path", &path); } + Examples examples() override + { + return { + Example{ + "To list a specific file in a NAR:", + "nix ls-nar -l hello.nar /bin/hello" + }, + }; + } + std::string name() override { return "ls-nar"; diff --git a/src/nix/run.cc b/src/nix/run.cc index ade87e63a49..822654daf48 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -85,6 +85,10 @@ struct CmdRun : InstallablesCommand "To run GNU Hello:", "nix run nixpkgs.hello -c hello --greeting 'Hi everybody!'" }, + Example{ + "To run GNU Hello in a chroot store:", + "nix run --store ~/my-nix nixpkgs.hello -c hello" + }, }; } From d7fdfe322b81f4b8578e55d95978484ec84cc46c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 19 Feb 2018 20:40:25 +0100 Subject: [PATCH 0842/2196] Remove macOS multi-user instructions This is already handled by the installer. --- doc/manual/installation/multi-user.xml | 28 -------------------------- 1 file changed, 28 deletions(-) diff --git a/doc/manual/installation/multi-user.xml b/doc/manual/installation/multi-user.xml index a13e3c89be7..69ae1ef2704 100644 --- a/doc/manual/installation/multi-user.xml +++ b/doc/manual/installation/multi-user.xml @@ -52,34 +52,6 @@ This creates 10 build users. There can never be more concurrent builds than the number of build users, so you may want to increase this if you expect to do many builds at the same time. -On macOS, you can create the required group and users by -running the following script: - - -#! /bin/bash -e - -dseditgroup -o create nixbld -q - -gid=$(dscl . -read /Groups/nixbld | awk '($1 == "PrimaryGroupID:") {print $2 }') - -echo "created nixbld group with gid $gid" - -for i in $(seq 1 10); do - user=/Users/nixbld$i - uid="$((30000 + $i))" - dscl . create $user - dscl . create $user RealName "Nix build user $i" - dscl . create $user PrimaryGroupID "$gid" - dscl . create $user UserShell /usr/bin/false - dscl . create $user NFSHomeDirectory /var/empty - dscl . create $user UniqueID "$uid" - dseditgroup -o edit -a nixbld$i -t user nixbld - echo "created nixbld$i user with uid $uid" -done - - - - From d4e93532e2695fa7204937a96573b686a3b1a1ea Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 19 Feb 2018 20:46:39 +0100 Subject: [PATCH 0843/2196] Fix incorrect (and unnecessary) format string https://hydra.nixos.org/eval/1434547#tabs-now-fail --- src/libstore/build.cc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 30f22833fa7..1d611ffbaba 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2520,9 +2520,9 @@ void setupSeccomp() if (seccomp_load(ctx) != 0) throw SysError("unable to load seccomp BPF program"); #else - throw Error("%s\n%s", - "seccomp is not supported on this platform" - "you can avoid this by setting the filter-syscalls option to false, but note that untrusted builds can then create setuid binaries!"); + throw Error( + "seccomp is not supported on this platform; " + "you can bypass this error by setting the option 'filter-syscalls' to false, but note that untrusted builds can then create setuid binaries!"); #endif #endif } From 056d28a60110975332d8cb0c08990a1d3d8060a7 Mon Sep 17 00:00:00 2001 From: Tuomas Tynkkynen Date: Mon, 19 Feb 2018 22:47:25 +0200 Subject: [PATCH 0844/2196] libexpr: Don't create lots of temporary strings in Bindings::lexicographicOrder Avoids ~180,000 string temporaries created when evaluating a headless NixOS system. --- src/libexpr/attr-set.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/attr-set.hh b/src/libexpr/attr-set.hh index e1fc2bf6d79..3119a1848af 100644 --- a/src/libexpr/attr-set.hh +++ b/src/libexpr/attr-set.hh @@ -83,7 +83,7 @@ public: for (size_t n = 0; n < size_; n++) res.emplace_back(&attrs[n]); std::sort(res.begin(), res.end(), [](const Attr * a, const Attr * b) { - return (string) a->name < (string) b->name; + return (const string &) a->name < (const string &) b->name; }); return res; } From 1d0e42879fa687a7b6856b1a63070e44bd8ed5c4 Mon Sep 17 00:00:00 2001 From: Tuomas Tynkkynen Date: Mon, 19 Feb 2018 17:32:11 +0200 Subject: [PATCH 0845/2196] libutil: Fix infinite loop in filterANSIEscapes on '\r' E.g. nix-instantiate --eval -E 'abort "\r"' hangs. Found by afl-fuzz. --- src/libutil/util.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libutil/util.cc b/src/libutil/util.cc index f7a12d21b24..341dedfdf03 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -1216,7 +1216,7 @@ std::string filterANSIEscapes(const std::string & s, unsigned int width) else if (*i == '\r') // do nothing for now - ; + i++; else { t += *i++; w++; From 4ea9707591beceacf9988b3c185faf50da238403 Mon Sep 17 00:00:00 2001 From: Tuomas Tynkkynen Date: Mon, 19 Feb 2018 17:52:33 +0200 Subject: [PATCH 0846/2196] libexpr: Fix prim_replaceStrings() to work on an empty source string Otherwise, running e.g. nix-instantiate --eval -E --strict 'builtins.replaceStrings [""] ["X"] "abc"' would just hang in an infinite loop. Found by afl-fuzz. --- src/libexpr/primops.cc | 11 ++++++++--- tests/lang/eval-okay-replacestrings.exp | 2 +- tests/lang/eval-okay-replacestrings.nix | 2 ++ 3 files changed, 11 insertions(+), 4 deletions(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index a800d24290a..ab9351f1185 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1913,21 +1913,26 @@ static void prim_replaceStrings(EvalState & state, const Pos & pos, Value * * ar auto s = state.forceString(*args[2], context, pos); string res; - for (size_t p = 0; p < s.size(); ) { + // Loops one past last character to handle the case where 'from' contains an empty string. + for (size_t p = 0; p <= s.size(); ) { bool found = false; auto i = from.begin(); auto j = to.begin(); for (; i != from.end(); ++i, ++j) if (s.compare(p, i->size(), *i) == 0) { found = true; - p += i->size(); res += j->first; + if (i->empty()) { + res += s[p++]; + } else { + p += i->size(); + } for (auto& path : j->second) context.insert(path); j->second.clear(); break; } - if (!found) res += s[p++]; + if (!found && p < s.size()) res += s[p++]; } mkString(v, res, context); diff --git a/tests/lang/eval-okay-replacestrings.exp b/tests/lang/eval-okay-replacestrings.exp index a2add1b7b14..590c281ac86 100644 --- a/tests/lang/eval-okay-replacestrings.exp +++ b/tests/lang/eval-okay-replacestrings.exp @@ -1 +1 @@ -[ "faabar" "fbar" "fubar" "faboor" "fubar" ] +[ "faabar" "fbar" "fubar" "faboor" "fubar" "XaXbXcX" "X" ] diff --git a/tests/lang/eval-okay-replacestrings.nix b/tests/lang/eval-okay-replacestrings.nix index 6284a0e660a..c84e9269ff3 100644 --- a/tests/lang/eval-okay-replacestrings.nix +++ b/tests/lang/eval-okay-replacestrings.nix @@ -5,4 +5,6 @@ with builtins; (replaceStrings ["oo"] ["u"] "foobar") (replaceStrings ["oo" "a"] ["a" "oo"] "foobar") (replaceStrings ["oo" "oo"] ["u" "i"] "foobar") + (replaceStrings [""] ["X"] "abc") + (replaceStrings [""] ["X"] "") ] From 546f98dace5c3569211caf392c9dde06a20aa7b0 Mon Sep 17 00:00:00 2001 From: Tuomas Tynkkynen Date: Mon, 19 Feb 2018 18:44:30 +0200 Subject: [PATCH 0847/2196] libutil: Fix invalid assert on decoding base64 hashes The assertion is broken because there is no one-to-one mapping from length of a base64 string to the length of the output. E.g. "1q69lz7Empb06nzfkj651413n9icx0njmyr3xzq1j9q=" results in a 32-byte output. "1q69lz7Empb06nzfkj651413n9icx0njmyr3xzq1j9qy" results in a 33-byte output. To reproduce, evaluate: builtins.derivationStrict { name = "0"; builder = "0"; system = "0"; outputHashAlgo = "sha256"; outputHash = "1q69lz7Empb06nzfkj651413n9icx0njmyr3xzq1j9qy"; } Found by afl-fuzz. --- src/libutil/hash.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index 11e3c9dca58..75e4767550f 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -189,7 +189,8 @@ Hash::Hash(const std::string & s, HashType type) else if (size == base64Len()) { auto d = base64Decode(std::string(s, pos)); - assert(d.size() == hashSize); + if (d.size() != hashSize) + throw BadHash("invalid base-64 hash '%s'", s); memcpy(hash, d.data(), hashSize); } From cea4fb3a31de8ae04faf8437c7b0ac6b964a1aad Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 20 Feb 2018 12:33:32 +0100 Subject: [PATCH 0848/2196] Fix evaluation of binaryTarball.aarch64-linux --- release.nix | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/release.nix b/release.nix index 9e04f0b67f9..3f8d5da4721 100644 --- a/release.nix +++ b/release.nix @@ -1,5 +1,5 @@ { nix ? builtins.fetchGit ./. -, nixpkgs ? fetchTarball channel:nixos-17.09 +, nixpkgs ? builtins.fetchGit { url = https://github.com/NixOS/nixpkgs.git; ref = "nix-2.0"; } , officialRelease ? false , systems ? [ "x86_64-linux" "i686-linux" "x86_64-darwin" "aarch64-linux" ] }: @@ -127,7 +127,6 @@ let binaryTarball = pkgs.lib.genAttrs systems (system: - # FIXME: temporarily use a different branch for the Darwin build. with import nixpkgs { inherit system; }; let @@ -137,7 +136,7 @@ let runCommand "nix-binary-tarball-${version}" { exportReferencesGraph = [ "closure1" toplevel "closure2" cacert ]; - buildInputs = [ perl shellcheck ]; + buildInputs = [ perl ] ++ lib.optional (system != "aarch64-linux") shellcheck; meta.description = "Distribution-independent Nix bootstrap binaries for ${system}"; } '' @@ -150,8 +149,10 @@ let --subst-var-by nix ${toplevel} \ --subst-var-by cacert ${cacert} - shellcheck -e SC1090 $TMPDIR/install - shellcheck -e SC1091,SC2002 $TMPDIR/install-darwin-multi-user + if type -p shellcheck; then + shellcheck -e SC1090 $TMPDIR/install + shellcheck -e SC1091,SC2002 $TMPDIR/install-darwin-multi-user + fi chmod +x $TMPDIR/install chmod +x $TMPDIR/install-darwin-multi-user From 4e44025ac5e280c50641d18f1e394c9f120f8bf7 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 20 Feb 2018 15:19:46 +0100 Subject: [PATCH 0849/2196] Release notes: Add contributors --- doc/manual/release-notes/rl-2.0.xml | 109 +++++++++++++++++++++++++++- 1 file changed, 105 insertions(+), 4 deletions(-) diff --git a/doc/manual/release-notes/rl-2.0.xml b/doc/manual/release-notes/rl-2.0.xml index 0ad17373ef4..0d5296cc900 100644 --- a/doc/manual/release-notes/rl-2.0.xml +++ b/doc/manual/release-notes/rl-2.0.xml @@ -97,7 +97,7 @@ Unlike the legacy commands, it has a consistent way to - refer to packages and package-like argumements (like store + refer to packages and package-like arguments (like store paths). For example, the following commands all copy the GNU Hello package to a remote machine: @@ -190,7 +190,7 @@ nix search replaces nix-env -qa. It searches the available packages for - occurences of a search string in the attribute name, package + occurrences of a search string in the attribute name, package name or description. Unlike nix-env -qa, it has a cache to speed up subsequent searches. @@ -878,7 +878,7 @@ configureFlags = "--prefix=${placeholder "out"} --includedir=${placeholder "dev" On Linux, builds are now executed in a user - namespace with uid 1000 and gid 100. + namespace with UID 1000 and GID 100. @@ -908,6 +908,107 @@ configureFlags = "--prefix=${placeholder "out"} --includedir=${placeholder "dev" -This release has contributions from TBD. +This release has contributions from + +Adrien Devresse, +Alexander Ried, +Alex Cruice, +Alexey Shmalko, +AmineChikhaoui, +Andy Wingo, +Aneesh Agrawal, +Anthony Cowley, +Armijn Hemel, +aszlig, +Ben Gamari, +Benjamin Hipple, +Benjamin Staffin, +Benno Fünfstück, +Bjørn Forsman, +Brian McKenna, +Charles Strahan, +Chase Adams, +Chris Martin, +Christian Theune, +Chris Warburton, +Daiderd Jordan, +Dan Connolly, +Daniel Peebles, +Dan Peebles, +davidak, +David McFarland, +Dmitry Kalinkin, +Domen Kožar, +Eelco Dolstra, +Emery Hemingway, +Eric Litak, +Eric Wolf, +Fabian Schmitthenner, +Frederik Rietdijk, +Gabriel Gonzalez, +Giorgio Gallo, +Graham Christensen, +Guillaume Maudoux, +Harmen, +Iavael, +James Broadhead, +James Earl Douglas, +Janus Troelsen, +Jeremy Shaw, +Joachim Schiele, +Joe Hermaszewski, +Joel Moberg, +Johannes 'fish' Ziemke, +Jörg Thalheim, +Jude Taylor, +kballou, +Keshav Kini, +Kjetil Orbekk, +Langston Barrett, +Linus Heckemann, +Ludovic Courtès, +Manav Rathi, +Marc Scholten, +Markus Hauck, +Matt Audesse, +Matthew Bauer, +Matthias Beyer, +Matthieu Coudron, +N1X, +Nathan Zadoks, +Neil Mayhew, +Nicolas B. Pierron, +Niklas Hambüchen, +Nikolay Amiantov, +Ole Jørgen Brønner, +Orivej Desh, +Peter Simons, +Peter Stuart, +Pyry Jahkola, +regnat, +Renzo Carbonara, +Rhys, +Robert Vollmert, +Scott Olson, +Scott R. Parish, +Sergei Trofimovich, +Shea Levy, +Sheena Artrip, +Spencer Baugh, +Stefan Junker, +Susan Potter, +Thomas Tuegel, +Timothy Allen, +Tristan Hume, +Tuomas Tynkkynen, +tv, +Tyson Whitehead, +Vladimír Čunát, +Will Dietz, +wmertens, +Wout Mertens, +zimbatm and +Zoran Plesivčak. + From fa7fd76c5e8edf3b6888ec0f689e8130448987e8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Wed, 21 Feb 2018 01:12:47 +0000 Subject: [PATCH 0850/2196] nix-copy: fix examples maybe a left-over from nix-store -r ? --- src/nix/copy.cc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/nix/copy.cc b/src/nix/copy.cc index 2ddea9e70a6..f29429c1ac4 100644 --- a/src/nix/copy.cc +++ b/src/nix/copy.cc @@ -57,15 +57,15 @@ struct CmdCopy : StorePathsCommand return { Example{ "To copy Firefox from the local store to a binary cache in file:///tmp/cache:", - "nix copy --to file:///tmp/cache -r $(type -p firefox)" + "nix copy --to file:///tmp/cache $(type -p firefox)" }, Example{ "To copy the entire current NixOS system closure to another machine via SSH:", - "nix copy --to ssh://server -r /run/current-system" + "nix copy --to ssh://server /run/current-system" }, Example{ "To copy a closure from another machine via SSH:", - "nix copy --from ssh://server -r /nix/store/a6cnl93nk1wxnq84brbbwr6hxw9gp2w9-blender-2.79-rc2" + "nix copy --from ssh://server /nix/store/a6cnl93nk1wxnq84brbbwr6hxw9gp2w9-blender-2.79-rc2" }, }; } From e2d71bd1862cdda65f957c6a6c41016258d9e003 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 21 Feb 2018 15:34:40 +0100 Subject: [PATCH 0851/2196] Revert "libexpr: Fix prim_replaceStrings() to work on an empty source string" This reverts commit 4ea9707591beceacf9988b3c185faf50da238403. It causes an infinite loop in Nixpkgs evaluation, e.g. "nix-instantiate -A hello" hung. PR #1886. --- src/libexpr/primops.cc | 11 +++-------- tests/lang/eval-okay-replacestrings.exp | 2 +- tests/lang/eval-okay-replacestrings.nix | 2 -- 3 files changed, 4 insertions(+), 11 deletions(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index ab9351f1185..a800d24290a 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1913,26 +1913,21 @@ static void prim_replaceStrings(EvalState & state, const Pos & pos, Value * * ar auto s = state.forceString(*args[2], context, pos); string res; - // Loops one past last character to handle the case where 'from' contains an empty string. - for (size_t p = 0; p <= s.size(); ) { + for (size_t p = 0; p < s.size(); ) { bool found = false; auto i = from.begin(); auto j = to.begin(); for (; i != from.end(); ++i, ++j) if (s.compare(p, i->size(), *i) == 0) { found = true; + p += i->size(); res += j->first; - if (i->empty()) { - res += s[p++]; - } else { - p += i->size(); - } for (auto& path : j->second) context.insert(path); j->second.clear(); break; } - if (!found && p < s.size()) res += s[p++]; + if (!found) res += s[p++]; } mkString(v, res, context); diff --git a/tests/lang/eval-okay-replacestrings.exp b/tests/lang/eval-okay-replacestrings.exp index 590c281ac86..a2add1b7b14 100644 --- a/tests/lang/eval-okay-replacestrings.exp +++ b/tests/lang/eval-okay-replacestrings.exp @@ -1 +1 @@ -[ "faabar" "fbar" "fubar" "faboor" "fubar" "XaXbXcX" "X" ] +[ "faabar" "fbar" "fubar" "faboor" "fubar" ] diff --git a/tests/lang/eval-okay-replacestrings.nix b/tests/lang/eval-okay-replacestrings.nix index c84e9269ff3..6284a0e660a 100644 --- a/tests/lang/eval-okay-replacestrings.nix +++ b/tests/lang/eval-okay-replacestrings.nix @@ -5,6 +5,4 @@ with builtins; (replaceStrings ["oo"] ["u"] "foobar") (replaceStrings ["oo" "a"] ["a" "oo"] "foobar") (replaceStrings ["oo" "oo"] ["u" "i"] "foobar") - (replaceStrings [""] ["X"] "abc") - (replaceStrings [""] ["X"] "") ] From 0d54671b7b3aa96ab45347e65352979d874346ea Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 21 Feb 2018 16:22:49 +0100 Subject: [PATCH 0852/2196] Manual: Update chapter on remote builds Alos add a command "nix ping-store" to make it easier to see if Nix can connect to a remote builder (e.g. 'nix ping-store --store ssh://mac'). --- .../advanced-topics/distributed-builds.xml | 211 ++++++++++++------ doc/manual/command-ref/conf-file.xml | 15 +- src/nix/ping-store.cc | 35 +++ 3 files changed, 191 insertions(+), 70 deletions(-) create mode 100644 src/nix/ping-store.cc diff --git a/doc/manual/advanced-topics/distributed-builds.xml b/doc/manual/advanced-topics/distributed-builds.xml index 1957e1105e6..20fd6a0cfb0 100644 --- a/doc/manual/advanced-topics/distributed-builds.xml +++ b/doc/manual/advanced-topics/distributed-builds.xml @@ -4,71 +4,109 @@ version="5.0" xml:id='chap-distributed-builds'> -Distributed Builds - -Nix supports distributed builds, where a local Nix installation can -forward Nix builds to other machines over the network. This allows -multiple builds to be performed in parallel (thus improving -performance) and allows Nix to perform multi-platform builds in a -semi-transparent way. For instance, if you perform a build for a -x86_64-darwin on an i686-linux -machine, Nix can automatically forward the build to a -x86_64-darwin machine, if available. - -You can enable distributed builds by setting the environment -variable NIX_BUILD_HOOK to point to a program that Nix -will call whenever it wants to build a derivation. The build hook -(typically a shell or Perl script) can decline the build, in which Nix -will perform it in the usual way if possible, or it can accept it, in -which case it is responsible for somehow getting the inputs of the -build to another machine, doing the build there, and getting the -results back. - -Remote machine configuration: -<filename>remote-systems.conf</filename> - -nix@mcflurry.labs.cs.uu.nl x86_64-darwin /home/nix/.ssh/id_quarterpounder_auto 2 -nix@scratchy.labs.cs.uu.nl i686-linux /home/nix/.ssh/id_scratchy_auto 8 1 kvm -nix@itchy.labs.cs.uu.nl i686-linux /home/nix/.ssh/id_scratchy_auto 8 2 -nix@poochie.labs.cs.uu.nl i686-linux /home/nix/.ssh/id_scratchy_auto 8 2 kvm perf - - - -Nix ships with a build hook that should be suitable for most -purposes. It uses ssh and -nix-copy-closure to copy the build inputs and -outputs and perform the remote build. To use it, you should set -NIX_BUILD_HOOK to -prefix/libexec/nix/build-remote. -You should also define a list of available build machines and point -the environment variable NIX_REMOTE_SYSTEMS to -it. NIX_REMOTE_SYSTEMS must be an absolute path. An -example configuration is shown in . Each line in the file specifies a machine, with the following -bits of information: +Remote Builds + +Nix supports remote builds, where a local Nix installation can +forward Nix builds to other machines. This allows multiple builds to +be performed in parallel and allows Nix to perform multi-platform +builds in a semi-transparent way. For instance, if you perform a +build for a x86_64-darwin on an +i686-linux machine, Nix can automatically forward +the build to a x86_64-darwin machine, if +available. + +To forward a build to a remote machine, it’s required that the +remote machine is accessible via SSH and that it has Nix +installed. You can test whether connecting to the remote Nix instance +works, e.g. + + +$ nix ping-store --store ssh://mac + + +will try to connect to the machine named mac. It is +possible to specify an SSH identity file as part of the remote store +URI, e.g. + + +$ nix ping-store --store ssh://mac?ssh-key=/home/alice/my-key + + +Since builds should be non-interactive, the key should not have a +passphrase. Alternatively, you can load identities ahead of time into +ssh-agent or gpg-agent. + +If you get the error + + +bash: nix-store: command not found +error: cannot connect to 'mac' + + +then you need to ensure that the PATH of +non-interactive login shells contains Nix. + +If you are building via the Nix daemon, it is the Nix +daemon user account (that is, root) that should +have SSH access to the remote machine. If you can’t or don’t want to +configure root to be able to access to remote +machine, you can use a private Nix store instead by passing +e.g. --store ~/my-nix. + +The list of remote machines can be specified on the command line +or in the Nix configuration file. The former is convenient for +testing. For example, the following command allows you to build a +derivation for x86_64-darwin on a Linux machine: + + +$ uname +Linux + +$ nix build \ + '(with import <nixpkgs> { system = "x86_64-darwin"; }; runCommand "foo" {} "uname > $out")' \ + --builders 'ssh://mac x86_64-darwin' +[1/0/1 built, 0.0 MiB DL] building foo on ssh://mac + +$ cat ./result +Darwin + + +It is possible to specify multiple builders separated by a semicolon +or a newline, e.g. + + + --builders 'ssh://mac x86_64-darwin ; ssh://beastie x86_64-freebsd' + + + +Each machine specification consists of the following elements, +separated by spaces. Only the first element is required. - The name of the remote machine, with optionally the - user under which the remote build should be performed. This is - actually passed as an argument to ssh, so it can - be an alias defined in your + The URI of the remote store in the format + ssh://[username@]hostname, + e.g. ssh://nix@mac or + ssh://mac. For backward compatibility, + ssh:// may be omitted. The hostname may be an + alias defined in your ~/.ssh/config. A comma-separated list of Nix platform type identifiers, such as x86_64-darwin. It is possible for a machine to support multiple platform types, e.g., - i686-linux,x86_64-linux. + i686-linux,x86_64-linux. If omitted, this + defaults to the local platform type. - The SSH private key to be used to log in to the - remote machine. Since builds should be non-interactive, this key - should not have a passphrase! + The SSH identity file to be used to log in to the + remote machine. If omitted, SSH will use its regular + identities. - The maximum number of builds that - build-remote will execute in parallel on the - machine. Typically this should be equal to the number of CPU cores. - For instance, the machine itchy in the example - will execute up to 8 builds in parallel. + The maximum number of builds that Nix will execute + in parallel on the machine. Typically this should be equal to the + number of CPU cores. For instance, the machine + itchy in the example will execute up to 8 builds + in parallel. The “speed factor”, indicating the relative speed of the machine. If there are multiple machines of the right type, Nix @@ -76,30 +114,69 @@ bits of information: A comma-separated list of supported features. If a derivation has the - requiredSystemFeatures attribute, then - build-remote will only perform the - derivation on a machine that has the specified features. For - instance, the attribute + requiredSystemFeatures attribute, then Nix will + only perform the derivation on a machine that has the specified + features. For instance, the attribute requiredSystemFeatures = [ "kvm" ]; will cause the build to be performed on a machine that has the - kvm feature (i.e., scratchy in - the example above). + kvm feature. A comma-separated list of mandatory features. A machine will only be used to build a derivation if all of the machine’s mandatory features appear in the - derivation’s requiredSystemFeatures attribute. - Thus, in the example, the machine poochie will - only do derivations that have - requiredSystemFeatures set to ["kvm" - "perf"] or ["perf"]. + derivation’s requiredSystemFeatures + attribute.. - +For example, the machine specification + + +nix@scratchy.labs.cs.uu.nl i686-linux /home/nix/.ssh/id_scratchy_auto 8 1 kvm +nix@itchy.labs.cs.uu.nl i686-linux /home/nix/.ssh/id_scratchy_auto 8 2 +nix@poochie.labs.cs.uu.nl i686-linux /home/nix/.ssh/id_scratchy_auto 1 2 kvm benchmark + + +specifies several machines that can perform +i686-linux builds. However, +poochie will only do builds that have the attribute + + +requiredSystemFeatures = [ "benchmark" ]; + + +or + + +requiredSystemFeatures = [ "benchmark" "kvm" ]; + + +itchy cannot do builds that require +kvm, but scratchy does support +such builds. For regular builds, itchy will be +preferred over scratchy because it has a higher +speed factor. + +Remote builders can also be configured in +nix.conf, e.g. + + +builders = ssh://mac x86_64-darwin ; ssh://beastie x86_64-freebsd + + +Finally, remote builders can be configured in a separate configuration +file included in via the syntax +@file. For example, + + +builders = @/etc/nix/machines + + +causes the list of machines in /etc/nix/machines +to be included. (This is the default.) diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index c14a4d206b8..f658058994c 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -763,7 +763,7 @@ builtins.fetchurl { more details. - Since these files are loaded into the same address space as + Since these files are loaded into the same address space as Nix itself, they must be DSOs compatible with the instance of Nix running at the time (i.e. compiled against the same headers, not linked to any incompatible libraries). They @@ -771,14 +771,23 @@ builtins.fetchurl { be available already at load time. - If an entry in the list is a directory, all files in the - directory are loaded as plugins (non-recursively). + If an entry in the list is a directory, all files in the + directory are loaded as plugins (non-recursively). + + builders + + A list of machines on which to perform builds. See for details. + + + diff --git a/src/nix/ping-store.cc b/src/nix/ping-store.cc new file mode 100644 index 00000000000..310942574a2 --- /dev/null +++ b/src/nix/ping-store.cc @@ -0,0 +1,35 @@ +#include "command.hh" +#include "shared.hh" +#include "store-api.hh" + +using namespace nix; + +struct CmdPingStore : StoreCommand +{ + std::string name() override + { + return "ping-store"; + } + + std::string description() override + { + return "test whether a store can be opened"; + } + + Examples examples() override + { + return { + Example{ + "To test whether connecting to a remote Nix store via SSH works:", + "nix ping-store --store ssh://mac1" + }, + }; + } + + void run(ref store) override + { + store->connect(); + } +}; + +static RegisterCommand r1(make_ref()); From 88c90d5e6d3865dffedef8a83f24473aefc08646 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 21 Feb 2018 18:08:47 +0100 Subject: [PATCH 0853/2196] Manual: Put configuration options in sorted order --- doc/manual/command-ref/conf-file.xml | 853 +++++++++++++-------------- 1 file changed, 419 insertions(+), 434 deletions(-) diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index f658058994c..c76640c97e7 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -63,147 +63,99 @@ false. - keep-outputs - - If true, the garbage collector - will keep the outputs of non-garbage derivations. If - false (default), outputs will be deleted unless - they are GC roots themselves (or reachable from other roots). - - In general, outputs must be registered as roots separately. - However, even if the output of a derivation is registered as a - root, the collector will still delete store paths that are used - only at build time (e.g., the C compiler, or source tarballs - downloaded from the network). To prevent it from doing so, set - this option to true. - - - - - keep-derivations - - If true (default), the garbage - collector will keep the derivations from which non-garbage store - paths were built. If false, they will be - deleted unless explicitly registered as a root (or reachable from - other roots). - - Keeping derivation around is useful for querying and - traceability (e.g., it allows you to ask with what dependencies or - options a store path was built), so by default this option is on. - Turn it off to save a bit of disk space (or a lot if - keep-outputs is also turned on). - - - - - keep-env-derivations + allowed-uris - If false (default), derivations - are not stored in Nix user environments. That is, the derivation - any build-time-only dependencies may be garbage-collected. + - If true, when you add a Nix derivation to - a user environment, the path of the derivation is stored in the - user environment. Thus, the derivation will not be - garbage-collected until the user environment generation is deleted - (nix-env --delete-generations). To prevent - build-time-only dependencies from being collected, you should also - turn on keep-outputs. + A list of URI prefixes to which access is allowed in + restricted evaluation mode. For example, when set to + https://github.com/NixOS, builtin functions + such as fetchGit are allowed to access + https://github.com/NixOS/patchelf.git. - The difference between this option and - keep-derivations is that this one is - “sticky”: it applies to any user environment created while this - option was enabled, while keep-derivations - only applies at the moment the garbage collector is - run. + - max-jobs + allow-import-from-derivation - This option defines the maximum number of jobs - that Nix will try to build in parallel. The default is - 1. The special value auto - causes Nix to use the number of CPUs in your system. It can be - overridden using the () - command line switch. + By default, Nix allows you to import from a derivation, + allowing building at evaluation time. With this option set to false, Nix will throw an error + when evaluating an expression that uses this feature, allowing users to ensure their evaluation + will not require any builds to take place. - cores + allow-new-privileges - Sets the value of the - NIX_BUILD_CORES environment variable in the - invocation of builders. Builders can use this variable at their - discretion to control the maximum amount of parallelism. For - instance, in Nixpkgs, if the derivation attribute - enableParallelBuilding is set to - true, the builder passes the - flag to GNU Make. - It can be overridden using the command line switch and - defaults to 1. The value 0 - means that the builder should use all available CPU cores in the - system. + (Linux-specific.) By default, builders on Linux + cannot acquire new privileges by calling setuid/setgid programs or + programs that have file capabilities. For example, programs such + as sudo or ping will + fail. (Note that in sandbox builds, no such programs are available + unless you bind-mount them into the sandbox via the + option.) You can allow the + use of such programs by enabling this option. This is impure and + usually undesirable, but may be useful in certain scenarios + (e.g. to spin up containers or set up userspace network interfaces + in tests). - max-silent-time + allowed-users - This option defines the maximum number of seconds that a - builder can go without producing any data on standard output or - standard error. This is useful (for instance in an automated - build system) to catch builds that are stuck in an infinite - loop, or to catch remote builds that are hanging due to network - problems. It can be overridden using the command - line switch. + A list of names of users (separated by whitespace) that + are allowed to connect to the Nix daemon. As with the + option, you can specify groups by + prefixing them with @. Also, you can allow + all users by specifying *. The default is + *. - The value 0 means that there is no - timeout. This is also the default. + Note that trusted users are always allowed to connect. - timeout + auto-optimise-store - + If set to true, Nix + automatically detects files in the store that have identical + contents, and replaces them with hard links to a single copy. + This saves disk space. If set to false (the + default), you can still run nix-store + --optimise to get rid of duplicate + files. - This option defines the maximum number of seconds that a - builder can run. This is useful (for instance in an automated - build system) to catch builds that are stuck in an infinite loop - but keep writing to their standard output or standard error. It - can be overridden using the command line - switch. + - The value 0 means that there is no - timeout. This is also the default. + + builders + + A list of machines on which to perform builds. See for details. - - max-build-log-size - - - - This option defines the maximum number of bytes that a - builder can write to its stdout/stderr. If the builder exceeds - this limit, it’s killed. A value of 0 (the - default) means that there is no limit. + builders-use-substitutes - + If set to true, Nix will instruct + remote build machines to use their own binary substitutes if available. In + practical terms, this means that remote hosts will fetch as many build + dependencies as possible from their own substitutes (e.g, from + cache.nixos.org), instead of waiting for this host to + upload them all. This can drastically reduce build times if the network + connection between this computer and the remote build host is slow. Defaults + to false. @@ -249,66 +201,51 @@ false. - sandbox + compress-build-log - If set to true, builds will be - performed in a sandboxed environment, i.e., - they’re isolated from the normal file system hierarchy and will - only see their dependencies in the Nix store, the temporary build - directory, private versions of /proc, - /dev, /dev/shm and - /dev/pts (on Linux), and the paths configured with the - sandbox-paths - option. This is useful to prevent undeclared dependencies - on files in directories such as /usr/bin. In - addition, on Linux, builds run in private PID, mount, network, IPC - and UTS namespaces to isolate them from other processes in the - system (except that fixed-output derivations do not run in private - network namespace to ensure they can access the network). + If set to true (the default), + build logs written to /nix/var/log/nix/drvs + will be compressed on the fly using bzip2. Otherwise, they will + not be compressed. - Currently, sandboxing only work on Linux and macOS. The use - of a sandbox requires that Nix is run as root (so you should use - the “build users” - feature to perform the actual builds under different users - than root). + - If this option is set to relaxed, then - fixed-output derivations and derivations that have the - __noChroot attribute set to - true do not run in sandboxes. - The default is false. + connect-timeout + + + + The timeout (in seconds) for establishing connections in + the binary cache substituter. It corresponds to + curl’s + option. - - sandbox-paths - - A list of paths bind-mounted into Nix sandbox - environments. You can use the syntax - target=source - to mount a path in a different location in the sandbox; for - instance, /bin=/nix-bin will mount the path - /nix-bin as /bin inside the - sandbox. If source is followed by - ?, then it is not an error if - source does not exist; for example, - /dev/nvidiactl? specifies that - /dev/nvidiactl will only be mounted in the - sandbox if it exists in the host filesystem. + cores - Depending on how Nix was built, the default value for this option - may be empty or provide /bin/sh as a - bind-mount of bash. + Sets the value of the + NIX_BUILD_CORES environment variable in the + invocation of builders. Builders can use this variable at their + discretion to control the maximum amount of parallelism. For + instance, in Nixpkgs, if the derivation attribute + enableParallelBuilding is set to + true, the builder passes the + flag to GNU Make. + It can be overridden using the command line switch and + defaults to 1. The value 0 + means that the builder should use all available CPU cores in the + system. - build-extra-sandbox-paths + extra-sandbox-paths A list of additional paths appended to . Useful if you want to extend @@ -317,30 +254,18 @@ false. - substitute + extra-substituters - If set to true (default), Nix - will use binary substitutes if available. This option can be - disabled to force building from source. + Additional binary caches appended to those + specified in . When used by + unprivileged users, untrusted substituters (i.e. those not listed + in ) are silently + ignored. - builders-use-substitutes - - If set to true, Nix will instruct - remote build machines to use their own binary substitutes if available. In - practical terms, this means that remote hosts will fetch as many build - dependencies as possible from their own substitutes (e.g, from - cache.nixos.org), instead of waiting for this host to - upload them all. This can drastically reduce build times if the network - connection between this computer and the remote build host is slow. Defaults - to false. - - - - - fallback + fallback If set to true, Nix will fall back to building from source if a binary substitute fails. This @@ -350,110 +275,41 @@ false. - keep-build-log - - If set to true (the default), - Nix will write the build log of a derivation (i.e. the standard - output and error of its builder) to the directory - /nix/var/log/nix/drvs. The build log can be - retrieved using the command nix-store -l - path. - - - - - compress-build-log - - If set to true (the default), - build logs written to /nix/var/log/nix/drvs - will be compressed on the fly using bzip2. Otherwise, they will - not be compressed. - - - - - substituters - - A list of URLs of substituters, separated by - whitespace. The default is - https://cache.nixos.org. - - - - - - - - trusted-substituters - - A list of URLs of substituters, separated by - whitespace. These are not used by default, but can be enabled by - users of the Nix daemon by specifying --option - substituters urls on the - command line. Unprivileged users are only allowed to pass a - subset of the URLs listed in substituters and - trusted-substituters. - - - - - extra-substituters - - Additional binary caches appended to those - specified in . When used by - unprivileged users, untrusted substituters (i.e. those not listed - in ) are silently - ignored. - - - - - require-sigs + fsync-metadata - If set to true (the default), - any non-content-addressed path added or copied to the Nix store - (e.g. when substituting from a binary cache) must have a valid - signature, that is, be signed using one of the keys listed in - or - . Set to false - to disable signature checking. + If set to true, changes to the + Nix store metadata (in /nix/var/nix/db) are + synchronously flushed to disk. This improves robustness in case + of system crashes, but reduces performance. The default is + true. - trusted-public-keys - - A whitespace-separated list of public keys. When - paths are copied from another Nix store (such as a binary cache), - they must be signed with one of these keys. For example: - cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= - hydra.nixos.org-1:CNHJZBh9K4tP3EKF6FkkgeVYsS3ohTl+oS0Qa8bezVs=. - - + hashed-mirrors + A list of web servers used by + builtins.fetchurl to obtain files by + hash. The default is + http://tarballs.nixos.org/. Given a hash type + ht and a base-16 hash + h, Nix will try to download the file + from + hashed-mirror/ht/h. + This allows files to be downloaded even if they have disappeared + from their original URI. For example, given the default mirror + http://tarballs.nixos.org/, when building the derivation - secret-key-files + +builtins.fetchurl { + url = https://example.org/foo-1.2.3.tar.xz; + sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae"; +} + - A whitespace-separated list of files containing - secret (private) keys. These are used to sign locally-built - paths. They can be generated using nix-store - --generate-binary-cache-key. The corresponding public - key can be distributed to other users, who can add it to - in their - nix.conf. + Nix will attempt to download this file from + http://tarballs.nixos.org/sha256/2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae + first. If it is not available there, if will try the original URI. @@ -467,157 +323,174 @@ false. - netrc-file - - If set to an absolute path to a netrc - file, Nix will use the HTTP authentication credentials in this file when - trying to download from a remote host through HTTP or HTTPS. Defaults to - $NIX_CONF_DIR/netrc. - - The netrc file consists of a list of - accounts in the following format: - - -machine my-machine -login my-username -password my-password - + keep-build-log - For the exact syntax, see the - curl documentation. + If set to true (the default), + Nix will write the build log of a derivation (i.e. the standard + output and error of its builder) to the directory + /nix/var/log/nix/drvs. The build log can be + retrieved using the command nix-store -l + path. - system + keep-derivations - This option specifies the canonical Nix system - name of the current installation, such as - i686-linux or - x86_64-darwin. Nix can only build derivations - whose system attribute equals the value - specified here. In general, it never makes sense to modify this - value from its default, since you can use it to ‘lie’ about the - platform you are building on (e.g., perform a Mac OS build on a - Linux machine; the result would obviously be wrong). It only - makes sense if the Nix binaries can run on multiple platforms, - e.g., ‘universal binaries’ that run on x86_64-linux and - i686-linux. + If true (default), the garbage + collector will keep the derivations from which non-garbage store + paths were built. If false, they will be + deleted unless explicitly registered as a root (or reachable from + other roots). - It defaults to the canonical Nix system name detected by - configure at build time. + Keeping derivation around is useful for querying and + traceability (e.g., it allows you to ask with what dependencies or + options a store path was built), so by default this option is on. + Turn it off to save a bit of disk space (or a lot if + keep-outputs is also turned on). - fsync-metadata + keep-env-derivations - If set to true, changes to the - Nix store metadata (in /nix/var/nix/db) are - synchronously flushed to disk. This improves robustness in case - of system crashes, but reduces performance. The default is - true. + If false (default), derivations + are not stored in Nix user environments. That is, the derivation + any build-time-only dependencies may be garbage-collected. + + If true, when you add a Nix derivation to + a user environment, the path of the derivation is stored in the + user environment. Thus, the derivation will not be + garbage-collected until the user environment generation is deleted + (nix-env --delete-generations). To prevent + build-time-only dependencies from being collected, you should also + turn on keep-outputs. + + The difference between this option and + keep-derivations is that this one is + “sticky”: it applies to any user environment created while this + option was enabled, while keep-derivations + only applies at the moment the garbage collector is + run. - auto-optimise-store + keep-outputs - If set to true, Nix - automatically detects files in the store that have identical - contents, and replaces them with hard links to a single copy. - This saves disk space. If set to false (the - default), you can still run nix-store - --optimise to get rid of duplicate - files. + If true, the garbage collector + will keep the outputs of non-garbage derivations. If + false (default), outputs will be deleted unless + they are GC roots themselves (or reachable from other roots). + + In general, outputs must be registered as roots separately. + However, even if the output of a derivation is registered as a + root, the collector will still delete store paths that are used + only at build time (e.g., the C compiler, or source tarballs + downloaded from the network). To prevent it from doing so, set + this option to true. - connect-timeout + max-build-log-size - The timeout (in seconds) for establishing connections in - the binary cache substituter. It corresponds to - curl’s - option. + This option defines the maximum number of bytes that a + builder can write to its stdout/stderr. If the builder exceeds + this limit, it’s killed. A value of 0 (the + default) means that there is no limit. - trusted-users - - - - A list of names of users (separated by whitespace) that - have additional rights when connecting to the Nix daemon, such - as the ability to specify additional binary caches, or to import - unsigned NARs. You can also specify groups by prefixing them - with @; for instance, - @wheel means all users in the - wheel group. The default is - root. - - Adding a user to - is essentially equivalent to giving that user root access to the - system. For example, the user can set - and thereby obtain read access to - directories that are otherwise inacessible to - them. + max-jobs - + This option defines the maximum number of jobs + that Nix will try to build in parallel. The default is + 1. The special value auto + causes Nix to use the number of CPUs in your system. It can be + overridden using the () + command line switch. - allowed-users + max-silent-time - A list of names of users (separated by whitespace) that - are allowed to connect to the Nix daemon. As with the - option, you can specify groups by - prefixing them with @. Also, you can allow - all users by specifying *. The default is - *. + This option defines the maximum number of seconds that a + builder can go without producing any data on standard output or + standard error. This is useful (for instance in an automated + build system) to catch builds that are stuck in an infinite + loop, or to catch remote builds that are hanging due to network + problems. It can be overridden using the command + line switch. - Note that trusted users are always allowed to connect. + The value 0 means that there is no + timeout. This is also the default. - restrict-eval + netrc-file - + If set to an absolute path to a netrc + file, Nix will use the HTTP authentication credentials in this file when + trying to download from a remote host through HTTP or HTTPS. Defaults to + $NIX_CONF_DIR/netrc. - If set to true, the Nix evaluator will - not allow access to any files outside of the Nix search path (as - set via the NIX_PATH environment variable or the - option), or to URIs outside of - . The default is - false. + The netrc file consists of a list of + accounts in the following format: - + +machine my-machine +login my-username +password my-password + - + For the exact syntax, see the + curl documentation. + - allowed-uris + + plugin-files - - A list of URI prefixes to which access is allowed in - restricted evaluation mode. For example, when set to - https://github.com/NixOS, builtin functions - such as fetchGit are allowed to access - https://github.com/NixOS/patchelf.git. - + + A list of plugin files to be loaded by Nix. Each of these + files will be dlopened by Nix, allowing them to affect + execution through static initialization. In particular, these + plugins may construct static instances of RegisterPrimOp to + add new primops or constants to the expression language, + RegisterStoreImplementation to add new store implementations, + RegisterCommand to add new subcommands to the + nix command, and RegisterSetting to add new + nix config settings. See the constructors for those types for + more details. + + + Since these files are loaded into the same address space as + Nix itself, they must be DSOs compatible with the instance of + Nix running at the time (i.e. compiled against the same + headers, not linked to any incompatible libraries). They + should not be linked to any Nix libs directly, as those will + be available already at load time. + + + If an entry in the list is a directory, all files in the + directory are loaded as plugins (non-recursively). + @@ -671,6 +544,70 @@ password my-password + require-sigs + + If set to true (the default), + any non-content-addressed path added or copied to the Nix store + (e.g. when substituting from a binary cache) must have a valid + signature, that is, be signed using one of the keys listed in + or + . Set to false + to disable signature checking. + + + + + restrict-eval + + + + If set to true, the Nix evaluator will + not allow access to any files outside of the Nix search path (as + set via the NIX_PATH environment variable or the + option), or to URIs outside of + . The default is + false. + + + + + + + sandbox + + If set to true, builds will be + performed in a sandboxed environment, i.e., + they’re isolated from the normal file system hierarchy and will + only see their dependencies in the Nix store, the temporary build + directory, private versions of /proc, + /dev, /dev/shm and + /dev/pts (on Linux), and the paths configured with the + sandbox-paths + option. This is useful to prevent undeclared dependencies + on files in directories such as /usr/bin. In + addition, on Linux, builds run in private PID, mount, network, IPC + and UTS namespaces to isolate them from other processes in the + system (except that fixed-output derivations do not run in private + network namespace to ensure they can access the network). + + Currently, sandboxing only work on Linux and macOS. The use + of a sandbox requires that Nix is run as root (so you should use + the “build users” + feature to perform the actual builds under different users + than root). + + If this option is set to relaxed, then + fixed-output derivations and derivations that have the + __noChroot attribute set to + true do not run in sandboxes. + + The default is false. + + + + + + sandbox-dev-shm-size This option determines the maximum size of the @@ -684,110 +621,158 @@ password my-password - allow-import-from-derivation + + sandbox-paths - By default, Nix allows you to import from a derivation, - allowing building at evaluation time. With this option set to false, Nix will throw an error - when evaluating an expression that uses this feature, allowing users to ensure their evaluation - will not require any builds to take place. + A list of paths bind-mounted into Nix sandbox + environments. You can use the syntax + target=source + to mount a path in a different location in the sandbox; for + instance, /bin=/nix-bin will mount the path + /nix-bin as /bin inside the + sandbox. If source is followed by + ?, then it is not an error if + source does not exist; for example, + /dev/nvidiactl? specifies that + /dev/nvidiactl will only be mounted in the + sandbox if it exists in the host filesystem. + + Depending on how Nix was built, the default value for this option + may be empty or provide /bin/sh as a + bind-mount of bash. - allow-new-privileges + secret-key-files - (Linux-specific.) By default, builders on Linux - cannot acquire new privileges by calling setuid/setgid programs or - programs that have file capabilities. For example, programs such - as sudo or ping will - fail. (Note that in sandbox builds, no such programs are available - unless you bind-mount them into the sandbox via the - option.) You can allow the - use of such programs by enabling this option. This is impure and - usually undesirable, but may be useful in certain scenarios - (e.g. to spin up containers or set up userspace network interfaces - in tests). + A whitespace-separated list of files containing + secret (private) keys. These are used to sign locally-built + paths. They can be generated using nix-store + --generate-binary-cache-key. The corresponding public + key can be distributed to other users, who can add it to + in their + nix.conf. - hashed-mirrors + show-trace - A list of web servers used by - builtins.fetchurl to obtain files by - hash. The default is - http://tarballs.nixos.org/. Given a hash type - ht and a base-16 hash - h, Nix will try to download the file - from - hashed-mirror/ht/h. - This allows files to be downloaded even if they have disappeared - from their original URI. For example, given the default mirror - http://tarballs.nixos.org/, when building the derivation + Causes Nix to print out a stack trace in case of Nix + expression evaluation errors. - -builtins.fetchurl { - url = https://example.org/foo-1.2.3.tar.xz; - sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae"; -} - + - Nix will attempt to download this file from - http://tarballs.nixos.org/sha256/2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae - first. If it is not available there, if will try the original URI. + + substitute + + If set to true (default), Nix + will use binary substitutes if available. This option can be + disabled to force building from source. - show-trace + substituters - Causes Nix to print out a stack trace in case of Nix - expression evaluation errors. + A list of URLs of substituters, separated by + whitespace. The default is + https://cache.nixos.org. - - plugin-files + system + + This option specifies the canonical Nix system + name of the current installation, such as + i686-linux or + x86_64-darwin. Nix can only build derivations + whose system attribute equals the value + specified here. In general, it never makes sense to modify this + value from its default, since you can use it to ‘lie’ about the + platform you are building on (e.g., perform a Mac OS build on a + Linux machine; the result would obviously be wrong). It only + makes sense if the Nix binaries can run on multiple platforms, + e.g., ‘universal binaries’ that run on x86_64-linux and + i686-linux. + + It defaults to the canonical Nix system name detected by + configure at build time. + + + + + timeout + - - A list of plugin files to be loaded by Nix. Each of these - files will be dlopened by Nix, allowing them to affect - execution through static initialization. In particular, these - plugins may construct static instances of RegisterPrimOp to - add new primops or constants to the expression language, - RegisterStoreImplementation to add new store implementations, - RegisterCommand to add new subcommands to the - nix command, and RegisterSetting to add new - nix config settings. See the constructors for those types for - more details. - - - Since these files are loaded into the same address space as - Nix itself, they must be DSOs compatible with the instance of - Nix running at the time (i.e. compiled against the same - headers, not linked to any incompatible libraries). They - should not be linked to any Nix libs directly, as those will - be available already at load time. - - - If an entry in the list is a directory, all files in the - directory are loaded as plugins (non-recursively). - + + This option defines the maximum number of seconds that a + builder can run. This is useful (for instance in an automated + build system) to catch builds that are stuck in an infinite loop + but keep writing to their standard output or standard error. It + can be overridden using the command line + switch. + + The value 0 means that there is no + timeout. This is also the default. + - - builders + trusted-public-keys + + A whitespace-separated list of public keys. When + paths are copied from another Nix store (such as a binary cache), + they must be signed with one of these keys. For example: + cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= + hydra.nixos.org-1:CNHJZBh9K4tP3EKF6FkkgeVYsS3ohTl+oS0Qa8bezVs=. + + + + + trusted-substituters + + A list of URLs of substituters, separated by + whitespace. These are not used by default, but can be enabled by + users of the Nix daemon by specifying --option + substituters urls on the + command line. Unprivileged users are only allowed to pass a + subset of the URLs listed in substituters and + trusted-substituters. + + + + + trusted-users + - A list of machines on which to perform builds. See for details. + + A list of names of users (separated by whitespace) that + have additional rights when connecting to the Nix daemon, such + as the ability to specify additional binary caches, or to import + unsigned NARs. You can also specify groups by prefixing them + with @; for instance, + @wheel means all users in the + wheel group. The default is + root. + + Adding a user to + is essentially equivalent to giving that user root access to the + system. For example, the user can set + and thereby obtain read access to + directories that are otherwise inacessible to + them. + + + From ddc58e789636e1b94149c342575d92583251fbf6 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 22 Feb 2018 12:27:25 +0100 Subject: [PATCH 0854/2196] Enable sandbox builds on Linux by default The overhead of sandbox builds is a problem on NixOS (since building a NixOS configuration involves a lot of small derivations) but not for typical non-NixOS use cases. So outside of NixOS we can enable it. Issue #179. --- doc/manual/manual.xml | 4 +--- doc/manual/release-notes/rl-2.0.xml | 4 ++++ src/libstore/globals.hh | 8 +++++++- 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/doc/manual/manual.xml b/doc/manual/manual.xml index b5a6af7d0c3..b408b681772 100644 --- a/doc/manual/manual.xml +++ b/doc/manual/manual.xml @@ -16,12 +16,10 @@ - 2004-2017 + 2004-2018 Eelco Dolstra - November 2014 - + diff --git a/doc/manual/release-notes/rl-2.1.xml b/doc/manual/release-notes/rl-2.1.xml new file mode 100644 index 00000000000..5f37083eefc --- /dev/null +++ b/doc/manual/release-notes/rl-2.1.xml @@ -0,0 +1,47 @@ +
+ +Release 2.1 (2018-08-??) + +This is primarily a bug fix release. It also reduces memory +consumption in certain situations. In addition, it has the following +new features: + + + + + New builtin functions: + builtins.bitAnd, + builtins.bitOr, + builtins.bitXor, + builtins.fromTOML, + builtins.concatMap, + builtins.mapAttrs. + + + + + The S3 binary cache store now supports uploading NARs larger + than 5 GiB. + + + + The flag is no longer required + to recover from disappeared NARs in binary caches. + + + + nix-daemon now respects + , so it can be run as a non-root + user. + + + + +This release has contributions from TODO. + + +
From 06080e4abce5f8ffc4a658802692f7464ec359b5 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Fri, 3 Aug 2018 11:09:31 -0400 Subject: [PATCH 1133/2196] 2.1 release notes: Add note about s3-compatible stores --- doc/manual/release-notes/rl-2.1.xml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/doc/manual/release-notes/rl-2.1.xml b/doc/manual/release-notes/rl-2.1.xml index 5f37083eefc..c628d04a707 100644 --- a/doc/manual/release-notes/rl-2.1.xml +++ b/doc/manual/release-notes/rl-2.1.xml @@ -28,6 +28,12 @@ new features: than 5 GiB. + + The S3 binary cache store now supports uploading to + S3-compatible services with the endpoint + option. + + The flag is no longer required to recover from disappeared NARs in binary caches. From e268bbc05435d8121275136934a594fc70a73da9 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 3 Aug 2018 18:07:46 +0200 Subject: [PATCH 1134/2196] LegacySSHStore: Add remote-store option This is primarily useful for testing, e.g. $ nix copy --to 'ssh://localhost?remote-store=/tmp/nix' ... --- src/libstore/legacy-ssh-store.cc | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 02d91ded04c..f9b0b42c83d 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -17,6 +17,7 @@ struct LegacySSHStore : public Store const Setting sshKey{this, "", "ssh-key", "path to an SSH private key"}; const Setting compress{this, false, "compress", "whether to compress the connection"}; const Setting remoteProgram{this, "nix-store", "remote-program", "path to the nix-store executable on the remote system"}; + const Setting remoteStore{this, "", "remote-store", "URI of the store on the remote system"}; // Hack for getting remote build log output. const Setting logFD{this, -1, "log-fd", "file descriptor to which SSH's stderr is connected"}; @@ -56,7 +57,9 @@ struct LegacySSHStore : public Store ref openConnection() { auto conn = make_ref(); - conn->sshConn = master.startCommand(fmt("%s --serve --write", remoteProgram)); + conn->sshConn = master.startCommand( + fmt("%s --serve --write", remoteProgram) + + (remoteStore.get() == "" ? "" : " --store " + shellEscape(remoteStore.get()))); conn->to = FdSink(conn->sshConn->in.get()); conn->from = FdSource(conn->sshConn->out.get()); From 4e7d5f660cec6da3a432c726dfe7a579c7581c79 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 3 Aug 2018 18:12:28 +0200 Subject: [PATCH 1135/2196] SSHMaster: Bypass SSH when connecting to localhost This is primarily useful for testing since it removes the need to have SSH working. --- src/libstore/ssh.cc | 22 +++++++++++++++------- src/libstore/ssh.hh | 1 + 2 files changed, 16 insertions(+), 7 deletions(-) diff --git a/src/libstore/ssh.cc b/src/libstore/ssh.cc index 033c580936a..5e0e44935cc 100644 --- a/src/libstore/ssh.cc +++ b/src/libstore/ssh.cc @@ -4,8 +4,9 @@ namespace nix { SSHMaster::SSHMaster(const std::string & host, const std::string & keyFile, bool useMaster, bool compress, int logFD) : host(host) + , fakeSSH(host == "localhost") , keyFile(keyFile) - , useMaster(useMaster) + , useMaster(useMaster && !fakeSSH) , compress(compress) , logFD(logFD) { @@ -45,12 +46,19 @@ std::unique_ptr SSHMaster::startCommand(const std::string if (logFD != -1 && dup2(logFD, STDERR_FILENO) == -1) throw SysError("duping over stderr"); - Strings args = { "ssh", host.c_str(), "-x", "-a" }; - addCommonSSHOpts(args); - if (socketPath != "") - args.insert(args.end(), {"-S", socketPath}); - if (verbosity >= lvlChatty) - args.push_back("-v"); + Strings args; + + if (fakeSSH) { + args = { "bash", "-c" }; + } else { + args = { "ssh", host.c_str(), "-x", "-a" }; + addCommonSSHOpts(args); + if (socketPath != "") + args.insert(args.end(), {"-S", socketPath}); + if (verbosity >= lvlChatty) + args.push_back("-v"); + } + args.push_back(command); execvp(args.begin()->c_str(), stringsToCharPtrs(args).data()); diff --git a/src/libstore/ssh.hh b/src/libstore/ssh.hh index 1268e6d0005..4f0f0bd29f9 100644 --- a/src/libstore/ssh.hh +++ b/src/libstore/ssh.hh @@ -10,6 +10,7 @@ class SSHMaster private: const std::string host; + bool fakeSSH; const std::string keyFile; const bool useMaster; const bool compress; From 848a9375c3753202bf53defda469ca5d7538e135 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 3 Aug 2018 20:36:25 +0200 Subject: [PATCH 1136/2196] Support escaping in store URIs --- src/libstore/store-api.cc | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 1854353828d..7a4a5f5eb85 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -844,8 +844,24 @@ ref openStore(const std::string & uri_, if (q != std::string::npos) { for (auto s : tokenizeString(uri.substr(q + 1), "&")) { auto e = s.find('='); - if (e != std::string::npos) - params[s.substr(0, e)] = s.substr(e + 1); + if (e != std::string::npos) { + auto value = s.substr(e + 1); + std::string decoded; + for (size_t i = 0; i < value.size(); ) { + if (value[i] == '%') { + if (i + 2 >= value.size()) + throw Error("invalid URI parameter '%s'", value); + try { + decoded += std::stoul(std::string(value, i + 1, 2), 0, 16); + i += 3; + } catch (...) { + throw Error("invalid URI parameter '%s'", value); + } + } else + decoded += value[i++]; + } + params[s.substr(0, e)] = decoded; + } } uri = uri_.substr(0, q); } From eeebe4cdc509aaaefb70d4af55e5174a75c0281f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 3 Aug 2018 20:39:07 +0200 Subject: [PATCH 1137/2196] cmdDumpStorePath: Support chroot stores --- src/nix-store/nix-store.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index e1e27ceef94..942fe50e86c 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -860,7 +860,7 @@ static void opServe(Strings opFlags, Strings opArgs) } case cmdDumpStorePath: - dumpPath(readStorePath(*store, in), out); + store->narFromPath(readStorePath(*store, in), out); break; case cmdImportPaths: { From 34c17fdae535d0128d178dde676f56f1bc8439e3 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 3 Aug 2018 20:41:56 +0200 Subject: [PATCH 1138/2196] Add a test for LegacySSHStore that doesn't require a VM --- tests/local.mk | 3 ++- tests/nix-copy-ssh.sh | 20 ++++++++++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) create mode 100644 tests/nix-copy-ssh.sh diff --git a/tests/local.mk b/tests/local.mk index 9df0adf1bfd..1ff68348b3c 100644 --- a/tests/local.mk +++ b/tests/local.mk @@ -25,7 +25,8 @@ nix_tests = \ pure-eval.sh \ check.sh \ plugins.sh \ - search.sh + search.sh \ + nix-copy-ssh.sh # parallel.sh install-tests += $(foreach x, $(nix_tests), tests/$(x)) diff --git a/tests/nix-copy-ssh.sh b/tests/nix-copy-ssh.sh new file mode 100644 index 00000000000..6aba667a45a --- /dev/null +++ b/tests/nix-copy-ssh.sh @@ -0,0 +1,20 @@ +source common.sh + +clearStore +clearCache + +remoteRoot=$TEST_ROOT/store2 +chmod -R u+w "$remoteRoot" || true +rm -rf "$remoteRoot" + +outPath=$(nix-build dependencies.nix) + +nix copy --to "ssh://localhost?store=$NIX_STORE_DIR&remote-store=$remoteRoot%3fstore=$NIX_STORE_DIR%26real=$remoteRoot$NIX_STORE_DIR" $outPath + +[ -f $remoteRoot$outPath/foobar ] + +clearStore + +nix copy --no-check-sigs --from "ssh://localhost?store=$NIX_STORE_DIR&remote-store=$remoteRoot%3fstore=$NIX_STORE_DIR%26real=$remoteRoot$NIX_STORE_DIR" $outPath + +[ -f $outPath/foobar ] From 2825e05d21ecabc8b8524836baf0b9b05da993c6 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 3 Aug 2018 21:10:03 +0200 Subject: [PATCH 1139/2196] Make adding paths via nix-store --serve run in constant memory It adds a new operation, cmdAddToStoreNar, that does the same thing as the corresponding nix-daemon operation, i.e. call addToStore(). This replaces cmdImportPaths, which has the major issue that it sends the NAR first and the store path second, thus requiring us to store the incoming NAR either in memory or on disk until we decide what to do with it. For example, this reduces the memory usage of $ nix copy --to 'ssh://localhost?remote-store=/tmp/nix' /nix/store/95cwv4q54dc6giaqv6q6p4r02ia2km35-blender-2.79 from 267 MiB to 12 MiB. Probably fixes #1988. --- src/libstore/legacy-ssh-store.cc | 59 ++++++++++++++++++++++++-------- src/libstore/serve-protocol.hh | 3 +- src/nix-daemon/nix-daemon.cc | 1 + src/nix-store/nix-store.cc | 22 ++++++++++++ 4 files changed, 70 insertions(+), 15 deletions(-) diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index f9b0b42c83d..ddfd17d641b 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -28,6 +28,7 @@ struct LegacySSHStore : public Store FdSink to; FdSource from; int remoteVersion; + bool good = true; }; std::string host; @@ -42,7 +43,7 @@ struct LegacySSHStore : public Store , connections(make_ref>( std::max(1, (int) maxConnections), [this]() { return openConnection(); }, - [](const ref & r) { return true; } + [](const ref & r) { return r->good; } )) , master( host, @@ -58,7 +59,7 @@ struct LegacySSHStore : public Store { auto conn = make_ref(); conn->sshConn = master.startCommand( - fmt("%s --serve --write", remoteProgram) + fmt("command time %s --serve --write", remoteProgram) + (remoteStore.get() == "" ? "" : " --store " + shellEscape(remoteStore.get()))); conn->to = FdSink(conn->sshConn->in.get()); conn->from = FdSource(conn->sshConn->out.get()); @@ -130,18 +131,48 @@ struct LegacySSHStore : public Store auto conn(connections->get()); - conn->to - << cmdImportPaths - << 1; - copyNAR(source, conn->to); - conn->to - << exportMagic - << info.path - << info.references - << info.deriver - << 0 - << 0; - conn->to.flush(); + if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4) { + + conn->to + << cmdAddToStoreNar + << info.path + << info.deriver + << info.narHash.to_string(Base16, false) + << info.references + << info.registrationTime + << info.narSize + << info.ultimate + << info.sigs + << info.ca; + try { + copyNAR(source, conn->to); + } catch (...) { + conn->good = false; + throw; + } + conn->to.flush(); + + } else { + + conn->to + << cmdImportPaths + << 1; + try { + copyNAR(source, conn->to); + } catch (...) { + conn->good = false; + throw; + } + conn->to + << exportMagic + << info.path + << info.references + << info.deriver + << 0 + << 0; + conn->to.flush(); + + } if (readInt(conn->from) != 1) throw Error("failed to add path '%s' to remote host '%s', info.path, host"); diff --git a/src/libstore/serve-protocol.hh b/src/libstore/serve-protocol.hh index f67d1e2580a..9fae6d5349f 100644 --- a/src/libstore/serve-protocol.hh +++ b/src/libstore/serve-protocol.hh @@ -5,7 +5,7 @@ namespace nix { #define SERVE_MAGIC_1 0x390c9deb #define SERVE_MAGIC_2 0x5452eecb -#define SERVE_PROTOCOL_VERSION 0x204 +#define SERVE_PROTOCOL_VERSION 0x205 #define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00) #define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff) @@ -18,6 +18,7 @@ typedef enum { cmdBuildPaths = 6, cmdQueryClosure = 7, cmdBuildDerivation = 8, + cmdAddToStoreNar = 9, } ServeCommand; } diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index a2e54b93c05..644fa6681de 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -707,6 +707,7 @@ static void performOp(TunnelLogger * logger, ref store, logger->startWork(); + // FIXME: race if addToStore doesn't read source? store.cast()->addToStore(info, *source, (RepairFlag) repair, dontCheckSigs ? NoCheckSigs : CheckSigs, nullptr); diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 942fe50e86c..fe68f681ae2 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -924,6 +924,28 @@ static void opServe(Strings opFlags, Strings opArgs) break; } + case cmdAddToStoreNar: { + if (!writeAllowed) throw Error("importing paths is not allowed"); + + ValidPathInfo info; + info.path = readStorePath(*store, in); + in >> info.deriver; + if (!info.deriver.empty()) + store->assertStorePath(info.deriver); + info.narHash = Hash(readString(in), htSHA256); + info.references = readStorePaths(*store, in); + in >> info.registrationTime >> info.narSize >> info.ultimate; + info.sigs = readStrings(in); + in >> info.ca; + + // FIXME: race if addToStore doesn't read source? + store->addToStore(info, in, NoRepair, NoCheckSigs); + + out << 1; // indicate success + + break; + } + default: throw Error(format("unknown serve command %1%") % cmd); } From 4361a4331f1b2eb3fcfd954c96de353c9d516508 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 6 Aug 2018 11:31:14 +0200 Subject: [PATCH 1140/2196] Fix reporting of HTTP body size when a result callback is used --- src/libstore/download.cc | 5 ++++- src/libstore/download.hh | 1 + 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/libstore/download.cc b/src/libstore/download.cc index f80c9e45b82..f0ea1995ae7 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -140,6 +140,7 @@ struct CurlDownloader : public Downloader size_t writeCallback(void * contents, size_t size, size_t nmemb) { size_t realSize = size * nmemb; + result.bodySize += realSize; if (request.dataCallback) request.dataCallback((char *) contents, realSize); else @@ -162,6 +163,7 @@ struct CurlDownloader : public Downloader auto ss = tokenizeString>(line, " "); status = ss.size() >= 2 ? ss[1] : ""; result.data = std::make_shared(); + result.bodySize = 0; encoding = ""; } else { auto i = line.find(':'); @@ -296,6 +298,7 @@ struct CurlDownloader : public Downloader curl_easy_setopt(req, CURLOPT_NETRC, CURL_NETRC_OPTIONAL); result.data = std::make_shared(); + result.bodySize = 0; } void finish(CURLcode code) @@ -309,7 +312,7 @@ struct CurlDownloader : public Downloader result.effectiveUrl = effectiveUrlCStr; debug("finished %s of '%s'; curl status = %d, HTTP status = %d, body = %d bytes", - request.verb(), request.uri, code, httpStatus, result.data ? result.data->size() : 0); + request.verb(), request.uri, code, httpStatus, result.bodySize); if (code == CURLE_WRITE_ERROR && result.etag == request.expectedETag) { code = CURLE_OK; diff --git a/src/libstore/download.hh b/src/libstore/download.hh index da55df7a6e7..ff38a2870cc 100644 --- a/src/libstore/download.hh +++ b/src/libstore/download.hh @@ -38,6 +38,7 @@ struct DownloadResult std::string etag; std::string effectiveUrl; std::shared_ptr data; + uint64_t bodySize = 0; }; class Store; From fa4def3d4675c8b2d6aacb56959dbbf9e52df66a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 6 Aug 2018 11:48:35 +0200 Subject: [PATCH 1141/2196] Require libbrotli --- Makefile.config.in | 1 - configure.ac | 9 +++---- src/libutil/compression.cc | 50 -------------------------------------- tests/brotli.sh | 7 ------ tests/common.sh.in | 1 - 5 files changed, 3 insertions(+), 65 deletions(-) diff --git a/Makefile.config.in b/Makefile.config.in index a9785dc7395..08edcb863ef 100644 --- a/Makefile.config.in +++ b/Makefile.config.in @@ -6,7 +6,6 @@ CXXFLAGS = @CXXFLAGS@ ENABLE_S3 = @ENABLE_S3@ HAVE_SODIUM = @HAVE_SODIUM@ HAVE_READLINE = @HAVE_READLINE@ -HAVE_BROTLI = @HAVE_BROTLI@ HAVE_SECCOMP = @HAVE_SECCOMP@ LIBCURL_LIBS = @LIBCURL_LIBS@ OPENSSL_LIBS = @OPENSSL_LIBS@ diff --git a/configure.ac b/configure.ac index c41a83c9764..6aeeacdba13 100644 --- a/configure.ac +++ b/configure.ac @@ -179,12 +179,9 @@ AC_CHECK_LIB([lzma], [lzma_stream_encoder_mt], [AC_DEFINE([HAVE_LZMA_MT], [1], [xz multithreaded compression support])]) -# Look for libbrotli{enc,dec}, optional dependencies -PKG_CHECK_MODULES([LIBBROTLI], [libbrotlienc libbrotlidec], - [AC_DEFINE([HAVE_BROTLI], [1], [Whether to use libbrotli.]) - CXXFLAGS="$LIBBROTLI_CFLAGS $CXXFLAGS"] - have_brotli=1], [have_brotli=]) -AC_SUBST(HAVE_BROTLI, [$have_brotli]) +# Look for libbrotli{enc,dec}. +PKG_CHECK_MODULES([LIBBROTLI], [libbrotlienc libbrotlidec], [CXXFLAGS="$LIBBROTLI_CFLAGS $CXXFLAGS"]) + # Look for libseccomp, required for Linux sandboxing. if test "$sys_name" = linux; then diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc index e1782f8c4bd..5de85ede150 100644 --- a/src/libutil/compression.cc +++ b/src/libutil/compression.cc @@ -8,10 +8,8 @@ #include #include -#if HAVE_BROTLI #include #include -#endif // HAVE_BROTLI #include @@ -132,12 +130,6 @@ static void decompressBzip2(Source & source, Sink & sink) static void decompressBrotli(Source & source, Sink & sink) { -#if !HAVE_BROTLI - RunOptions options(BROTLI, {"-d"}); - options.standardIn = &source; - options.standardOut = &sink; - runProgram2(options); -#else auto *s = BrotliDecoderCreateInstance(nullptr, nullptr, nullptr); if (!s) throw CompressionError("unable to initialize brotli decoder"); @@ -193,7 +185,6 @@ static void decompressBrotli(Source & source, Sink & sink) if (ret == BROTLI_DECODER_RESULT_SUCCESS) return; } -#endif // HAVE_BROTLI } ref decompress(const std::string & method, const std::string & in) @@ -403,42 +394,6 @@ struct BzipSink : CompressionSink } }; -struct LambdaCompressionSink : CompressionSink -{ - Sink & nextSink; - std::string data; - using CompressFnTy = std::function; - CompressFnTy compressFn; - LambdaCompressionSink(Sink& nextSink, CompressFnTy compressFn) - : nextSink(nextSink) - , compressFn(std::move(compressFn)) - { - }; - - void finish() override - { - flush(); - nextSink(compressFn(data)); - } - - void write(const unsigned char * data, size_t len) override - { - checkInterrupt(); - this->data.append((const char *) data, len); - } -}; - -struct BrotliCmdSink : LambdaCompressionSink -{ - BrotliCmdSink(Sink& nextSink) - : LambdaCompressionSink(nextSink, [](const std::string& data) { - return runProgram(BROTLI, true, {}, data); - }) - { - } -}; - -#if HAVE_BROTLI struct BrotliSink : CompressionSink { Sink & nextSink; @@ -525,7 +480,6 @@ struct BrotliSink : CompressionSink } } }; -#endif // HAVE_BROTLI ref makeCompressionSink(const std::string & method, Sink & nextSink, const bool parallel) { @@ -544,11 +498,7 @@ ref makeCompressionSink(const std::string & method, Sink & next else if (method == "bzip2") return make_ref(nextSink); else if (method == "br") -#if HAVE_BROTLI return make_ref(nextSink); -#else - return make_ref(nextSink); -#endif else throw UnknownCompressionMethod(format("unknown compression method '%s'") % method); } diff --git a/tests/brotli.sh b/tests/brotli.sh index 645dd4214ec..a3c6e55a8fa 100644 --- a/tests/brotli.sh +++ b/tests/brotli.sh @@ -1,10 +1,5 @@ source common.sh - -# Only test if we found brotli libraries -# (CLI tool is likely unavailable if libraries are missing) -if [ -n "$HAVE_BROTLI" ]; then - clearStore clearCache @@ -24,5 +19,3 @@ nix copy --from $cacheURI $outPath --no-check-sigs HASH2=$(nix hash-path $outPath) [[ $HASH = $HASH2 ]] - -fi # HAVE_BROTLI diff --git a/tests/common.sh.in b/tests/common.sh.in index fddd25b366b..2ee2f589dae 100644 --- a/tests/common.sh.in +++ b/tests/common.sh.in @@ -31,7 +31,6 @@ export xmllint="@xmllint@" export SHELL="@bash@" export PAGER=cat export HAVE_SODIUM="@HAVE_SODIUM@" -export HAVE_BROTLI="@HAVE_BROTLI@" export version=@PACKAGE_VERSION@ export system=@system@ From d3761f5f8bce1e4c8dcfdff3fa77c173157c0346 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 6 Aug 2018 15:40:29 +0200 Subject: [PATCH 1142/2196] Fix Brotli decompression in 'nix log' This didn't work anymore since decompression was only done in the non-coroutine case. Decompressors are now sinks, just like compressors. Also fixed a bug in bzip2 API handling (we have to handle BZ_RUN_OK rather than BZ_OK), which we didn't notice because there was a missing 'throw': if (ret != BZ_OK) CompressionError("error while compressing bzip2 file"); --- src/libstore/binary-cache-store.cc | 23 +- src/libstore/builtins/fetchurl.cc | 21 +- src/libstore/download.cc | 68 ++-- src/libstore/download.hh | 3 - src/libstore/s3-binary-cache-store.cc | 6 +- src/libutil/compression.cc | 557 +++++++++++--------------- src/libutil/compression.hh | 12 +- 7 files changed, 309 insertions(+), 381 deletions(-) diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 76c0a1a891b..9c75c85993f 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -217,17 +217,6 @@ void BinaryCacheStore::narFromPath(const Path & storePath, Sink & sink) { auto info = queryPathInfo(storePath).cast(); - auto source = sinkToSource([this, url{info->url}](Sink & sink) { - try { - getFile(url, sink); - } catch (NoSuchBinaryCacheFile & e) { - throw SubstituteGone(e.what()); - } - }); - - stats.narRead++; - //stats.narReadCompressedBytes += nar->size(); // FIXME - uint64_t narSize = 0; LambdaSink wrapperSink([&](const unsigned char * data, size_t len) { @@ -235,8 +224,18 @@ void BinaryCacheStore::narFromPath(const Path & storePath, Sink & sink) narSize += len; }); - decompress(info->compression, *source, wrapperSink); + auto decompressor = makeDecompressionSink(info->compression, wrapperSink); + try { + getFile(info->url, *decompressor); + } catch (NoSuchBinaryCacheFile & e) { + throw SubstituteGone(e.what()); + } + + decompressor->flush(); + + stats.narRead++; + //stats.narReadCompressedBytes += nar->size(); // FIXME stats.narReadBytes += narSize; } diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc index 1f4abd374f5..b4dcb35f951 100644 --- a/src/libstore/builtins/fetchurl.cc +++ b/src/libstore/builtins/fetchurl.cc @@ -39,21 +39,16 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) request.verifyTLS = false; request.decompress = false; - downloader->download(std::move(request), sink); + auto decompressor = makeDecompressionSink( + hasSuffix(mainUrl, ".xz") ? "xz" : "none", sink); + downloader->download(std::move(request), *decompressor); + decompressor->finish(); }); - if (get(drv.env, "unpack", "") == "1") { - - if (hasSuffix(mainUrl, ".xz")) { - auto source2 = sinkToSource([&](Sink & sink) { - decompress("xz", *source, sink); - }); - restorePath(storePath, *source2); - } else - restorePath(storePath, *source); - - } else - writeFile(storePath, *source); + if (get(drv.env, "unpack", "") == "1") + restorePath(storePath, *source); + else + writeFile(storePath, *source); auto executable = drv.env.find("executable"); if (executable != drv.env.end() && executable->second == "1") { diff --git a/src/libstore/download.cc b/src/libstore/download.cc index f0ea1995ae7..973fca0b130 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -58,16 +58,6 @@ std::string resolveUri(const std::string & uri) return uri; } -ref decodeContent(const std::string & encoding, ref data) -{ - if (encoding == "") - return data; - else if (encoding == "br") - return decompress(encoding, *data); - else - throw Error("unsupported Content-Encoding '%s'", encoding); -} - struct CurlDownloader : public Downloader { CURLM * curlm = 0; @@ -106,6 +96,12 @@ struct CurlDownloader : public Downloader fmt(request.data ? "uploading '%s'" : "downloading '%s'", request.uri), {request.uri}, request.parentAct) , callback(callback) + , finalSink([this](const unsigned char * data, size_t len) { + if (this->request.dataCallback) + this->request.dataCallback((char *) data, len); + else + this->result.data->append((char *) data, len); + }) { if (!request.expectedETag.empty()) requestHeaders = curl_slist_append(requestHeaders, ("If-None-Match: " + request.expectedETag).c_str()); @@ -129,23 +125,40 @@ struct CurlDownloader : public Downloader } } - template - void fail(const T & e) + void failEx(std::exception_ptr ex) { assert(!done); done = true; - callback.rethrow(std::make_exception_ptr(e)); + callback.rethrow(ex); } + template + void fail(const T & e) + { + failEx(std::make_exception_ptr(e)); + } + + LambdaSink finalSink; + std::shared_ptr decompressionSink; + + std::exception_ptr writeException; + size_t writeCallback(void * contents, size_t size, size_t nmemb) { - size_t realSize = size * nmemb; - result.bodySize += realSize; - if (request.dataCallback) - request.dataCallback((char *) contents, realSize); - else - result.data->append((char *) contents, realSize); - return realSize; + try { + size_t realSize = size * nmemb; + result.bodySize += realSize; + + if (!decompressionSink) + decompressionSink = makeDecompressionSink(encoding, finalSink); + + (*decompressionSink)((unsigned char *) contents, realSize); + + return realSize; + } catch (...) { + writeException = std::current_exception(); + return 0; + } } static size_t writeCallbackWrapper(void * contents, size_t size, size_t nmemb, void * userp) @@ -314,27 +327,33 @@ struct CurlDownloader : public Downloader debug("finished %s of '%s'; curl status = %d, HTTP status = %d, body = %d bytes", request.verb(), request.uri, code, httpStatus, result.bodySize); + if (decompressionSink) + decompressionSink->finish(); + if (code == CURLE_WRITE_ERROR && result.etag == request.expectedETag) { code = CURLE_OK; httpStatus = 304; } - if (code == CURLE_OK && + if (writeException) + failEx(writeException); + + else if (code == CURLE_OK && (httpStatus == 200 || httpStatus == 201 || httpStatus == 204 || httpStatus == 304 || httpStatus == 226 /* FTP */ || httpStatus == 0 /* other protocol */)) { result.cached = httpStatus == 304; done = true; try { - if (request.decompress) - result.data = decodeContent(encoding, ref(result.data)); act.progress(result.data->size(), result.data->size()); callback(std::move(result)); } catch (...) { done = true; callback.rethrow(); } - } else { + } + + else { // We treat most errors as transient, but won't retry when hopeless Error err = Transient; @@ -369,6 +388,7 @@ struct CurlDownloader : public Downloader case CURLE_UNKNOWN_OPTION: case CURLE_SSL_CACERT_BADFILE: case CURLE_TOO_MANY_REDIRECTS: + case CURLE_WRITE_ERROR: err = Misc; break; default: // Shut up warnings diff --git a/src/libstore/download.hh b/src/libstore/download.hh index ff38a2870cc..f0228f7d053 100644 --- a/src/libstore/download.hh +++ b/src/libstore/download.hh @@ -88,7 +88,4 @@ public: bool isUri(const string & s); -/* Decode data according to the Content-Encoding header. */ -ref decodeContent(const std::string & encoding, ref data); - } diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 2f18e3f38c6..660583d31fe 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -153,10 +153,8 @@ S3Helper::DownloadResult S3Helper::getObject( auto result = checkAws(fmt("AWS error fetching '%s'", key), client->GetObject(request)); - res.data = decodeContent( - result.GetContentEncoding(), - make_ref( - dynamic_cast(result.GetBody()).str())); + res.data = decompress(result.GetContentEncoding(), + dynamic_cast(result.GetBody()).str()); } catch (S3Error & e) { if (e.err != Aws::S3::S3Errors::NO_SUCH_KEY) throw; diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc index 5de85ede150..53b62f62a76 100644 --- a/src/libutil/compression.cc +++ b/src/libutil/compression.cc @@ -17,228 +17,258 @@ namespace nix { static const size_t bufSize = 32 * 1024; -static void decompressNone(Source & source, Sink & sink) +// Don't feed brotli too much at once. +struct ChunkedCompressionSink : CompressionSink { - std::vector buf(bufSize); - while (true) { - size_t n; - try { - n = source.read(buf.data(), buf.size()); - } catch (EndOfFile &) { - break; + uint8_t outbuf[BUFSIZ]; + + void write(const unsigned char * data, size_t len) override + { + const size_t CHUNK_SIZE = sizeof(outbuf) << 2; + while (len) { + size_t n = std::min(CHUNK_SIZE, len); + writeInternal(data, n); + data += n; + len -= n; } - sink(buf.data(), n); } -} -static void decompressXZ(Source & source, Sink & sink) + virtual void writeInternal(const unsigned char * data, size_t len) = 0; +}; + +struct NoneSink : CompressionSink { - lzma_stream strm(LZMA_STREAM_INIT); - - lzma_ret ret = lzma_stream_decoder( - &strm, UINT64_MAX, LZMA_CONCATENATED); - if (ret != LZMA_OK) - throw CompressionError("unable to initialise lzma decoder"); - - Finally free([&]() { lzma_end(&strm); }); - - lzma_action action = LZMA_RUN; - std::vector inbuf(bufSize), outbuf(bufSize); - strm.next_in = nullptr; - strm.avail_in = 0; - strm.next_out = outbuf.data(); - strm.avail_out = outbuf.size(); - bool eof = false; - - while (true) { - checkInterrupt(); - - if (strm.avail_in == 0 && !eof) { - strm.next_in = inbuf.data(); - try { - strm.avail_in = source.read((unsigned char *) strm.next_in, inbuf.size()); - } catch (EndOfFile &) { - eof = true; - } - } + Sink & nextSink; + NoneSink(Sink & nextSink) : nextSink(nextSink) { } + void finish() override { flush(); } + void write(const unsigned char * data, size_t len) override { nextSink(data, len); } +}; - if (strm.avail_in == 0) - action = LZMA_FINISH; +struct XzDecompressionSink : CompressionSink +{ + Sink & nextSink; + uint8_t outbuf[BUFSIZ]; + lzma_stream strm = LZMA_STREAM_INIT; + bool finished = false; - lzma_ret ret = lzma_code(&strm, action); + XzDecompressionSink(Sink & nextSink) : nextSink(nextSink) + { + lzma_ret ret = lzma_stream_decoder( + &strm, UINT64_MAX, LZMA_CONCATENATED); + if (ret != LZMA_OK) + throw CompressionError("unable to initialise lzma decoder"); - if (strm.avail_out < outbuf.size()) { - sink((unsigned char *) outbuf.data(), outbuf.size() - strm.avail_out); - strm.next_out = outbuf.data(); - strm.avail_out = outbuf.size(); - } + strm.next_out = outbuf; + strm.avail_out = sizeof(outbuf); + } - if (ret == LZMA_STREAM_END) return; + ~XzDecompressionSink() + { + lzma_end(&strm); + } - if (ret != LZMA_OK) - throw CompressionError("error %d while decompressing xz file", ret); + void finish() override + { + CompressionSink::flush(); + write(nullptr, 0); } -} -static void decompressBzip2(Source & source, Sink & sink) -{ - bz_stream strm; - memset(&strm, 0, sizeof(strm)); - - int ret = BZ2_bzDecompressInit(&strm, 0, 0); - if (ret != BZ_OK) - throw CompressionError("unable to initialise bzip2 decoder"); - - Finally free([&]() { BZ2_bzDecompressEnd(&strm); }); - - std::vector inbuf(bufSize), outbuf(bufSize); - strm.next_in = nullptr; - strm.avail_in = 0; - strm.next_out = outbuf.data(); - strm.avail_out = outbuf.size(); - bool eof = false; - - while (true) { - checkInterrupt(); - - if (strm.avail_in == 0 && !eof) { - strm.next_in = inbuf.data(); - try { - strm.avail_in = source.read((unsigned char *) strm.next_in, inbuf.size()); - } catch (EndOfFile &) { - eof = true; - } - } + void write(const unsigned char * data, size_t len) override + { + strm.next_in = data; + strm.avail_in = len; - int ret = BZ2_bzDecompress(&strm); + while (!finished && (!data || strm.avail_in)) { + checkInterrupt(); - if (strm.avail_in == 0 && strm.avail_out == outbuf.size() && eof) - throw CompressionError("bzip2 data ends prematurely"); + lzma_ret ret = lzma_code(&strm, data ? LZMA_RUN : LZMA_FINISH); + if (ret != LZMA_OK && ret != LZMA_STREAM_END) + throw CompressionError("error %d while decompressing xz file", ret); + + finished = ret == LZMA_STREAM_END; - if (strm.avail_out < outbuf.size()) { - sink((unsigned char *) outbuf.data(), outbuf.size() - strm.avail_out); - strm.next_out = outbuf.data(); - strm.avail_out = outbuf.size(); + if (strm.avail_out < sizeof(outbuf) || strm.avail_in == 0) { + nextSink(outbuf, sizeof(outbuf) - strm.avail_out); + strm.next_out = outbuf; + strm.avail_out = sizeof(outbuf); + } } + } +}; - if (ret == BZ_STREAM_END) return; +struct BzipDecompressionSink : ChunkedCompressionSink +{ + Sink & nextSink; + bz_stream strm; + bool finished = false; + BzipDecompressionSink(Sink & nextSink) : nextSink(nextSink) + { + memset(&strm, 0, sizeof(strm)); + int ret = BZ2_bzDecompressInit(&strm, 0, 0); if (ret != BZ_OK) - throw CompressionError("error while decompressing bzip2 file"); + throw CompressionError("unable to initialise bzip2 decoder"); + + strm.next_out = (char *) outbuf; + strm.avail_out = sizeof(outbuf); } -} -static void decompressBrotli(Source & source, Sink & sink) -{ - auto *s = BrotliDecoderCreateInstance(nullptr, nullptr, nullptr); - if (!s) - throw CompressionError("unable to initialize brotli decoder"); - - Finally free([s]() { BrotliDecoderDestroyInstance(s); }); - - std::vector inbuf(bufSize), outbuf(bufSize); - const uint8_t * next_in = nullptr; - size_t avail_in = 0; - bool eof = false; - - while (true) { - checkInterrupt(); - - if (avail_in == 0 && !eof) { - next_in = inbuf.data(); - try { - avail_in = source.read((unsigned char *) next_in, inbuf.size()); - } catch (EndOfFile &) { - eof = true; + ~BzipDecompressionSink() + { + BZ2_bzDecompressEnd(&strm); + } + + void finish() override + { + flush(); + write(nullptr, 0); + } + + void writeInternal(const unsigned char * data, size_t len) + { + assert(len <= std::numeric_limits::max()); + + strm.next_in = (char *) data; + strm.avail_in = len; + + while (strm.avail_in) { + checkInterrupt(); + + int ret = BZ2_bzDecompress(&strm); + if (ret != BZ_OK && ret != BZ_STREAM_END) + throw CompressionError("error while decompressing bzip2 file"); + + finished = ret == BZ_STREAM_END; + + if (strm.avail_out < sizeof(outbuf) || strm.avail_in == 0) { + nextSink(outbuf, sizeof(outbuf) - strm.avail_out); + strm.next_out = (char *) outbuf; + strm.avail_out = sizeof(outbuf); } } + } +}; - uint8_t * next_out = outbuf.data(); - size_t avail_out = outbuf.size(); - - auto ret = BrotliDecoderDecompressStream(s, - &avail_in, &next_in, - &avail_out, &next_out, - nullptr); - - switch (ret) { - case BROTLI_DECODER_RESULT_ERROR: - throw CompressionError("error while decompressing brotli file"); - case BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: - if (eof) - throw CompressionError("incomplete or corrupt brotli file"); - break; - case BROTLI_DECODER_RESULT_SUCCESS: - if (avail_in != 0) - throw CompressionError("unexpected input after brotli decompression"); - break; - case BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: - // I'm not sure if this can happen, but abort if this happens with empty buffer - if (avail_out == outbuf.size()) - throw CompressionError("brotli decompression requires larger buffer"); - break; - } +struct BrotliDecompressionSink : ChunkedCompressionSink +{ + Sink & nextSink; + BrotliDecoderState * state; + bool finished = false; - // Always ensure we have full buffer for next invocation - if (avail_out < outbuf.size()) - sink((unsigned char *) outbuf.data(), outbuf.size() - avail_out); + BrotliDecompressionSink(Sink & nextSink) : nextSink(nextSink) + { + state = BrotliDecoderCreateInstance(nullptr, nullptr, nullptr); + if (!state) + throw CompressionError("unable to initialize brotli decoder"); + } - if (ret == BROTLI_DECODER_RESULT_SUCCESS) return; + ~BrotliDecompressionSink() + { + BrotliDecoderDestroyInstance(state); } -} + + void finish() override + { + flush(); + writeInternal(nullptr, 0); + } + + void writeInternal(const unsigned char * data, size_t len) + { + const uint8_t * next_in = data; + size_t avail_in = len; + uint8_t * next_out = outbuf; + size_t avail_out = sizeof(outbuf); + + while (!finished && (!data || avail_in)) { + checkInterrupt(); + + if (!BrotliDecoderDecompressStream(state, + &avail_in, &next_in, + &avail_out, &next_out, + nullptr)) + throw CompressionError("error while decompressing brotli file"); + + if (avail_out < sizeof(outbuf) || avail_in == 0) { + nextSink(outbuf, sizeof(outbuf) - avail_out); + next_out = outbuf; + avail_out = sizeof(outbuf); + } + + finished = BrotliDecoderIsFinished(state); + } + } +}; ref decompress(const std::string & method, const std::string & in) { - StringSource source(in); - StringSink sink; - decompress(method, source, sink); - return sink.s; + StringSink ssink; + auto sink = makeDecompressionSink(method, ssink); + (*sink)(in); + sink->finish(); + return ssink.s; } -void decompress(const std::string & method, Source & source, Sink & sink) +ref makeDecompressionSink(const std::string & method, Sink & nextSink) { - if (method == "none") - return decompressNone(source, sink); + if (method == "none" || method == "") + return make_ref(nextSink); else if (method == "xz") - return decompressXZ(source, sink); + return make_ref(nextSink); else if (method == "bzip2") - return decompressBzip2(source, sink); + return make_ref(nextSink); else if (method == "br") - return decompressBrotli(source, sink); + return make_ref(nextSink); else throw UnknownCompressionMethod("unknown compression method '%s'", method); } -struct NoneSink : CompressionSink -{ - Sink & nextSink; - NoneSink(Sink & nextSink) : nextSink(nextSink) { } - void finish() override { flush(); } - void write(const unsigned char * data, size_t len) override { nextSink(data, len); } -}; - -struct XzSink : CompressionSink +struct XzCompressionSink : CompressionSink { Sink & nextSink; uint8_t outbuf[BUFSIZ]; lzma_stream strm = LZMA_STREAM_INIT; bool finished = false; - template - XzSink(Sink & nextSink, F&& initEncoder) : nextSink(nextSink) { - lzma_ret ret = initEncoder(); + XzCompressionSink(Sink & nextSink, bool parallel) : nextSink(nextSink) + { + lzma_ret ret; + bool done = false; + + if (parallel) { +#ifdef HAVE_LZMA_MT + lzma_mt mt_options = {}; + mt_options.flags = 0; + mt_options.timeout = 300; // Using the same setting as the xz cmd line + mt_options.preset = LZMA_PRESET_DEFAULT; + mt_options.filters = NULL; + mt_options.check = LZMA_CHECK_CRC64; + mt_options.threads = lzma_cputhreads(); + mt_options.block_size = 0; + if (mt_options.threads == 0) + mt_options.threads = 1; + // FIXME: maybe use lzma_stream_encoder_mt_memusage() to control the + // number of threads. + ret = lzma_stream_encoder_mt(&strm, &mt_options); + done = true; +#else + printMsg(lvlError, "warning: parallel compression requested but not supported for metho d '%1%', falling back to single-threaded compression", method); +#endif + } + + if (!done) + ret = lzma_easy_encoder(&strm, 6, LZMA_CHECK_CRC64); + if (ret != LZMA_OK) throw CompressionError("unable to initialise lzma encoder"); + // FIXME: apply the x86 BCJ filter? strm.next_out = outbuf; strm.avail_out = sizeof(outbuf); } - XzSink(Sink & nextSink) : XzSink(nextSink, [this]() { - return lzma_easy_encoder(&strm, 6, LZMA_CHECK_CRC64); - }) {} - ~XzSink() + ~XzCompressionSink() { lzma_end(&strm); } @@ -246,43 +276,25 @@ struct XzSink : CompressionSink void finish() override { CompressionSink::flush(); - - assert(!finished); - finished = true; - - while (true) { - checkInterrupt(); - - lzma_ret ret = lzma_code(&strm, LZMA_FINISH); - if (ret != LZMA_OK && ret != LZMA_STREAM_END) - throw CompressionError("error while flushing xz file"); - - if (strm.avail_out == 0 || ret == LZMA_STREAM_END) { - nextSink(outbuf, sizeof(outbuf) - strm.avail_out); - strm.next_out = outbuf; - strm.avail_out = sizeof(outbuf); - } - - if (ret == LZMA_STREAM_END) break; - } + write(nullptr, 0); } void write(const unsigned char * data, size_t len) override { - assert(!finished); - strm.next_in = data; strm.avail_in = len; - while (strm.avail_in) { + while (!finished && (!data || strm.avail_in)) { checkInterrupt(); - lzma_ret ret = lzma_code(&strm, LZMA_RUN); - if (ret != LZMA_OK) - throw CompressionError("error while compressing xz file"); + lzma_ret ret = lzma_code(&strm, data ? LZMA_RUN : LZMA_FINISH); + if (ret != LZMA_OK && ret != LZMA_STREAM_END) + throw CompressionError("error %d while compressing xz file", ret); + + finished = ret == LZMA_STREAM_END; - if (strm.avail_out == 0) { - nextSink(outbuf, sizeof(outbuf)); + if (strm.avail_out < sizeof(outbuf) || strm.avail_in == 0) { + nextSink(outbuf, sizeof(outbuf) - strm.avail_out); strm.next_out = outbuf; strm.avail_out = sizeof(outbuf); } @@ -290,46 +302,24 @@ struct XzSink : CompressionSink } }; -#ifdef HAVE_LZMA_MT -struct ParallelXzSink : public XzSink -{ - ParallelXzSink(Sink &nextSink) : XzSink(nextSink, [this]() { - lzma_mt mt_options = {}; - mt_options.flags = 0; - mt_options.timeout = 300; // Using the same setting as the xz cmd line - mt_options.preset = LZMA_PRESET_DEFAULT; - mt_options.filters = NULL; - mt_options.check = LZMA_CHECK_CRC64; - mt_options.threads = lzma_cputhreads(); - mt_options.block_size = 0; - if (mt_options.threads == 0) - mt_options.threads = 1; - // FIXME: maybe use lzma_stream_encoder_mt_memusage() to control the - // number of threads. - return lzma_stream_encoder_mt(&strm, &mt_options); - }) {} -}; -#endif - -struct BzipSink : CompressionSink +struct BzipCompressionSink : ChunkedCompressionSink { Sink & nextSink; - char outbuf[BUFSIZ]; bz_stream strm; bool finished = false; - BzipSink(Sink & nextSink) : nextSink(nextSink) + BzipCompressionSink(Sink & nextSink) : nextSink(nextSink) { memset(&strm, 0, sizeof(strm)); int ret = BZ2_bzCompressInit(&strm, 9, 0, 30); if (ret != BZ_OK) throw CompressionError("unable to initialise bzip2 encoder"); - strm.next_out = outbuf; + strm.next_out = (char *) outbuf; strm.avail_out = sizeof(outbuf); } - ~BzipSink() + ~BzipCompressionSink() { BZ2_bzCompressEnd(&strm); } @@ -337,78 +327,49 @@ struct BzipSink : CompressionSink void finish() override { flush(); - - assert(!finished); - finished = true; - - while (true) { - checkInterrupt(); - - int ret = BZ2_bzCompress(&strm, BZ_FINISH); - if (ret != BZ_FINISH_OK && ret != BZ_STREAM_END) - throw CompressionError("error while flushing bzip2 file"); - - if (strm.avail_out == 0 || ret == BZ_STREAM_END) { - nextSink((unsigned char *) outbuf, sizeof(outbuf) - strm.avail_out); - strm.next_out = outbuf; - strm.avail_out = sizeof(outbuf); - } - - if (ret == BZ_STREAM_END) break; - } - } - - void write(const unsigned char * data, size_t len) override - { - /* Bzip2's 'avail_in' parameter is an unsigned int, so we need - to split the input into chunks of at most 4 GiB. */ - while (len) { - auto n = std::min((size_t) std::numeric_limits::max(), len); - writeInternal(data, n); - data += n; - len -= n; - } + writeInternal(nullptr, 0); } void writeInternal(const unsigned char * data, size_t len) { - assert(!finished); assert(len <= std::numeric_limits::max()); strm.next_in = (char *) data; strm.avail_in = len; - while (strm.avail_in) { + while (!finished && (!data || strm.avail_in)) { checkInterrupt(); - int ret = BZ2_bzCompress(&strm, BZ_RUN); - if (ret != BZ_OK) - CompressionError("error while compressing bzip2 file"); + int ret = BZ2_bzCompress(&strm, data ? BZ_RUN : BZ_FINISH); + if (ret != BZ_RUN_OK && ret != BZ_FINISH_OK && ret != BZ_STREAM_END) + throw CompressionError("error %d while compressing bzip2 file", ret); - if (strm.avail_out == 0) { - nextSink((unsigned char *) outbuf, sizeof(outbuf)); - strm.next_out = outbuf; + finished = ret == BZ_STREAM_END; + + if (strm.avail_out < sizeof(outbuf) || strm.avail_in == 0) { + nextSink(outbuf, sizeof(outbuf) - strm.avail_out); + strm.next_out = (char *) outbuf; strm.avail_out = sizeof(outbuf); } } } }; -struct BrotliSink : CompressionSink +struct BrotliCompressionSink : ChunkedCompressionSink { Sink & nextSink; uint8_t outbuf[BUFSIZ]; BrotliEncoderState *state; bool finished = false; - BrotliSink(Sink & nextSink) : nextSink(nextSink) + BrotliCompressionSink(Sink & nextSink) : nextSink(nextSink) { state = BrotliEncoderCreateInstance(nullptr, nullptr, nullptr); if (!state) throw CompressionError("unable to initialise brotli encoder"); } - ~BrotliSink() + ~BrotliCompressionSink() { BrotliEncoderDestroyInstance(state); } @@ -416,89 +377,47 @@ struct BrotliSink : CompressionSink void finish() override { flush(); - assert(!finished); - - const uint8_t *next_in = nullptr; - size_t avail_in = 0; - uint8_t *next_out = outbuf; - size_t avail_out = sizeof(outbuf); - while (!finished) { - checkInterrupt(); - - if (!BrotliEncoderCompressStream(state, - BROTLI_OPERATION_FINISH, - &avail_in, &next_in, - &avail_out, &next_out, - nullptr)) - throw CompressionError("error while finishing brotli file"); - - finished = BrotliEncoderIsFinished(state); - if (avail_out == 0 || finished) { - nextSink(outbuf, sizeof(outbuf) - avail_out); - next_out = outbuf; - avail_out = sizeof(outbuf); - } - } - } - - void write(const unsigned char * data, size_t len) override - { - // Don't feed brotli too much at once - const size_t CHUNK_SIZE = sizeof(outbuf) << 2; - while (len) { - size_t n = std::min(CHUNK_SIZE, len); - writeInternal(data, n); - data += n; - len -= n; - } + writeInternal(nullptr, 0); } void writeInternal(const unsigned char * data, size_t len) { - assert(!finished); - - const uint8_t *next_in = data; + const uint8_t * next_in = data; size_t avail_in = len; - uint8_t *next_out = outbuf; + uint8_t * next_out = outbuf; size_t avail_out = sizeof(outbuf); - while (avail_in > 0) { + while (!finished && (!data || avail_in)) { checkInterrupt(); if (!BrotliEncoderCompressStream(state, - BROTLI_OPERATION_PROCESS, - &avail_in, &next_in, - &avail_out, &next_out, - nullptr)) - throw CompressionError("error while compressing brotli file"); + data ? BROTLI_OPERATION_PROCESS : BROTLI_OPERATION_FINISH, + &avail_in, &next_in, + &avail_out, &next_out, + nullptr)) + throw CompressionError("error while compressing brotli compression"); if (avail_out < sizeof(outbuf) || avail_in == 0) { nextSink(outbuf, sizeof(outbuf) - avail_out); next_out = outbuf; avail_out = sizeof(outbuf); } + + finished = BrotliEncoderIsFinished(state); } } }; ref makeCompressionSink(const std::string & method, Sink & nextSink, const bool parallel) { - if (parallel) { -#ifdef HAVE_LZMA_MT - if (method == "xz") - return make_ref(nextSink); -#endif - printMsg(lvlError, format("Warning: parallel compression requested but not supported for method '%1%', falling back to single-threaded compression") % method); - } - if (method == "none") return make_ref(nextSink); else if (method == "xz") - return make_ref(nextSink); + return make_ref(nextSink, parallel); else if (method == "bzip2") - return make_ref(nextSink); + return make_ref(nextSink); else if (method == "br") - return make_ref(nextSink); + return make_ref(nextSink); else throw UnknownCompressionMethod(format("unknown compression method '%s'") % method); } diff --git a/src/libutil/compression.hh b/src/libutil/compression.hh index f7a3e3fbd32..dd666a4e19f 100644 --- a/src/libutil/compression.hh +++ b/src/libutil/compression.hh @@ -8,17 +8,17 @@ namespace nix { -ref decompress(const std::string & method, const std::string & in); - -void decompress(const std::string & method, Source & source, Sink & sink); - -ref compress(const std::string & method, const std::string & in, const bool parallel = false); - struct CompressionSink : BufferedSink { virtual void finish() = 0; }; +ref decompress(const std::string & method, const std::string & in); + +ref makeDecompressionSink(const std::string & method, Sink & nextSink); + +ref compress(const std::string & method, const std::string & in, const bool parallel = false); + ref makeCompressionSink(const std::string & method, Sink & nextSink, const bool parallel = false); MakeError(UnknownCompressionMethod, Error); From 7de3e00ad905bba85abadd86b83973fdba8d0dfd Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 6 Aug 2018 17:20:23 +0200 Subject: [PATCH 1143/2196] Fix setting Content-Encoding in S3 uploads Fixes https://github.com/NixOS/nix/issues/2333 and https://github.com/NixOS/nixpkgs/issues/44337. --- release-common.nix | 9 +++++++-- src/libstore/s3-binary-cache-store.cc | 10 ++++------ 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/release-common.nix b/release-common.nix index f98e86a1b44..e23967bdbfd 100644 --- a/release-common.nix +++ b/release-common.nix @@ -60,10 +60,15 @@ rec { ++ lib.optionals stdenv.isLinux [libseccomp utillinuxMinimal] ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) - (aws-sdk-cpp.override { + ((aws-sdk-cpp.override { apis = ["s3" "transfer"]; customMemoryManagement = false; - }); + }).overrideDerivation (args: { + patches = args.patches or [] ++ [ (fetchpatch { + url = https://github.com/edolstra/aws-sdk-cpp/commit/3e07e1f1aae41b4c8b340735ff9e8c735f0c063f.patch; + sha256 = "1pij0v449p166f9l29x7ppzk8j7g9k9mp15ilh5qxp29c7fnvxy2"; + }) ]; + })); perlDeps = [ perl diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 660583d31fe..6d95c1fa8c6 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -291,10 +291,6 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore transferConfig.s3Client = s3Helper.client; transferConfig.bufferSize = bufferSize; - if (contentEncoding != "") - transferConfig.createMultipartUploadTemplate.SetContentEncoding( - contentEncoding); - transferConfig.uploadProgressCallback = [&](const TransferManager *transferManager, const std::shared_ptr @@ -336,8 +332,10 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore auto now1 = std::chrono::steady_clock::now(); std::shared_ptr transferHandle = - transferManager->UploadFile(stream, bucketName, path, mimeType, - Aws::Map()); + transferManager->UploadFile( + stream, bucketName, path, mimeType, + Aws::Map(), + nullptr, contentEncoding); transferHandle->WaitUntilFinished(); From ed6c646f44e5b9e0fcbc53058491e97875a263c2 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 6 Aug 2018 17:27:08 +0200 Subject: [PATCH 1144/2196] Doh --- src/libstore/legacy-ssh-store.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index ddfd17d641b..7c214f09d6f 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -59,7 +59,7 @@ struct LegacySSHStore : public Store { auto conn = make_ref(); conn->sshConn = master.startCommand( - fmt("command time %s --serve --write", remoteProgram) + fmt("%s --serve --write", remoteProgram) + (remoteStore.get() == "" ? "" : " --store " + shellEscape(remoteStore.get()))); conn->to = FdSink(conn->sshConn->in.get()); conn->from = FdSource(conn->sshConn->out.get()); From ec49ea28dc5309c8905fc4385fb65ec7a88b33f6 Mon Sep 17 00:00:00 2001 From: Ivan Kozik Date: Tue, 7 Aug 2018 03:40:44 +0000 Subject: [PATCH 1145/2196] repl: don't add trailing spaces to history lines --- src/nix/repl.cc | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/nix/repl.cc b/src/nix/repl.cc index 1eb71600637..b71e6f905f2 100644 --- a/src/nix/repl.cc +++ b/src/nix/repl.cc @@ -173,9 +173,14 @@ void NixRepl::mainLoop(const std::vector & files) printMsg(lvlError, format(error + "%1%%2%") % (settings.showTrace ? e.prefix() : "") % e.msg()); } + if (input.size() > 0) { + // Remove trailing newline before adding to history + input.erase(input.size() - 1); + linenoiseHistoryAdd(input.c_str()); + } + // We handled the current input fully, so we should clear it // and read brand new input. - linenoiseHistoryAdd(input.c_str()); input.clear(); std::cout << std::endl; } From 58a85fa4621faaa89286065f5583328783085722 Mon Sep 17 00:00:00 2001 From: Bob van der Linden Date: Wed, 8 Aug 2018 21:21:21 +0200 Subject: [PATCH 1146/2196] mention nix-store --query --roots when a path cannot be deleted --- src/libstore/gc.cc | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index b415d542147..65220a9f6ae 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -791,7 +791,11 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) assertStorePath(i); tryToDelete(state, i); if (state.dead.find(i) == state.dead.end()) - throw Error(format("cannot delete path '%1%' since it is still alive") % i); + throw Error(format( + "cannot delete path '%1%' since it is still alive. " + "To find out why use: " + "nix-store --query --roots" + ) % i); } } else if (options.maxFreed > 0) { From a0b971dd9c19819d4f7a3a8ab102be9d7101e3e0 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 8 Aug 2018 21:39:11 +0200 Subject: [PATCH 1147/2196] S3BinaryCacheStore: Don't use the transfer status callback This callback is executed on a different thread, so exceptions thrown from the callback are not caught: Aug 08 16:25:48 chef hydra-queue-runner[11967]: terminate called after throwing an instance of 'nix::Error' Aug 08 16:25:48 chef hydra-queue-runner[11967]: what(): AWS error: failed to upload 's3://nix-cache/19dbddlfb0vp68g68y19p9fswrgl0bg7.ls' Therefore, just check the transfer status after it completes. Also include the S3 error message in the exception. --- src/libstore/s3-binary-cache-store.cc | 37 ++++++++++----------------- 1 file changed, 13 insertions(+), 24 deletions(-) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 6d95c1fa8c6..ef41e413fcf 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -296,36 +296,13 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore const std::shared_ptr &transferHandle) { //FIXME: find a way to properly abort the multipart upload. - checkInterrupt(); + //checkInterrupt(); debug("upload progress ('%s'): '%d' of '%d' bytes", path, transferHandle->GetBytesTransferred(), transferHandle->GetBytesTotalSize()); }; - transferConfig.transferStatusUpdatedCallback = - [&](const TransferManager *, - const std::shared_ptr - &transferHandle) { - switch (transferHandle->GetStatus()) { - case TransferStatus::COMPLETED: - printTalkative("upload of '%s' completed", path); - stats.put++; - stats.putBytes += data.size(); - break; - case TransferStatus::IN_PROGRESS: - break; - case TransferStatus::FAILED: - throw Error("AWS error: failed to upload 's3://%s/%s'", - bucketName, path); - break; - default: - throw Error("AWS error: transfer status of 's3://%s/%s' " - "in unexpected state", - bucketName, path); - }; - }; - std::shared_ptr transferManager = TransferManager::Create(transferConfig); @@ -339,6 +316,16 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore transferHandle->WaitUntilFinished(); + if (transferHandle->GetStatus() == TransferStatus::FAILED) + throw Error("AWS error: failed to upload 's3://%s/%s': %s", + bucketName, path, transferHandle->GetLastError().GetMessage()); + + if (transferHandle->GetStatus() != TransferStatus::COMPLETED) + throw Error("AWS error: transfer status of 's3://%s/%s' in unexpected state", + bucketName, path); + + printTalkative("upload of '%s' completed", path); + auto now2 = std::chrono::steady_clock::now(); auto duration = @@ -349,6 +336,8 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore bucketName % path % data.size() % duration); stats.putTimeMs += duration; + stats.putBytes += data.size(); + stats.put++; } void upsertFile(const std::string & path, const std::string & data, From c87f4b9324b87a059cf760a477177f322bb8dc26 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 9 Aug 2018 13:01:03 +0200 Subject: [PATCH 1148/2196] nix run: Respect propagated-user-env-packages Also, add $path/bin to $PATH even if it doesn't exist. This makes 'man' work properly (since it looks for ../share/man relative to $PATH entries). --- src/nix/run.cc | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/src/nix/run.cc b/src/nix/run.cc index d04e106e037..65ced34759b 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -12,6 +12,8 @@ #include #endif +#include + using namespace nix; std::string chrootHelperName = "__run_in_chroot"; @@ -121,10 +123,27 @@ struct CmdRun : InstallablesCommand unsetenv(var.c_str()); } + std::unordered_set done; + std::queue todo; + for (auto & path : outPaths) todo.push(path); + auto unixPath = tokenizeString(getEnv("PATH"), ":"); - for (auto & path : outPaths) - if (accessor->stat(path + "/bin").type != FSAccessor::tMissing) + + while (!todo.empty()) { + Path path = todo.front(); + todo.pop(); + if (!done.insert(path).second) continue; + + if (true) unixPath.push_front(path + "/bin"); + + auto propPath = path + "/nix-support/propagated-user-env-packages"; + if (accessor->stat(propPath).type == FSAccessor::tRegular) { + for (auto & p : tokenizeString(readFile(propPath))) + todo.push(p); + } + } + setenv("PATH", concatStringsSep(":", unixPath).c_str(), 1); std::string cmd = *command.begin(); From ada4e902672e4d82f47fa260adcc450d9a85d41f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 9 Aug 2018 20:44:18 +0200 Subject: [PATCH 1149/2196] S3BinaryCacheStore: Share TransferManager TransferManager allocates a lot of memory (50 MiB by default), and it might leak but I'm not sure about that. In any case it was causing OOMs in hydra-queue-runner. So allocate only one TransferManager per S3BinaryCacheStore. Hopefully fixes https://github.com/NixOS/hydra/issues/586. --- src/libstore/s3-binary-cache-store.cc | 42 +++++++++++++++------------ 1 file changed, 24 insertions(+), 18 deletions(-) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index ef41e413fcf..7711388f05a 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -275,6 +275,9 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore return true; } + std::shared_ptr transferManager; + std::once_flag transferManagerCreated; + void uploadFile(const std::string & path, const std::string & data, const std::string & mimeType, const std::string & contentEncoding) @@ -286,25 +289,28 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore static std::shared_ptr executor = std::make_shared(maxThreads); - TransferManagerConfiguration transferConfig(executor.get()); - - transferConfig.s3Client = s3Helper.client; - transferConfig.bufferSize = bufferSize; - - transferConfig.uploadProgressCallback = - [&](const TransferManager *transferManager, - const std::shared_ptr - &transferHandle) { - //FIXME: find a way to properly abort the multipart upload. - //checkInterrupt(); - debug("upload progress ('%s'): '%d' of '%d' bytes", - path, - transferHandle->GetBytesTransferred(), - transferHandle->GetBytesTotalSize()); - }; + std::call_once(transferManagerCreated, [&]() { + + TransferManagerConfiguration transferConfig(executor.get()); + + transferConfig.s3Client = s3Helper.client; + transferConfig.bufferSize = bufferSize; + + transferConfig.uploadProgressCallback = + [&](const TransferManager *transferManager, + const std::shared_ptr + &transferHandle) + { + //FIXME: find a way to properly abort the multipart upload. + //checkInterrupt(); + debug("upload progress ('%s'): '%d' of '%d' bytes", + path, + transferHandle->GetBytesTransferred(), + transferHandle->GetBytesTotalSize()); + }; - std::shared_ptr transferManager = - TransferManager::Create(transferConfig); + transferManager = TransferManager::Create(transferConfig); + }); auto now1 = std::chrono::steady_clock::now(); From f72c907ad833fa26800ad1694e63f3cec952b444 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 10 Aug 2018 11:34:41 +0200 Subject: [PATCH 1150/2196] Slightly questionable workaround for #2342 --- src/nix/installables.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix/installables.cc b/src/nix/installables.cc index 0be992b03c5..0c1ad3ab3db 100644 --- a/src/nix/installables.cc +++ b/src/nix/installables.cc @@ -96,7 +96,7 @@ struct InstallableStorePath : Installable Buildables toBuildables() override { - return {{"", {{"out", storePath}}}}; + return {{isDerivation(storePath) ? storePath : "", {{"out", storePath}}}}; } }; From d7402c9cd5c17644d73b2a7a39e10be22fffeb00 Mon Sep 17 00:00:00 2001 From: Linus Heckemann Date: Mon, 13 Aug 2018 11:27:35 +0200 Subject: [PATCH 1151/2196] dirOf: allow use on non-absolute paths --- src/libexpr/primops.cc | 2 +- src/libutil/util.cc | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 8ace6db4d11..32d6640fad2 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -866,7 +866,7 @@ static void prim_baseNameOf(EvalState & state, const Pos & pos, Value * * args, static void prim_dirOf(EvalState & state, const Pos & pos, Value * * args, Value & v) { PathSet context; - Path dir = dirOf(state.coerceToPath(pos, *args[0], context)); + Path dir = dirOf(state.coerceToString(pos, *args[0], context, false, false)); if (args[0]->type == tPath) mkPath(v, dir.c_str()); else mkString(v, dir, context); } diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 6bc64ae75a4..4cc7455be6c 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -167,7 +167,7 @@ Path dirOf(const Path & path) { Path::size_type pos = path.rfind('/'); if (pos == string::npos) - throw Error(format("invalid file name '%1%'") % path); + return "."; return pos == 0 ? "/" : Path(path, 0, pos); } From 5b19a6663b06a158cb77d170e6740dce7ced6185 Mon Sep 17 00:00:00 2001 From: Symphorien Gibol Date: Mon, 13 Aug 2018 21:11:36 +0200 Subject: [PATCH 1152/2196] ignore when listxattr fails with ENODATA This happens on CIFS and means the remote filesystem has no extended attributes. --- src/libstore/local-store.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 3b2ba65f3b4..c91dbf241bc 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -450,7 +450,7 @@ static void canonicalisePathMetaData_(const Path & path, uid_t fromUid, InodesSe ssize_t eaSize = llistxattr(path.c_str(), nullptr, 0); if (eaSize < 0) { - if (errno != ENOTSUP) + if (errno != ENOTSUP && errno != ENODATA) throw SysError("querying extended attributes of '%s'", path); } else if (eaSize > 0) { std::vector eaBuf(eaSize); From bad27dc4755b91b421a0df96dd1a11bc043a5c25 Mon Sep 17 00:00:00 2001 From: volth Date: Mon, 13 Aug 2018 20:00:06 +0000 Subject: [PATCH 1153/2196] update config/config.{sub,guess} Just curl 'http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD' > config/config.sub curl 'http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD' > config/config.guess Those files are 5 years old and failed to guess new archs ("ppc64-linux") --- config/config.guess | 983 +++++++++++----------- config/config.sub | 1944 ++++++++++++++++++++++--------------------- 2 files changed, 1454 insertions(+), 1473 deletions(-) diff --git a/config/config.guess b/config/config.guess index 137bedf2e28..d4fb3213ec7 100755 --- a/config/config.guess +++ b/config/config.guess @@ -1,14 +1,12 @@ #! /bin/sh # Attempt to guess a canonical system name. -# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, -# 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -# 2011, 2012 Free Software Foundation, Inc. +# Copyright 1992-2018 Free Software Foundation, Inc. -timestamp='2012-08-14' +timestamp='2018-08-02' # This file is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or +# the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but @@ -17,24 +15,22 @@ timestamp='2012-08-14' # General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with this program; if not, see . +# along with this program; if not, see . # # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under -# the same distribution terms that you use for the rest of that program. - - -# Originally written by Per Bothner. Please send patches (context -# diff format) to and include a ChangeLog -# entry. +# the same distribution terms that you use for the rest of that +# program. This Exception is an additional permission under section 7 +# of the GNU General Public License, version 3 ("GPLv3"). # -# This script attempts to guess a canonical system name similar to -# config.sub. If it succeeds, it prints the system name on stdout, and -# exits with 0. Otherwise, it exits with 1. +# Originally written by Per Bothner; maintained since 2000 by Ben Elliston. # # You can get the latest version of this script from: -# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD +# https://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess +# +# Please send patches to . + me=`echo "$0" | sed -e 's,.*/,,'` @@ -43,7 +39,7 @@ Usage: $0 [OPTION] Output the configuration name of the system \`$me' is run on. -Operation modes: +Options: -h, --help print this help, then exit -t, --time-stamp print date of last modification, then exit -v, --version print version number, then exit @@ -54,9 +50,7 @@ version="\ GNU config.guess ($timestamp) Originally written by Per Bothner. -Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, -2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 -Free Software Foundation, Inc. +Copyright 1992-2018 Free Software Foundation, Inc. This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." @@ -90,8 +84,6 @@ if test $# != 0; then exit 1 fi -trap 'exit 1' 1 2 15 - # CC_FOR_BUILD -- compiler used by this script. Note that the use of a # compiler to aid in system detection is discouraged as it requires # temporary files to be created and, as you can see below, it is a @@ -102,34 +94,39 @@ trap 'exit 1' 1 2 15 # Portable tmp directory creation inspired by the Autoconf team. -set_cc_for_build=' -trap "exitcode=\$?; (rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null) && exit \$exitcode" 0 ; -trap "rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null; exit 1" 1 2 13 15 ; -: ${TMPDIR=/tmp} ; - { tmp=`(umask 077 && mktemp -d "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } || - { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir $tmp) ; } || - { tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir $tmp) && echo "Warning: creating insecure temp directory" >&2 ; } || - { echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; } ; -dummy=$tmp/dummy ; -tmpfiles="$dummy.c $dummy.o $dummy.rel $dummy" ; -case $CC_FOR_BUILD,$HOST_CC,$CC in - ,,) echo "int x;" > $dummy.c ; - for c in cc gcc c89 c99 ; do - if ($c -c -o $dummy.o $dummy.c) >/dev/null 2>&1 ; then - CC_FOR_BUILD="$c"; break ; - fi ; - done ; - if test x"$CC_FOR_BUILD" = x ; then - CC_FOR_BUILD=no_compiler_found ; - fi - ;; - ,,*) CC_FOR_BUILD=$CC ;; - ,*,*) CC_FOR_BUILD=$HOST_CC ;; -esac ; set_cc_for_build= ;' +tmp= +# shellcheck disable=SC2172 +trap 'test -z "$tmp" || rm -fr "$tmp"' 1 2 13 15 +trap 'exitcode=$?; test -z "$tmp" || rm -fr "$tmp"; exit $exitcode' 0 + +set_cc_for_build() { + : "${TMPDIR=/tmp}" + # shellcheck disable=SC2039 + { tmp=`(umask 077 && mktemp -d "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } || + { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir "$tmp" 2>/dev/null) ; } || + { tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir "$tmp" 2>/dev/null) && echo "Warning: creating insecure temp directory" >&2 ; } || + { echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; } + dummy=$tmp/dummy + case ${CC_FOR_BUILD-},${HOST_CC-},${CC-} in + ,,) echo "int x;" > "$dummy.c" + for driver in cc gcc c89 c99 ; do + if ($driver -c -o "$dummy.o" "$dummy.c") >/dev/null 2>&1 ; then + CC_FOR_BUILD="$driver" + break + fi + done + if test x"$CC_FOR_BUILD" = x ; then + CC_FOR_BUILD=no_compiler_found + fi + ;; + ,,*) CC_FOR_BUILD=$CC ;; + ,*,*) CC_FOR_BUILD=$HOST_CC ;; + esac +} # This is needed to find uname on a Pyramid OSx when run in the BSD universe. # (ghazi@noc.rutgers.edu 1994-08-24) -if (test -f /.attbin/uname) >/dev/null 2>&1 ; then +if test -f /.attbin/uname ; then PATH=$PATH:/.attbin ; export PATH fi @@ -138,9 +135,37 @@ UNAME_RELEASE=`(uname -r) 2>/dev/null` || UNAME_RELEASE=unknown UNAME_SYSTEM=`(uname -s) 2>/dev/null` || UNAME_SYSTEM=unknown UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown +case "$UNAME_SYSTEM" in +Linux|GNU|GNU/*) + # If the system lacks a compiler, then just pick glibc. + # We could probably try harder. + LIBC=gnu + + set_cc_for_build + cat <<-EOF > "$dummy.c" + #include + #if defined(__UCLIBC__) + LIBC=uclibc + #elif defined(__dietlibc__) + LIBC=dietlibc + #else + LIBC=gnu + #endif + EOF + eval "`$CC_FOR_BUILD -E "$dummy.c" 2>/dev/null | grep '^LIBC' | sed 's, ,,g'`" + + # If ldd exists, use it to detect musl libc. + if command -v ldd >/dev/null && \ + ldd --version 2>&1 | grep -q ^musl + then + LIBC=musl + fi + ;; +esac + # Note: order is significant - the case branches are not exclusive. -case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in +case "$UNAME_MACHINE:$UNAME_SYSTEM:$UNAME_RELEASE:$UNAME_VERSION" in *:NetBSD:*:*) # NetBSD (nbsd) targets should (where applicable) match one or # more of the tuples: *-*-netbsdelf*, *-*-netbsdaout*, @@ -153,21 +178,31 @@ case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in # Note: NetBSD doesn't particularly care about the vendor # portion of the name. We always set it to "unknown". sysctl="sysctl -n hw.machine_arch" - UNAME_MACHINE_ARCH=`(/sbin/$sysctl 2>/dev/null || \ - /usr/sbin/$sysctl 2>/dev/null || echo unknown)` - case "${UNAME_MACHINE_ARCH}" in + UNAME_MACHINE_ARCH=`(uname -p 2>/dev/null || \ + "/sbin/$sysctl" 2>/dev/null || \ + "/usr/sbin/$sysctl" 2>/dev/null || \ + echo unknown)` + case "$UNAME_MACHINE_ARCH" in armeb) machine=armeb-unknown ;; arm*) machine=arm-unknown ;; sh3el) machine=shl-unknown ;; sh3eb) machine=sh-unknown ;; sh5el) machine=sh5le-unknown ;; - *) machine=${UNAME_MACHINE_ARCH}-unknown ;; + earmv*) + arch=`echo "$UNAME_MACHINE_ARCH" | sed -e 's,^e\(armv[0-9]\).*$,\1,'` + endian=`echo "$UNAME_MACHINE_ARCH" | sed -ne 's,^.*\(eb\)$,\1,p'` + machine="${arch}${endian}"-unknown + ;; + *) machine="$UNAME_MACHINE_ARCH"-unknown ;; esac # The Operating System including object format, if it has switched - # to ELF recently, or will in the future. - case "${UNAME_MACHINE_ARCH}" in + # to ELF recently (or will in the future) and ABI. + case "$UNAME_MACHINE_ARCH" in + earm*) + os=netbsdelf + ;; arm*|i386|m68k|ns32k|sh3*|sparc|vax) - eval $set_cc_for_build + set_cc_for_build if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \ | grep -q __ELF__ then @@ -182,44 +217,67 @@ case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in os=netbsd ;; esac + # Determine ABI tags. + case "$UNAME_MACHINE_ARCH" in + earm*) + expr='s/^earmv[0-9]/-eabi/;s/eb$//' + abi=`echo "$UNAME_MACHINE_ARCH" | sed -e "$expr"` + ;; + esac # The OS release # Debian GNU/NetBSD machines have a different userland, and # thus, need a distinct triplet. However, they do not need # kernel version information, so it can be replaced with a # suitable tag, in the style of linux-gnu. - case "${UNAME_VERSION}" in + case "$UNAME_VERSION" in Debian*) release='-gnu' ;; *) - release=`echo ${UNAME_RELEASE}|sed -e 's/[-_].*/\./'` + release=`echo "$UNAME_RELEASE" | sed -e 's/[-_].*//' | cut -d. -f1,2` ;; esac # Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM: # contains redundant information, the shorter form: # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used. - echo "${machine}-${os}${release}" + echo "$machine-${os}${release}${abi-}" exit ;; *:Bitrig:*:*) UNAME_MACHINE_ARCH=`arch | sed 's/Bitrig.//'` - echo ${UNAME_MACHINE_ARCH}-unknown-bitrig${UNAME_RELEASE} + echo "$UNAME_MACHINE_ARCH"-unknown-bitrig"$UNAME_RELEASE" exit ;; *:OpenBSD:*:*) UNAME_MACHINE_ARCH=`arch | sed 's/OpenBSD.//'` - echo ${UNAME_MACHINE_ARCH}-unknown-openbsd${UNAME_RELEASE} + echo "$UNAME_MACHINE_ARCH"-unknown-openbsd"$UNAME_RELEASE" + exit ;; + *:LibertyBSD:*:*) + UNAME_MACHINE_ARCH=`arch | sed 's/^.*BSD\.//'` + echo "$UNAME_MACHINE_ARCH"-unknown-libertybsd"$UNAME_RELEASE" + exit ;; + *:MidnightBSD:*:*) + echo "$UNAME_MACHINE"-unknown-midnightbsd"$UNAME_RELEASE" exit ;; *:ekkoBSD:*:*) - echo ${UNAME_MACHINE}-unknown-ekkobsd${UNAME_RELEASE} + echo "$UNAME_MACHINE"-unknown-ekkobsd"$UNAME_RELEASE" exit ;; *:SolidBSD:*:*) - echo ${UNAME_MACHINE}-unknown-solidbsd${UNAME_RELEASE} + echo "$UNAME_MACHINE"-unknown-solidbsd"$UNAME_RELEASE" exit ;; macppc:MirBSD:*:*) - echo powerpc-unknown-mirbsd${UNAME_RELEASE} + echo powerpc-unknown-mirbsd"$UNAME_RELEASE" exit ;; *:MirBSD:*:*) - echo ${UNAME_MACHINE}-unknown-mirbsd${UNAME_RELEASE} + echo "$UNAME_MACHINE"-unknown-mirbsd"$UNAME_RELEASE" + exit ;; + *:Sortix:*:*) + echo "$UNAME_MACHINE"-unknown-sortix + exit ;; + *:Redox:*:*) + echo "$UNAME_MACHINE"-unknown-redox exit ;; + mips:OSF1:*.*) + echo mips-dec-osf1 + exit ;; alpha:OSF1:*:*) case $UNAME_RELEASE in *4.0) @@ -236,63 +294,54 @@ case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in ALPHA_CPU_TYPE=`/usr/sbin/psrinfo -v | sed -n -e 's/^ The alpha \(.*\) processor.*$/\1/p' | head -n 1` case "$ALPHA_CPU_TYPE" in "EV4 (21064)") - UNAME_MACHINE="alpha" ;; + UNAME_MACHINE=alpha ;; "EV4.5 (21064)") - UNAME_MACHINE="alpha" ;; + UNAME_MACHINE=alpha ;; "LCA4 (21066/21068)") - UNAME_MACHINE="alpha" ;; + UNAME_MACHINE=alpha ;; "EV5 (21164)") - UNAME_MACHINE="alphaev5" ;; + UNAME_MACHINE=alphaev5 ;; "EV5.6 (21164A)") - UNAME_MACHINE="alphaev56" ;; + UNAME_MACHINE=alphaev56 ;; "EV5.6 (21164PC)") - UNAME_MACHINE="alphapca56" ;; + UNAME_MACHINE=alphapca56 ;; "EV5.7 (21164PC)") - UNAME_MACHINE="alphapca57" ;; + UNAME_MACHINE=alphapca57 ;; "EV6 (21264)") - UNAME_MACHINE="alphaev6" ;; + UNAME_MACHINE=alphaev6 ;; "EV6.7 (21264A)") - UNAME_MACHINE="alphaev67" ;; + UNAME_MACHINE=alphaev67 ;; "EV6.8CB (21264C)") - UNAME_MACHINE="alphaev68" ;; + UNAME_MACHINE=alphaev68 ;; "EV6.8AL (21264B)") - UNAME_MACHINE="alphaev68" ;; + UNAME_MACHINE=alphaev68 ;; "EV6.8CX (21264D)") - UNAME_MACHINE="alphaev68" ;; + UNAME_MACHINE=alphaev68 ;; "EV6.9A (21264/EV69A)") - UNAME_MACHINE="alphaev69" ;; + UNAME_MACHINE=alphaev69 ;; "EV7 (21364)") - UNAME_MACHINE="alphaev7" ;; + UNAME_MACHINE=alphaev7 ;; "EV7.9 (21364A)") - UNAME_MACHINE="alphaev79" ;; + UNAME_MACHINE=alphaev79 ;; esac # A Pn.n version is a patched version. # A Vn.n version is a released version. # A Tn.n version is a released field test version. # A Xn.n version is an unreleased experimental baselevel. # 1.2 uses "1.2" for uname -r. - echo ${UNAME_MACHINE}-dec-osf`echo ${UNAME_RELEASE} | sed -e 's/^[PVTX]//' | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'` + echo "$UNAME_MACHINE"-dec-osf"`echo "$UNAME_RELEASE" | sed -e 's/^[PVTX]//' | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz`" # Reset EXIT trap before exiting to avoid spurious non-zero exit code. exitcode=$? trap '' 0 exit $exitcode ;; - Alpha\ *:Windows_NT*:*) - # How do we know it's Interix rather than the generic POSIX subsystem? - # Should we change UNAME_MACHINE based on the output of uname instead - # of the specific Alpha model? - echo alpha-pc-interix - exit ;; - 21064:Windows_NT:50:3) - echo alpha-dec-winnt3.5 - exit ;; Amiga*:UNIX_System_V:4.0:*) echo m68k-unknown-sysv4 exit ;; *:[Aa]miga[Oo][Ss]:*:*) - echo ${UNAME_MACHINE}-unknown-amigaos + echo "$UNAME_MACHINE"-unknown-amigaos exit ;; *:[Mm]orph[Oo][Ss]:*:*) - echo ${UNAME_MACHINE}-unknown-morphos + echo "$UNAME_MACHINE"-unknown-morphos exit ;; *:OS/390:*:*) echo i370-ibm-openedition @@ -304,9 +353,9 @@ case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in echo powerpc-ibm-os400 exit ;; arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*) - echo arm-acorn-riscix${UNAME_RELEASE} + echo arm-acorn-riscix"$UNAME_RELEASE" exit ;; - arm:riscos:*:*|arm:RISCOS:*:*) + arm*:riscos:*:*|arm*:RISCOS:*:*) echo arm-unknown-riscos exit ;; SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*) @@ -331,38 +380,33 @@ case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in sparc) echo sparc-icl-nx7; exit ;; esac ;; s390x:SunOS:*:*) - echo ${UNAME_MACHINE}-ibm-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` + echo "$UNAME_MACHINE"-ibm-solaris2"`echo "$UNAME_RELEASE" | sed -e 's/[^.]*//'`" exit ;; sun4H:SunOS:5.*:*) - echo sparc-hal-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` + echo sparc-hal-solaris2"`echo "$UNAME_RELEASE"|sed -e 's/[^.]*//'`" exit ;; sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*) - echo sparc-sun-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` + echo sparc-sun-solaris2"`echo "$UNAME_RELEASE" | sed -e 's/[^.]*//'`" exit ;; i86pc:AuroraUX:5.*:* | i86xen:AuroraUX:5.*:*) - echo i386-pc-auroraux${UNAME_RELEASE} + echo i386-pc-auroraux"$UNAME_RELEASE" exit ;; i86pc:SunOS:5.*:* | i86xen:SunOS:5.*:*) - eval $set_cc_for_build - SUN_ARCH="i386" - # If there is a compiler, see if it is configured for 64-bit objects. - # Note that the Sun cc does not turn __LP64__ into 1 like gcc does. - # This test works for both compilers. - if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then - if (echo '#ifdef __amd64'; echo IS_64BIT_ARCH; echo '#endif') | \ - (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \ - grep IS_64BIT_ARCH >/dev/null - then - SUN_ARCH="x86_64" - fi - fi - echo ${SUN_ARCH}-pc-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` + UNAME_REL="`echo "$UNAME_RELEASE" | sed -e 's/[^.]*//'`" + case `isainfo -b` in + 32) + echo i386-pc-solaris2"$UNAME_REL" + ;; + 64) + echo x86_64-pc-solaris2"$UNAME_REL" + ;; + esac exit ;; sun4*:SunOS:6*:*) # According to config.sub, this is the proper way to canonicalize # SunOS6. Hard to guess exactly what SunOS6 will be like, but # it's likely to be more like Solaris than SunOS4. - echo sparc-sun-solaris3`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` + echo sparc-sun-solaris3"`echo "$UNAME_RELEASE"|sed -e 's/[^.]*//'`" exit ;; sun4*:SunOS:*:*) case "`/usr/bin/arch -k`" in @@ -371,25 +415,25 @@ case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in ;; esac # Japanese Language versions have a version number like `4.1.3-JL'. - echo sparc-sun-sunos`echo ${UNAME_RELEASE}|sed -e 's/-/_/'` + echo sparc-sun-sunos"`echo "$UNAME_RELEASE"|sed -e 's/-/_/'`" exit ;; sun3*:SunOS:*:*) - echo m68k-sun-sunos${UNAME_RELEASE} + echo m68k-sun-sunos"$UNAME_RELEASE" exit ;; sun*:*:4.2BSD:*) UNAME_RELEASE=`(sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null` - test "x${UNAME_RELEASE}" = "x" && UNAME_RELEASE=3 + test "x$UNAME_RELEASE" = x && UNAME_RELEASE=3 case "`/bin/arch`" in sun3) - echo m68k-sun-sunos${UNAME_RELEASE} + echo m68k-sun-sunos"$UNAME_RELEASE" ;; sun4) - echo sparc-sun-sunos${UNAME_RELEASE} + echo sparc-sun-sunos"$UNAME_RELEASE" ;; esac exit ;; aushp:SunOS:*:*) - echo sparc-auspex-sunos${UNAME_RELEASE} + echo sparc-auspex-sunos"$UNAME_RELEASE" exit ;; # The situation for MiNT is a little confusing. The machine name # can be virtually everything (everything which is not @@ -400,44 +444,44 @@ case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in # MiNT. But MiNT is downward compatible to TOS, so this should # be no problem. atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*) - echo m68k-atari-mint${UNAME_RELEASE} + echo m68k-atari-mint"$UNAME_RELEASE" exit ;; atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*) - echo m68k-atari-mint${UNAME_RELEASE} + echo m68k-atari-mint"$UNAME_RELEASE" exit ;; *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*) - echo m68k-atari-mint${UNAME_RELEASE} + echo m68k-atari-mint"$UNAME_RELEASE" exit ;; milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*) - echo m68k-milan-mint${UNAME_RELEASE} + echo m68k-milan-mint"$UNAME_RELEASE" exit ;; hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*) - echo m68k-hades-mint${UNAME_RELEASE} + echo m68k-hades-mint"$UNAME_RELEASE" exit ;; *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*) - echo m68k-unknown-mint${UNAME_RELEASE} + echo m68k-unknown-mint"$UNAME_RELEASE" exit ;; m68k:machten:*:*) - echo m68k-apple-machten${UNAME_RELEASE} + echo m68k-apple-machten"$UNAME_RELEASE" exit ;; powerpc:machten:*:*) - echo powerpc-apple-machten${UNAME_RELEASE} + echo powerpc-apple-machten"$UNAME_RELEASE" exit ;; RISC*:Mach:*:*) echo mips-dec-mach_bsd4.3 exit ;; RISC*:ULTRIX:*:*) - echo mips-dec-ultrix${UNAME_RELEASE} + echo mips-dec-ultrix"$UNAME_RELEASE" exit ;; VAX*:ULTRIX*:*:*) - echo vax-dec-ultrix${UNAME_RELEASE} + echo vax-dec-ultrix"$UNAME_RELEASE" exit ;; 2020:CLIX:*:* | 2430:CLIX:*:*) - echo clipper-intergraph-clix${UNAME_RELEASE} + echo clipper-intergraph-clix"$UNAME_RELEASE" exit ;; mips:*:*:UMIPS | mips:*:*:RISCos) - eval $set_cc_for_build - sed 's/^ //' << EOF >$dummy.c + set_cc_for_build + sed 's/^ //' << EOF > "$dummy.c" #ifdef __cplusplus #include /* for printf() prototype */ int main (int argc, char *argv[]) { @@ -446,23 +490,23 @@ case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in #endif #if defined (host_mips) && defined (MIPSEB) #if defined (SYSTYPE_SYSV) - printf ("mips-mips-riscos%ssysv\n", argv[1]); exit (0); + printf ("mips-mips-riscos%ssysv\\n", argv[1]); exit (0); #endif #if defined (SYSTYPE_SVR4) - printf ("mips-mips-riscos%ssvr4\n", argv[1]); exit (0); + printf ("mips-mips-riscos%ssvr4\\n", argv[1]); exit (0); #endif #if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD) - printf ("mips-mips-riscos%sbsd\n", argv[1]); exit (0); + printf ("mips-mips-riscos%sbsd\\n", argv[1]); exit (0); #endif #endif exit (-1); } EOF - $CC_FOR_BUILD -o $dummy $dummy.c && - dummyarg=`echo "${UNAME_RELEASE}" | sed -n 's/\([0-9]*\).*/\1/p'` && - SYSTEM_NAME=`$dummy $dummyarg` && + $CC_FOR_BUILD -o "$dummy" "$dummy.c" && + dummyarg=`echo "$UNAME_RELEASE" | sed -n 's/\([0-9]*\).*/\1/p'` && + SYSTEM_NAME=`"$dummy" "$dummyarg"` && { echo "$SYSTEM_NAME"; exit; } - echo mips-mips-riscos${UNAME_RELEASE} + echo mips-mips-riscos"$UNAME_RELEASE" exit ;; Motorola:PowerMAX_OS:*:*) echo powerpc-motorola-powermax @@ -488,17 +532,17 @@ EOF AViiON:dgux:*:*) # DG/UX returns AViiON for all architectures UNAME_PROCESSOR=`/usr/bin/uname -p` - if [ $UNAME_PROCESSOR = mc88100 ] || [ $UNAME_PROCESSOR = mc88110 ] + if [ "$UNAME_PROCESSOR" = mc88100 ] || [ "$UNAME_PROCESSOR" = mc88110 ] then - if [ ${TARGET_BINARY_INTERFACE}x = m88kdguxelfx ] || \ - [ ${TARGET_BINARY_INTERFACE}x = x ] + if [ "$TARGET_BINARY_INTERFACE"x = m88kdguxelfx ] || \ + [ "$TARGET_BINARY_INTERFACE"x = x ] then - echo m88k-dg-dgux${UNAME_RELEASE} + echo m88k-dg-dgux"$UNAME_RELEASE" else - echo m88k-dg-dguxbcs${UNAME_RELEASE} + echo m88k-dg-dguxbcs"$UNAME_RELEASE" fi else - echo i586-dg-dgux${UNAME_RELEASE} + echo i586-dg-dgux"$UNAME_RELEASE" fi exit ;; M88*:DolphinOS:*:*) # DolphinOS (SVR3) @@ -515,7 +559,7 @@ EOF echo m68k-tektronix-bsd exit ;; *:IRIX*:*:*) - echo mips-sgi-irix`echo ${UNAME_RELEASE}|sed -e 's/-/_/g'` + echo mips-sgi-irix"`echo "$UNAME_RELEASE"|sed -e 's/-/_/g'`" exit ;; ????????:AIX?:[12].1:2) # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX. echo romp-ibm-aix # uname -m gives an 8 hex-code CPU id @@ -527,14 +571,14 @@ EOF if [ -x /usr/bin/oslevel ] ; then IBM_REV=`/usr/bin/oslevel` else - IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE} + IBM_REV="$UNAME_VERSION.$UNAME_RELEASE" fi - echo ${UNAME_MACHINE}-ibm-aix${IBM_REV} + echo "$UNAME_MACHINE"-ibm-aix"$IBM_REV" exit ;; *:AIX:2:3) if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then - eval $set_cc_for_build - sed 's/^ //' << EOF >$dummy.c + set_cc_for_build + sed 's/^ //' << EOF > "$dummy.c" #include main() @@ -545,7 +589,7 @@ EOF exit(0); } EOF - if $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` + if $CC_FOR_BUILD -o "$dummy" "$dummy.c" && SYSTEM_NAME=`"$dummy"` then echo "$SYSTEM_NAME" else @@ -559,26 +603,27 @@ EOF exit ;; *:AIX:*:[4567]) IBM_CPU_ID=`/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }'` - if /usr/sbin/lsattr -El ${IBM_CPU_ID} | grep ' POWER' >/dev/null 2>&1; then + if /usr/sbin/lsattr -El "$IBM_CPU_ID" | grep ' POWER' >/dev/null 2>&1; then IBM_ARCH=rs6000 else IBM_ARCH=powerpc fi - if [ -x /usr/bin/oslevel ] ; then - IBM_REV=`/usr/bin/oslevel` + if [ -x /usr/bin/lslpp ] ; then + IBM_REV=`/usr/bin/lslpp -Lqc bos.rte.libc | + awk -F: '{ print $3 }' | sed s/[0-9]*$/0/` else - IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE} + IBM_REV="$UNAME_VERSION.$UNAME_RELEASE" fi - echo ${IBM_ARCH}-ibm-aix${IBM_REV} + echo "$IBM_ARCH"-ibm-aix"$IBM_REV" exit ;; *:AIX:*:*) echo rs6000-ibm-aix exit ;; - ibmrt:4.4BSD:*|romp-ibm:BSD:*) + ibmrt:4.4BSD:*|romp-ibm:4.4BSD:*) echo romp-ibm-bsd4.4 exit ;; ibmrt:*BSD:*|romp-ibm:BSD:*) # covers RT/PC BSD and - echo romp-ibm-bsd${UNAME_RELEASE} # 4.3 with uname added to + echo romp-ibm-bsd"$UNAME_RELEASE" # 4.3 with uname added to exit ;; # report: romp-ibm BSD 4.3 *:BOSX:*:*) echo rs6000-bull-bosx @@ -593,28 +638,28 @@ EOF echo m68k-hp-bsd4.4 exit ;; 9000/[34678]??:HP-UX:*:*) - HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'` - case "${UNAME_MACHINE}" in - 9000/31? ) HP_ARCH=m68000 ;; - 9000/[34]?? ) HP_ARCH=m68k ;; + HPUX_REV=`echo "$UNAME_RELEASE"|sed -e 's/[^.]*.[0B]*//'` + case "$UNAME_MACHINE" in + 9000/31?) HP_ARCH=m68000 ;; + 9000/[34]??) HP_ARCH=m68k ;; 9000/[678][0-9][0-9]) if [ -x /usr/bin/getconf ]; then sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null` sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null` - case "${sc_cpu_version}" in - 523) HP_ARCH="hppa1.0" ;; # CPU_PA_RISC1_0 - 528) HP_ARCH="hppa1.1" ;; # CPU_PA_RISC1_1 + case "$sc_cpu_version" in + 523) HP_ARCH=hppa1.0 ;; # CPU_PA_RISC1_0 + 528) HP_ARCH=hppa1.1 ;; # CPU_PA_RISC1_1 532) # CPU_PA_RISC2_0 - case "${sc_kernel_bits}" in - 32) HP_ARCH="hppa2.0n" ;; - 64) HP_ARCH="hppa2.0w" ;; - '') HP_ARCH="hppa2.0" ;; # HP-UX 10.20 + case "$sc_kernel_bits" in + 32) HP_ARCH=hppa2.0n ;; + 64) HP_ARCH=hppa2.0w ;; + '') HP_ARCH=hppa2.0 ;; # HP-UX 10.20 esac ;; esac fi - if [ "${HP_ARCH}" = "" ]; then - eval $set_cc_for_build - sed 's/^ //' << EOF >$dummy.c + if [ "$HP_ARCH" = "" ]; then + set_cc_for_build + sed 's/^ //' << EOF > "$dummy.c" #define _HPUX_SOURCE #include @@ -647,13 +692,13 @@ EOF exit (0); } EOF - (CCOPTS= $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null) && HP_ARCH=`$dummy` + (CCOPTS="" $CC_FOR_BUILD -o "$dummy" "$dummy.c" 2>/dev/null) && HP_ARCH=`"$dummy"` test -z "$HP_ARCH" && HP_ARCH=hppa fi ;; esac - if [ ${HP_ARCH} = "hppa2.0w" ] + if [ "$HP_ARCH" = hppa2.0w ] then - eval $set_cc_for_build + set_cc_for_build # hppa2.0w-hp-hpux* has a 64-bit kernel and a compiler generating # 32-bit code. hppa64-hp-hpux* has the same kernel and a compiler @@ -664,23 +709,23 @@ EOF # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess # => hppa64-hp-hpux11.23 - if echo __LP64__ | (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | + if echo __LP64__ | (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | grep -q __LP64__ then - HP_ARCH="hppa2.0w" + HP_ARCH=hppa2.0w else - HP_ARCH="hppa64" + HP_ARCH=hppa64 fi fi - echo ${HP_ARCH}-hp-hpux${HPUX_REV} + echo "$HP_ARCH"-hp-hpux"$HPUX_REV" exit ;; ia64:HP-UX:*:*) - HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'` - echo ia64-hp-hpux${HPUX_REV} + HPUX_REV=`echo "$UNAME_RELEASE"|sed -e 's/[^.]*.[0B]*//'` + echo ia64-hp-hpux"$HPUX_REV" exit ;; 3050*:HI-UX:*:*) - eval $set_cc_for_build - sed 's/^ //' << EOF >$dummy.c + set_cc_for_build + sed 's/^ //' << EOF > "$dummy.c" #include int main () @@ -705,11 +750,11 @@ EOF exit (0); } EOF - $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` && + $CC_FOR_BUILD -o "$dummy" "$dummy.c" && SYSTEM_NAME=`"$dummy"` && { echo "$SYSTEM_NAME"; exit; } echo unknown-hitachi-hiuxwe2 exit ;; - 9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:* ) + 9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:*) echo hppa1.1-hp-bsd exit ;; 9000/8??:4.3bsd:*:*) @@ -718,7 +763,7 @@ EOF *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*) echo hppa1.0-hp-mpeix exit ;; - hp7??:OSF1:*:* | hp8?[79]:OSF1:*:* ) + hp7??:OSF1:*:* | hp8?[79]:OSF1:*:*) echo hppa1.1-hp-osf exit ;; hp8??:OSF1:*:*) @@ -726,9 +771,9 @@ EOF exit ;; i*86:OSF1:*:*) if [ -x /usr/sbin/sysversion ] ; then - echo ${UNAME_MACHINE}-unknown-osf1mk + echo "$UNAME_MACHINE"-unknown-osf1mk else - echo ${UNAME_MACHINE}-unknown-osf1 + echo "$UNAME_MACHINE"-unknown-osf1 fi exit ;; parisc*:Lites*:*:*) @@ -753,127 +798,120 @@ EOF echo c4-convex-bsd exit ;; CRAY*Y-MP:*:*:*) - echo ymp-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' + echo ymp-cray-unicos"$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/' exit ;; CRAY*[A-Z]90:*:*:*) - echo ${UNAME_MACHINE}-cray-unicos${UNAME_RELEASE} \ + echo "$UNAME_MACHINE"-cray-unicos"$UNAME_RELEASE" \ | sed -e 's/CRAY.*\([A-Z]90\)/\1/' \ -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \ -e 's/\.[^.]*$/.X/' exit ;; CRAY*TS:*:*:*) - echo t90-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' + echo t90-cray-unicos"$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/' exit ;; CRAY*T3E:*:*:*) - echo alphaev5-cray-unicosmk${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' + echo alphaev5-cray-unicosmk"$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/' exit ;; CRAY*SV1:*:*:*) - echo sv1-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' + echo sv1-cray-unicos"$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/' exit ;; *:UNICOS/mp:*:*) - echo craynv-cray-unicosmp${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' + echo craynv-cray-unicosmp"$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/' exit ;; F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*) - FUJITSU_PROC=`uname -m | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'` - FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'` - FUJITSU_REL=`echo ${UNAME_RELEASE} | sed -e 's/ /_/'` + FUJITSU_PROC=`uname -m | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz` + FUJITSU_SYS=`uname -p | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/\///'` + FUJITSU_REL=`echo "$UNAME_RELEASE" | sed -e 's/ /_/'` echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" exit ;; 5000:UNIX_System_V:4.*:*) - FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'` - FUJITSU_REL=`echo ${UNAME_RELEASE} | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/ /_/'` + FUJITSU_SYS=`uname -p | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/\///'` + FUJITSU_REL=`echo "$UNAME_RELEASE" | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/ /_/'` echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" exit ;; i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*) - echo ${UNAME_MACHINE}-pc-bsdi${UNAME_RELEASE} + echo "$UNAME_MACHINE"-pc-bsdi"$UNAME_RELEASE" exit ;; sparc*:BSD/OS:*:*) - echo sparc-unknown-bsdi${UNAME_RELEASE} + echo sparc-unknown-bsdi"$UNAME_RELEASE" exit ;; *:BSD/OS:*:*) - echo ${UNAME_MACHINE}-unknown-bsdi${UNAME_RELEASE} + echo "$UNAME_MACHINE"-unknown-bsdi"$UNAME_RELEASE" + exit ;; + arm*:FreeBSD:*:*) + UNAME_PROCESSOR=`uname -p` + set_cc_for_build + if echo __ARM_PCS_VFP | $CC_FOR_BUILD -E - 2>/dev/null \ + | grep -q __ARM_PCS_VFP + then + echo "${UNAME_PROCESSOR}"-unknown-freebsd"`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`"-gnueabi + else + echo "${UNAME_PROCESSOR}"-unknown-freebsd"`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`"-gnueabihf + fi exit ;; *:FreeBSD:*:*) UNAME_PROCESSOR=`/usr/bin/uname -p` - case ${UNAME_PROCESSOR} in + case "$UNAME_PROCESSOR" in amd64) - echo x86_64-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;; - *) - echo ${UNAME_PROCESSOR}-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;; + UNAME_PROCESSOR=x86_64 ;; + i386) + UNAME_PROCESSOR=i586 ;; esac + echo "$UNAME_PROCESSOR"-unknown-freebsd"`echo "$UNAME_RELEASE"|sed -e 's/[-(].*//'`" exit ;; i*:CYGWIN*:*) - echo ${UNAME_MACHINE}-pc-cygwin + echo "$UNAME_MACHINE"-pc-cygwin exit ;; *:MINGW64*:*) - echo ${UNAME_MACHINE}-pc-mingw64 + echo "$UNAME_MACHINE"-pc-mingw64 exit ;; *:MINGW*:*) - echo ${UNAME_MACHINE}-pc-mingw32 + echo "$UNAME_MACHINE"-pc-mingw32 exit ;; - i*:MSYS*:*) - echo ${UNAME_MACHINE}-pc-msys - exit ;; - i*:windows32*:*) - # uname -m includes "-pc" on this system. - echo ${UNAME_MACHINE}-mingw32 + *:MSYS*:*) + echo "$UNAME_MACHINE"-pc-msys exit ;; i*:PW*:*) - echo ${UNAME_MACHINE}-pc-pw32 + echo "$UNAME_MACHINE"-pc-pw32 exit ;; *:Interix*:*) - case ${UNAME_MACHINE} in + case "$UNAME_MACHINE" in x86) - echo i586-pc-interix${UNAME_RELEASE} + echo i586-pc-interix"$UNAME_RELEASE" exit ;; authenticamd | genuineintel | EM64T) - echo x86_64-unknown-interix${UNAME_RELEASE} + echo x86_64-unknown-interix"$UNAME_RELEASE" exit ;; IA64) - echo ia64-unknown-interix${UNAME_RELEASE} + echo ia64-unknown-interix"$UNAME_RELEASE" exit ;; esac ;; - [345]86:Windows_95:* | [345]86:Windows_98:* | [345]86:Windows_NT:*) - echo i${UNAME_MACHINE}-pc-mks - exit ;; - 8664:Windows_NT:*) - echo x86_64-pc-mks - exit ;; - i*:Windows_NT*:* | Pentium*:Windows_NT*:*) - # How do we know it's Interix rather than the generic POSIX subsystem? - # It also conflicts with pre-2.0 versions of AT&T UWIN. Should we - # UNAME_MACHINE based on the output of uname instead of i386? - echo i586-pc-interix - exit ;; i*:UWIN*:*) - echo ${UNAME_MACHINE}-pc-uwin + echo "$UNAME_MACHINE"-pc-uwin exit ;; amd64:CYGWIN*:*:* | x86_64:CYGWIN*:*:*) echo x86_64-unknown-cygwin exit ;; - p*:CYGWIN*:*) - echo powerpcle-unknown-cygwin - exit ;; prep*:SunOS:5.*:*) - echo powerpcle-unknown-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` + echo powerpcle-unknown-solaris2"`echo "$UNAME_RELEASE"|sed -e 's/[^.]*//'`" exit ;; *:GNU:*:*) # the GNU system - echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-gnu`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'` + echo "`echo "$UNAME_MACHINE"|sed -e 's,[-/].*$,,'`-unknown-$LIBC`echo "$UNAME_RELEASE"|sed -e 's,/.*$,,'`" exit ;; *:GNU/*:*:*) # other systems with GNU libc and userland - echo ${UNAME_MACHINE}-unknown-`echo ${UNAME_SYSTEM} | sed 's,^[^/]*/,,' | tr '[A-Z]' '[a-z]'``echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`-gnu + echo "$UNAME_MACHINE-unknown-`echo "$UNAME_SYSTEM" | sed 's,^[^/]*/,,' | tr "[:upper:]" "[:lower:]"``echo "$UNAME_RELEASE"|sed -e 's/[-(].*//'`-$LIBC" exit ;; - i*86:Minix:*:*) - echo ${UNAME_MACHINE}-pc-minix + *:Minix:*:*) + echo "$UNAME_MACHINE"-unknown-minix exit ;; aarch64:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" exit ;; aarch64_be:Linux:*:*) UNAME_MACHINE=aarch64_be - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" exit ;; alpha:Linux:*:*) case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' < /proc/cpuinfo` in @@ -886,63 +924,64 @@ EOF EV68*) UNAME_MACHINE=alphaev68 ;; esac objdump --private-headers /bin/sh | grep -q ld.so.1 - if test "$?" = 0 ; then LIBC="libc1" ; else LIBC="" ; fi - echo ${UNAME_MACHINE}-unknown-linux-gnu${LIBC} + if test "$?" = 0 ; then LIBC=gnulibc1 ; fi + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" + exit ;; + arc:Linux:*:* | arceb:Linux:*:*) + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" exit ;; arm*:Linux:*:*) - eval $set_cc_for_build + set_cc_for_build if echo __ARM_EABI__ | $CC_FOR_BUILD -E - 2>/dev/null \ | grep -q __ARM_EABI__ then - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" else if echo __ARM_PCS_VFP | $CC_FOR_BUILD -E - 2>/dev/null \ | grep -q __ARM_PCS_VFP then - echo ${UNAME_MACHINE}-unknown-linux-gnueabi + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"eabi else - echo ${UNAME_MACHINE}-unknown-linux-gnueabihf + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"eabihf fi fi exit ;; avr32*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" exit ;; cris:Linux:*:*) - echo ${UNAME_MACHINE}-axis-linux-gnu + echo "$UNAME_MACHINE"-axis-linux-"$LIBC" exit ;; crisv32:Linux:*:*) - echo ${UNAME_MACHINE}-axis-linux-gnu + echo "$UNAME_MACHINE"-axis-linux-"$LIBC" + exit ;; + e2k:Linux:*:*) + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" exit ;; frv:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" exit ;; hexagon:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" exit ;; i*86:Linux:*:*) - LIBC=gnu - eval $set_cc_for_build - sed 's/^ //' << EOF >$dummy.c - #ifdef __dietlibc__ - LIBC=dietlibc - #endif -EOF - eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^LIBC'` - echo "${UNAME_MACHINE}-pc-linux-${LIBC}" + echo "$UNAME_MACHINE"-pc-linux-"$LIBC" exit ;; ia64:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" + exit ;; + k1om:Linux:*:*) + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" exit ;; m32r*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" exit ;; m68*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" exit ;; mips:Linux:*:* | mips64:Linux:*:*) - eval $set_cc_for_build - sed 's/^ //' << EOF >$dummy.c + set_cc_for_build + sed 's/^ //' << EOF > "$dummy.c" #undef CPU #undef ${UNAME_MACHINE} #undef ${UNAME_MACHINE}el @@ -956,55 +995,70 @@ EOF #endif #endif EOF - eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^CPU'` - test x"${CPU}" != x && { echo "${CPU}-unknown-linux-gnu"; exit; } + eval "`$CC_FOR_BUILD -E "$dummy.c" 2>/dev/null | grep '^CPU'`" + test "x$CPU" != x && { echo "$CPU-unknown-linux-$LIBC"; exit; } ;; - or32:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + mips64el:Linux:*:*) + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" + exit ;; + openrisc*:Linux:*:*) + echo or1k-unknown-linux-"$LIBC" + exit ;; + or32:Linux:*:* | or1k*:Linux:*:*) + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" exit ;; padre:Linux:*:*) - echo sparc-unknown-linux-gnu + echo sparc-unknown-linux-"$LIBC" exit ;; parisc64:Linux:*:* | hppa64:Linux:*:*) - echo hppa64-unknown-linux-gnu + echo hppa64-unknown-linux-"$LIBC" exit ;; parisc:Linux:*:* | hppa:Linux:*:*) # Look for CPU level case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in - PA7*) echo hppa1.1-unknown-linux-gnu ;; - PA8*) echo hppa2.0-unknown-linux-gnu ;; - *) echo hppa-unknown-linux-gnu ;; + PA7*) echo hppa1.1-unknown-linux-"$LIBC" ;; + PA8*) echo hppa2.0-unknown-linux-"$LIBC" ;; + *) echo hppa-unknown-linux-"$LIBC" ;; esac exit ;; ppc64:Linux:*:*) - echo powerpc64-unknown-linux-gnu + echo powerpc64-unknown-linux-"$LIBC" exit ;; ppc:Linux:*:*) - echo powerpc-unknown-linux-gnu + echo powerpc-unknown-linux-"$LIBC" + exit ;; + ppc64le:Linux:*:*) + echo powerpc64le-unknown-linux-"$LIBC" + exit ;; + ppcle:Linux:*:*) + echo powerpcle-unknown-linux-"$LIBC" + exit ;; + riscv32:Linux:*:* | riscv64:Linux:*:*) + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" exit ;; s390:Linux:*:* | s390x:Linux:*:*) - echo ${UNAME_MACHINE}-ibm-linux + echo "$UNAME_MACHINE"-ibm-linux-"$LIBC" exit ;; sh64*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" exit ;; sh*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" exit ;; sparc:Linux:*:* | sparc64:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" exit ;; tile*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" exit ;; vax:Linux:*:*) - echo ${UNAME_MACHINE}-dec-linux-gnu + echo "$UNAME_MACHINE"-dec-linux-"$LIBC" exit ;; x86_64:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo "$UNAME_MACHINE"-pc-linux-"$LIBC" exit ;; xtensa*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo "$UNAME_MACHINE"-unknown-linux-"$LIBC" exit ;; i*86:DYNIX/ptx:4*:*) # ptx 4.0 does uname -s correctly, with DYNIX/ptx in there. @@ -1018,34 +1072,34 @@ EOF # I am not positive that other SVR4 systems won't match this, # I just have to hope. -- rms. # Use sysv4.2uw... so that sysv4* matches it. - echo ${UNAME_MACHINE}-pc-sysv4.2uw${UNAME_VERSION} + echo "$UNAME_MACHINE"-pc-sysv4.2uw"$UNAME_VERSION" exit ;; i*86:OS/2:*:*) # If we were able to find `uname', then EMX Unix compatibility # is probably installed. - echo ${UNAME_MACHINE}-pc-os2-emx + echo "$UNAME_MACHINE"-pc-os2-emx exit ;; i*86:XTS-300:*:STOP) - echo ${UNAME_MACHINE}-unknown-stop + echo "$UNAME_MACHINE"-unknown-stop exit ;; i*86:atheos:*:*) - echo ${UNAME_MACHINE}-unknown-atheos + echo "$UNAME_MACHINE"-unknown-atheos exit ;; i*86:syllable:*:*) - echo ${UNAME_MACHINE}-pc-syllable + echo "$UNAME_MACHINE"-pc-syllable exit ;; i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.[02]*:*) - echo i386-unknown-lynxos${UNAME_RELEASE} + echo i386-unknown-lynxos"$UNAME_RELEASE" exit ;; i*86:*DOS:*:*) - echo ${UNAME_MACHINE}-pc-msdosdjgpp + echo "$UNAME_MACHINE"-pc-msdosdjgpp exit ;; - i*86:*:4.*:* | i*86:SYSTEM_V:4.*:*) - UNAME_REL=`echo ${UNAME_RELEASE} | sed 's/\/MP$//'` + i*86:*:4.*:*) + UNAME_REL=`echo "$UNAME_RELEASE" | sed 's/\/MP$//'` if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then - echo ${UNAME_MACHINE}-univel-sysv${UNAME_REL} + echo "$UNAME_MACHINE"-univel-sysv"$UNAME_REL" else - echo ${UNAME_MACHINE}-pc-sysv${UNAME_REL} + echo "$UNAME_MACHINE"-pc-sysv"$UNAME_REL" fi exit ;; i*86:*:5:[678]*) @@ -1055,12 +1109,12 @@ EOF *Pentium) UNAME_MACHINE=i586 ;; *Pent*|*Celeron) UNAME_MACHINE=i686 ;; esac - echo ${UNAME_MACHINE}-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}${UNAME_VERSION} + echo "$UNAME_MACHINE-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}{$UNAME_VERSION}" exit ;; i*86:*:3.2:*) if test -f /usr/options/cb.name; then UNAME_REL=`sed -n 's/.*Version //p' /dev/null >/dev/null ; then UNAME_REL=`(/bin/uname -X|grep Release|sed -e 's/.*= //')` (/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486 @@ -1070,9 +1124,9 @@ EOF && UNAME_MACHINE=i686 (/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \ && UNAME_MACHINE=i686 - echo ${UNAME_MACHINE}-pc-sco$UNAME_REL + echo "$UNAME_MACHINE"-pc-sco"$UNAME_REL" else - echo ${UNAME_MACHINE}-pc-sysv32 + echo "$UNAME_MACHINE"-pc-sysv32 fi exit ;; pc:*:*:*) @@ -1080,7 +1134,7 @@ EOF # uname -m prints for DJGPP always 'pc', but it prints nothing about # the processor, so we play safe by assuming i586. # Note: whatever this is, it MUST be the same as what config.sub - # prints for the "djgpp" host, or else GDB configury will decide that + # prints for the "djgpp" host, or else GDB configure will decide that # this is a cross-build. echo i586-pc-msdosdjgpp exit ;; @@ -1092,9 +1146,9 @@ EOF exit ;; i860:*:4.*:*) # i860-SVR4 if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then - echo i860-stardent-sysv${UNAME_RELEASE} # Stardent Vistra i860-SVR4 + echo i860-stardent-sysv"$UNAME_RELEASE" # Stardent Vistra i860-SVR4 else # Add other i860-SVR4 vendors below as they are discovered. - echo i860-unknown-sysv${UNAME_RELEASE} # Unknown i860-SVR4 + echo i860-unknown-sysv"$UNAME_RELEASE" # Unknown i860-SVR4 fi exit ;; mini*:CTIX:SYS*5:*) @@ -1114,9 +1168,9 @@ EOF test -r /etc/.relid \ && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid` /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ - && { echo i486-ncr-sysv4.3${OS_REL}; exit; } + && { echo i486-ncr-sysv4.3"$OS_REL"; exit; } /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ - && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;; + && { echo i586-ncr-sysv4.3"$OS_REL"; exit; } ;; 3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*) /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ && { echo i486-ncr-sysv4; exit; } ;; @@ -1125,28 +1179,28 @@ EOF test -r /etc/.relid \ && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid` /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ - && { echo i486-ncr-sysv4.3${OS_REL}; exit; } + && { echo i486-ncr-sysv4.3"$OS_REL"; exit; } /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ - && { echo i586-ncr-sysv4.3${OS_REL}; exit; } + && { echo i586-ncr-sysv4.3"$OS_REL"; exit; } /bin/uname -p 2>/dev/null | /bin/grep pteron >/dev/null \ - && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;; + && { echo i586-ncr-sysv4.3"$OS_REL"; exit; } ;; m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*) - echo m68k-unknown-lynxos${UNAME_RELEASE} + echo m68k-unknown-lynxos"$UNAME_RELEASE" exit ;; mc68030:UNIX_System_V:4.*:*) echo m68k-atari-sysv4 exit ;; TSUNAMI:LynxOS:2.*:*) - echo sparc-unknown-lynxos${UNAME_RELEASE} + echo sparc-unknown-lynxos"$UNAME_RELEASE" exit ;; rs6000:LynxOS:2.*:*) - echo rs6000-unknown-lynxos${UNAME_RELEASE} + echo rs6000-unknown-lynxos"$UNAME_RELEASE" exit ;; PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.[02]*:*) - echo powerpc-unknown-lynxos${UNAME_RELEASE} + echo powerpc-unknown-lynxos"$UNAME_RELEASE" exit ;; SM[BE]S:UNIX_SV:*:*) - echo mips-dde-sysv${UNAME_RELEASE} + echo mips-dde-sysv"$UNAME_RELEASE" exit ;; RM*:ReliantUNIX-*:*:*) echo mips-sni-sysv4 @@ -1157,7 +1211,7 @@ EOF *:SINIX-*:*:*) if uname -p 2>/dev/null >/dev/null ; then UNAME_MACHINE=`(uname -p) 2>/dev/null` - echo ${UNAME_MACHINE}-sni-sysv4 + echo "$UNAME_MACHINE"-sni-sysv4 else echo ns32k-sni-sysv fi @@ -1177,23 +1231,23 @@ EOF exit ;; i*86:VOS:*:*) # From Paul.Green@stratus.com. - echo ${UNAME_MACHINE}-stratus-vos + echo "$UNAME_MACHINE"-stratus-vos exit ;; *:VOS:*:*) # From Paul.Green@stratus.com. echo hppa1.1-stratus-vos exit ;; mc68*:A/UX:*:*) - echo m68k-apple-aux${UNAME_RELEASE} + echo m68k-apple-aux"$UNAME_RELEASE" exit ;; news*:NEWS-OS:6*:*) echo mips-sony-newsos6 exit ;; R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*) if [ -d /usr/nec ]; then - echo mips-nec-sysv${UNAME_RELEASE} + echo mips-nec-sysv"$UNAME_RELEASE" else - echo mips-unknown-sysv${UNAME_RELEASE} + echo mips-unknown-sysv"$UNAME_RELEASE" fi exit ;; BeBox:BeOS:*:*) # BeOS running on hardware made by Be, PPC only. @@ -1212,65 +1266,93 @@ EOF echo x86_64-unknown-haiku exit ;; SX-4:SUPER-UX:*:*) - echo sx4-nec-superux${UNAME_RELEASE} + echo sx4-nec-superux"$UNAME_RELEASE" exit ;; SX-5:SUPER-UX:*:*) - echo sx5-nec-superux${UNAME_RELEASE} + echo sx5-nec-superux"$UNAME_RELEASE" exit ;; SX-6:SUPER-UX:*:*) - echo sx6-nec-superux${UNAME_RELEASE} + echo sx6-nec-superux"$UNAME_RELEASE" exit ;; SX-7:SUPER-UX:*:*) - echo sx7-nec-superux${UNAME_RELEASE} + echo sx7-nec-superux"$UNAME_RELEASE" exit ;; SX-8:SUPER-UX:*:*) - echo sx8-nec-superux${UNAME_RELEASE} + echo sx8-nec-superux"$UNAME_RELEASE" exit ;; SX-8R:SUPER-UX:*:*) - echo sx8r-nec-superux${UNAME_RELEASE} + echo sx8r-nec-superux"$UNAME_RELEASE" + exit ;; + SX-ACE:SUPER-UX:*:*) + echo sxace-nec-superux"$UNAME_RELEASE" exit ;; Power*:Rhapsody:*:*) - echo powerpc-apple-rhapsody${UNAME_RELEASE} + echo powerpc-apple-rhapsody"$UNAME_RELEASE" exit ;; *:Rhapsody:*:*) - echo ${UNAME_MACHINE}-apple-rhapsody${UNAME_RELEASE} + echo "$UNAME_MACHINE"-apple-rhapsody"$UNAME_RELEASE" exit ;; *:Darwin:*:*) UNAME_PROCESSOR=`uname -p` || UNAME_PROCESSOR=unknown - case $UNAME_PROCESSOR in - i386) - eval $set_cc_for_build - if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then - if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \ - (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \ - grep IS_64BIT_ARCH >/dev/null - then - UNAME_PROCESSOR="x86_64" - fi - fi ;; - unknown) UNAME_PROCESSOR=powerpc ;; - esac - echo ${UNAME_PROCESSOR}-apple-darwin${UNAME_RELEASE} + set_cc_for_build + if test "$UNAME_PROCESSOR" = unknown ; then + UNAME_PROCESSOR=powerpc + fi + if test "`echo "$UNAME_RELEASE" | sed -e 's/\..*//'`" -le 10 ; then + if [ "$CC_FOR_BUILD" != no_compiler_found ]; then + if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \ + (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \ + grep IS_64BIT_ARCH >/dev/null + then + case $UNAME_PROCESSOR in + i386) UNAME_PROCESSOR=x86_64 ;; + powerpc) UNAME_PROCESSOR=powerpc64 ;; + esac + fi + # On 10.4-10.6 one might compile for PowerPC via gcc -arch ppc + if (echo '#ifdef __POWERPC__'; echo IS_PPC; echo '#endif') | \ + (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \ + grep IS_PPC >/dev/null + then + UNAME_PROCESSOR=powerpc + fi + fi + elif test "$UNAME_PROCESSOR" = i386 ; then + # Avoid executing cc on OS X 10.9, as it ships with a stub + # that puts up a graphical alert prompting to install + # developer tools. Any system running Mac OS X 10.7 or + # later (Darwin 11 and later) is required to have a 64-bit + # processor. This is not true of the ARM version of Darwin + # that Apple uses in portable devices. + UNAME_PROCESSOR=x86_64 + fi + echo "$UNAME_PROCESSOR"-apple-darwin"$UNAME_RELEASE" exit ;; *:procnto*:*:* | *:QNX:[0123456789]*:*) UNAME_PROCESSOR=`uname -p` - if test "$UNAME_PROCESSOR" = "x86"; then + if test "$UNAME_PROCESSOR" = x86; then UNAME_PROCESSOR=i386 UNAME_MACHINE=pc fi - echo ${UNAME_PROCESSOR}-${UNAME_MACHINE}-nto-qnx${UNAME_RELEASE} + echo "$UNAME_PROCESSOR"-"$UNAME_MACHINE"-nto-qnx"$UNAME_RELEASE" exit ;; *:QNX:*:4*) echo i386-pc-qnx exit ;; - NEO-?:NONSTOP_KERNEL:*:*) - echo neo-tandem-nsk${UNAME_RELEASE} + NEO-*:NONSTOP_KERNEL:*:*) + echo neo-tandem-nsk"$UNAME_RELEASE" exit ;; NSE-*:NONSTOP_KERNEL:*:*) - echo nse-tandem-nsk${UNAME_RELEASE} + echo nse-tandem-nsk"$UNAME_RELEASE" + exit ;; + NSR-*:NONSTOP_KERNEL:*:*) + echo nsr-tandem-nsk"$UNAME_RELEASE" exit ;; - NSR-?:NONSTOP_KERNEL:*:*) - echo nsr-tandem-nsk${UNAME_RELEASE} + NSV-*:NONSTOP_KERNEL:*:*) + echo nsv-tandem-nsk"$UNAME_RELEASE" + exit ;; + NSX-*:NONSTOP_KERNEL:*:*) + echo nsx-tandem-nsk"$UNAME_RELEASE" exit ;; *:NonStop-UX:*:*) echo mips-compaq-nonstopux @@ -1279,18 +1361,19 @@ EOF echo bs2000-siemens-sysv exit ;; DS/*:UNIX_System_V:*:*) - echo ${UNAME_MACHINE}-${UNAME_SYSTEM}-${UNAME_RELEASE} + echo "$UNAME_MACHINE"-"$UNAME_SYSTEM"-"$UNAME_RELEASE" exit ;; *:Plan9:*:*) # "uname -m" is not consistent, so use $cputype instead. 386 # is converted to i386 for consistency with other x86 # operating systems. - if test "$cputype" = "386"; then + # shellcheck disable=SC2154 + if test "$cputype" = 386; then UNAME_MACHINE=i386 else UNAME_MACHINE="$cputype" fi - echo ${UNAME_MACHINE}-unknown-plan9 + echo "$UNAME_MACHINE"-unknown-plan9 exit ;; *:TOPS-10:*:*) echo pdp10-unknown-tops10 @@ -1311,14 +1394,14 @@ EOF echo pdp10-unknown-its exit ;; SEI:*:*:SEIUX) - echo mips-sei-seiux${UNAME_RELEASE} + echo mips-sei-seiux"$UNAME_RELEASE" exit ;; *:DragonFly:*:*) - echo ${UNAME_MACHINE}-unknown-dragonfly`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` + echo "$UNAME_MACHINE"-unknown-dragonfly"`echo "$UNAME_RELEASE"|sed -e 's/[-(].*//'`" exit ;; *:*VMS:*:*) UNAME_MACHINE=`(uname -p) 2>/dev/null` - case "${UNAME_MACHINE}" in + case "$UNAME_MACHINE" in A*) echo alpha-dec-vms ; exit ;; I*) echo ia64-dec-vms ; exit ;; V*) echo vax-dec-vms ; exit ;; @@ -1327,182 +1410,48 @@ EOF echo i386-pc-xenix exit ;; i*86:skyos:*:*) - echo ${UNAME_MACHINE}-pc-skyos`echo ${UNAME_RELEASE}` | sed -e 's/ .*$//' + echo "$UNAME_MACHINE"-pc-skyos"`echo "$UNAME_RELEASE" | sed -e 's/ .*$//'`" exit ;; i*86:rdos:*:*) - echo ${UNAME_MACHINE}-pc-rdos + echo "$UNAME_MACHINE"-pc-rdos exit ;; i*86:AROS:*:*) - echo ${UNAME_MACHINE}-pc-aros + echo "$UNAME_MACHINE"-pc-aros exit ;; x86_64:VMkernel:*:*) - echo ${UNAME_MACHINE}-unknown-esx + echo "$UNAME_MACHINE"-unknown-esx + exit ;; + amd64:Isilon\ OneFS:*:*) + echo x86_64-unknown-onefs exit ;; esac -eval $set_cc_for_build -cat >$dummy.c < -# include -#endif -main () -{ -#if defined (sony) -#if defined (MIPSEB) - /* BFD wants "bsd" instead of "newsos". Perhaps BFD should be changed, - I don't know.... */ - printf ("mips-sony-bsd\n"); exit (0); -#else -#include - printf ("m68k-sony-newsos%s\n", -#ifdef NEWSOS4 - "4" -#else - "" -#endif - ); exit (0); -#endif -#endif - -#if defined (__arm) && defined (__acorn) && defined (__unix) - printf ("arm-acorn-riscix\n"); exit (0); -#endif +echo "$0: unable to guess system type" >&2 -#if defined (hp300) && !defined (hpux) - printf ("m68k-hp-bsd\n"); exit (0); -#endif +case "$UNAME_MACHINE:$UNAME_SYSTEM" in + mips:Linux | mips64:Linux) + # If we got here on MIPS GNU/Linux, output extra information. + cat >&2 </dev/null`; - if (version < 4) - printf ("%s-next-nextstep%d\n", __ARCHITECTURE__, version); - else - printf ("%s-next-openstep%d\n", __ARCHITECTURE__, version); - exit (0); -#endif - -#if defined (MULTIMAX) || defined (n16) -#if defined (UMAXV) - printf ("ns32k-encore-sysv\n"); exit (0); -#else -#if defined (CMU) - printf ("ns32k-encore-mach\n"); exit (0); -#else - printf ("ns32k-encore-bsd\n"); exit (0); -#endif -#endif -#endif - -#if defined (__386BSD__) - printf ("i386-pc-bsd\n"); exit (0); -#endif - -#if defined (sequent) -#if defined (i386) - printf ("i386-sequent-dynix\n"); exit (0); -#endif -#if defined (ns32000) - printf ("ns32k-sequent-dynix\n"); exit (0); -#endif -#endif - -#if defined (_SEQUENT_) - struct utsname un; - - uname(&un); - - if (strncmp(un.version, "V2", 2) == 0) { - printf ("i386-sequent-ptx2\n"); exit (0); - } - if (strncmp(un.version, "V1", 2) == 0) { /* XXX is V1 correct? */ - printf ("i386-sequent-ptx1\n"); exit (0); - } - printf ("i386-sequent-ptx\n"); exit (0); - -#endif - -#if defined (vax) -# if !defined (ultrix) -# include -# if defined (BSD) -# if BSD == 43 - printf ("vax-dec-bsd4.3\n"); exit (0); -# else -# if BSD == 199006 - printf ("vax-dec-bsd4.3reno\n"); exit (0); -# else - printf ("vax-dec-bsd\n"); exit (0); -# endif -# endif -# else - printf ("vax-dec-bsd\n"); exit (0); -# endif -# else - printf ("vax-dec-ultrix\n"); exit (0); -# endif -#endif - -#if defined (alliant) && defined (i860) - printf ("i860-alliant-bsd\n"); exit (0); -#endif - - exit (1); -} +NOTE: MIPS GNU/Linux systems require a C compiler to fully recognize +the system type. Please install a C compiler and try again. EOF - -$CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null && SYSTEM_NAME=`$dummy` && - { echo "$SYSTEM_NAME"; exit; } - -# Apollos put the system type in the environment. - -test -d /usr/apollo && { echo ${ISP}-apollo-${SYSTYPE}; exit; } - -# Convex versions that predate uname can use getsysinfo(1) - -if [ -x /usr/convex/getsysinfo ] -then - case `getsysinfo -f cpu_type` in - c1*) - echo c1-convex-bsd - exit ;; - c2*) - if getsysinfo -f scalar_acc - then echo c32-convex-bsd - else echo c2-convex-bsd - fi - exit ;; - c34*) - echo c34-convex-bsd - exit ;; - c38*) - echo c38-convex-bsd - exit ;; - c4*) - echo c4-convex-bsd - exit ;; - esac -fi + ;; +esac cat >&2 < in order to provide the needed -information to handle your system. +If $0 has already been updated, send the following data and any +information you think might be pertinent to config-patches@gnu.org to +provide the necessary information to handle your system. config.guess timestamp = $timestamp @@ -1521,16 +1470,16 @@ hostinfo = `(hostinfo) 2>/dev/null` /usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null` /usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null` -UNAME_MACHINE = ${UNAME_MACHINE} -UNAME_RELEASE = ${UNAME_RELEASE} -UNAME_SYSTEM = ${UNAME_SYSTEM} -UNAME_VERSION = ${UNAME_VERSION} +UNAME_MACHINE = "$UNAME_MACHINE" +UNAME_RELEASE = "$UNAME_RELEASE" +UNAME_SYSTEM = "$UNAME_SYSTEM" +UNAME_VERSION = "$UNAME_VERSION" EOF exit 1 # Local variables: -# eval: (add-hook 'write-file-hooks 'time-stamp) +# eval: (add-hook 'before-save-hook 'time-stamp) # time-stamp-start: "timestamp='" # time-stamp-format: "%:y-%02m-%02d" # time-stamp-end: "'" diff --git a/config/config.sub b/config/config.sub index bdda9e4a32c..c19e671805a 100755 --- a/config/config.sub +++ b/config/config.sub @@ -1,36 +1,31 @@ #! /bin/sh # Configuration validation subroutine script. -# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, -# 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -# 2011, 2012 Free Software Foundation, Inc. +# Copyright 1992-2018 Free Software Foundation, Inc. -timestamp='2012-08-18' +timestamp='2018-08-13' -# This file is (in principle) common to ALL GNU software. -# The presence of a machine in this file suggests that SOME GNU software -# can handle that machine. It does not imply ALL GNU software can. -# -# This file is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or +# This file is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with this program; if not, see . +# along with this program; if not, see . # # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under -# the same distribution terms that you use for the rest of that program. +# the same distribution terms that you use for the rest of that +# program. This Exception is an additional permission under section 7 +# of the GNU General Public License, version 3 ("GPLv3"). -# Please send patches to . Submit a context -# diff and a properly formatted GNU ChangeLog entry. +# Please send patches to . # # Configuration subroutine to validate and canonicalize a configuration type. # Supply the specified configuration type as an argument. @@ -38,7 +33,7 @@ timestamp='2012-08-18' # Otherwise, we print the canonical config type on stdout and succeed. # You can get the latest version of this script from: -# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD +# https://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub # This file is supposed to be the same for all GNU packages # and recognize all the CPU types, system types and aliases @@ -58,12 +53,11 @@ timestamp='2012-08-18' me=`echo "$0" | sed -e 's,.*/,,'` usage="\ -Usage: $0 [OPTION] CPU-MFR-OPSYS - $0 [OPTION] ALIAS +Usage: $0 [OPTION] CPU-MFR-OPSYS or ALIAS Canonicalize a configuration name. -Operation modes: +Options: -h, --help print this help, then exit -t, --time-stamp print date of last modification, then exit -v, --version print version number, then exit @@ -73,9 +67,7 @@ Report bugs and patches to ." version="\ GNU config.sub ($timestamp) -Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, -2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 -Free Software Foundation, Inc. +Copyright 1992-2018 Free Software Foundation, Inc. This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." @@ -102,7 +94,7 @@ while test $# -gt 0 ; do *local*) # First pass through any local machine types. - echo $1 + echo "$1" exit ;; * ) @@ -118,162 +110,596 @@ case $# in exit 1;; esac -# Separate what the user gave into CPU-COMPANY and OS or KERNEL-OS (if any). -# Here we must recognize all the valid KERNEL-OS combinations. -maybe_os=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\2/'` -case $maybe_os in - nto-qnx* | linux-gnu* | linux-android* | linux-dietlibc | linux-newlib* | \ - linux-musl* | linux-uclibc* | uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | \ - knetbsd*-gnu* | netbsd*-gnu* | \ - kopensolaris*-gnu* | \ - storm-chaos* | os2-emx* | rtmk-nova*) - os=-$maybe_os - basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'` - ;; - android-linux) - os=-linux-android - basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'`-unknown - ;; - *) - basic_machine=`echo $1 | sed 's/-[^-]*$//'` - if [ $basic_machine != $1 ] - then os=`echo $1 | sed 's/.*-/-/'` - else os=; fi - ;; -esac +# Split fields of configuration type +IFS="-" read -r field1 field2 field3 field4 <&2 + exit 1 ;; - -wrs) - os=-vxworks - basic_machine=$1 + *-*-*-*) + basic_machine=$field1-$field2 + os=$field3-$field4 ;; - -chorusos*) - os=-chorusos - basic_machine=$1 + *-*-*) + # Ambiguous whether COMPANY is present, or skipped and KERNEL-OS is two + # parts + maybe_os=$field2-$field3 + case $maybe_os in + nto-qnx* | linux-gnu* | linux-android* | linux-dietlibc \ + | linux-newlib* | linux-musl* | linux-uclibc* | uclinux-uclibc* \ + | uclinux-gnu* | kfreebsd*-gnu* | knetbsd*-gnu* | netbsd*-gnu* \ + | netbsd*-eabi* | kopensolaris*-gnu* | cloudabi*-eabi* \ + | storm-chaos* | os2-emx* | rtmk-nova*) + basic_machine=$field1 + os=$maybe_os + ;; + android-linux) + basic_machine=$field1-unknown + os=linux-android + ;; + *) + basic_machine=$field1-$field2 + os=$field3 + ;; + esac ;; - -chorusrdb) - os=-chorusrdb - basic_machine=$1 + *-*) + # Second component is usually, but not always the OS + case $field2 in + # Prevent following clause from handling this valid os + sun*os*) + basic_machine=$field1 + os=$field2 + ;; + # Manufacturers + dec* | mips* | sequent* | encore* | pc532* | sgi* | sony* \ + | att* | 7300* | 3300* | delta* | motorola* | sun[234]* \ + | unicom* | ibm* | next | hp | isi* | apollo | altos* \ + | convergent* | ncr* | news | 32* | 3600* | 3100* | hitachi* \ + | c[123]* | convex* | sun | crds | omron* | dg | ultra | tti* \ + | harris | dolphin | highlevel | gould | cbm | ns | masscomp \ + | apple | axis | knuth | cray | microblaze* \ + | sim | cisco | oki | wec | wrs | winbond) + basic_machine=$field1-$field2 + os= + ;; + *) + basic_machine=$field1 + os=$field2 + ;; + esac ;; - -hiux*) - os=-hiuxwe2 + *) + # Convert single-component short-hands not valid as part of + # multi-component configurations. + case $field1 in + 386bsd) + basic_machine=i386-pc + os=bsd + ;; + a29khif) + basic_machine=a29k-amd + os=udi + ;; + adobe68k) + basic_machine=m68010-adobe + os=scout + ;; + alliant) + basic_machine=fx80-alliant + os= + ;; + altos | altos3068) + basic_machine=m68k-altos + os= + ;; + am29k) + basic_machine=a29k-none + os=bsd + ;; + amdahl) + basic_machine=580-amdahl + os=sysv + ;; + amigaos | amigados) + basic_machine=m68k-unknown + os=amigaos + ;; + amigaunix | amix) + basic_machine=m68k-unknown + os=sysv4 + ;; + apollo68) + basic_machine=m68k-apollo + os=sysv + ;; + apollo68bsd) + basic_machine=m68k-apollo + os=bsd + ;; + aros) + basic_machine=i386-pc + os=aros + ;; + aux) + basic_machine=m68k-apple + os=aux + ;; + balance) + basic_machine=ns32k-sequent + os=dynix + ;; + blackfin) + basic_machine=bfin-unknown + os=linux + ;; + cegcc) + basic_machine=arm-unknown + os=cegcc + ;; + convex-c1) + basic_machine=c1-convex + os=bsd + ;; + convex-c2) + basic_machine=c2-convex + os=bsd + ;; + convex-c32) + basic_machine=c32-convex + os=bsd + ;; + convex-c34) + basic_machine=c34-convex + os=bsd + ;; + convex-c38) + basic_machine=c38-convex + os=bsd + ;; + cray) + basic_machine=j90-cray + os=unicos + ;; + crds | unos) + basic_machine=m68k-crds + os= + ;; + delta88) + basic_machine=m88k-motorola + os=sysv3 + ;; + dicos) + basic_machine=i686-pc + os=dicos + ;; + djgpp) + basic_machine=i586-pc + os=msdosdjgpp + ;; + ebmon29k) + basic_machine=a29k-amd + os=ebmon + ;; + es1800 | OSE68k | ose68k | ose | OSE) + basic_machine=m68k-ericsson + os=ose + ;; + gmicro) + basic_machine=tron-gmicro + os=sysv + ;; + go32) + basic_machine=i386-pc + os=go32 + ;; + h8300hms) + basic_machine=h8300-hitachi + os=hms + ;; + h8300xray) + basic_machine=h8300-hitachi + os=xray + ;; + h8500hms) + basic_machine=h8500-hitachi + os=hms + ;; + harris) + basic_machine=m88k-harris + os=sysv3 + ;; + hp300bsd) + basic_machine=m68k-hp + os=bsd + ;; + hp300hpux) + basic_machine=m68k-hp + os=hpux + ;; + hppaosf) + basic_machine=hppa1.1-hp + os=osf + ;; + hppro) + basic_machine=hppa1.1-hp + os=proelf + ;; + i386mach) + basic_machine=i386-mach + os=mach + ;; + vsta) + basic_machine=i386-pc + os=vsta + ;; + isi68 | isi) + basic_machine=m68k-isi + os=sysv + ;; + m68knommu) + basic_machine=m68k-unknown + os=linux + ;; + magnum | m3230) + basic_machine=mips-mips + os=sysv + ;; + merlin) + basic_machine=ns32k-utek + os=sysv + ;; + mingw64) + basic_machine=x86_64-pc + os=mingw64 + ;; + mingw32) + basic_machine=i686-pc + os=mingw32 + ;; + mingw32ce) + basic_machine=arm-unknown + os=mingw32ce + ;; + monitor) + basic_machine=m68k-rom68k + os=coff + ;; + morphos) + basic_machine=powerpc-unknown + os=morphos + ;; + moxiebox) + basic_machine=moxie-unknown + os=moxiebox + ;; + msdos) + basic_machine=i386-pc + os=msdos + ;; + msys) + basic_machine=i686-pc + os=msys + ;; + mvs) + basic_machine=i370-ibm + os=mvs + ;; + nacl) + basic_machine=le32-unknown + os=nacl + ;; + ncr3000) + basic_machine=i486-ncr + os=sysv4 + ;; + netbsd386) + basic_machine=i386-pc + os=netbsd + ;; + netwinder) + basic_machine=armv4l-rebel + os=linux + ;; + news | news700 | news800 | news900) + basic_machine=m68k-sony + os=newsos + ;; + news1000) + basic_machine=m68030-sony + os=newsos + ;; + necv70) + basic_machine=v70-nec + os=sysv + ;; + nh3000) + basic_machine=m68k-harris + os=cxux + ;; + nh[45]000) + basic_machine=m88k-harris + os=cxux + ;; + nindy960) + basic_machine=i960-intel + os=nindy + ;; + mon960) + basic_machine=i960-intel + os=mon960 + ;; + nonstopux) + basic_machine=mips-compaq + os=nonstopux + ;; + os400) + basic_machine=powerpc-ibm + os=os400 + ;; + OSE68000 | ose68000) + basic_machine=m68000-ericsson + os=ose + ;; + os68k) + basic_machine=m68k-none + os=os68k + ;; + paragon) + basic_machine=i860-intel + os=osf + ;; + parisc) + basic_machine=hppa-unknown + os=linux + ;; + pw32) + basic_machine=i586-unknown + os=pw32 + ;; + rdos | rdos64) + basic_machine=x86_64-pc + os=rdos + ;; + rdos32) + basic_machine=i386-pc + os=rdos + ;; + rom68k) + basic_machine=m68k-rom68k + os=coff + ;; + sa29200) + basic_machine=a29k-amd + os=udi + ;; + sei) + basic_machine=mips-sei + os=seiux + ;; + sps7) + basic_machine=m68k-bull + os=sysv2 + ;; + st2000) + basic_machine=m68k-tandem + os= + ;; + stratus) + basic_machine=i860-stratus + os=sysv4 + ;; + sun2) + basic_machine=m68000-sun + os= + ;; + sun2os3) + basic_machine=m68000-sun + os=sunos3 + ;; + sun2os4) + basic_machine=m68000-sun + os=sunos4 + ;; + sun3) + basic_machine=m68k-sun + os= + ;; + sun3os3) + basic_machine=m68k-sun + os=sunos3 + ;; + sun3os4) + basic_machine=m68k-sun + os=sunos4 + ;; + sun4) + basic_machine=sparc-sun + os= + ;; + sun4os3) + basic_machine=sparc-sun + os=sunos3 + ;; + sun4os4) + basic_machine=sparc-sun + os=sunos4 + ;; + sun4sol2) + basic_machine=sparc-sun + os=solaris2 + ;; + sun386 | sun386i | roadrunner) + basic_machine=i386-sun + os= + ;; + sv1) + basic_machine=sv1-cray + os=unicos + ;; + symmetry) + basic_machine=i386-sequent + os=dynix + ;; + t3e) + basic_machine=alphaev5-cray + os=unicos + ;; + t90) + basic_machine=t90-cray + os=unicos + ;; + toad1) + basic_machine=pdp10-xkl + os=tops20 + ;; + tpf) + basic_machine=s390x-ibm + os=tpf + ;; + udi29k) + basic_machine=a29k-amd + os=udi + ;; + ultra3) + basic_machine=a29k-nyu + os=sym1 + ;; + v810 | necv810) + basic_machine=v810-nec + os=none + ;; + vaxv) + basic_machine=vax-dec + os=sysv + ;; + vms) + basic_machine=vax-dec + os=vms + ;; + vxworks960) + basic_machine=i960-wrs + os=vxworks + ;; + vxworks68) + basic_machine=m68k-wrs + os=vxworks + ;; + vxworks29k) + basic_machine=a29k-wrs + os=vxworks + ;; + xbox) + basic_machine=i686-pc + os=mingw32 + ;; + ymp) + basic_machine=ymp-cray + os=unicos + ;; + *) + basic_machine=$1 + os= + ;; + esac ;; - -sco6) - os=-sco5v6 - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` +esac + +# Decode aliases for certain CPU-COMPANY combinations. +case $basic_machine in + # Here we handle the default manufacturer of certain CPU types. It is in + # some cases the only manufacturer, in others, it is the most popular. + craynv) + basic_machine=craynv-cray + os=${os:-unicosmp} ;; - -sco5) - os=-sco3.2v5 - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + fx80) + basic_machine=fx80-alliant ;; - -sco4) - os=-sco3.2v4 - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + w89k) + basic_machine=hppa1.1-winbond ;; - -sco3.2.[4-9]*) - os=`echo $os | sed -e 's/sco3.2./sco3.2v/'` - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + op50n) + basic_machine=hppa1.1-oki ;; - -sco3.2v[4-9]*) - # Don't forget version if it is 3.2v4 or newer. - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + op60c) + basic_machine=hppa1.1-oki ;; - -sco5v6*) - # Don't forget version if it is 3.2v4 or newer. - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + romp) + basic_machine=romp-ibm ;; - -sco*) - os=-sco3.2v2 - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + mmix) + basic_machine=mmix-knuth ;; - -udk*) - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + rs6000) + basic_machine=rs6000-ibm ;; - -isc) - os=-isc2.2 - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + vax) + basic_machine=vax-dec ;; - -clix*) - basic_machine=clipper-intergraph + pdp11) + basic_machine=pdp11-dec ;; - -isc*) - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + we32k) + basic_machine=we32k-att ;; - -lynx*178) - os=-lynxos178 + cydra) + basic_machine=cydra-cydrome ;; - -lynx*5) - os=-lynxos5 + i370-ibm* | ibm*) + basic_machine=i370-ibm ;; - -lynx*) - os=-lynxos + orion) + basic_machine=orion-highlevel ;; - -ptx*) - basic_machine=`echo $1 | sed -e 's/86-.*/86-sequent/'` + orion105) + basic_machine=clipper-highlevel ;; - -windowsnt*) - os=`echo $os | sed -e 's/windowsnt/winnt/'` + mac | mpw | mac-mpw) + basic_machine=m68k-apple ;; - -psos*) - os=-psos + pmac | pmac-mpw) + basic_machine=powerpc-apple ;; - -mint | -mint[0-9]*) - basic_machine=m68k-atari - os=-mint + xps | xps100) + basic_machine=xps100-honeywell ;; -esac -# Decode aliases for certain CPU-COMPANY combinations. -case $basic_machine in # Recognize the basic CPU types without company name. # Some are omitted here because they have special meanings below. 1750a | 580 \ | a29k \ | aarch64 | aarch64_be \ + | abacus \ | alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \ | alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \ | am33_2.0 \ - | arc | arm | arm[bl]e | arme[lb] | armv[2345] | armv[345][lb] | avr | avr32 \ - | be32 | be64 \ + | arc | arceb \ + | arm | arm[bl]e | arme[lb] | armv[2-8] | armv[3-8][lb] | armv6m | armv[78][arm] \ + | avr | avr32 \ + | asmjs \ + | ba \ + | be32 | be64 \ | bfin \ - | c4x | clipper \ + | c4x | c8051 | clipper | csky \ | d10v | d30v | dlx | dsp16xx \ - | epiphany \ - | fido | fr30 | frv \ + | e2k | epiphany \ + | fido | fr30 | frv | ft32 \ | h8300 | h8500 | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \ | hexagon \ - | i370 | i860 | i960 | ia64 \ + | i370 | i860 | i960 | ia16 | ia64 \ | ip2k | iq2000 \ + | k1om \ | le32 | le64 \ | lm32 \ | m32c | m32r | m32rle | m68000 | m68k | m88k \ - | maxq | mb | microblaze | mcore | mep | metag \ + | m6811 | m68hc11 | m6812 | m68hc12 | m68hcs12x | nvptx | picochip \ + | maxq | mb | microblaze | microblazeel | mcore | mep | metag \ | mips | mipsbe | mipseb | mipsel | mipsle \ | mips16 \ | mips64 | mips64el \ @@ -287,26 +713,31 @@ case $basic_machine in | mips64vr5900 | mips64vr5900el \ | mipsisa32 | mipsisa32el \ | mipsisa32r2 | mipsisa32r2el \ + | mipsisa32r6 | mipsisa32r6el \ | mipsisa64 | mipsisa64el \ | mipsisa64r2 | mipsisa64r2el \ + | mipsisa64r6 | mipsisa64r6el \ | mipsisa64sb1 | mipsisa64sb1el \ | mipsisa64sr71k | mipsisa64sr71kel \ + | mipsr5900 | mipsr5900el \ | mipstx39 | mipstx39el \ | mn10200 | mn10300 \ | moxie \ | mt \ | msp430 \ | nds32 | nds32le | nds32be \ - | nios | nios2 \ + | nfp \ + | nios | nios2 | nios2eb | nios2el \ | ns16k | ns32k \ - | open8 \ - | or32 \ - | pdp10 | pdp11 | pj | pjl \ + | open8 | or1k | or1knd | or32 \ + | pdp10 | pj | pjl \ | powerpc | powerpc64 | powerpc64le | powerpcle \ + | pru \ | pyramid \ + | riscv | riscv32 | riscv64 \ | rl78 | rx \ | score \ - | sh | sh[1234] | sh[24]a | sh[24]aeb | sh[23]e | sh[34]eb | sheb | shbe | shle | sh[1234]le | sh3ele \ + | sh | sh[1234] | sh[24]a | sh[24]aeb | sh[23]e | sh[234]eb | sheb | shbe | shle | sh[1234]le | sh[23]ele \ | sh64 | sh64le \ | sparc | sparc64 | sparc64b | sparc64v | sparc86x | sparclet | sparclite \ | sparcv8 | sparcv9 | sparcv9b | sparcv9v \ @@ -314,8 +745,9 @@ case $basic_machine in | tahoe | tic4x | tic54x | tic55x | tic6x | tic80 | tron \ | ubicom32 \ | v850 | v850e | v850e1 | v850e2 | v850es | v850e2v3 \ - | we32k \ - | x86 | xc16x | xstormy16 | xtensa \ + | visium \ + | wasm32 \ + | x86 | xc16x | xstormy16 | xgate | xtensa \ | z8k | z80) basic_machine=$basic_machine-unknown ;; @@ -328,23 +760,23 @@ case $basic_machine in c6x) basic_machine=tic6x-unknown ;; - m6811 | m68hc11 | m6812 | m68hc12 | m68hcs12x | picochip) - basic_machine=$basic_machine-unknown - os=-none + leon|leon[3-9]) + basic_machine=sparc-$basic_machine + ;; + m88110 | m680[12346]0 | m683?2 | m68360 | m5200 | v70 | w65) + ;; + m9s12z | m68hcs12z | hcs12z | s12z) + basic_machine=s12z-unknown ;; - m88110 | m680[12346]0 | m683?2 | m68360 | m5200 | v70 | w65 | z8k) + m9s12z-* | m68hcs12z-* | hcs12z-* | s12z-*) + basic_machine=s12z-`echo "$basic_machine" | sed 's/^[^-]*-//'` ;; ms1) basic_machine=mt-unknown ;; - strongarm | thumb | xscale) basic_machine=arm-unknown ;; - xgate) - basic_machine=$basic_machine-unknown - os=-none - ;; xscaleeb) basic_machine=armeb-unknown ;; @@ -359,37 +791,40 @@ case $basic_machine in i*86 | x86_64) basic_machine=$basic_machine-pc ;; - # Object if more than one company name word. - *-*-*) - echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2 - exit 1 - ;; # Recognize the basic CPU types with company name. - 580-* \ + 1750a-* | 580-* \ | a29k-* \ | aarch64-* | aarch64_be-* \ + | abacus-* \ | alpha-* | alphaev[4-8]-* | alphaev56-* | alphaev6[78]-* \ | alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \ - | alphapca5[67]-* | alpha64pca5[67]-* | arc-* \ - | arm-* | armbe-* | armle-* | armeb-* | armv*-* \ + | alphapca5[67]-* | alpha64pca5[67]-* \ + | am33_2.0-* \ + | arc-* | arceb-* \ + | arm-* | arm[lb]e-* | arme[lb]-* | armv*-* \ | avr-* | avr32-* \ + | asmjs-* \ + | ba-* \ | be32-* | be64-* \ | bfin-* | bs2000-* \ | c[123]* | c30-* | [cjt]90-* | c4x-* \ - | clipper-* | craynv-* | cydra-* \ - | d10v-* | d30v-* | dlx-* \ - | elxsi-* \ - | f30[01]-* | f700-* | fido-* | fr30-* | frv-* | fx80-* \ + | c8051-* | clipper-* | craynv-* | csky-* | cydra-* \ + | d10v-* | d30v-* | dlx-* | dsp16xx-* \ + | e2k-* | elxsi-* | epiphany-* \ + | f30[01]-* | f700-* | fido-* | fr30-* | frv-* | ft32-* | fx80-* \ | h8300-* | h8500-* \ | hppa-* | hppa1.[01]-* | hppa2.0-* | hppa2.0[nw]-* | hppa64-* \ | hexagon-* \ - | i*86-* | i860-* | i960-* | ia64-* \ + | i370-* | i*86-* | i860-* | i960-* | ia16-* | ia64-* \ | ip2k-* | iq2000-* \ + | k1om-* \ | le32-* | le64-* \ | lm32-* \ | m32c-* | m32r-* | m32rle-* \ - | m68000-* | m680[012346]0-* | m68360-* | m683?2-* | m68k-* \ - | m88110-* | m88k-* | maxq-* | mcore-* | metag-* | microblaze-* \ + | m5200-* | m68000-* | m680[012346]0-* | m68360-* | m683?2-* | m68k-* | v70-* | w65-* \ + | m6811-* | m68hc11-* | m6812-* | m68hc12-* | m68hcs12x-* | nvptx-* | picochip-* \ + | m88110-* | m88k-* | maxq-* | mb-* | mcore-* | mep-* | metag-* \ + | microblaze-* | microblazeel-* \ | mips-* | mipsbe-* | mipseb-* | mipsel-* | mipsle-* \ | mips16-* \ | mips64-* | mips64el-* \ @@ -403,37 +838,50 @@ case $basic_machine in | mips64vr5900-* | mips64vr5900el-* \ | mipsisa32-* | mipsisa32el-* \ | mipsisa32r2-* | mipsisa32r2el-* \ + | mipsisa32r6-* | mipsisa32r6el-* \ | mipsisa64-* | mipsisa64el-* \ | mipsisa64r2-* | mipsisa64r2el-* \ + | mipsisa64r6-* | mipsisa64r6el-* \ | mipsisa64sb1-* | mipsisa64sb1el-* \ | mipsisa64sr71k-* | mipsisa64sr71kel-* \ + | mipsr5900-* | mipsr5900el-* \ | mipstx39-* | mipstx39el-* \ | mmix-* \ + | mn10200-* | mn10300-* \ + | moxie-* \ | mt-* \ | msp430-* \ | nds32-* | nds32le-* | nds32be-* \ - | nios-* | nios2-* \ + | nfp-* \ + | nios-* | nios2-* | nios2eb-* | nios2el-* \ | none-* | np1-* | ns16k-* | ns32k-* \ | open8-* \ + | or1k*-* \ + | or32-* \ | orion-* \ | pdp10-* | pdp11-* | pj-* | pjl-* | pn-* | power-* \ | powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* \ + | pru-* \ | pyramid-* \ + | riscv-* | riscv32-* | riscv64-* \ | rl78-* | romp-* | rs6000-* | rx-* \ - | sh-* | sh[1234]-* | sh[24]a-* | sh[24]aeb-* | sh[23]e-* | sh[34]eb-* | sheb-* | shbe-* \ - | shle-* | sh[1234]le-* | sh3ele-* | sh64-* | sh64le-* \ + | score-* \ + | sh-* | sh[1234]-* | sh[24]a-* | sh[24]ae[lb]-* | sh[23]e-* | she[lb]-* | sh[lb]e-* \ + | sh[1234]e[lb]-* | sh[12345][lb]e-* | sh[23]ele-* | sh64-* | sh64le-* \ | sparc-* | sparc64-* | sparc64b-* | sparc64v-* | sparc86x-* | sparclet-* \ | sparclite-* \ - | sparcv8-* | sparcv9-* | sparcv9b-* | sparcv9v-* | sv1-* | sx?-* \ + | sparcv8-* | sparcv9-* | sparcv9b-* | sparcv9v-* | sv1-* | sx*-* \ + | spu-* \ | tahoe-* \ | tic30-* | tic4x-* | tic54x-* | tic55x-* | tic6x-* | tic80-* \ - | tile*-* \ | tron-* \ | ubicom32-* \ | v850-* | v850e-* | v850e1-* | v850es-* | v850e2-* | v850e2v3-* \ | vax-* \ + | visium-* \ + | wasm32-* \ | we32k-* \ - | x86-* | x86_64-* | xc16x-* | xps100-* \ + | x86-* | x86_64-* | xc16x-* | xgate-* | xps100-* \ | xstormy16-* | xtensa*-* \ | ymp-* \ | z8k-* | z80-*) @@ -444,141 +892,45 @@ case $basic_machine in ;; # Recognize the various machine names and aliases which stand # for a CPU type and a company and sometimes even an OS. - 386bsd) - basic_machine=i386-unknown - os=-bsd - ;; 3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc) basic_machine=m68000-att ;; 3b*) basic_machine=we32k-att ;; - a29khif) - basic_machine=a29k-amd - os=-udi - ;; - abacus) - basic_machine=abacus-unknown - ;; - adobe68k) - basic_machine=m68010-adobe - os=-scout - ;; - alliant | fx80) - basic_machine=fx80-alliant - ;; - altos | altos3068) - basic_machine=m68k-altos - ;; - am29k) - basic_machine=a29k-none - os=-bsd - ;; amd64) basic_machine=x86_64-pc ;; amd64-*) - basic_machine=x86_64-`echo $basic_machine | sed 's/^[^-]*-//'` - ;; - amdahl) - basic_machine=580-amdahl - os=-sysv + basic_machine=x86_64-`echo "$basic_machine" | sed 's/^[^-]*-//'` ;; amiga | amiga-*) basic_machine=m68k-unknown ;; - amigaos | amigados) - basic_machine=m68k-unknown - os=-amigaos - ;; - amigaunix | amix) - basic_machine=m68k-unknown - os=-sysv4 - ;; - apollo68) - basic_machine=m68k-apollo - os=-sysv - ;; - apollo68bsd) - basic_machine=m68k-apollo - os=-bsd - ;; - aros) - basic_machine=i386-pc - os=-aros - ;; - aux) - basic_machine=m68k-apple - os=-aux - ;; - balance) - basic_machine=ns32k-sequent - os=-dynix - ;; - blackfin) - basic_machine=bfin-unknown - os=-linux - ;; blackfin-*) - basic_machine=bfin-`echo $basic_machine | sed 's/^[^-]*-//'` - os=-linux + basic_machine=bfin-`echo "$basic_machine" | sed 's/^[^-]*-//'` + os=linux ;; bluegene*) basic_machine=powerpc-ibm - os=-cnk + os=cnk ;; c54x-*) - basic_machine=tic54x-`echo $basic_machine | sed 's/^[^-]*-//'` + basic_machine=tic54x-`echo "$basic_machine" | sed 's/^[^-]*-//'` ;; c55x-*) - basic_machine=tic55x-`echo $basic_machine | sed 's/^[^-]*-//'` + basic_machine=tic55x-`echo "$basic_machine" | sed 's/^[^-]*-//'` ;; c6x-*) - basic_machine=tic6x-`echo $basic_machine | sed 's/^[^-]*-//'` + basic_machine=tic6x-`echo "$basic_machine" | sed 's/^[^-]*-//'` ;; c90) basic_machine=c90-cray - os=-unicos - ;; - cegcc) - basic_machine=arm-unknown - os=-cegcc - ;; - convex-c1) - basic_machine=c1-convex - os=-bsd - ;; - convex-c2) - basic_machine=c2-convex - os=-bsd - ;; - convex-c32) - basic_machine=c32-convex - os=-bsd - ;; - convex-c34) - basic_machine=c34-convex - os=-bsd - ;; - convex-c38) - basic_machine=c38-convex - os=-bsd - ;; - cray | j90) - basic_machine=j90-cray - os=-unicos - ;; - craynv) - basic_machine=craynv-cray - os=-unicosmp + os=${os:-unicos} ;; cr16 | cr16-*) basic_machine=cr16-unknown - os=-elf - ;; - crds | unos) - basic_machine=m68k-crds + os=${os:-elf} ;; crisv32 | crisv32-* | etraxfs*) basic_machine=crisv32-axis @@ -588,7 +940,7 @@ case $basic_machine in ;; crx) basic_machine=crx-unknown - os=-elf + os=${os:-elf} ;; da30 | da30-*) basic_machine=m68k-da30 @@ -598,50 +950,38 @@ case $basic_machine in ;; decsystem10* | dec10*) basic_machine=pdp10-dec - os=-tops10 + os=tops10 ;; decsystem20* | dec20*) basic_machine=pdp10-dec - os=-tops20 + os=tops20 ;; delta | 3300 | motorola-3300 | motorola-delta \ | 3300-motorola | delta-motorola) basic_machine=m68k-motorola ;; - delta88) - basic_machine=m88k-motorola - os=-sysv3 - ;; - dicos) - basic_machine=i686-pc - os=-dicos - ;; - djgpp) - basic_machine=i586-pc - os=-msdosdjgpp - ;; dpx20 | dpx20-*) basic_machine=rs6000-bull - os=-bosx + os=${os:-bosx} ;; - dpx2* | dpx2*-bull) + dpx2*) basic_machine=m68k-bull - os=-sysv3 + os=sysv3 ;; - ebmon29k) - basic_machine=a29k-amd - os=-ebmon + e500v[12]) + basic_machine=powerpc-unknown + os=$os"spe" ;; - elxsi) - basic_machine=elxsi-elxsi - os=-bsd + e500v[12]-*) + basic_machine=powerpc-`echo "$basic_machine" | sed 's/^[^-]*-//'` + os=$os"spe" ;; encore | umax | mmax) basic_machine=ns32k-encore ;; - es1800 | OSE68k | ose68k | ose | OSE) - basic_machine=m68k-ericsson - os=-ose + elxsi) + basic_machine=elxsi-elxsi + os=${os:-bsd} ;; fx2800) basic_machine=i860-alliant @@ -649,45 +989,13 @@ case $basic_machine in genix) basic_machine=ns32k-ns ;; - gmicro) - basic_machine=tron-gmicro - os=-sysv - ;; - go32) - basic_machine=i386-pc - os=-go32 - ;; h3050r* | hiux*) basic_machine=hppa1.1-hitachi - os=-hiuxwe2 - ;; - h8300hms) - basic_machine=h8300-hitachi - os=-hms - ;; - h8300xray) - basic_machine=h8300-hitachi - os=-xray - ;; - h8500hms) - basic_machine=h8500-hitachi - os=-hms - ;; - harris) - basic_machine=m88k-harris - os=-sysv3 + os=hiuxwe2 ;; hp300-*) basic_machine=m68k-hp ;; - hp300bsd) - basic_machine=m68k-hp - os=-bsd - ;; - hp300hpux) - basic_machine=m68k-hp - os=-hpux - ;; hp3k9[0-9][0-9] | hp9[0-9][0-9]) basic_machine=hppa1.0-hp ;; @@ -717,193 +1025,79 @@ case $basic_machine in hp9k8[0-9][0-9] | hp8[0-9][0-9]) basic_machine=hppa1.0-hp ;; - hppa-next) - os=-nextstep3 - ;; - hppaosf) - basic_machine=hppa1.1-hp - os=-osf - ;; - hppro) - basic_machine=hppa1.1-hp - os=-proelf - ;; - i370-ibm* | ibm*) - basic_machine=i370-ibm - ;; i*86v32) - basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` - os=-sysv32 + basic_machine=`echo "$1" | sed -e 's/86.*/86-pc/'` + os=sysv32 ;; i*86v4*) - basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` - os=-sysv4 + basic_machine=`echo "$1" | sed -e 's/86.*/86-pc/'` + os=sysv4 ;; i*86v) - basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` - os=-sysv + basic_machine=`echo "$1" | sed -e 's/86.*/86-pc/'` + os=sysv ;; i*86sol2) - basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` - os=-solaris2 - ;; - i386mach) - basic_machine=i386-mach - os=-mach + basic_machine=`echo "$1" | sed -e 's/86.*/86-pc/'` + os=solaris2 ;; - i386-vsta | vsta) - basic_machine=i386-unknown - os=-vsta + j90 | j90-cray) + basic_machine=j90-cray + os=${os:-unicos} ;; iris | iris4d) basic_machine=mips-sgi case $os in - -irix*) + irix*) ;; *) - os=-irix4 + os=irix4 ;; esac ;; - isi68 | isi) - basic_machine=m68k-isi - os=-sysv - ;; - m68knommu) - basic_machine=m68k-unknown - os=-linux + leon-*|leon[3-9]-*) + basic_machine=sparc-`echo "$basic_machine" | sed 's/-.*//'` ;; m68knommu-*) - basic_machine=m68k-`echo $basic_machine | sed 's/^[^-]*-//'` - os=-linux - ;; - m88k-omron*) - basic_machine=m88k-omron - ;; - magnum | m3230) - basic_machine=mips-mips - os=-sysv + basic_machine=m68k-`echo "$basic_machine" | sed 's/^[^-]*-//'` + os=linux ;; - merlin) - basic_machine=ns32k-utek - os=-sysv - ;; - microblaze) + microblaze*) basic_machine=microblaze-xilinx ;; - mingw64) - basic_machine=x86_64-pc - os=-mingw64 - ;; - mingw32) - basic_machine=i386-pc - os=-mingw32 - ;; - mingw32ce) - basic_machine=arm-unknown - os=-mingw32ce - ;; miniframe) basic_machine=m68000-convergent ;; - *mint | -mint[0-9]* | *MiNT | *MiNT[0-9]*) + *mint | mint[0-9]* | *MiNT | *MiNT[0-9]*) basic_machine=m68k-atari - os=-mint + os=mint ;; mips3*-*) - basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'` + basic_machine=`echo "$basic_machine" | sed -e 's/mips3/mips64/'` ;; mips3*) - basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`-unknown - ;; - monitor) - basic_machine=m68k-rom68k - os=-coff - ;; - morphos) - basic_machine=powerpc-unknown - os=-morphos - ;; - msdos) - basic_machine=i386-pc - os=-msdos + basic_machine=`echo "$basic_machine" | sed -e 's/mips3/mips64/'`-unknown ;; ms1-*) - basic_machine=`echo $basic_machine | sed -e 's/ms1-/mt-/'` - ;; - msys) - basic_machine=i386-pc - os=-msys - ;; - mvs) - basic_machine=i370-ibm - os=-mvs - ;; - nacl) - basic_machine=le32-unknown - os=-nacl - ;; - ncr3000) - basic_machine=i486-ncr - os=-sysv4 - ;; - netbsd386) - basic_machine=i386-unknown - os=-netbsd - ;; - netwinder) - basic_machine=armv4l-rebel - os=-linux - ;; - news | news700 | news800 | news900) - basic_machine=m68k-sony - os=-newsos - ;; - news1000) - basic_machine=m68030-sony - os=-newsos + basic_machine=`echo "$basic_machine" | sed -e 's/ms1-/mt-/'` ;; news-3600 | risc-news) basic_machine=mips-sony - os=-newsos - ;; - necv70) - basic_machine=v70-nec - os=-sysv + os=newsos ;; - next | m*-next ) + next | m*-next) basic_machine=m68k-next case $os in - -nextstep* ) + nextstep* ) ;; - -ns2*) - os=-nextstep2 + ns2*) + os=nextstep2 ;; *) - os=-nextstep3 + os=nextstep3 ;; esac ;; - nh3000) - basic_machine=m68k-harris - os=-cxux - ;; - nh[45]000) - basic_machine=m88k-harris - os=-cxux - ;; - nindy960) - basic_machine=i960-intel - os=-nindy - ;; - mon960) - basic_machine=i960-intel - os=-mon960 - ;; - nonstopux) - basic_machine=mips-compaq - os=-nonstopux - ;; np1) basic_machine=np1-gould ;; @@ -916,40 +1110,26 @@ case $basic_machine in nsr-tandem) basic_machine=nsr-tandem ;; + nsv-tandem) + basic_machine=nsv-tandem + ;; + nsx-tandem) + basic_machine=nsx-tandem + ;; op50n-* | op60c-*) basic_machine=hppa1.1-oki - os=-proelf + os=proelf ;; openrisc | openrisc-*) basic_machine=or32-unknown ;; - os400) - basic_machine=powerpc-ibm - os=-os400 - ;; - OSE68000 | ose68000) - basic_machine=m68000-ericsson - os=-ose - ;; - os68k) - basic_machine=m68k-none - os=-os68k - ;; pa-hitachi) basic_machine=hppa1.1-hitachi - os=-hiuxwe2 - ;; - paragon) - basic_machine=i860-intel - os=-osf - ;; - parisc) - basic_machine=hppa-unknown - os=-linux + os=hiuxwe2 ;; parisc-*) - basic_machine=hppa-`echo $basic_machine | sed 's/^[^-]*-//'` - os=-linux + basic_machine=hppa-`echo "$basic_machine" | sed 's/^[^-]*-//'` + os=linux ;; pbd) basic_machine=sparc-tti @@ -964,7 +1144,7 @@ case $basic_machine in basic_machine=i386-pc ;; pc98-*) - basic_machine=i386-`echo $basic_machine | sed 's/^[^-]*-//'` + basic_machine=i386-`echo "$basic_machine" | sed 's/^[^-]*-//'` ;; pentium | p5 | k5 | k6 | nexgen | viac3) basic_machine=i586-pc @@ -979,16 +1159,16 @@ case $basic_machine in basic_machine=i786-pc ;; pentium-* | p5-* | k5-* | k6-* | nexgen-* | viac3-*) - basic_machine=i586-`echo $basic_machine | sed 's/^[^-]*-//'` + basic_machine=i586-`echo "$basic_machine" | sed 's/^[^-]*-//'` ;; pentiumpro-* | p6-* | 6x86-* | athlon-*) - basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'` + basic_machine=i686-`echo "$basic_machine" | sed 's/^[^-]*-//'` ;; pentiumii-* | pentium2-* | pentiumiii-* | pentium3-*) - basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'` + basic_machine=i686-`echo "$basic_machine" | sed 's/^[^-]*-//'` ;; pentium4-*) - basic_machine=i786-`echo $basic_machine | sed 's/^[^-]*-//'` + basic_machine=i786-`echo "$basic_machine" | sed 's/^[^-]*-//'` ;; pn) basic_machine=pn-gould @@ -998,39 +1178,27 @@ case $basic_machine in ppc | ppcbe) basic_machine=powerpc-unknown ;; ppc-* | ppcbe-*) - basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'` + basic_machine=powerpc-`echo "$basic_machine" | sed 's/^[^-]*-//'` ;; - ppcle | powerpclittle | ppc-le | powerpc-little) + ppcle | powerpclittle) basic_machine=powerpcle-unknown ;; ppcle-* | powerpclittle-*) - basic_machine=powerpcle-`echo $basic_machine | sed 's/^[^-]*-//'` + basic_machine=powerpcle-`echo "$basic_machine" | sed 's/^[^-]*-//'` ;; ppc64) basic_machine=powerpc64-unknown ;; - ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'` + ppc64-*) basic_machine=powerpc64-`echo "$basic_machine" | sed 's/^[^-]*-//'` ;; - ppc64le | powerpc64little | ppc64-le | powerpc64-little) + ppc64le | powerpc64little) basic_machine=powerpc64le-unknown ;; ppc64le-* | powerpc64little-*) - basic_machine=powerpc64le-`echo $basic_machine | sed 's/^[^-]*-//'` + basic_machine=powerpc64le-`echo "$basic_machine" | sed 's/^[^-]*-//'` ;; ps2) basic_machine=i386-ibm ;; - pw32) - basic_machine=i586-unknown - os=-pw32 - ;; - rdos) - basic_machine=i386-pc - os=-rdos - ;; - rom68k) - basic_machine=m68k-rom68k - os=-coff - ;; rm[46]00) basic_machine=mips-siemens ;; @@ -1043,10 +1211,6 @@ case $basic_machine in s390x | s390x-*) basic_machine=s390x-ibm ;; - sa29200) - basic_machine=a29k-amd - os=-udi - ;; sb1) basic_machine=mipsisa64sb1-unknown ;; @@ -1055,105 +1219,32 @@ case $basic_machine in ;; sde) basic_machine=mipsisa32-sde - os=-elf - ;; - sei) - basic_machine=mips-sei - os=-seiux + os=${os:-elf} ;; sequent) basic_machine=i386-sequent ;; - sh) - basic_machine=sh-hitachi - os=-hms - ;; sh5el) basic_machine=sh5le-unknown ;; - sh64) - basic_machine=sh64-unknown + sh5el-*) + basic_machine=sh5le-`echo "$basic_machine" | sed 's/^[^-]*-//'` ;; - sparclite-wrs | simso-wrs) + simso-wrs) basic_machine=sparclite-wrs - os=-vxworks - ;; - sps7) - basic_machine=m68k-bull - os=-sysv2 + os=vxworks ;; spur) basic_machine=spur-unknown ;; - st2000) - basic_machine=m68k-tandem - ;; - stratus) - basic_machine=i860-stratus - os=-sysv4 - ;; strongarm-* | thumb-*) - basic_machine=arm-`echo $basic_machine | sed 's/^[^-]*-//'` - ;; - sun2) - basic_machine=m68000-sun - ;; - sun2os3) - basic_machine=m68000-sun - os=-sunos3 - ;; - sun2os4) - basic_machine=m68000-sun - os=-sunos4 + basic_machine=arm-`echo "$basic_machine" | sed 's/^[^-]*-//'` ;; - sun3os3) - basic_machine=m68k-sun - os=-sunos3 - ;; - sun3os4) - basic_machine=m68k-sun - os=-sunos4 - ;; - sun4os3) - basic_machine=sparc-sun - os=-sunos3 - ;; - sun4os4) - basic_machine=sparc-sun - os=-sunos4 - ;; - sun4sol2) - basic_machine=sparc-sun - os=-solaris2 - ;; - sun3 | sun3-*) - basic_machine=m68k-sun - ;; - sun4) - basic_machine=sparc-sun - ;; - sun386 | sun386i | roadrunner) - basic_machine=i386-sun - ;; - sv1) - basic_machine=sv1-cray - os=-unicos - ;; - symmetry) - basic_machine=i386-sequent - os=-dynix - ;; - t3e) - basic_machine=alphaev5-cray - os=-unicos - ;; - t90) - basic_machine=t90-cray - os=-unicos + tile*-*) ;; tile*) basic_machine=$basic_machine-unknown - os=-linux-gnu + os=${os:-linux-gnu} ;; tx39) basic_machine=mipstx39-unknown @@ -1161,146 +1252,32 @@ case $basic_machine in tx39el) basic_machine=mipstx39el-unknown ;; - toad1) - basic_machine=pdp10-xkl - os=-tops20 - ;; tower | tower-32) basic_machine=m68k-ncr ;; - tpf) - basic_machine=s390x-ibm - os=-tpf - ;; - udi29k) - basic_machine=a29k-amd - os=-udi - ;; - ultra3) - basic_machine=a29k-nyu - os=-sym1 - ;; - v810 | necv810) - basic_machine=v810-nec - os=-none - ;; - vaxv) - basic_machine=vax-dec - os=-sysv - ;; - vms) - basic_machine=vax-dec - os=-vms - ;; vpp*|vx|vx-*) basic_machine=f301-fujitsu ;; - vxworks960) - basic_machine=i960-wrs - os=-vxworks - ;; - vxworks68) - basic_machine=m68k-wrs - os=-vxworks - ;; - vxworks29k) - basic_machine=a29k-wrs - os=-vxworks - ;; w65*) basic_machine=w65-wdc - os=-none + os=none ;; w89k-*) basic_machine=hppa1.1-winbond - os=-proelf - ;; - xbox) - basic_machine=i686-pc - os=-mingw32 + os=proelf ;; - xps | xps100) - basic_machine=xps100-honeywell + x64) + basic_machine=x86_64-pc ;; xscale-* | xscalee[bl]-*) - basic_machine=`echo $basic_machine | sed 's/^xscale/arm/'` - ;; - ymp) - basic_machine=ymp-cray - os=-unicos - ;; - z8k-*-coff) - basic_machine=z8k-unknown - os=-sim - ;; - z80-*-coff) - basic_machine=z80-unknown - os=-sim + basic_machine=`echo "$basic_machine" | sed 's/^xscale/arm/'` ;; none) basic_machine=none-none - os=-none ;; -# Here we handle the default manufacturer of certain CPU types. It is in -# some cases the only manufacturer, in others, it is the most popular. - w89k) - basic_machine=hppa1.1-winbond - ;; - op50n) - basic_machine=hppa1.1-oki - ;; - op60c) - basic_machine=hppa1.1-oki - ;; - romp) - basic_machine=romp-ibm - ;; - mmix) - basic_machine=mmix-knuth - ;; - rs6000) - basic_machine=rs6000-ibm - ;; - vax) - basic_machine=vax-dec - ;; - pdp10) - # there are many clones, so DEC is not a safe bet - basic_machine=pdp10-unknown - ;; - pdp11) - basic_machine=pdp11-dec - ;; - we32k) - basic_machine=we32k-att - ;; - sh[1234] | sh[24]a | sh[24]aeb | sh[34]eb | sh[1234]le | sh[23]ele) - basic_machine=sh-unknown - ;; - sparc | sparcv8 | sparcv9 | sparcv9b | sparcv9v) - basic_machine=sparc-sun - ;; - cydra) - basic_machine=cydra-cydrome - ;; - orion) - basic_machine=orion-highlevel - ;; - orion105) - basic_machine=clipper-highlevel - ;; - mac | mpw | mac-mpw) - basic_machine=m68k-apple - ;; - pmac | pmac-mpw) - basic_machine=powerpc-apple - ;; - *-unknown) - # Make sure to match an already-canonicalized machine name. - ;; *) - echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2 + echo Invalid configuration \`"$1"\': machine \`"$basic_machine"\' not recognized 1>&2 exit 1 ;; esac @@ -1308,10 +1285,10 @@ esac # Here we canonicalize certain aliases for manufacturers. case $basic_machine in *-digital*) - basic_machine=`echo $basic_machine | sed 's/digital.*/dec/'` + basic_machine=`echo "$basic_machine" | sed 's/digital.*/dec/'` ;; *-commodore*) - basic_machine=`echo $basic_machine | sed 's/commodore.*/cbm/'` + basic_machine=`echo "$basic_machine" | sed 's/commodore.*/cbm/'` ;; *) ;; @@ -1319,200 +1296,246 @@ esac # Decode manufacturer-specific aliases for certain operating systems. -if [ x"$os" != x"" ] +if [ x$os != x ] then case $os in - # First match some system type aliases - # that might get confused with valid system types. - # -solaris* is a basic system type, with this one exception. - -auroraux) - os=-auroraux + # First match some system type aliases that might get confused + # with valid system types. + # solaris* is a basic system type, with this one exception. + auroraux) + os=auroraux ;; - -solaris1 | -solaris1.*) - os=`echo $os | sed -e 's|solaris1|sunos4|'` + bluegene*) + os=cnk ;; - -solaris) - os=-solaris2 + solaris1 | solaris1.*) + os=`echo $os | sed -e 's|solaris1|sunos4|'` ;; - -svr4*) - os=-sysv4 + solaris) + os=solaris2 ;; - -unixware*) - os=-sysv4.2uw + unixware*) + os=sysv4.2uw ;; - -gnu/linux*) + gnu/linux*) os=`echo $os | sed -e 's|gnu/linux|linux-gnu|'` ;; - # First accept the basic system types. + # es1800 is here to avoid being matched by es* (a different OS) + es1800*) + os=ose + ;; + # Some version numbers need modification + chorusos*) + os=chorusos + ;; + isc) + os=isc2.2 + ;; + sco6) + os=sco5v6 + ;; + sco5) + os=sco3.2v5 + ;; + sco4) + os=sco3.2v4 + ;; + sco3.2.[4-9]*) + os=`echo $os | sed -e 's/sco3.2./sco3.2v/'` + ;; + sco3.2v[4-9]* | sco5v6*) + # Don't forget version if it is 3.2v4 or newer. + ;; + scout) + # Don't match below + ;; + sco*) + os=sco3.2v2 + ;; + psos*) + os=psos + ;; + # Now accept the basic system types. # The portable systems comes first. - # Each alternative MUST END IN A *, to match a version number. - # -sysv* is not here because it comes later, after sysvr4. - -gnu* | -bsd* | -mach* | -minix* | -genix* | -ultrix* | -irix* \ - | -*vms* | -sco* | -esix* | -isc* | -aix* | -cnk* | -sunos | -sunos[34]*\ - | -hpux* | -unos* | -osf* | -luna* | -dgux* | -auroraux* | -solaris* \ - | -sym* | -kopensolaris* \ - | -amigaos* | -amigados* | -msdos* | -newsos* | -unicos* | -aof* \ - | -aos* | -aros* \ - | -nindy* | -vxsim* | -vxworks* | -ebmon* | -hms* | -mvs* \ - | -clix* | -riscos* | -uniplus* | -iris* | -rtu* | -xenix* \ - | -hiux* | -386bsd* | -knetbsd* | -mirbsd* | -netbsd* \ - | -bitrig* | -openbsd* | -solidbsd* \ - | -ekkobsd* | -kfreebsd* | -freebsd* | -riscix* | -lynxos* \ - | -bosx* | -nextstep* | -cxux* | -aout* | -elf* | -oabi* \ - | -ptx* | -coff* | -ecoff* | -winnt* | -domain* | -vsta* \ - | -udi* | -eabi* | -lites* | -ieee* | -go32* | -aux* \ - | -chorusos* | -chorusrdb* | -cegcc* \ - | -cygwin* | -msys* | -pe* | -psos* | -moss* | -proelf* | -rtems* \ - | -mingw32* | -mingw64* | -linux-gnu* | -linux-android* \ - | -linux-newlib* | -linux-musl* | -linux-uclibc* \ - | -uxpv* | -beos* | -mpeix* | -udk* \ - | -interix* | -uwin* | -mks* | -rhapsody* | -darwin* | -opened* \ - | -openstep* | -oskit* | -conix* | -pw32* | -nonstopux* \ - | -storm-chaos* | -tops10* | -tenex* | -tops20* | -its* \ - | -os2* | -vos* | -palmos* | -uclinux* | -nucleus* \ - | -morphos* | -superux* | -rtmk* | -rtmk-nova* | -windiss* \ - | -powermax* | -dnix* | -nx6 | -nx7 | -sei* | -dragonfly* \ - | -skyos* | -haiku* | -rdos* | -toppers* | -drops* | -es*) + # Each alternative MUST end in a * to match a version number. + # sysv* is not here because it comes later, after sysvr4. + gnu* | bsd* | mach* | minix* | genix* | ultrix* | irix* \ + | *vms* | esix* | aix* | cnk* | sunos | sunos[34]*\ + | hpux* | unos* | osf* | luna* | dgux* | auroraux* | solaris* \ + | sym* | kopensolaris* | plan9* \ + | amigaos* | amigados* | msdos* | newsos* | unicos* | aof* \ + | aos* | aros* | cloudabi* | sortix* \ + | nindy* | vxsim* | vxworks* | ebmon* | hms* | mvs* \ + | clix* | riscos* | uniplus* | iris* | isc* | rtu* | xenix* \ + | knetbsd* | mirbsd* | netbsd* \ + | bitrig* | openbsd* | solidbsd* | libertybsd* \ + | ekkobsd* | kfreebsd* | freebsd* | riscix* | lynxos* \ + | bosx* | nextstep* | cxux* | aout* | elf* | oabi* \ + | ptx* | coff* | ecoff* | winnt* | domain* | vsta* \ + | udi* | eabi* | lites* | ieee* | go32* | aux* | hcos* \ + | chorusrdb* | cegcc* | glidix* \ + | cygwin* | msys* | pe* | moss* | proelf* | rtems* \ + | midipix* | mingw32* | mingw64* | linux-gnu* | linux-android* \ + | linux-newlib* | linux-musl* | linux-uclibc* \ + | uxpv* | beos* | mpeix* | udk* | moxiebox* \ + | interix* | uwin* | mks* | rhapsody* | darwin* \ + | openstep* | oskit* | conix* | pw32* | nonstopux* \ + | storm-chaos* | tops10* | tenex* | tops20* | its* \ + | os2* | vos* | palmos* | uclinux* | nucleus* \ + | morphos* | superux* | rtmk* | windiss* \ + | powermax* | dnix* | nx6 | nx7 | sei* | dragonfly* \ + | skyos* | haiku* | rdos* | toppers* | drops* | es* \ + | onefs* | tirtos* | phoenix* | fuchsia* | redox* | bme* \ + | midnightbsd*) # Remember, each alternative MUST END IN *, to match a version number. ;; - -qnx*) + qnx*) case $basic_machine in x86-* | i*86-*) ;; *) - os=-nto$os + os=nto-$os ;; esac ;; - -nto-qnx*) + hiux*) + os=hiuxwe2 ;; - -nto*) - os=`echo $os | sed -e 's|nto|nto-qnx|'` + nto-qnx*) ;; - -sim | -es1800* | -hms* | -xray | -os68k* | -none* | -v88r* \ - | -windows* | -osx | -abug | -netware* | -os9* | -beos* | -haiku* \ - | -macos* | -mpw* | -magic* | -mmixware* | -mon960* | -lnews*) + nto*) + os=`echo $os | sed -e 's|nto|nto-qnx|'` ;; - -mac*) - os=`echo $os | sed -e 's|mac|macos|'` + sim | xray | os68k* | v88r* \ + | windows* | osx | abug | netware* | os9* \ + | macos* | mpw* | magic* | mmixware* | mon960* | lnews*) ;; - -linux-dietlibc) - os=-linux-dietlibc + linux-dietlibc) + os=linux-dietlibc ;; - -linux*) + linux*) os=`echo $os | sed -e 's|linux|linux-gnu|'` ;; - -sunos5*) - os=`echo $os | sed -e 's|sunos5|solaris2|'` + lynx*178) + os=lynxos178 ;; - -sunos6*) - os=`echo $os | sed -e 's|sunos6|solaris3|'` + lynx*5) + os=lynxos5 ;; - -opened*) - os=-openedition + lynx*) + os=lynxos ;; - -os400*) - os=-os400 + mac*) + os=`echo "$os" | sed -e 's|mac|macos|'` ;; - -wince*) - os=-wince + opened*) + os=openedition ;; - -osfrose*) - os=-osfrose + os400*) + os=os400 ;; - -osf*) - os=-osf + sunos5*) + os=`echo "$os" | sed -e 's|sunos5|solaris2|'` ;; - -utek*) - os=-bsd + sunos6*) + os=`echo "$os" | sed -e 's|sunos6|solaris3|'` ;; - -dynix*) - os=-bsd + wince*) + os=wince ;; - -acis*) - os=-aos + utek*) + os=bsd ;; - -atheos*) - os=-atheos + dynix*) + os=bsd ;; - -syllable*) - os=-syllable + acis*) + os=aos ;; - -386bsd) - os=-bsd + atheos*) + os=atheos ;; - -ctix* | -uts*) - os=-sysv + syllable*) + os=syllable ;; - -nova*) - os=-rtmk-nova + 386bsd) + os=bsd + ;; + ctix* | uts*) + os=sysv + ;; + nova*) + os=rtmk-nova ;; - -ns2 ) - os=-nextstep2 + ns2) + os=nextstep2 ;; - -nsk*) - os=-nsk + nsk*) + os=nsk ;; # Preserve the version number of sinix5. - -sinix5.*) + sinix5.*) os=`echo $os | sed -e 's|sinix|sysv|'` ;; - -sinix*) - os=-sysv4 + sinix*) + os=sysv4 ;; - -tpf*) - os=-tpf + tpf*) + os=tpf ;; - -triton*) - os=-sysv3 + triton*) + os=sysv3 ;; - -oss*) - os=-sysv3 + oss*) + os=sysv3 ;; - -svr4) - os=-sysv4 + svr4*) + os=sysv4 ;; - -svr3) - os=-sysv3 + svr3) + os=sysv3 ;; - -sysvr4) - os=-sysv4 + sysvr4) + os=sysv4 ;; - # This must come after -sysvr4. - -sysv*) + # This must come after sysvr4. + sysv*) ;; - -ose*) - os=-ose + ose*) + os=ose ;; - -es1800*) - os=-ose + *mint | mint[0-9]* | *MiNT | MiNT[0-9]*) + os=mint ;; - -xenix) - os=-xenix + zvmoe) + os=zvmoe ;; - -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*) - os=-mint + dicos*) + os=dicos ;; - -aros*) - os=-aros - ;; - -kaos*) - os=-kaos + pikeos*) + # Until real need of OS specific support for + # particular features comes up, bare metal + # configurations are quite functional. + case $basic_machine in + arm*) + os=eabi + ;; + *) + os=elf + ;; + esac ;; - -zvmoe) - os=-zvmoe + nacl*) ;; - -dicos*) - os=-dicos + ios) ;; - -nacl*) + none) ;; - -none) + *-eabi) ;; *) - # Get rid of the `-' at the beginning of $os. - os=`echo $os | sed 's/[^-]*-//'` - echo Invalid configuration \`$1\': system \`$os\' not recognized 1>&2 + echo Invalid configuration \`"$1"\': system \`"$os"\' not recognized 1>&2 exit 1 ;; esac @@ -1530,173 +1553,179 @@ else case $basic_machine in score-*) - os=-elf + os=elf ;; spu-*) - os=-elf + os=elf ;; *-acorn) - os=-riscix1.2 + os=riscix1.2 ;; arm*-rebel) - os=-linux + os=linux ;; arm*-semi) - os=-aout + os=aout ;; c4x-* | tic4x-*) - os=-coff + os=coff + ;; + c8051-*) + os=elf + ;; + clipper-intergraph) + os=clix ;; hexagon-*) - os=-elf + os=elf ;; tic54x-*) - os=-coff + os=coff ;; tic55x-*) - os=-coff + os=coff ;; tic6x-*) - os=-coff + os=coff ;; # This must come before the *-dec entry. pdp10-*) - os=-tops20 + os=tops20 ;; pdp11-*) - os=-none + os=none ;; *-dec | vax-*) - os=-ultrix4.2 + os=ultrix4.2 ;; m68*-apollo) - os=-domain + os=domain ;; i386-sun) - os=-sunos4.0.2 + os=sunos4.0.2 ;; m68000-sun) - os=-sunos3 + os=sunos3 ;; m68*-cisco) - os=-aout + os=aout ;; mep-*) - os=-elf + os=elf ;; mips*-cisco) - os=-elf + os=elf ;; mips*-*) - os=-elf + os=elf ;; or32-*) - os=-coff + os=coff ;; *-tti) # must be before sparc entry or we get the wrong os. - os=-sysv3 + os=sysv3 ;; sparc-* | *-sun) - os=-sunos4.1.1 + os=sunos4.1.1 ;; - *-be) - os=-beos + pru-*) + os=elf ;; - *-haiku) - os=-haiku + *-be) + os=beos ;; *-ibm) - os=-aix + os=aix ;; *-knuth) - os=-mmixware + os=mmixware ;; *-wec) - os=-proelf + os=proelf ;; *-winbond) - os=-proelf + os=proelf ;; *-oki) - os=-proelf + os=proelf ;; *-hp) - os=-hpux + os=hpux ;; *-hitachi) - os=-hiux + os=hiux ;; i860-* | *-att | *-ncr | *-altos | *-motorola | *-convergent) - os=-sysv + os=sysv ;; *-cbm) - os=-amigaos + os=amigaos ;; *-dg) - os=-dgux + os=dgux ;; *-dolphin) - os=-sysv3 + os=sysv3 ;; m68k-ccur) - os=-rtu + os=rtu ;; m88k-omron*) - os=-luna + os=luna ;; - *-next ) - os=-nextstep + *-next) + os=nextstep ;; *-sequent) - os=-ptx + os=ptx ;; *-crds) - os=-unos + os=unos ;; *-ns) - os=-genix + os=genix ;; i370-*) - os=-mvs - ;; - *-next) - os=-nextstep3 + os=mvs ;; *-gould) - os=-sysv + os=sysv ;; *-highlevel) - os=-bsd + os=bsd ;; *-encore) - os=-bsd + os=bsd ;; *-sgi) - os=-irix + os=irix ;; *-siemens) - os=-sysv4 + os=sysv4 ;; *-masscomp) - os=-rtu + os=rtu ;; f30[01]-fujitsu | f700-fujitsu) - os=-uxpv + os=uxpv ;; *-rom68k) - os=-coff + os=coff ;; *-*bug) - os=-coff + os=coff ;; *-apple) - os=-macos + os=macos ;; *-atari*) - os=-mint + os=mint + ;; + *-wrs) + os=vxworks ;; *) - os=-none + os=none ;; esac fi @@ -1707,79 +1736,82 @@ vendor=unknown case $basic_machine in *-unknown) case $os in - -riscix*) + riscix*) vendor=acorn ;; - -sunos*) + sunos*) vendor=sun ;; - -cnk*|-aix*) + cnk*|-aix*) vendor=ibm ;; - -beos*) + beos*) vendor=be ;; - -hpux*) + hpux*) vendor=hp ;; - -mpeix*) + mpeix*) vendor=hp ;; - -hiux*) + hiux*) vendor=hitachi ;; - -unos*) + unos*) vendor=crds ;; - -dgux*) + dgux*) vendor=dg ;; - -luna*) + luna*) vendor=omron ;; - -genix*) + genix*) vendor=ns ;; - -mvs* | -opened*) + clix*) + vendor=intergraph + ;; + mvs* | opened*) vendor=ibm ;; - -os400*) + os400*) vendor=ibm ;; - -ptx*) + ptx*) vendor=sequent ;; - -tpf*) + tpf*) vendor=ibm ;; - -vxsim* | -vxworks* | -windiss*) + vxsim* | vxworks* | windiss*) vendor=wrs ;; - -aux*) + aux*) vendor=apple ;; - -hms*) + hms*) vendor=hitachi ;; - -mpw* | -macos*) + mpw* | macos*) vendor=apple ;; - -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*) + *mint | mint[0-9]* | *MiNT | MiNT[0-9]*) vendor=atari ;; - -vos*) + vos*) vendor=stratus ;; esac - basic_machine=`echo $basic_machine | sed "s/unknown/$vendor/"` + basic_machine=`echo "$basic_machine" | sed "s/unknown/$vendor/"` ;; esac -echo $basic_machine$os +echo "$basic_machine-$os" exit # Local variables: -# eval: (add-hook 'write-file-hooks 'time-stamp) +# eval: (add-hook 'before-save-hook 'time-stamp) # time-stamp-start: "timestamp='" # time-stamp-format: "%:y-%02m-%02d" # time-stamp-end: "'" From d277442df53a01343ba7c1df0bbd2a294058dcba Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 17 Aug 2018 12:59:23 +0200 Subject: [PATCH 1154/2196] Fix parser/lexer generation with parallel make Fun fact: rules with multiple targets don't work properly with 'make -j'. For example, a rule like a b: c touch a b is equivalent to a: c touch a b b: c touch a b so with 'make -j', the 'touch' command will be run twice. See e.g. https://stackoverflow.com/questions/2973445/gnu-makefile-rule-generating-a-few-targets-from-a-single-source-file. --- src/libexpr/local.mk | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/libexpr/local.mk b/src/libexpr/local.mk index daa3258f0d3..78c8b0deae6 100644 --- a/src/libexpr/local.mk +++ b/src/libexpr/local.mk @@ -20,10 +20,14 @@ libexpr_LDFLAGS_PROPAGATED = $(BDW_GC_LIBS) libexpr_ORDER_AFTER := $(d)/parser-tab.cc $(d)/parser-tab.hh $(d)/lexer-tab.cc $(d)/lexer-tab.hh -$(d)/parser-tab.cc $(d)/parser-tab.hh: $(d)/parser.y +$(d)/parser-tab.hh: $(d)/parser-tab.cc + +$(d)/parser-tab.cc: $(d)/parser.y $(trace-gen) bison -v -o $(libexpr_DIR)/parser-tab.cc $< -d -$(d)/lexer-tab.cc $(d)/lexer-tab.hh: $(d)/lexer.l +$(d)/lexer-tab.hh: $(d)/lexer-tab.cc + +$(d)/lexer-tab.cc: $(d)/lexer.l $(trace-gen) flex --outfile $(libexpr_DIR)/lexer-tab.cc --header-file=$(libexpr_DIR)/lexer-tab.hh $< clean-files += $(d)/parser-tab.cc $(d)/parser-tab.hh $(d)/lexer-tab.cc $(d)/lexer-tab.hh From 02098d2073e2f7c06b6d05c6749ae2b76b7f57d5 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Fri, 17 Aug 2018 10:29:32 -0400 Subject: [PATCH 1155/2196] fetchGit: use a better caching scheme The current usage technically works by putting multiple different repos in to the same git directory. However, it is very slow as Git tries very hard to find common commits between the two repositories. If the two repositories are large (like Nixpkgs and another long-running project,) it is maddeningly slow. This change busts the cache for existing deployments, but users will be promptly repaid in per-repository performance. --- src/libexpr/primops/fetchGit.cc | 4 +++- tests/fetchGit.sh | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/libexpr/primops/fetchGit.cc b/src/libexpr/primops/fetchGit.cc index 7aa98e0bfab..aeb2df5f8ae 100644 --- a/src/libexpr/primops/fetchGit.cc +++ b/src/libexpr/primops/fetchGit.cc @@ -3,6 +3,7 @@ #include "download.hh" #include "store-api.hh" #include "pathlocks.hh" +#include "hash.hh" #include @@ -84,9 +85,10 @@ GitInfo exportGit(ref store, const std::string & uri, if (rev != "" && !std::regex_match(rev, revRegex)) throw Error("invalid Git revision '%s'", rev); - Path cacheDir = getCacheDir() + "/nix/git"; + Path cacheDir = getCacheDir() + "/nix/gitv2/" + hashString(htSHA256, uri).to_string(Base32, false); if (!pathExists(cacheDir)) { + createDirs(dirOf(cacheDir)); runProgram("git", true, { "init", "--bare", cacheDir }); } diff --git a/tests/fetchGit.sh b/tests/fetchGit.sh index 530ac7bb813..4c46bdf0465 100644 --- a/tests/fetchGit.sh +++ b/tests/fetchGit.sh @@ -9,7 +9,7 @@ clearStore repo=$TEST_ROOT/git -rm -rf $repo ${repo}-tmp $TEST_HOME/.cache/nix/git +rm -rf $repo ${repo}-tmp $TEST_HOME/.cache/nix/gitv2 git init $repo git -C $repo config user.email "foobar@example.com" @@ -129,7 +129,7 @@ path5=$(nix eval --raw "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outP # Nuke the cache -rm -rf $TEST_HOME/.cache/nix/git +rm -rf $TEST_HOME/.cache/nix/gitv2 # Try again, but without 'git' on PATH NIX=$(command -v nix) From 9b1bdf2db8798c024e6fbe84d4144c04f61d80c7 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Sun, 19 Aug 2018 11:59:49 +0200 Subject: [PATCH 1156/2196] FIx floating point evaluation Fixes #2361. --- src/libexpr/primops.cc | 9 +++++++++ tests/lang/eval-okay-float.exp | 1 + tests/lang/eval-okay-float.nix | 6 ++++++ 3 files changed, 16 insertions(+) create mode 100644 tests/lang/eval-okay-float.exp create mode 100644 tests/lang/eval-okay-float.nix diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 8ace6db4d11..6f82c6c404f 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1680,6 +1680,8 @@ static void prim_concatMap(EvalState & state, const Pos & pos, Value * * args, V static void prim_add(EvalState & state, const Pos & pos, Value * * args, Value & v) { + state.forceValue(*args[0], pos); + state.forceValue(*args[1], pos); if (args[0]->type == tFloat || args[1]->type == tFloat) mkFloat(v, state.forceFloat(*args[0], pos) + state.forceFloat(*args[1], pos)); else @@ -1689,6 +1691,8 @@ static void prim_add(EvalState & state, const Pos & pos, Value * * args, Value & static void prim_sub(EvalState & state, const Pos & pos, Value * * args, Value & v) { + state.forceValue(*args[0], pos); + state.forceValue(*args[1], pos); if (args[0]->type == tFloat || args[1]->type == tFloat) mkFloat(v, state.forceFloat(*args[0], pos) - state.forceFloat(*args[1], pos)); else @@ -1698,6 +1702,8 @@ static void prim_sub(EvalState & state, const Pos & pos, Value * * args, Value & static void prim_mul(EvalState & state, const Pos & pos, Value * * args, Value & v) { + state.forceValue(*args[0], pos); + state.forceValue(*args[1], pos); if (args[0]->type == tFloat || args[1]->type == tFloat) mkFloat(v, state.forceFloat(*args[0], pos) * state.forceFloat(*args[1], pos)); else @@ -1707,6 +1713,9 @@ static void prim_mul(EvalState & state, const Pos & pos, Value * * args, Value & static void prim_div(EvalState & state, const Pos & pos, Value * * args, Value & v) { + state.forceValue(*args[0], pos); + state.forceValue(*args[1], pos); + NixFloat f2 = state.forceFloat(*args[1], pos); if (f2 == 0) throw EvalError(format("division by zero, at %1%") % pos); diff --git a/tests/lang/eval-okay-float.exp b/tests/lang/eval-okay-float.exp new file mode 100644 index 00000000000..3c50a8adce8 --- /dev/null +++ b/tests/lang/eval-okay-float.exp @@ -0,0 +1 @@ +[ 3.4 3.5 2.5 1.5 ] diff --git a/tests/lang/eval-okay-float.nix b/tests/lang/eval-okay-float.nix new file mode 100644 index 00000000000..b2702c7b166 --- /dev/null +++ b/tests/lang/eval-okay-float.nix @@ -0,0 +1,6 @@ +[ + (1.1 + 2.3) + (builtins.add (0.5 + 0.5) (2.0 + 0.5)) + ((0.5 + 0.5) * (2.0 + 0.5)) + ((1.5 + 1.5) / (0.5 * 4.0)) +] From cc7b4386b16885a22ccabb019381539fecb00230 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Sun, 19 Aug 2018 12:05:08 +0200 Subject: [PATCH 1157/2196] nix run: Restore CPU affinity Fixes #2359. --- src/nix/run.cc | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/nix/run.cc b/src/nix/run.cc index 65ced34759b..35b76334587 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -7,6 +7,7 @@ #include "finally.hh" #include "fs-accessor.hh" #include "progress-bar.hh" +#include "affinity.hh" #if __linux__ #include @@ -154,6 +155,8 @@ struct CmdRun : InstallablesCommand restoreSignals(); + restoreAffinity(); + /* If this is a diverted store (i.e. its "logical" location (typically /nix/store) differs from its "physical" location (e.g. /home/eelco/nix/store), then run the command in a From 458282be5946318086b74dfa5aedb91e6f0477aa Mon Sep 17 00:00:00 2001 From: Tuomas Tynkkynen Date: Mon, 20 Aug 2018 01:51:23 +0300 Subject: [PATCH 1158/2196] Drop all references to --disable-init-state It's all dead code since 2014 (commit 0c6d62cf27b3b2). --- configure.ac | 6 ------ release-common.nix | 2 +- release.nix | 4 ---- 3 files changed, 1 insertion(+), 11 deletions(-) diff --git a/configure.ac b/configure.ac index 6aeeacdba13..cc354f6f302 100644 --- a/configure.ac +++ b/configure.ac @@ -229,12 +229,6 @@ if test "$gc" = yes; then fi -AC_ARG_ENABLE(init-state, AC_HELP_STRING([--disable-init-state], - [do not initialise DB etc. in `make install']), - init_state=$enableval, init_state=yes) -#AM_CONDITIONAL(INIT_STATE, test "$init_state" = "yes") - - # documentation generation switch AC_ARG_ENABLE(doc-gen, AC_HELP_STRING([--disable-doc-gen], [disable documentation generation]), diff --git a/release-common.nix b/release-common.nix index e23967bdbfd..ace2a4f9b91 100644 --- a/release-common.nix +++ b/release-common.nix @@ -30,7 +30,7 @@ rec { }); configureFlags = - [ "--disable-init-state" + [ "--enable-gc" ] ++ lib.optionals stdenv.isLinux [ "--with-sandbox-shell=${sh}/bin/busybox" diff --git a/release.nix b/release.nix index 076f1de8dea..5c3fdf55ca3 100644 --- a/release.nix +++ b/release.nix @@ -189,10 +189,6 @@ let buildInputs = buildDeps; - configureFlags = '' - --disable-init-state - ''; - dontInstall = false; doInstallCheck = true; From 2894197de768bf5433595886d0248e77ffa3c693 Mon Sep 17 00:00:00 2001 From: Tuomas Tynkkynen Date: Mon, 20 Aug 2018 02:01:59 +0300 Subject: [PATCH 1159/2196] Drop all references to NIX_INDENT_MAKE Dead code since https://github.com/NixOS/nixpkgs/commit/6669a3b47711dc967df0ea8ff93fa9857aad015d --- release.nix | 4 ---- src/nix-build/nix-build.cc | 1 - 2 files changed, 5 deletions(-) diff --git a/release.nix b/release.nix index 5c3fdf55ca3..58809ba8ab0 100644 --- a/release.nix +++ b/release.nix @@ -74,8 +74,6 @@ let makeFlags = "profiledir=$(out)/etc/profile.d"; - preBuild = "unset NIX_INDENT_MAKE"; - installFlags = "sysconfdir=$(out)/etc"; doInstallCheck = true; @@ -103,8 +101,6 @@ let enableParallelBuilding = true; postUnpack = "sourceRoot=$sourceRoot/perl"; - - preBuild = "unset NIX_INDENT_MAKE"; }); diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 34f1cba9dda..21a0756a200 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -419,7 +419,6 @@ void mainWrapped(int argc, char * * argv) R"s([ -n "$PS1" ] && PS1='\n\[\033[1;32m\][nix-shell:\w]\$\[\033[0m\] '; )s" "if [ \"$(type -t runHook)\" = function ]; then runHook shellHook; fi; " "unset NIX_ENFORCE_PURITY; " - "unset NIX_INDENT_MAKE; " "shopt -u nullglob; " "unset TZ; %4%" "%5%", From 17a92dfb7d87499d7aed5d3b0504d4c724db18a3 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 21 Aug 2018 15:19:20 +0200 Subject: [PATCH 1160/2196] Fix another 'coroutine has finished' during decompression https://hydra.nixos.org/build/79867739 --- src/libstore/binary-cache-store.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 9c75c85993f..4527ee6ba66 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -232,7 +232,7 @@ void BinaryCacheStore::narFromPath(const Path & storePath, Sink & sink) throw SubstituteGone(e.what()); } - decompressor->flush(); + decompressor->finish(); stats.narRead++; //stats.narReadCompressedBytes += nar->size(); // FIXME From 6317c65937ad7d9c5a954c4dfc48de259e55f4f3 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 21 Aug 2018 15:20:23 +0200 Subject: [PATCH 1161/2196] Fix warnings in compression.cc --- src/libutil/compression.cc | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc index 53b62f62a76..204c63cd26f 100644 --- a/src/libutil/compression.cc +++ b/src/libutil/compression.cc @@ -15,12 +15,10 @@ namespace nix { -static const size_t bufSize = 32 * 1024; - // Don't feed brotli too much at once. struct ChunkedCompressionSink : CompressionSink { - uint8_t outbuf[BUFSIZ]; + uint8_t outbuf[32 * 1024]; void write(const unsigned char * data, size_t len) override { @@ -124,7 +122,7 @@ struct BzipDecompressionSink : ChunkedCompressionSink write(nullptr, 0); } - void writeInternal(const unsigned char * data, size_t len) + void writeInternal(const unsigned char * data, size_t len) override { assert(len <= std::numeric_limits::max()); @@ -173,7 +171,7 @@ struct BrotliDecompressionSink : ChunkedCompressionSink writeInternal(nullptr, 0); } - void writeInternal(const unsigned char * data, size_t len) + void writeInternal(const unsigned char * data, size_t len) override { const uint8_t * next_in = data; size_t avail_in = len; @@ -330,7 +328,7 @@ struct BzipCompressionSink : ChunkedCompressionSink writeInternal(nullptr, 0); } - void writeInternal(const unsigned char * data, size_t len) + void writeInternal(const unsigned char * data, size_t len) override { assert(len <= std::numeric_limits::max()); @@ -380,7 +378,7 @@ struct BrotliCompressionSink : ChunkedCompressionSink writeInternal(nullptr, 0); } - void writeInternal(const unsigned char * data, size_t len) + void writeInternal(const unsigned char * data, size_t len) override { const uint8_t * next_in = data; size_t avail_in = len; From ebe3d2d3704aca001ff818295af014581dcf348f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 21 Aug 2018 15:22:04 +0200 Subject: [PATCH 1162/2196] Improve 'coroutine has finished' error message --- src/libstore/store-api.cc | 2 ++ src/libutil/serialise.cc | 15 +++++++++------ src/libutil/serialise.hh | 6 +++++- 3 files changed, 16 insertions(+), 7 deletions(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 7a4a5f5eb85..1f42097fccf 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -609,6 +609,8 @@ void copyStorePath(ref srcStore, ref dstStore, act.progress(total, info->narSize); }); srcStore->narFromPath({storePath}, wrapperSink); + }, [&]() { + throw EndOfFile("NAR for '%s' fetched from '%s' is incomplete", storePath, srcStore->getUri()); }); dstStore->addToStore(*info, *source, repair, checkSigs); diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index b2c49d911b3..17448f70efb 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -161,16 +161,20 @@ size_t StringSource::read(unsigned char * data, size_t len) #error Coroutines are broken in this version of Boost! #endif -std::unique_ptr sinkToSource(std::function fun) +std::unique_ptr sinkToSource( + std::function fun, + std::function eof) { struct SinkToSource : Source { typedef boost::coroutines2::coroutine coro_t; + std::function eof; coro_t::pull_type coro; - SinkToSource(std::function fun) - : coro([&](coro_t::push_type & yield) { + SinkToSource(std::function fun, std::function eof) + : eof(eof) + , coro([&](coro_t::push_type & yield) { LambdaSink sink([&](const unsigned char * data, size_t len) { if (len) yield(std::string((const char *) data, len)); }); @@ -184,8 +188,7 @@ std::unique_ptr sinkToSource(std::function fun) size_t read(unsigned char * data, size_t len) override { - if (!coro) - throw EndOfFile("coroutine has finished"); + if (!coro) { eof(); abort(); } if (pos == cur.size()) { if (!cur.empty()) coro(); @@ -201,7 +204,7 @@ std::unique_ptr sinkToSource(std::function fun) } }; - return std::make_unique(fun); + return std::make_unique(fun, eof); } diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh index 14b62fdb677..4b6ad5da5b9 100644 --- a/src/libutil/serialise.hh +++ b/src/libutil/serialise.hh @@ -214,7 +214,11 @@ struct LambdaSource : Source /* Convert a function that feeds data into a Sink into a Source. The Source executes the function as a coroutine. */ -std::unique_ptr sinkToSource(std::function fun); +std::unique_ptr sinkToSource( + std::function fun, + std::function eof = []() { + throw EndOfFile("coroutine has finished"); + }); void writePadding(size_t len, Sink & sink); From 954d1f4d0a35063ff431b258beebadf753cb9efe Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 21 Aug 2018 15:33:29 +0200 Subject: [PATCH 1163/2196] Disable the Ubuntu 17.10 build Ubuntu 17.10 doesn't have libbrotli. https://hydra.nixos.org/build/79867741 --- release.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/release.nix b/release.nix index 076f1de8dea..7ce84e445ab 100644 --- a/release.nix +++ b/release.nix @@ -212,8 +212,8 @@ let #deb_debian8i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.debian8i386) [ "libsodium-dev" ] [ "libsodium13" ]; #deb_debian8x86_64 = makeDeb_x86_64 (diskImageFunsFun: diskImageFunsFun.debian8x86_64) [ "libsodium-dev" ] [ "libsodium13" ]; - deb_ubuntu1710i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1710i386) [ ] [ "libsodium18" ]; - deb_ubuntu1710x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1710x86_64) [ ] [ "libsodium18" "libboost-context1.62.0" ]; + #deb_ubuntu1710i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1710i386) [ ] [ "libsodium18" ]; + #deb_ubuntu1710x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1710x86_64) [ ] [ "libsodium18" "libboost-context1.62.0" ]; # System tests. From 8ad2defdf0b33c34e48773b501e35bfdbbb936f7 Mon Sep 17 00:00:00 2001 From: Erik Arvstedt Date: Tue, 21 Aug 2018 21:44:37 +0200 Subject: [PATCH 1164/2196] Docs: Fix install prefix shell.nix defines the install prefix as $(pwd)/inst --- doc/manual/hacking.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/hacking.xml b/doc/manual/hacking.xml index 183aed7adff..b671811d3a3 100644 --- a/doc/manual/hacking.xml +++ b/doc/manual/hacking.xml @@ -30,7 +30,7 @@ To build Nix itself in this shell: [nix-shell]$ configurePhase [nix-shell]$ make -To install it in $(pwd)/nix and test it: +To install it in $(pwd)/inst and test it: [nix-shell]$ make install [nix-shell]$ make installcheck From c651b7bdc996a18688c5e5dd5dd84eeeb8d3376d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 23 Aug 2018 00:23:35 +0200 Subject: [PATCH 1165/2196] Revert "Fix parser/lexer generation with parallel make" This reverts commit d277442df53a01343ba7c1df0bbd2a294058dcba. Make sucks. --- src/libexpr/local.mk | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/src/libexpr/local.mk b/src/libexpr/local.mk index 78c8b0deae6..daa3258f0d3 100644 --- a/src/libexpr/local.mk +++ b/src/libexpr/local.mk @@ -20,14 +20,10 @@ libexpr_LDFLAGS_PROPAGATED = $(BDW_GC_LIBS) libexpr_ORDER_AFTER := $(d)/parser-tab.cc $(d)/parser-tab.hh $(d)/lexer-tab.cc $(d)/lexer-tab.hh -$(d)/parser-tab.hh: $(d)/parser-tab.cc - -$(d)/parser-tab.cc: $(d)/parser.y +$(d)/parser-tab.cc $(d)/parser-tab.hh: $(d)/parser.y $(trace-gen) bison -v -o $(libexpr_DIR)/parser-tab.cc $< -d -$(d)/lexer-tab.hh: $(d)/lexer-tab.cc - -$(d)/lexer-tab.cc: $(d)/lexer.l +$(d)/lexer-tab.cc $(d)/lexer-tab.hh: $(d)/lexer.l $(trace-gen) flex --outfile $(libexpr_DIR)/lexer-tab.cc --header-file=$(libexpr_DIR)/lexer-tab.hh $< clean-files += $(d)/parser-tab.cc $(d)/parser-tab.hh $(d)/lexer-tab.cc $(d)/lexer-tab.hh From 414397759a5390b54cee12c6834e6106bea12f5f Mon Sep 17 00:00:00 2001 From: Daiderd Jordan Date: Sat, 25 Aug 2018 20:25:43 +0200 Subject: [PATCH 1166/2196] upgrade-nix: add --dry-run --- src/nix/upgrade-nix.cc | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/src/nix/upgrade-nix.cc b/src/nix/upgrade-nix.cc index e23ae792369..3417adb62c9 100644 --- a/src/nix/upgrade-nix.cc +++ b/src/nix/upgrade-nix.cc @@ -1,4 +1,5 @@ #include "command.hh" +#include "common-args.hh" #include "store-api.hh" #include "download.hh" #include "eval.hh" @@ -6,7 +7,7 @@ using namespace nix; -struct CmdUpgradeNix : StoreCommand +struct CmdUpgradeNix : MixDryRun, StoreCommand { Path profileDir; @@ -61,21 +62,25 @@ struct CmdUpgradeNix : StoreCommand { Activity act(*logger, lvlInfo, actUnknown, fmt("downloading '%s'...", storePath)); - store->ensurePath(storePath); + if (!dryRun) + store->ensurePath(storePath); } { Activity act(*logger, lvlInfo, actUnknown, fmt("verifying that '%s' works...", storePath)); - auto program = storePath + "/bin/nix-env"; - auto s = runProgram(program, false, {"--version"}); - if (s.find("Nix") == std::string::npos) - throw Error("could not verify that '%s' works", program); + if (!dryRun) { + auto program = storePath + "/bin/nix-env"; + auto s = runProgram(program, false, {"--version"}); + if (s.find("Nix") == std::string::npos) + throw Error("could not verify that '%s' works", program); + } } { Activity act(*logger, lvlInfo, actUnknown, fmt("installing '%s' into profile '%s'...", storePath, profileDir)); - runProgram(settings.nixBinDir + "/nix-env", false, - {"--profile", profileDir, "-i", storePath, "--no-sandbox"}); + if (!dryRun) + runProgram(settings.nixBinDir + "/nix-env", false, + {"--profile", profileDir, "-i", storePath, "--no-sandbox"}); } } From d85bb4814f4fa2ed6046bba6e8c6394adf9e5666 Mon Sep 17 00:00:00 2001 From: Daiderd Jordan Date: Sat, 25 Aug 2018 20:33:01 +0200 Subject: [PATCH 1167/2196] upgrade-nix: resolve profile symlinks The profile present in PATH is not necessarily the actual profile location. User profiles are generally added as $HOME/.nix-profile in which case the indirect profile link needs to be resolved first. /home/user/.nix-profile -> /nix/var/nix/profiles/per-user/user/profile /nix/var/nix/profiles/per-user/user/profile -> profile-15-link /nix/var/nix/profiles/per-user/user/profile-14-link -> /nix/store/hyi4kkjh3bwi2z3wfljrkfymz9904h62-user-environment /nix/var/nix/profiles/per-user/user/profile-15-link -> /nix/store/6njpl3qvihz46vj911pwx7hfcvwhifl9-user-environment To upgrade nix here we want /nix/var/nix/profiles/per-user/user/profile-16-link instead of /home/user/.nix-profile-1-link. The latter is not a gcroot and would be garbage collected, resulting in a broken profile. Fixes #2175 --- src/nix/upgrade-nix.cc | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/src/nix/upgrade-nix.cc b/src/nix/upgrade-nix.cc index 3417adb62c9..72b298283a6 100644 --- a/src/nix/upgrade-nix.cc +++ b/src/nix/upgrade-nix.cc @@ -103,11 +103,18 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand if (hasPrefix(where, "/run/current-system")) throw Error("Nix on NixOS must be upgraded via 'nixos-rebuild'"); - Path profileDir; - Path userEnv; + Path profileDir = dirOf(where); + + // Resolve profile to /nix/var/nix/profiles/ link. + while (baseNameOf(dirOf(canonPath(profileDir))) != "profiles") + profileDir = readLink(profileDir); + + printInfo("found profile '%s'", profileDir); + + Path userEnv = canonPath(profileDir, true); if (baseNameOf(where) != "bin" || - !hasSuffix(userEnv = canonPath(profileDir = dirOf(where), true), "user-environment")) + !hasSuffix(userEnv, "user-environment")) throw Error("directory '%s' does not appear to be part of a Nix profile", where); if (!store->isValidPath(userEnv)) From 0ad643ed5c9642738a8e8b37b7e4b1835f160bc8 Mon Sep 17 00:00:00 2001 From: aszlig Date: Wed, 29 Aug 2018 00:23:51 +0200 Subject: [PATCH 1168/2196] libexpr: Use int64_t for NixInt Using a 64bit integer on 32bit systems will come with a bit of a performance overhead, but given that Nix doesn't use a lot of integers compared to other types, I think the overhead is negligible also considering that 32bit systems are in decline. The biggest advantage however is that when we use a consistent integer size across all platforms it's less likely that we miss things that we break due to that. One example would be: https://github.com/NixOS/nixpkgs/pull/44233 On Hydra it will evaluate, because the evaluator runs on a 64bit machine, but when evaluating the same on a 32bit machine it will fail, so using 64bit integers should make that consistent. While the change of the type in value.hh is rather easy to do, we have a few more options available for doing the conversion in the lexer: * Via an #ifdef on the architecture and using strtol() or strtoll() accordingly depending on which architecture we are. For the #ifdef we would need another AX_COMPILE_CHECK_SIZEOF in configure.ac. * Using istringstream, which would involve copying the value. * As we're already using boost, lexical_cast might be a good idea. Spoiler: I went for the latter, first of all because lexical_cast does have an overload for const char* and second of all, because it doesn't involve copying around the input string. Also, because istringstream seems to come with a bigger overhead than boost::lexical_cast: https://www.boost.org/doc/libs/release/doc/html/boost_lexical_cast/performance.html The first method (still using strtol/strtoll) also wasn't something I pursued further, because it is also locale-aware which I doubt is what we want, given that the regex for int is [0-9]+. Signed-off-by: aszlig Fixes: #2339 --- src/libexpr/lexer.l | 8 ++++++-- src/libexpr/value.hh | 2 +- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index 29ca327c1e4..a052447d3dc 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -12,6 +12,8 @@ %{ +#include + #include "nixexpr.hh" #include "parser-tab.hh" @@ -124,9 +126,11 @@ or { return OR_KW; } {ID} { yylval->id = strdup(yytext); return ID; } {INT} { errno = 0; - yylval->n = strtol(yytext, 0, 10); - if (errno != 0) + try { + yylval->n = boost::lexical_cast(yytext); + } catch (const boost::bad_lexical_cast &) { throw ParseError(format("invalid integer '%1%'") % yytext); + } return INT; } {FLOAT} { errno = 0; diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh index 809772f7c08..e1ec87d3b84 100644 --- a/src/libexpr/value.hh +++ b/src/libexpr/value.hh @@ -43,7 +43,7 @@ class XMLWriter; class JSONPlaceholder; -typedef long NixInt; +typedef int64_t NixInt; typedef double NixFloat; /* External values must descend from ExternalValueBase, so that From 54df4bb0b5d45a17945d8c4b0e38cd4ae167a732 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Domen=20Ko=C5=BEar?= Date: Wed, 29 Aug 2018 11:51:51 +0100 Subject: [PATCH 1169/2196] nix.conf: mention -j0 is useful --- doc/manual/command-ref/conf-file.xml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index 1865bb37c86..3851cf18d7f 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -437,7 +437,8 @@ builtins.fetchurl { This option defines the maximum number of jobs that Nix will try to build in parallel. The default is 1. The special value auto - causes Nix to use the number of CPUs in your system. It can be + causes Nix to use the number of CPUs in your system. 0 + is useful when using remote builders to prevent any local builds. It can be overridden using the () command line switch. From d16ff76c69318c4ef04b1dcc873a72910b8eca49 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Domen=20Ko=C5=BEar?= Date: Wed, 29 Aug 2018 14:33:14 +0100 Subject: [PATCH 1170/2196] nix.conf: clarify -j0 doesn't affect preferLocalBuild --- doc/manual/command-ref/conf-file.xml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index 3851cf18d7f..6a23b8f1fda 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -438,7 +438,9 @@ builtins.fetchurl { that Nix will try to build in parallel. The default is 1. The special value auto causes Nix to use the number of CPUs in your system. 0 - is useful when using remote builders to prevent any local builds. It can be + is useful when using remote builders to prevent any local builds (except for + preferLocalBuild derivation attribute which executes locally + regardless). It can be overridden using the () command line switch. From 245d01701d421d5c6aafd2b377a7b14c254f788c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 30 Aug 2018 15:21:58 +0200 Subject: [PATCH 1171/2196] upload-release.pl: Handle prerelease versions --- maintainers/upload-release.pl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/maintainers/upload-release.pl b/maintainers/upload-release.pl index 9b0a09e6c83..8432c95960c 100755 --- a/maintainers/upload-release.pl +++ b/maintainers/upload-release.pl @@ -91,6 +91,8 @@ sub downloadFile { downloadFile("binaryTarball.x86_64-darwin", "1"); downloadFile("installerScript", "1"); +exit if $version =~ /pre/; + # Update Nixpkgs in a very hacky way. system("cd $nixpkgsDir && git pull") == 0 or die; my $oldName = `nix-instantiate --eval $nixpkgsDir -A nix.name`; chomp $oldName; From 64d7d1a884838f6a15fd18d90f73a8061832af57 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 30 Aug 2018 15:28:18 +0200 Subject: [PATCH 1172/2196] Update release notes --- doc/manual/release-notes/rl-2.1.xml | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/doc/manual/release-notes/rl-2.1.xml b/doc/manual/release-notes/rl-2.1.xml index c628d04a707..09b4f92e199 100644 --- a/doc/manual/release-notes/rl-2.1.xml +++ b/doc/manual/release-notes/rl-2.1.xml @@ -4,7 +4,7 @@ version="5.0" xml:id="ssec-relnotes-2.1"> -Release 2.1 (2018-08-??) +Release 2.1 (2018-08-31) This is primarily a bug fix release. It also reduces memory consumption in certain situations. In addition, it has the following @@ -41,8 +41,12 @@ new features: nix-daemon now respects - , so it can be run as a non-root - user. + . + + + + nix run now respects + nix-support/propagated-user-env-packages. From 145db703e58fa8438413cb67a1361020850ce10e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 30 Aug 2018 16:03:06 +0200 Subject: [PATCH 1173/2196] Remove boost from the closure This reduces the size of the closure by 45 MiB. --- release.nix | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/release.nix b/release.nix index beb8b5aa97c..f9ee07e3478 100644 --- a/release.nix +++ b/release.nix @@ -67,6 +67,14 @@ let buildInputs = buildDeps; + preConfigure = + # Copy libboost_context so we don't get all of Boost in our closure. + # https://github.com/NixOS/nixpkgs/issues/45462 + '' + mkdir -p $out/lib + cp ${boost}/lib/libboost_context* $out/lib + ''; + configureFlags = configureFlags ++ [ "--sysconfdir=/etc" ]; From 5e83b0227f024c63fd1adb7995d17a8a1604b10c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 30 Aug 2018 16:14:19 +0200 Subject: [PATCH 1174/2196] nix: Remove the -h flag --- src/nix/main.cc | 1 - 1 file changed, 1 deletion(-) diff --git a/src/nix/main.cc b/src/nix/main.cc index 9cd5d21c84b..69791e223c2 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -24,7 +24,6 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs { mkFlag() .longName("help") - .shortName('h') .description("show usage information") .handler([&]() { showHelpAndExit(); }); From 3407a5d9361611ea8b5da9e02a7a1e2f90c928cd Mon Sep 17 00:00:00 2001 From: Benjamin Hipple Date: Sat, 25 Aug 2018 18:35:15 -0400 Subject: [PATCH 1175/2196] Add human readable closure sizes to nix path-info Unfortunately, -h is already taken as a short option by --help, so we have to use a different letter or the capitalized version. Resolves #2363 --- src/nix/path-info.cc | 29 +++++++++++++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc index 47caa401d3c..ee48d483c7e 100644 --- a/src/nix/path-info.cc +++ b/src/nix/path-info.cc @@ -13,12 +13,14 @@ struct CmdPathInfo : StorePathsCommand, MixJSON { bool showSize = false; bool showClosureSize = false; + bool humanReadable = false; bool showSigs = false; CmdPathInfo() { mkFlag('s', "size", "print size of the NAR dump of each path", &showSize); mkFlag('S', "closure-size", "print sum size of the NAR dumps of the closure of each path", &showClosureSize); + mkFlag('H', "human-readable", "with -s and -S, print sizes like 1K 234M 5.67G etc.", &humanReadable); mkFlag(0, "sigs", "show signatures", &showSigs); } @@ -39,6 +41,10 @@ struct CmdPathInfo : StorePathsCommand, MixJSON "To show the closure sizes of every path in the current NixOS system closure, sorted by size:", "nix path-info -rS /run/current-system | sort -nk2" }, + Example{ + "To show a package's closure size and all its dependencies with human readable sizes:", + "nix path-info -rsSH nixpkgs.rust" + }, Example{ "To check the existence of a path in a binary cache:", "nix path-info -r /nix/store/7qvk5c91...-geeqie-1.1 --store https://cache.nixos.org/" @@ -58,6 +64,25 @@ struct CmdPathInfo : StorePathsCommand, MixJSON }; } + void printSize(int value) + { + if (!humanReadable) { + std::cout << '\t' << std::setw(11) << value; + return; + } + + static constexpr std::array idents = { + ' ', 'K', 'M', 'G', 'T', 'P' + }; + size_t power = 0; + double res = value; + while (res > 1024) { + ++power; + res /= 1024; + } + std::cout << '\t' << std::setw(11) << std::setprecision(3) << res << idents[power]; + } + void run(ref store, Paths storePaths) override { size_t pathLen = 0; @@ -81,10 +106,10 @@ struct CmdPathInfo : StorePathsCommand, MixJSON std::cout << storePath << std::string(std::max(0, (int) pathLen - (int) storePath.size()), ' '); if (showSize) - std::cout << '\t' << std::setw(11) << info->narSize; + printSize(info->narSize); if (showClosureSize) - std::cout << '\t' << std::setw(11) << store->getClosureSize(storePath).first; + printSize(store->getClosureSize(storePath).first); if (showSigs) { std::cout << '\t'; From c908df881f105d6a1004ae6a0e39da2d0149f3e9 Mon Sep 17 00:00:00 2001 From: Benjamin Hipple Date: Sun, 26 Aug 2018 18:12:06 -0400 Subject: [PATCH 1176/2196] Avoid overflow and use boost::format If the user has an object greater than 1024 yottabytes, it'll just display it as N yottabytes instead of overflowing. Swaps to use boost::format strings instead of std::setw and std::setprecision. --- src/nix/path-info.cc | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc index ee48d483c7e..fa4f45fb429 100644 --- a/src/nix/path-info.cc +++ b/src/nix/path-info.cc @@ -4,8 +4,9 @@ #include "json.hh" #include "common-args.hh" -#include #include +#include +#include using namespace nix; @@ -67,20 +68,20 @@ struct CmdPathInfo : StorePathsCommand, MixJSON void printSize(int value) { if (!humanReadable) { - std::cout << '\t' << std::setw(11) << value; + std::cout << '\t' << boost::format("%11d") % value; return; } - static constexpr std::array idents = { - ' ', 'K', 'M', 'G', 'T', 'P' + static constexpr std::array idents = { + ' ', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y' }; size_t power = 0; double res = value; - while (res > 1024) { + while (res > 1024 && power < idents.size()) { ++power; res /= 1024; } - std::cout << '\t' << std::setw(11) << std::setprecision(3) << res << idents[power]; + std::cout << '\t' << boost::format("%11.1f") % res << idents[power]; } void run(ref store, Paths storePaths) override From 99828245f8f03a8b3b54a04f67e6df31235838f4 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 30 Aug 2018 16:18:53 +0200 Subject: [PATCH 1177/2196] printSize() fixes Fix a 32-bit overflow that resulted in negative numbers being printed; use fmt() instead of boost::format(); change -H to -h for consistency with 'ls' and 'du'; make the columns narrower (since they can't be bigger than 1024.0). --- src/nix/path-info.cc | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc index fa4f45fb429..916ed360e62 100644 --- a/src/nix/path-info.cc +++ b/src/nix/path-info.cc @@ -5,8 +5,6 @@ #include "common-args.hh" #include -#include -#include using namespace nix; @@ -21,7 +19,7 @@ struct CmdPathInfo : StorePathsCommand, MixJSON { mkFlag('s', "size", "print size of the NAR dump of each path", &showSize); mkFlag('S', "closure-size", "print sum size of the NAR dumps of the closure of each path", &showClosureSize); - mkFlag('H', "human-readable", "with -s and -S, print sizes like 1K 234M 5.67G etc.", &humanReadable); + mkFlag('h', "human-readable", "with -s and -S, print sizes like 1K 234M 5.67G etc.", &humanReadable); mkFlag(0, "sigs", "show signatures", &showSigs); } @@ -44,7 +42,7 @@ struct CmdPathInfo : StorePathsCommand, MixJSON }, Example{ "To show a package's closure size and all its dependencies with human readable sizes:", - "nix path-info -rsSH nixpkgs.rust" + "nix path-info -rsSh nixpkgs.rust" }, Example{ "To check the existence of a path in a binary cache:", @@ -65,10 +63,10 @@ struct CmdPathInfo : StorePathsCommand, MixJSON }; } - void printSize(int value) + void printSize(unsigned long long value) { if (!humanReadable) { - std::cout << '\t' << boost::format("%11d") % value; + std::cout << fmt("\t%11d", value); return; } @@ -81,7 +79,7 @@ struct CmdPathInfo : StorePathsCommand, MixJSON ++power; res /= 1024; } - std::cout << '\t' << boost::format("%11.1f") % res << idents[power]; + std::cout << fmt("\t%6.1f%c", res, idents.at(power)); } void run(ref store, Paths storePaths) override From 264e66f69699919f1040be2986b91fd6b188e9af Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 30 Aug 2018 17:10:28 +0200 Subject: [PATCH 1178/2196] Add simple test for nix upgrade-nix --- release.nix | 12 ++++++++++++ src/nix/upgrade-nix.cc | 9 ++++++++- 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/release.nix b/release.nix index f9ee07e3478..fdf3fa946aa 100644 --- a/release.nix +++ b/release.nix @@ -249,6 +249,18 @@ let su - alice -c '_NIX_INSTALLER_TEST=1 ./nix-*/install' su - alice -c 'nix-store --verify' su - alice -c 'PAGER= nix-store -qR ${build.x86_64-linux}' + + # Check whether 'nix upgrade-nix' works. + (! [ -L /nix/var/nix/profiles/per-user/alice/profile-2-link ]) + cat > /tmp/paths.nix < store) { // FIXME: use nixos.org? - auto req = DownloadRequest("https://github.com/NixOS/nixpkgs/raw/master/nixos/modules/installer/tools/nix-fallback-paths.nix"); + auto req = DownloadRequest(storePathsUrl); auto res = getDownloader()->download(req); auto state = std::make_unique(Strings(), store); From f08b14c9d0e0a1d24b0a5d231ba7d448ef000106 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 30 Aug 2018 17:14:17 +0200 Subject: [PATCH 1179/2196] nix upgrade-nix: Improve error message if 'nix' is not in a profile E.g. $ nix upgrade-nix error: directory '/home/eelco/Dev/nix/inst/bin' does not appear to be part of a Nix profile instead of $ nix upgrade-nix error: '/home/eelco/Dev/nix/inst' is not a symlink --- src/nix/upgrade-nix.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix/upgrade-nix.cc b/src/nix/upgrade-nix.cc index 666ac68b0a7..2db60922a9c 100644 --- a/src/nix/upgrade-nix.cc +++ b/src/nix/upgrade-nix.cc @@ -113,7 +113,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand Path profileDir = dirOf(where); // Resolve profile to /nix/var/nix/profiles/ link. - while (baseNameOf(dirOf(canonPath(profileDir))) != "profiles") + while (baseNameOf(dirOf(canonPath(profileDir))) != "profiles" && isLink(profileDir)) profileDir = readLink(profileDir); printInfo("found profile '%s'", profileDir); From 39f1722f364d7ce95717161cc283e96250c14643 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 30 Aug 2018 21:18:56 +0200 Subject: [PATCH 1180/2196] nix upgrade-nix: Handle .nix-profile being a link to .../profiles/per-user/... Also some cosmetic improvements. --- release.nix | 4 ++-- src/nix/upgrade-nix.cc | 34 ++++++++++++++++++++++------------ 2 files changed, 24 insertions(+), 14 deletions(-) diff --git a/release.nix b/release.nix index fdf3fa946aa..e359ebcb2ce 100644 --- a/release.nix +++ b/release.nix @@ -241,6 +241,7 @@ let { diskImage = vmTools.diskImages.ubuntu1204x86_64; } '' + set -x useradd -m alice su - alice -c 'tar xf ${binaryTarball.x86_64-linux}/*.tar.*' mkdir /dest-nix @@ -251,14 +252,13 @@ let su - alice -c 'PAGER= nix-store -qR ${build.x86_64-linux}' # Check whether 'nix upgrade-nix' works. - (! [ -L /nix/var/nix/profiles/per-user/alice/profile-2-link ]) cat > /tmp/paths.nix <ensurePath(storePath); + store->ensurePath(storePath); } { Activity act(*logger, lvlInfo, actUnknown, fmt("verifying that '%s' works...", storePath)); - if (!dryRun) { - auto program = storePath + "/bin/nix-env"; - auto s = runProgram(program, false, {"--version"}); - if (s.find("Nix") == std::string::npos) - throw Error("could not verify that '%s' works", program); - } + auto program = storePath + "/bin/nix-env"; + auto s = runProgram(program, false, {"--version"}); + if (s.find("Nix") == std::string::npos) + throw Error("could not verify that '%s' works", program); } + stopProgressBar(); + { Activity act(*logger, lvlInfo, actUnknown, fmt("installing '%s' into profile '%s'...", storePath, profileDir)); - if (!dryRun) - runProgram(settings.nixBinDir + "/nix-env", false, - {"--profile", profileDir, "-i", storePath, "--no-sandbox"}); + runProgram(settings.nixBinDir + "/nix-env", false, + {"--profile", profileDir, "-i", storePath, "--no-sandbox"}); } + + printError(ANSI_GREEN "upgrade to version %s done" ANSI_NORMAL, version); } /* Return the profile in which Nix is installed. */ @@ -113,7 +123,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand Path profileDir = dirOf(where); // Resolve profile to /nix/var/nix/profiles/ link. - while (baseNameOf(dirOf(canonPath(profileDir))) != "profiles" && isLink(profileDir)) + while (canonPath(profileDir).find("/profiles/") == std::string::npos && isLink(profileDir)) profileDir = readLink(profileDir); printInfo("found profile '%s'", profileDir); From 0767e402f1d166bdc25755422091bbd67eda52fe Mon Sep 17 00:00:00 2001 From: Michael Bishop Date: Thu, 30 Aug 2018 20:00:01 -0300 Subject: [PATCH 1181/2196] fix `error: unknown serve command 9` --- src/libstore/legacy-ssh-store.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 7c214f09d6f..88d2574e86e 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -131,7 +131,7 @@ struct LegacySSHStore : public Store auto conn(connections->get()); - if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4) { + if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 5) { conn->to << cmdAddToStoreNar From c0c31b58a43dc46c5f2a4f7f1880387db449711b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 31 Aug 2018 11:12:51 +0200 Subject: [PATCH 1182/2196] Add localhost alias for ::1 to the sandbox --- src/libstore/build.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index d75ca0be86e..cd37f7a3fc0 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2007,7 +2007,7 @@ void DerivationGoal::startBuilder() /* Create /etc/hosts with localhost entry. */ if (!fixedOutput) - writeFile(chrootRootDir + "/etc/hosts", "127.0.0.1 localhost\n"); + writeFile(chrootRootDir + "/etc/hosts", "127.0.0.1 localhost\n::1 localhost\n"); /* Make the closure of the inputs available in the chroot, rather than the whole Nix store. This prevents any access From 2df21b78b98dcfcb1fd36e15b76e78cc2d8587a0 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Fri, 31 Aug 2018 09:32:59 -0400 Subject: [PATCH 1183/2196] docs: Add some examples to fetchGit --- doc/manual/expressions/builtins.xml | 89 ++++++++++++++++++++++++++++- 1 file changed, 87 insertions(+), 2 deletions(-) diff --git a/doc/manual/expressions/builtins.xml b/doc/manual/expressions/builtins.xml index 07d8357b40b..6f3c4e3d0b6 100644 --- a/doc/manual/expressions/builtins.xml +++ b/doc/manual/expressions/builtins.xml @@ -398,6 +398,91 @@ stdenv.mkDerivation { … } + + + Fetching a private repository over SSH + builtins.fetchGit { + url = "ssh://git@github.com/my-secret/repository.git"; + ref = "master"; + rev = "adab8b916a45068c044658c4158d81878f9ed1c3"; +} + + + Note the URL format is not the same as git + clone. builtins.fetchGit uses + a / instead of a : + between github.com and + my-secret. + + + + Fetching a repository's specific commit on an arbitrary branch + + If the revision you're looking for is in the default branch + of the gift repository you don't strictly need to specify + the branch name in the ref attribute. + + + However, if the revision you're looking for is in a future + branch for the non-default branch you will need to specify + the the ref attribute as well. + + builtins.fetchGit { + url = "https://github.com/nixos/nix.git"; + rev = "841fcbd04755c7a2865c51c1e2d3b045976b7452"; + ref = "1.11-maintenance"; +} + + + It is nice to always specify the branch which a revision + belongs to. Without the branch being specified, the + fetcher might fail if the default branch changes. + Additionally, it can be confusing to try a commit from a + non-default branch and see the fetch fail. If the branch + is specified the fault is much more obvious. + + + + + + Fetching a repository's specific commit on the default branch + + If the revision you're looking for is in the default branch + of the gift repository you may omit the + ref attribute. + + builtins.fetchGit { + url = "https://github.com/nixos/nix.git"; + rev = "841fcbd04755c7a2865c51c1e2d3b045976b7452"; +} + + + + Fetching a tag + builtins.fetchGit { + url = "https://github.com/nixos/nix.git"; + ref = "tags/1.9"; +} + Due to a bug (#2385), + only non-annotated tags can be fetched. + + + + Fetching the latest version of a remote branch + + builtins.fetchGit can behave impurely + fetch the latest version of a remote branch. + + Nix will refetch the branch in accordance to + . + This behavior is disabled in + Pure evaluation mode. + builtins.fetchGit { + url = "ssh://git@github.com/nixos/nix.git"; + ref = "master"; +} + @@ -1244,8 +1329,8 @@ in foo This is not allowed because it would cause a cyclic dependency in the computation of the cryptographic hashes for foo and bar. - It is also not possible to reference the result of a derivation. - If you are using Nixpkgs, the writeTextFile function is able to + It is also not possible to reference the result of a derivation. + If you are using Nixpkgs, the writeTextFile function is able to do that. From 149d10c308090dafa852542419010b0caf4412a5 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Fri, 31 Aug 2018 10:06:33 -0400 Subject: [PATCH 1184/2196] docs: Add IDs to important sections --- doc/manual/advanced-topics/advanced-topics.xml | 1 + doc/manual/expressions/language-constructs.xml | 2 +- doc/manual/glossary/glossary.xml | 3 ++- doc/manual/installation/env-variables.xml | 2 +- 4 files changed, 5 insertions(+), 3 deletions(-) diff --git a/doc/manual/advanced-topics/advanced-topics.xml b/doc/manual/advanced-topics/advanced-topics.xml index 338aa6f3a23..b710f9f2b51 100644 --- a/doc/manual/advanced-topics/advanced-topics.xml +++ b/doc/manual/advanced-topics/advanced-topics.xml @@ -1,6 +1,7 @@ Advanced Topics diff --git a/doc/manual/expressions/language-constructs.xml b/doc/manual/expressions/language-constructs.xml index 47d95f8a13e..f961ed921bc 100644 --- a/doc/manual/expressions/language-constructs.xml +++ b/doc/manual/expressions/language-constructs.xml @@ -41,7 +41,7 @@ encountered).. -Let-expressions +Let-expressions A let-expression allows you define local variables for an expression. For instance, diff --git a/doc/manual/glossary/glossary.xml b/doc/manual/glossary/glossary.xml index 4977825578f..e3162ed8d46 100644 --- a/doc/manual/glossary/glossary.xml +++ b/doc/manual/glossary/glossary.xml @@ -1,5 +1,6 @@ + xmlns:xlink="http://www.w3.org/1999/xlink" + xml:id="part-glossary"> Glossary diff --git a/doc/manual/installation/env-variables.xml b/doc/manual/installation/env-variables.xml index 1fd6bafee7e..91ecd114f6d 100644 --- a/doc/manual/installation/env-variables.xml +++ b/doc/manual/installation/env-variables.xml @@ -55,7 +55,7 @@ export NIX_SSL_CERT_FILE=/etc/ssl/my-certificate-bundle.crt the Nix installer will detect the presense of Nix configuration, and abort. -
+
<envar>NIX_SSL_CERT_FILE</envar> with macOS and the Nix daemon On macOS you must specify the environment variable for the Nix From ef09da58f2cb592b10c0c3bffff01cf89a5fdb7d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 31 Aug 2018 17:30:45 +0200 Subject: [PATCH 1185/2196] nix path-info: Hopefully fix macOS build failure https://hydra.nixos.org/build/80480356 --- src/nix/path-info.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc index 916ed360e62..fbe5f91698f 100644 --- a/src/nix/path-info.cc +++ b/src/nix/path-info.cc @@ -70,9 +70,9 @@ struct CmdPathInfo : StorePathsCommand, MixJSON return; } - static constexpr std::array idents = { + static const std::array idents{{ ' ', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y' - }; + }}; size_t power = 0; double res = value; while (res > 1024 && power < idents.size()) { From d1f36e8787d2aa2071ece097273975656f788f2a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 31 Aug 2018 19:20:08 +0200 Subject: [PATCH 1186/2196] Really fix the build --- src/nix/path-info.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc index fbe5f91698f..11c47bc4b49 100644 --- a/src/nix/path-info.cc +++ b/src/nix/path-info.cc @@ -5,6 +5,7 @@ #include "common-args.hh" #include +#include using namespace nix; From 4095cd6438812a3f7871796604f1ae548b592aeb Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Sat, 1 Sep 2018 00:01:05 +0200 Subject: [PATCH 1187/2196] Add contributors --- doc/manual/release-notes/rl-2.1.xml | 55 ++++++++++++++++++++++++++++- 1 file changed, 54 insertions(+), 1 deletion(-) diff --git a/doc/manual/release-notes/rl-2.1.xml b/doc/manual/release-notes/rl-2.1.xml index 09b4f92e199..9a5f37f6625 100644 --- a/doc/manual/release-notes/rl-2.1.xml +++ b/doc/manual/release-notes/rl-2.1.xml @@ -51,7 +51,60 @@ new features: -This release has contributions from TODO. +This release has contributions from + +Adrien Devresse, +Aleksandr Pashkov, +Alexandre Esteves, +Amine Chikhaoui, +Andrew Dunham, +Asad Saeeduddin, +aszlig, +Ben Challenor, +Ben Gamari, +Benjamin Hipple, +Bogdan Seniuc, +Corey O'Connor, +Daiderd Jordan, +Daniel Peebles, +Daniel Poelzleithner, +Danylo Hlynskyi, +Dmitry Kalinkin, +Domen Kožar, +Doug Beardsley, +Eelco Dolstra, +Erik Arvstedt, +Félix Baylac-Jacqué, +Gleb Peregud, +Graham Christensen, +Guillaume Maudoux, +Ivan Kozik, +John Arnold, +Justin Humm, +Linus Heckemann, +Lorenzo Manacorda, +Matthew Justin Bauer, +Matthew O'Gorman, +Maximilian Bosch, +Michael Bishop, +Michael Fiano, +Michael Mercier, +Michael Raskin, +Michael Weiss, +Nicolas Dudebout, +Peter Simons, +Ryan Trinkle, +Samuel Dionne-Riel, +Sean Seefried, +Shea Levy, +Symphorien Gibol, +Tim Engler, +Tim Sears, +Tuomas Tynkkynen, +volth, +Will Dietz, +Yorick van Pelt and +zimbatm.
From b7409c57544997e6c31a05b5ca024e752cb1a35d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Sat, 1 Sep 2018 00:04:57 +0200 Subject: [PATCH 1188/2196] nix path-info: Remove trailing spaces Fixes #2390. --- src/nix/path-info.cc | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc index 11c47bc4b49..dea5f0557b8 100644 --- a/src/nix/path-info.cc +++ b/src/nix/path-info.cc @@ -103,7 +103,10 @@ struct CmdPathInfo : StorePathsCommand, MixJSON auto info = store->queryPathInfo(storePath); storePath = info->path; // FIXME: screws up padding - std::cout << storePath << std::string(std::max(0, (int) pathLen - (int) storePath.size()), ' '); + std::cout << storePath; + + if (showSize || showClosureSize || showSigs) + std::cout << std::string(std::max(0, (int) pathLen - (int) storePath.size()), ' '); if (showSize) printSize(info->narSize); From 475a0a54a9c28c311f0c05d129bb165e7c215efb Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Sat, 1 Sep 2018 00:19:49 +0200 Subject: [PATCH 1189/2196] fetchGit/fetchMercurial: Don't absolutize paths This is already done by coerceToString(), provided that the argument is a path (e.g. 'fetchGit ./bla'). It fixes the handling of URLs like git@github.com:owner/repo.git. It breaks 'fetchGit "./bla"', but that was never intended to work anyway and is inconsistent with other builtin functions (e.g. 'readFile "./bla"' fails). --- src/libexpr/primops/fetchGit.cc | 2 -- src/libexpr/primops/fetchMercurial.cc | 2 -- 2 files changed, 4 deletions(-) diff --git a/src/libexpr/primops/fetchGit.cc b/src/libexpr/primops/fetchGit.cc index 7aa98e0bfab..0c6539959bf 100644 --- a/src/libexpr/primops/fetchGit.cc +++ b/src/libexpr/primops/fetchGit.cc @@ -219,8 +219,6 @@ static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Va } else url = state.coerceToString(pos, *args[0], context, false, false); - if (!isUri(url)) url = absPath(url); - // FIXME: git externals probably can be used to bypass the URI // whitelist. Ah well. state.checkURI(url); diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc index 9d35f6d0d6d..97cda2458c9 100644 --- a/src/libexpr/primops/fetchMercurial.cc +++ b/src/libexpr/primops/fetchMercurial.cc @@ -184,8 +184,6 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar } else url = state.coerceToString(pos, *args[0], context, false, false); - if (!isUri(url)) url = absPath(url); - // FIXME: git externals probably can be used to bypass the URI // whitelist. Ah well. state.checkURI(url); From 0b7568fb730439a2fbbf3c4b3b222319faa7e66a Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Fri, 31 Aug 2018 19:49:56 -0400 Subject: [PATCH 1190/2196] Drop ssh://... as a required formatting for builtins.fetchGit --- doc/manual/expressions/builtins.xml | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/doc/manual/expressions/builtins.xml b/doc/manual/expressions/builtins.xml index 6f3c4e3d0b6..873f30b062e 100644 --- a/doc/manual/expressions/builtins.xml +++ b/doc/manual/expressions/builtins.xml @@ -402,17 +402,10 @@ stdenv.mkDerivation { … } Fetching a private repository over SSH builtins.fetchGit { - url = "ssh://git@github.com/my-secret/repository.git"; + url = "git@github.com:my-secret/repository.git"; ref = "master"; rev = "adab8b916a45068c044658c4158d81878f9ed1c3"; } - - - Note the URL format is not the same as git - clone. builtins.fetchGit uses - a / instead of a : - between github.com and - my-secret. From b7bb627f674ce90d1a543bf3eab80b834bb592b2 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Fri, 31 Aug 2018 20:57:07 -0400 Subject: [PATCH 1191/2196] nix-build: print stats on sucessful builds --- src/nix-build/nix-build.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 21a0756a200..54d6b1db736 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -495,6 +495,7 @@ void mainWrapped(int argc, char * * argv) for (auto & path : outPaths) std::cout << path << '\n'; + state->printStats(); } } From 87702532d2aa24394493ccd12e77811cc2d0893c Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Fri, 31 Aug 2018 21:03:32 -0400 Subject: [PATCH 1192/2196] nix-build: Print stats even in failing builds --- src/nix-build/nix-build.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 54d6b1db736..94d3a27560f 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -305,6 +305,8 @@ void mainWrapped(int argc, char * * argv) } } + state->printStats(); + auto buildPaths = [&](const PathSet & paths) { /* Note: we do this even when !printMissing to efficiently fetch binary cache data. */ @@ -495,7 +497,6 @@ void mainWrapped(int argc, char * * argv) for (auto & path : outPaths) std::cout << path << '\n'; - state->printStats(); } } From f66fa7cd20731ab08be72e016cc5b912940cd1db Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Sat, 1 Sep 2018 08:56:53 -0400 Subject: [PATCH 1193/2196] We support aarch64 now --- doc/manual/installation/supported-platforms.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/installation/supported-platforms.xml b/doc/manual/installation/supported-platforms.xml index 6858573ff40..3e74be49d1f 100644 --- a/doc/manual/installation/supported-platforms.xml +++ b/doc/manual/installation/supported-platforms.xml @@ -10,7 +10,7 @@ - Linux (i686, x86_64). + Linux (i686, x86_64, aarch64). macOS (x86_64). From c3e508d9245f9439343d88d827c2d7c369ebfc88 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Fri, 31 Aug 2018 11:47:00 -0400 Subject: [PATCH 1194/2196] Document the multi-user installer some Use sh <(...) syntax for installation to preserve stdin and prompting also update installation docs to account for changes in multi-user selection --- doc/manual/installation/env-variables.xml | 2 +- doc/manual/installation/installing-binary.xml | 178 ++++++++++++++---- 2 files changed, 140 insertions(+), 40 deletions(-) diff --git a/doc/manual/installation/env-variables.xml b/doc/manual/installation/env-variables.xml index 91ecd114f6d..d1ee0bb2e09 100644 --- a/doc/manual/installation/env-variables.xml +++ b/doc/manual/installation/env-variables.xml @@ -39,7 +39,7 @@ bundle. Set the environment variable and install Nix $ export NIX_SSL_CERT_FILE=/etc/ssl/my-certificate-bundle.crt -$ curl https://nixos.org/nix/install | sh +$ sh <(curl https://nixos.org/nix/install) In the shell profile and rc files (for example, diff --git a/doc/manual/installation/installing-binary.xml b/doc/manual/installation/installing-binary.xml index 7e8dfb0db3d..394d8053b94 100644 --- a/doc/manual/installation/installing-binary.xml +++ b/doc/manual/installation/installing-binary.xml @@ -6,13 +6,30 @@ Installing a Binary Distribution -If you are using Linux or macOS, the easiest way to install -Nix is to run the following command: +If you are using Linux or macOS, the easiest way to install Nix +is to run the following command: -$ bash <(curl https://nixos.org/nix/install) + $ sh <(curl https://nixos.org/nix/install) +As of Nix 2.1.0, the Nix installer will always default to creating a +single-user installation, however opting in to the multi-user +installation is highly recommended. + + +
+ Single User Installation + + + To explicitly select a single-user installation on your system: + + + sh <(curl https://nixos.org/nix/install) --no-daemon + + + + This will perform a single-user installation of Nix, meaning that /nix is owned by the invoking user. You should run this under your usual user account, not as @@ -33,58 +50,141 @@ and .profile to source the NIX_INSTALLER_NO_MODIFY_PROFILE environment variable before executing the install script to disable this behaviour. - - + There may also be references to Nix in + /etc/profile, + /etc/bashrc, and + /etc/zshrc which you may remove. + -You can also download a binary tarball that contains Nix and all -its dependencies. (This is what the install script at -https://nixos.org/nix/install does automatically.) You -should unpack it somewhere (e.g. in /tmp), and -then run the script named install inside the binary -tarball: +
- -alice$ cd /tmp -alice$ tar xfj nix-1.8-x86_64-darwin.tar.bz2 -alice$ cd nix-1.8-x86_64-darwin -alice$ ./install - +
+ Installing a pinned Nix version from a URL - + + NixOS.org hosts version-specific installation URLs for all Nix + versions since 1.11.16, at + https://nixos.org/releases/nix/nix-VERSION/install. + -You can uninstall Nix simply by running: + + These install scripts can be used the same as the main + NixOS.org installation script: - -$ rm -rf /nix + + sh <(curl https://nixos.org/nix/install) + - + + In the same directory of the install script are sha256 sums, and + gpg signature files. + +
+ +
+ Installing from a binary tarball + + You can also download a binary tarball that contains Nix and all + its dependencies. (This is what the install script at + https://nixos.org/nix/install does automatically.) You + should unpack it somewhere (e.g. in /tmp), + and then run the script named install inside + the binary tarball: + + + +alice$ cd /tmp +alice$ tar xfj nix-1.8-x86_64-darwin.tar.bz2 +alice$ cd nix-1.8-x86_64-darwin +alice$ ./install + + + + + If you need to edit the multi-user installation script to use + different group ID or a different user ID range, modify the + variables set in the file named + install-multi-user. + +
From 51f9682a8b16c4f97856510c1cc99576326c9192 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Sat, 1 Sep 2018 08:51:32 -0400 Subject: [PATCH 1195/2196] Default to single-user install --- scripts/install-nix-from-closure.sh | 25 ++++++++++++++----------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/scripts/install-nix-from-closure.sh b/scripts/install-nix-from-closure.sh index cd71d7947d7..ab20774bbf0 100644 --- a/scripts/install-nix-from-closure.sh +++ b/scripts/install-nix-from-closure.sh @@ -30,15 +30,14 @@ if [ "$(uname -s)" = "Darwin" ]; then fi fi -# Determine if we should punt to the single-user installer or not +# Determine if we could use the multi-user installer or not if [ "$(uname -s)" = "Darwin" ]; then - INSTALL_MODE=daemon + echo "Note: a multi-user installation is possible. See https://nixos.org/nix/manual/#sect-multi-user-installation" >&2 elif [ "$(uname -s)" = "Linux" ] && [ -e /run/systemd/system ]; then - INSTALL_MODE=daemon -else - INSTALL_MODE=no-daemon + echo "Note: a multi-user installation is possible. See https://nixos.org/nix/manual/#sect-multi-user-installation" >&2 fi +INSTALL_MODE=no-daemon # Trivially handle the --daemon / --no-daemon options if [ "x${1:-}" = "x--no-daemon" ]; then INSTALL_MODE=no-daemon @@ -47,14 +46,18 @@ elif [ "x${1:-}" = "x--daemon" ]; then elif [ "x${1:-}" != "x" ]; then ( echo "Nix Installer [--daemon|--no-daemon]" + + echo "Choose installation method." echo "" - echo " --daemon: Force the installer to use the Daemon" - echo " based installer, even though it may not" - echo " work." + echo " --daemon: Installs and configures a background daemon that manages the store," + echo " providing multi-user support and better isolation for local builds." + echo " Both for security and reproducibility, this method is recommended if" + echo " supported on your platform." + echo " See https://nixos.org/nix/manual/#sect-multi-user-installation" echo "" - echo " --no-daemon: Force a no-daemon, single-user" - echo " installation even when the preferred" - echo " method is with the daemon." + echo " --no-daemon: Simple, single-user installation that does not require root and is" + echo " trivial to uninstall." + echo " (default)" echo "" ) >&2 exit From 4be7652dd3c577ce0890ee849c041e4cb59a3db7 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Fri, 31 Aug 2018 11:47:42 -0400 Subject: [PATCH 1196/2196] release notes: note that the multi-user installer is available but not selected for Linux with systemd, and the bug about selinux --- doc/manual/release-notes/rl-2.1.xml | 30 +++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/doc/manual/release-notes/rl-2.1.xml b/doc/manual/release-notes/rl-2.1.xml index 9a5f37f6625..20a8e4ea6ca 100644 --- a/doc/manual/release-notes/rl-2.1.xml +++ b/doc/manual/release-notes/rl-2.1.xml @@ -49,8 +49,38 @@ new features:
nix-support/propagated-user-env-packages.
+ + The Nix installer will no longer default to the Multi-User + installation for macOS. You can still instruct the installer to + run in multi-user mode. + + + + + The Nix installer now supports performing a Multi-User + installation for Linux computers which are running systemd. You + can select a Multi-User installation by passing the + flag to the installer: sh <(curl + https://nixos.org/nix/install) --daemon. + + + The multi-user installer cannot handle systems with SELinux. + If your system has SELinux enabled, you can force the installer to run + in single-user mode. + +
+ + Multi-user Nix users on macOS can upgrade Nix by running + sudo -i sh -c 'nix-channel --update && nix-env -iA + nixpkgs.nix'; sudo launchctl stop org.nixos.nix-daemon; sudo + launchctl start org.nixos.nix-daemon. + + This release has contributions from Adrien Devresse, From c42eaaf684a6b1f7358d33cfaea1614885467d72 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Sat, 1 Sep 2018 15:35:46 -0400 Subject: [PATCH 1197/2196] Create upgrade notes --- doc/manual/installation/upgrading.xml | 21 +++++++++++++++++++++ doc/manual/manual.xml | 1 + doc/manual/release-notes/rl-2.1.xml | 7 ------- 3 files changed, 22 insertions(+), 7 deletions(-) create mode 100644 doc/manual/installation/upgrading.xml diff --git a/doc/manual/installation/upgrading.xml b/doc/manual/installation/upgrading.xml new file mode 100644 index 00000000000..a3f86ade95c --- /dev/null +++ b/doc/manual/installation/upgrading.xml @@ -0,0 +1,21 @@ + + + Upgrading Nix + + + Multi-user Nix users on macOS can upgrade Nix by running + sudo -i sh -c 'nix-channel --update && nix-env + -iA nixpkgs.nix'; sudo launchctl stop org.nixos.nix-daemon; sudo + launchctl start org.nixos.nix-daemon. + + + + Single-user installations of Nix should run nix-channel + --update; nix-env -iA nixpkgs.nix. + + + diff --git a/doc/manual/manual.xml b/doc/manual/manual.xml index b408b681772..87d9de28ab1 100644 --- a/doc/manual/manual.xml +++ b/doc/manual/manual.xml @@ -32,6 +32,7 @@ + diff --git a/doc/manual/release-notes/rl-2.1.xml b/doc/manual/release-notes/rl-2.1.xml index 20a8e4ea6ca..3cace13f10f 100644 --- a/doc/manual/release-notes/rl-2.1.xml +++ b/doc/manual/release-notes/rl-2.1.xml @@ -74,13 +74,6 @@ new features: - - Multi-user Nix users on macOS can upgrade Nix by running - sudo -i sh -c 'nix-channel --update && nix-env -iA - nixpkgs.nix'; sudo launchctl stop org.nixos.nix-daemon; sudo - launchctl start org.nixos.nix-daemon. - - This release has contributions from Adrien Devresse, From c29e5fbb1306a2da869260153cd38cf25986d662 Mon Sep 17 00:00:00 2001 From: Michael Bishop Date: Sat, 1 Sep 2018 17:11:56 -0300 Subject: [PATCH 1198/2196] improve the stats when profiling --- src/libexpr/eval.cc | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index f41905787f9..77d1faced72 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -1739,17 +1739,16 @@ void EvalState::printStats() uint64_t bValues = nrValues * sizeof(Value); uint64_t bAttrsets = nrAttrsets * sizeof(Bindings) + nrAttrsInAttrsets * sizeof(Attr); + auto sum = bEnvs + bLists + bValues + bAttrsets; + printMsg(v, format(" time elapsed: %1%") % cpuTime); printMsg(v, format(" size of a value: %1%") % sizeof(Value)); printMsg(v, format(" size of an attr: %1%") % sizeof(Attr)); - printMsg(v, format(" environments allocated count: %1%") % nrEnvs); - printMsg(v, format(" environments allocated bytes: %1%") % bEnvs); - printMsg(v, format(" list elements count: %1%") % nrListElems); - printMsg(v, format(" list elements bytes: %1%") % bLists); + printMsg(v, format(" environments allocated: %1% (%2% bytes, %3%%%)") % nrEnvs % bEnvs % ((bEnvs*100) / sum)); + printMsg(v, format(" list elements: %1% (%2% bytes %3%%%)") % nrListElems % bLists % ((bLists*100)/sum)); printMsg(v, format(" list concatenations: %1%") % nrListConcats); - printMsg(v, format(" values allocated count: %1%") % nrValues); - printMsg(v, format(" values allocated bytes: %1%") % bValues); - printMsg(v, format(" sets allocated: %1% (%2% bytes)") % nrAttrsets % bAttrsets); + printMsg(v, format(" values allocated: %1% (%2% bytes %3%%%)") % nrValues % bValues % ((bValues*100)/sum)); + printMsg(v, format(" sets allocated: %1% (%2% bytes %3%%%)") % nrAttrsets % bAttrsets % ((bAttrsets*100)/sum)); printMsg(v, format(" right-biased unions: %1%") % nrOpUpdates); printMsg(v, format(" values copied in right-biased unions: %1%") % nrOpUpdateValuesCopied); printMsg(v, format(" symbols in symbol table: %1%") % symbols.size()); @@ -1761,6 +1760,8 @@ void EvalState::printStats() printMsg(v, format(" number of function calls: %1%") % nrFunctionCalls); printMsg(v, format(" total allocations: %1% bytes") % (bEnvs + bLists + bValues + bAttrsets)); + printMsg(v, format(" sets: %1% (%2% each, %3% mb total), attrs-in-sets: %4% (%5% each, %6% mb total)") % nrAttrsets % sizeof(Bindings) % ((nrAttrsets * sizeof(Bindings)) / 1024 / 1024) % nrAttrsInAttrsets % sizeof(Attr) % ((nrAttrsInAttrsets * sizeof(Attr)) / 1024 / 1024)); + #if HAVE_BOEHMGC GC_word heapSize, totalBytes; GC_get_heap_usage_safe(&heapSize, 0, 0, 0, &totalBytes); From 2fd1008c708be68b56d39be2e1230f29b2ac5ab1 Mon Sep 17 00:00:00 2001 From: Michael Bishop Date: Sat, 1 Sep 2018 19:50:22 -0300 Subject: [PATCH 1199/2196] add JSON to NIX_SHOW_STATS --- src/libexpr/eval.cc | 114 ++++++++++++++++++++++++++++++++++---------- 1 file changed, 90 insertions(+), 24 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 77d1faced72..9ad0b1a2043 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -6,12 +6,15 @@ #include "globals.hh" #include "eval-inline.hh" #include "download.hh" +#include "json.hh" #include #include #include #include #include +#include +#include #include #include @@ -23,6 +26,7 @@ #endif +using std::cout; namespace nix { @@ -1723,10 +1727,10 @@ bool EvalState::eqValues(Value & v1, Value & v2) } } - void EvalState::printStats() { bool showStats = getEnv("NIX_SHOW_STATS", "0") != "0"; + bool showJsonStats = getEnv("NIX_SHOW_STATS", "0") == "json"; Verbosity v = showStats ? lvlInfo : lvlDebug; printMsg(v, "evaluation statistics:"); @@ -1741,33 +1745,95 @@ void EvalState::printStats() auto sum = bEnvs + bLists + bValues + bAttrsets; - printMsg(v, format(" time elapsed: %1%") % cpuTime); - printMsg(v, format(" size of a value: %1%") % sizeof(Value)); - printMsg(v, format(" size of an attr: %1%") % sizeof(Attr)); - printMsg(v, format(" environments allocated: %1% (%2% bytes, %3%%%)") % nrEnvs % bEnvs % ((bEnvs*100) / sum)); - printMsg(v, format(" list elements: %1% (%2% bytes %3%%%)") % nrListElems % bLists % ((bLists*100)/sum)); - printMsg(v, format(" list concatenations: %1%") % nrListConcats); - printMsg(v, format(" values allocated: %1% (%2% bytes %3%%%)") % nrValues % bValues % ((bValues*100)/sum)); - printMsg(v, format(" sets allocated: %1% (%2% bytes %3%%%)") % nrAttrsets % bAttrsets % ((bAttrsets*100)/sum)); - printMsg(v, format(" right-biased unions: %1%") % nrOpUpdates); - printMsg(v, format(" values copied in right-biased unions: %1%") % nrOpUpdateValuesCopied); - printMsg(v, format(" symbols in symbol table: %1%") % symbols.size()); - printMsg(v, format(" size of symbol table: %1%") % symbols.totalSize()); - printMsg(v, format(" number of thunks: %1%") % nrThunks); - printMsg(v, format(" number of thunks avoided: %1%") % nrAvoided); - printMsg(v, format(" number of attr lookups: %1%") % nrLookups); - printMsg(v, format(" number of primop calls: %1%") % nrPrimOpCalls); - printMsg(v, format(" number of function calls: %1%") % nrFunctionCalls); - printMsg(v, format(" total allocations: %1% bytes") % (bEnvs + bLists + bValues + bAttrsets)); - - printMsg(v, format(" sets: %1% (%2% each, %3% mb total), attrs-in-sets: %4% (%5% each, %6% mb total)") % nrAttrsets % sizeof(Bindings) % ((nrAttrsets * sizeof(Bindings)) / 1024 / 1024) % nrAttrsInAttrsets % sizeof(Attr) % ((nrAttrsInAttrsets * sizeof(Attr)) / 1024 / 1024)); - #if HAVE_BOEHMGC GC_word heapSize, totalBytes; GC_get_heap_usage_safe(&heapSize, 0, 0, 0, &totalBytes); - printMsg(v, format(" current Boehm heap size: %1% bytes") % heapSize); - printMsg(v, format(" total Boehm heap allocations: %1% bytes") % totalBytes); #endif + if (showJsonStats) { + auto outPath = getEnv("NIX_SHOW_STATS_PATH","-"); + std::fstream fs; + if (outPath != "-") { + fs.open(outPath, std::fstream::out); + printMsg(v, format(" written to: %1%") % outPath); + } + JSONObject topObj(outPath == "-" ? cout : fs, true); + topObj.attr("cpuTime",cpuTime); + { + auto envs = topObj.object("envs"); + envs.attr("number", nrEnvs); + envs.attr("bytes", bEnvs); + } + { + auto lists = topObj.object("list"); + lists.attr("elements", nrListElems); + lists.attr("bytes", bLists); + lists.attr("concats", nrListConcats); + } + { + auto values = topObj.object("values"); + values.attr("number", nrValues); + values.attr("bytes", bValues); + } + { + auto syms = topObj.object("symbols"); + syms.attr("number", symbols.size()); + syms.attr("bytes", symbols.totalSize()); + } + { + auto sets = topObj.object("sets"); + sets.attr("number", nrAttrsets); + sets.attr("bytes", bAttrsets); + } + { + JSONObject sizes = topObj.object("sizes"); + sizes.attr("Env", sizeof(Env)); + sizes.attr("Value", sizeof(Value)); + sizes.attr("Bindings", sizeof(Bindings)); + sizes.attr("Attr", sizeof(Attr)); + } + topObj.attr("nrOpUpdates", nrOpUpdates); + topObj.attr("nrOpUpdateValuesCopied", nrOpUpdateValuesCopied); + topObj.attr("nrThunks", nrThunks); + topObj.attr("nrAvoided", nrAvoided); + topObj.attr("nrLookups", nrLookups); + topObj.attr("nrPrimOpCalls", nrPrimOpCalls); + topObj.attr("nrFunctionCalls", nrFunctionCalls); +#if HAVE_BOEHMGC + JSONObject gc = topObj.object("gc"); + gc.attr("heapSize", heapSize); + gc.attr("totalBytes", totalBytes); +#endif + } else { + if (getEnv("NIX_SHOW_STATS_PATH","-") != "-") { + printError("warning: $NIX_SHOW_STATS_PATH only works in combination with NIX_SHOW_STATS=json"); + } + printMsg(v, format(" time elapsed: %1%") % cpuTime); + printMsg(v, format(" size of a value: %1%") % sizeof(Value)); + printMsg(v, format(" size of an attr: %1%") % sizeof(Attr)); + printMsg(v, format(" environments allocated: %1% (%2% bytes, %3%%%)") % nrEnvs % bEnvs % ((bEnvs*100) / sum)); + printMsg(v, format(" list elements: %1% (%2% bytes %3%%%)") % nrListElems % bLists % ((bLists*100)/sum)); + printMsg(v, format(" list concatenations: %1%") % nrListConcats); + printMsg(v, format(" values allocated: %1% (%2% bytes %3%%%)") % nrValues % bValues % ((bValues*100)/sum)); + printMsg(v, format(" sets allocated: %1% (%2% bytes %3%%%)") % nrAttrsets % bAttrsets % ((bAttrsets*100)/sum)); + printMsg(v, format(" right-biased unions: %1%") % nrOpUpdates); + printMsg(v, format(" values copied in right-biased unions: %1%") % nrOpUpdateValuesCopied); + printMsg(v, format(" symbols in symbol table: %1%") % symbols.size()); + printMsg(v, format(" size of symbol table: %1%") % symbols.totalSize()); + printMsg(v, format(" number of thunks: %1%") % nrThunks); + printMsg(v, format(" number of thunks avoided: %1%") % nrAvoided); + printMsg(v, format(" number of attr lookups: %1%") % nrLookups); + printMsg(v, format(" number of primop calls: %1%") % nrPrimOpCalls); + printMsg(v, format(" number of function calls: %1%") % nrFunctionCalls); + printMsg(v, format(" total allocations: %1% bytes") % (bEnvs + bLists + bValues + bAttrsets)); + + printMsg(v, format(" sets: %1% (%2% each, %3% mb total), attrs-in-sets: %4% (%5% each, %6% mb total)") % nrAttrsets % sizeof(Bindings) % ((nrAttrsets * sizeof(Bindings)) / 1024 / 1024) % nrAttrsInAttrsets % sizeof(Attr) % ((nrAttrsInAttrsets * sizeof(Attr)) / 1024 / 1024)); + +#if HAVE_BOEHMGC + printMsg(v, format(" current Boehm heap size: %1% bytes") % heapSize); + printMsg(v, format(" total Boehm heap allocations: %1% bytes") % totalBytes); +#endif + } + if (countCalls) { v = lvlInfo; From c9a08540c3d64d1285928d1ce3d3d416a2547dd9 Mon Sep 17 00:00:00 2001 From: Daiderd Jordan Date: Thu, 30 Aug 2018 00:59:29 +0200 Subject: [PATCH 1200/2196] nix doctor: add command Inspired by the homebrew command, shows a combination of debugging information and warnings with potential issues with a nix installation. --- src/nix/doctor.cc | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 src/nix/doctor.cc diff --git a/src/nix/doctor.cc b/src/nix/doctor.cc new file mode 100644 index 00000000000..fb4fc2a6e7e --- /dev/null +++ b/src/nix/doctor.cc @@ -0,0 +1,26 @@ +#include "command.hh" +#include "shared.hh" +#include "store-api.hh" + +using namespace nix; + +struct CmdDoctor : StoreCommand +{ + std::string name() override + { + return "doctor"; + } + + std::string description() override + { + return "check your system for potential problems"; + } + + void run(ref store) override + { + std::cout << "Store uri: " << store->getUri() << std::endl; + } +}; + +static RegisterCommand r1(make_ref()); + From 070823baa4c3c397c8a5eb0378944187e7f4903c Mon Sep 17 00:00:00 2001 From: Daiderd Jordan Date: Thu, 30 Aug 2018 23:28:47 +0200 Subject: [PATCH 1201/2196] Store: expose the protocol version used by a store --- src/libstore/legacy-ssh-store.cc | 6 ++++++ src/libstore/local-store.cc | 6 ++++++ src/libstore/local-store.hh | 2 ++ src/libstore/remote-store.cc | 7 +++++++ src/libstore/remote-store.hh | 2 ++ src/libstore/store-api.hh | 6 ++++++ 6 files changed, 29 insertions(+) diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 88d2574e86e..26e1851981d 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -303,6 +303,12 @@ struct LegacySSHStore : public Store { auto conn(connections->get()); } + + unsigned int getProtocol() override + { + auto conn(connections->get()); + return conn->remoteVersion; + } }; static RegisterStoreImplementation regStore([]( diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index c91dbf241bc..c8117c0c650 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1332,6 +1332,12 @@ void LocalStore::verifyPath(const Path & path, const PathSet & store, } +unsigned int LocalStore::getProtocol() +{ + return PROTOCOL_VERSION; +} + + #if defined(FS_IOC_SETFLAGS) && defined(FS_IOC_GETFLAGS) && defined(FS_IMMUTABLE_FL) static void makeMutable(const Path & path) diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index 746bdbeed79..fce963433a5 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -209,6 +209,8 @@ public: void registerValidPaths(const ValidPathInfos & infos); + unsigned int getProtocol() override; + void vacuumDB(); /* Repair the contents of the given path by redownloading it using diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index ea86ef052f5..eff5d252419 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -646,6 +646,13 @@ void RemoteStore::connect() } +unsigned int RemoteStore::getProtocol() +{ + auto conn(connections->get()); + return conn->daemonVersion; +} + + void RemoteStore::flushBadConnections() { connections->flushBad(); diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh index b488e34ce26..16daee8b673 100644 --- a/src/libstore/remote-store.hh +++ b/src/libstore/remote-store.hh @@ -97,6 +97,8 @@ public: void connect() override; + unsigned int getProtocol() override; + void flushBadConnections(); protected: diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 7c5b495a448..c2f964e11f7 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -598,6 +598,12 @@ public: a notion of connection. Otherwise this is a no-op. */ virtual void connect() { }; + /* Get the protocol version of this store or it's connection. */ + virtual unsigned int getProtocol() + { + return 0; + }; + /* Get the priority of the store, used to order substituters. In particular, binary caches can specify a priority field in their "nix-cache-info" file. Lower value means higher priority. */ From 7314dc7f07a90782dea5cc9d298c7f7148e3b7c3 Mon Sep 17 00:00:00 2001 From: Daiderd Jordan Date: Thu, 30 Aug 2018 23:42:28 +0200 Subject: [PATCH 1202/2196] nix doctor: add warning if client/daemon protocol mismatches A protocol mismatch can sometimes cause problems when using specific features with an older daemon. For example: Nix 2.0 changed the way files are compied to the store. The daemon is backwards compatible and can still handle older clients, however a 1.11 nix-daemon isn't forwards compatible. --- src/nix/doctor.cc | 27 ++++++++++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/src/nix/doctor.cc b/src/nix/doctor.cc index fb4fc2a6e7e..6ef5eb9d30e 100644 --- a/src/nix/doctor.cc +++ b/src/nix/doctor.cc @@ -1,9 +1,20 @@ #include "command.hh" #include "shared.hh" #include "store-api.hh" +#include "worker-protocol.hh" using namespace nix; +std::string formatProtocol(unsigned int proto) +{ + if (proto) { + auto major = GET_PROTOCOL_MAJOR(proto) >> 8; + auto minor = GET_PROTOCOL_MINOR(proto); + return (format("%1%.%2%") % major % minor).str(); + } + return "unknown"; +} + struct CmdDoctor : StoreCommand { std::string name() override @@ -19,8 +30,22 @@ struct CmdDoctor : StoreCommand void run(ref store) override { std::cout << "Store uri: " << store->getUri() << std::endl; + std::cout << std::endl; + + checkStoreProtocol(store->getProtocol()); + } + + void checkStoreProtocol(unsigned int proto) { + if (PROTOCOL_VERSION != proto) { + std::cout << "Warning: protocol version of this client does not match the store." << std::endl; + std::cout << "While this is not necessarily a problem it's recommended to keep the client in" << std::endl; + std::cout << "sync with the daemon." << std::endl; + std::cout << std::endl; + std::cout << "Client protocol: " << formatProtocol(PROTOCOL_VERSION) << std::endl; + std::cout << "Store protocol: " << formatProtocol(proto) << std::endl; + std::cout << std::endl; + } } }; static RegisterCommand r1(make_ref()); - From 246acf93f2b61b2915e2140d761b19c2e836a96e Mon Sep 17 00:00:00 2001 From: Daiderd Jordan Date: Fri, 31 Aug 2018 01:01:59 +0200 Subject: [PATCH 1203/2196] nix doctor: handle serve protocol The serve protocol used by LegacySSHStore has a different major and shouldn't be compared to PROTOCOL_VERSION. --- src/nix/doctor.cc | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/src/nix/doctor.cc b/src/nix/doctor.cc index 6ef5eb9d30e..a31df595da8 100644 --- a/src/nix/doctor.cc +++ b/src/nix/doctor.cc @@ -1,4 +1,5 @@ #include "command.hh" +#include "serve-protocol.hh" #include "shared.hh" #include "store-api.hh" #include "worker-protocol.hh" @@ -35,14 +36,18 @@ struct CmdDoctor : StoreCommand checkStoreProtocol(store->getProtocol()); } - void checkStoreProtocol(unsigned int proto) { - if (PROTOCOL_VERSION != proto) { + void checkStoreProtocol(unsigned int storeProto) { + auto clientProto = GET_PROTOCOL_MAJOR(SERVE_PROTOCOL_VERSION) == GET_PROTOCOL_MAJOR(storeProto) + ? SERVE_PROTOCOL_VERSION + : PROTOCOL_VERSION; + + if (clientProto != storeProto) { std::cout << "Warning: protocol version of this client does not match the store." << std::endl; std::cout << "While this is not necessarily a problem it's recommended to keep the client in" << std::endl; std::cout << "sync with the daemon." << std::endl; std::cout << std::endl; - std::cout << "Client protocol: " << formatProtocol(PROTOCOL_VERSION) << std::endl; - std::cout << "Store protocol: " << formatProtocol(proto) << std::endl; + std::cout << "Client protocol: " << formatProtocol(clientProto) << std::endl; + std::cout << "Store protocol: " << formatProtocol(storeProto) << std::endl; std::cout << std::endl; } } From 0f18dc54797a1850bc1b91673790ad73e8f4b82f Mon Sep 17 00:00:00 2001 From: Daiderd Jordan Date: Sun, 2 Sep 2018 01:01:23 +0200 Subject: [PATCH 1204/2196] nix doctor: add warning for multiple versions It's pretty easy to unintentionally install a second version of nix into the user profile when using a daemon install. In this case it looks like nix was upgraded while the nix-daemon is probably still unning an older version. --- src/nix/doctor.cc | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/src/nix/doctor.cc b/src/nix/doctor.cc index a31df595da8..6265e9cfef2 100644 --- a/src/nix/doctor.cc +++ b/src/nix/doctor.cc @@ -33,9 +33,26 @@ struct CmdDoctor : StoreCommand std::cout << "Store uri: " << store->getUri() << std::endl; std::cout << std::endl; + checkNixInPath(); checkStoreProtocol(store->getProtocol()); } + void checkNixInPath() { + PathSet dirs; + + for (auto & dir : tokenizeString(getEnv("PATH"), ":")) + if (pathExists(dir + "/nix-env")) + dirs.insert(dirOf(canonPath(dir + "/nix-env", true))); + + if (dirs.size() != 1) { + std::cout << "Warning: multiple versions of nix found in PATH." << std::endl; + std::cout << std::endl; + for (auto & dir : dirs) + std::cout << " " << dir << std::endl; + std::cout << std::endl; + } + } + void checkStoreProtocol(unsigned int storeProto) { auto clientProto = GET_PROTOCOL_MAJOR(SERVE_PROTOCOL_VERSION) == GET_PROTOCOL_MAJOR(storeProto) ? SERVE_PROTOCOL_VERSION From bfdca55868ac1cd336c5d73ff944098ce82d023d Mon Sep 17 00:00:00 2001 From: Daiderd Jordan Date: Sun, 2 Sep 2018 12:52:04 +0200 Subject: [PATCH 1205/2196] nix doctor: add check for profile roots In most cases profiles that are in PATH should have a gcroot. --- src/nix/doctor.cc | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/src/nix/doctor.cc b/src/nix/doctor.cc index 6265e9cfef2..2a8af7780b9 100644 --- a/src/nix/doctor.cc +++ b/src/nix/doctor.cc @@ -34,6 +34,7 @@ struct CmdDoctor : StoreCommand std::cout << std::endl; checkNixInPath(); + checkProfileRoots(store); checkStoreProtocol(store->getProtocol()); } @@ -53,6 +54,40 @@ struct CmdDoctor : StoreCommand } } + void checkProfileRoots(ref store) { + PathSet dirs; + + Roots roots = store->findRoots(); + + for (auto & dir : tokenizeString(getEnv("PATH"), ":")) + try { + auto profileDir = canonPath(dirOf(dir), true); + if (hasSuffix(profileDir, "user-environment") && + store->isValidPath(profileDir)) { + PathSet referrers; + store->computeFSClosure({profileDir}, referrers, true, + settings.gcKeepOutputs, settings.gcKeepDerivations); + bool found = false; + for (auto & i : roots) + if (referrers.find(i.second) != referrers.end()) + found = true; + if (!found) + dirs.insert(dir); + + } + } catch (SysError &) {} + + if (!dirs.empty()) { + std::cout << "Warning: found profiles without a gcroot." << std::endl; + std::cout << "The generation this profile points to will be deleted with the next gc, resulting" << std::endl; + std::cout << "in broken symlinks. Make sure your profiles are in " << settings.nixStateDir << "/profiles." << std::endl; + std::cout << std::endl; + for (auto & dir : dirs) + std::cout << " " << dir << std::endl; + std::cout << std::endl; + } + } + void checkStoreProtocol(unsigned int storeProto) { auto clientProto = GET_PROTOCOL_MAJOR(SERVE_PROTOCOL_VERSION) == GET_PROTOCOL_MAJOR(storeProto) ? SERVE_PROTOCOL_VERSION From 80a4b44d3d1bd35ab13d6b534f1015d15fdb08fd Mon Sep 17 00:00:00 2001 From: Daiderd Jordan Date: Sun, 2 Sep 2018 13:14:39 +0200 Subject: [PATCH 1206/2196] nix doctor: only perform path/profile checks with a daemon/local store Not all store types LegacySSHStore support these operations and it doesn't really make sense to check those. --- src/nix/doctor.cc | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/nix/doctor.cc b/src/nix/doctor.cc index 2a8af7780b9..6112b1f650a 100644 --- a/src/nix/doctor.cc +++ b/src/nix/doctor.cc @@ -33,8 +33,12 @@ struct CmdDoctor : StoreCommand std::cout << "Store uri: " << store->getUri() << std::endl; std::cout << std::endl; - checkNixInPath(); - checkProfileRoots(store); + auto type = getStoreType(); + + if (type < tOther) { + checkNixInPath(); + checkProfileRoots(store); + } checkStoreProtocol(store->getProtocol()); } @@ -56,7 +60,6 @@ struct CmdDoctor : StoreCommand void checkProfileRoots(ref store) { PathSet dirs; - Roots roots = store->findRoots(); for (auto & dir : tokenizeString(getEnv("PATH"), ":")) From 4dd09210d7d32980ffbfc392dd7f8a42185dd9a3 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Sun, 2 Sep 2018 21:47:10 +0200 Subject: [PATCH 1207/2196] Release notes tweaks --- doc/manual/release-notes/rl-2.1.xml | 48 ++++++++++++++--------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/doc/manual/release-notes/rl-2.1.xml b/doc/manual/release-notes/rl-2.1.xml index 3cace13f10f..16c243fc191 100644 --- a/doc/manual/release-notes/rl-2.1.xml +++ b/doc/manual/release-notes/rl-2.1.xml @@ -4,7 +4,7 @@ version="5.0" xml:id="ssec-relnotes-2.1"> -Release 2.1 (2018-08-31) +Release 2.1 (2018-09-02) This is primarily a bug fix release. It also reduces memory consumption in certain situations. In addition, it has the following @@ -12,6 +12,29 @@ new features: + + The Nix installer will no longer default to the Multi-User + installation for macOS. You can still instruct the installer to + run in multi-user mode. + + + + + The Nix installer now supports performing a Multi-User + installation for Linux computers which are running systemd. You + can select a Multi-User installation by passing the + flag to the installer: sh <(curl + https://nixos.org/nix/install) --daemon. + + + The multi-user installer cannot handle systems with SELinux. + If your system has SELinux enabled, you can force the installer to run + in single-user mode. + + New builtin functions: builtins.bitAnd, @@ -49,29 +72,6 @@ new features: nix-support/propagated-user-env-packages. - - The Nix installer will no longer default to the Multi-User - installation for macOS. You can still instruct the installer to - run in multi-user mode. - - - - - The Nix installer now supports performing a Multi-User - installation for Linux computers which are running systemd. You - can select a Multi-User installation by passing the - flag to the installer: sh <(curl - https://nixos.org/nix/install) --daemon. - - - The multi-user installer cannot handle systems with SELinux. - If your system has SELinux enabled, you can force the installer to run - in single-user mode. - - This release has contributions from From 54996b51fba46e8f287b1ac6a58854e62b476985 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Sun, 2 Sep 2018 22:12:00 +0200 Subject: [PATCH 1208/2196] Bump version --- version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version b/version index 42f7d2336ea..616187889b6 100644 --- a/version +++ b/version @@ -1 +1 @@ -2.1 \ No newline at end of file +2.2 \ No newline at end of file From 4b034f390c207d997465112a814adb60797e1d4d Mon Sep 17 00:00:00 2001 From: Michael Bishop Date: Sun, 2 Sep 2018 18:20:18 -0300 Subject: [PATCH 1209/2196] remove the old text format output --- src/libexpr/eval.cc | 38 ++++---------------------------------- 1 file changed, 4 insertions(+), 34 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 9ad0b1a2043..a67b7ae0dd6 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -1730,9 +1730,7 @@ bool EvalState::eqValues(Value & v1, Value & v2) void EvalState::printStats() { bool showStats = getEnv("NIX_SHOW_STATS", "0") != "0"; - bool showJsonStats = getEnv("NIX_SHOW_STATS", "0") == "json"; Verbosity v = showStats ? lvlInfo : lvlDebug; - printMsg(v, "evaluation statistics:"); struct rusage buf; getrusage(RUSAGE_SELF, &buf); @@ -1743,13 +1741,12 @@ void EvalState::printStats() uint64_t bValues = nrValues * sizeof(Value); uint64_t bAttrsets = nrAttrsets * sizeof(Bindings) + nrAttrsInAttrsets * sizeof(Attr); - auto sum = bEnvs + bLists + bValues + bAttrsets; - #if HAVE_BOEHMGC GC_word heapSize, totalBytes; GC_get_heap_usage_safe(&heapSize, 0, 0, 0, &totalBytes); #endif - if (showJsonStats) { + if (showStats) { + printMsg(v, "evaluation statistics:"); auto outPath = getEnv("NIX_SHOW_STATS_PATH","-"); std::fstream fs; if (outPath != "-") { @@ -1761,6 +1758,7 @@ void EvalState::printStats() { auto envs = topObj.object("envs"); envs.attr("number", nrEnvs); + envs.attr("elements", nrValuesInEnvs); envs.attr("bytes", bEnvs); } { @@ -1783,6 +1781,7 @@ void EvalState::printStats() auto sets = topObj.object("sets"); sets.attr("number", nrAttrsets); sets.attr("bytes", bAttrsets); + sets.attr("elements", nrAttrsInAttrsets); } { JSONObject sizes = topObj.object("sizes"); @@ -1802,35 +1801,6 @@ void EvalState::printStats() JSONObject gc = topObj.object("gc"); gc.attr("heapSize", heapSize); gc.attr("totalBytes", totalBytes); -#endif - } else { - if (getEnv("NIX_SHOW_STATS_PATH","-") != "-") { - printError("warning: $NIX_SHOW_STATS_PATH only works in combination with NIX_SHOW_STATS=json"); - } - printMsg(v, format(" time elapsed: %1%") % cpuTime); - printMsg(v, format(" size of a value: %1%") % sizeof(Value)); - printMsg(v, format(" size of an attr: %1%") % sizeof(Attr)); - printMsg(v, format(" environments allocated: %1% (%2% bytes, %3%%%)") % nrEnvs % bEnvs % ((bEnvs*100) / sum)); - printMsg(v, format(" list elements: %1% (%2% bytes %3%%%)") % nrListElems % bLists % ((bLists*100)/sum)); - printMsg(v, format(" list concatenations: %1%") % nrListConcats); - printMsg(v, format(" values allocated: %1% (%2% bytes %3%%%)") % nrValues % bValues % ((bValues*100)/sum)); - printMsg(v, format(" sets allocated: %1% (%2% bytes %3%%%)") % nrAttrsets % bAttrsets % ((bAttrsets*100)/sum)); - printMsg(v, format(" right-biased unions: %1%") % nrOpUpdates); - printMsg(v, format(" values copied in right-biased unions: %1%") % nrOpUpdateValuesCopied); - printMsg(v, format(" symbols in symbol table: %1%") % symbols.size()); - printMsg(v, format(" size of symbol table: %1%") % symbols.totalSize()); - printMsg(v, format(" number of thunks: %1%") % nrThunks); - printMsg(v, format(" number of thunks avoided: %1%") % nrAvoided); - printMsg(v, format(" number of attr lookups: %1%") % nrLookups); - printMsg(v, format(" number of primop calls: %1%") % nrPrimOpCalls); - printMsg(v, format(" number of function calls: %1%") % nrFunctionCalls); - printMsg(v, format(" total allocations: %1% bytes") % (bEnvs + bLists + bValues + bAttrsets)); - - printMsg(v, format(" sets: %1% (%2% each, %3% mb total), attrs-in-sets: %4% (%5% each, %6% mb total)") % nrAttrsets % sizeof(Bindings) % ((nrAttrsets * sizeof(Bindings)) / 1024 / 1024) % nrAttrsInAttrsets % sizeof(Attr) % ((nrAttrsInAttrsets * sizeof(Attr)) / 1024 / 1024)); - -#if HAVE_BOEHMGC - printMsg(v, format(" current Boehm heap size: %1% bytes") % heapSize); - printMsg(v, format(" total Boehm heap allocations: %1% bytes") % totalBytes); #endif } From 5f3b72cfc2f729f6bd5cf1aea9720678b76b2bf3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Vladim=C3=ADr=20=C4=8Cun=C3=A1t?= Date: Mon, 3 Sep 2018 17:57:13 +0200 Subject: [PATCH 1210/2196] docs: change expired bzip2.org to archive.org Fixes #2396. --- configure.ac | 4 ++-- doc/manual/installation/prerequisites-source.xml | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/configure.ac b/configure.ac index cc354f6f302..9c85182efbf 100644 --- a/configure.ac +++ b/configure.ac @@ -152,9 +152,9 @@ PKG_CHECK_MODULES([OPENSSL], [libcrypto], [CXXFLAGS="$OPENSSL_CFLAGS $CXXFLAGS"] # Look for libbz2, a required dependency. AC_CHECK_LIB([bz2], [BZ2_bzWriteOpen], [true], - [AC_MSG_ERROR([Nix requires libbz2, which is part of bzip2. See http://www.bzip.org/.])]) + [AC_MSG_ERROR([Nix requires libbz2, which is part of bzip2. See https://web.archive.org/web/20180624184756/http://www.bzip.org/.])]) AC_CHECK_HEADERS([bzlib.h], [true], - [AC_MSG_ERROR([Nix requires libbz2, which is part of bzip2. See http://www.bzip.org/.])]) + [AC_MSG_ERROR([Nix requires libbz2, which is part of bzip2. See https://web.archive.org/web/20180624184756/http://www.bzip.org/.])]) # Look for SQLite, a required dependency. diff --git a/doc/manual/installation/prerequisites-source.xml b/doc/manual/installation/prerequisites-source.xml index 01e9688d635..ef14a1d753d 100644 --- a/doc/manual/installation/prerequisites-source.xml +++ b/doc/manual/installation/prerequisites-source.xml @@ -29,7 +29,8 @@ libbz2 library. Thus you must have bzip2 installed, including development headers and libraries. If your distribution does not provide these, you can obtain bzip2 from . + xlink:href="https://web.archive.org/web/20180624184756/http://www.bzip.org/" + />. liblzma, which is provided by XZ Utils. If your distribution does not provide this, you can From 28418af920284e0ea830e2f0339efadb0660efa1 Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Mon, 27 Aug 2018 08:41:17 -0500 Subject: [PATCH 1211/2196] download: fix size reported to progress bar --- src/libstore/download.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 973fca0b130..13913d031da 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -345,7 +345,7 @@ struct CurlDownloader : public Downloader done = true; try { - act.progress(result.data->size(), result.data->size()); + act.progress(result.bodySize, result.bodySize); callback(std::move(result)); } catch (...) { done = true; From 74f6d8767de13980b0e83a249633ee4a7e6fc547 Mon Sep 17 00:00:00 2001 From: Matthew Bauer Date: Tue, 4 Sep 2018 19:32:39 -0500 Subject: [PATCH 1212/2196] Get effective user in Nix commands MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ‘geteuid’ gives us the user that the command is being run as, including in setuid modes. By using geteuid to determind id, we can avoid the ‘sudo -i’ hack when upgrading Nix. So now, upgrading Nix on macOS is as simple as: $ sudo nix-channel --update $ sudo nix-env -u $ sudo launchctl stop org.nixos.nix-daemon $ sudo launchctl start org.nixos.nix-daemon or $ sudo systemctl restart nix-daemon --- src/libutil/util.cc | 2 +- src/nix-channel/nix-channel.cc | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 6bc64ae75a4..04c98dc80bf 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -468,7 +468,7 @@ static Lazy getHome2([]() { std::vector buf(16384); struct passwd pwbuf; struct passwd * pw; - if (getpwuid_r(getuid(), &pwbuf, buf.data(), buf.size(), &pw) != 0 + if (getpwuid_r(geteuid(), &pwbuf, buf.data(), buf.size(), &pw) != 0 || !pw || !pw->pw_dir || !pw->pw_dir[0]) throw Error("cannot determine user's home directory"); homeDir = pw->pw_dir; diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc index 55ebda43896..2083d3df5ca 100755 --- a/src/nix-channel/nix-channel.cc +++ b/src/nix-channel/nix-channel.cc @@ -169,7 +169,7 @@ int main(int argc, char ** argv) // Figure out the name of the channels profile. ; - auto pw = getpwuid(getuid()); + auto pw = getpwuid(geteuid()); std::string name = pw ? pw->pw_name : getEnv("USER", ""); if (name.empty()) throw Error("cannot figure out user name"); From 2d91012754dbe707fe60a0017496f78076d6694a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 5 Sep 2018 21:22:37 +0200 Subject: [PATCH 1213/2196] fetchurl: Respect unpack Fixes #2393. --- src/libstore/builtins/fetchurl.cc | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc index b4dcb35f951..92aec63a037 100644 --- a/src/libstore/builtins/fetchurl.cc +++ b/src/libstore/builtins/fetchurl.cc @@ -24,6 +24,7 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) Path storePath = getAttr("out"); auto mainUrl = getAttr("url"); + bool unpack = get(drv.env, "unpack", "") == "1"; /* Note: have to use a fresh downloader here because we're in a forked process. */ @@ -40,12 +41,12 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) request.decompress = false; auto decompressor = makeDecompressionSink( - hasSuffix(mainUrl, ".xz") ? "xz" : "none", sink); + unpack && hasSuffix(mainUrl, ".xz") ? "xz" : "none", sink); downloader->download(std::move(request), *decompressor); decompressor->finish(); }); - if (get(drv.env, "unpack", "") == "1") + if (unpack) restorePath(storePath, *source); else writeFile(storePath, *source); From 0a2545f95c155588a82f0e112aa1e8853e58477d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 5 Sep 2018 21:35:58 +0200 Subject: [PATCH 1214/2196] Log stats to stderr We shouldn't pollute stdout. --- src/libexpr/eval.cc | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index a67b7ae0dd6..08a2c2bfc6b 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -26,8 +26,6 @@ #endif -using std::cout; - namespace nix { @@ -1746,14 +1744,11 @@ void EvalState::printStats() GC_get_heap_usage_safe(&heapSize, 0, 0, 0, &totalBytes); #endif if (showStats) { - printMsg(v, "evaluation statistics:"); auto outPath = getEnv("NIX_SHOW_STATS_PATH","-"); std::fstream fs; - if (outPath != "-") { + if (outPath != "-") fs.open(outPath, std::fstream::out); - printMsg(v, format(" written to: %1%") % outPath); - } - JSONObject topObj(outPath == "-" ? cout : fs, true); + JSONObject topObj(outPath == "-" ? std::cerr : fs, true); topObj.attr("cpuTime",cpuTime); { auto envs = topObj.object("envs"); From 91405986f4d076d08051ebe980af6ac6ba124323 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 5 Sep 2018 21:57:54 +0200 Subject: [PATCH 1215/2196] Convert NIX_COUNT_CALLS to JSON too --- src/libexpr/eval.cc | 76 ++++++++++++++++++++++++--------------------- 1 file changed, 41 insertions(+), 35 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 08a2c2bfc6b..ab407e56907 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -1728,7 +1728,6 @@ bool EvalState::eqValues(Value & v1, Value & v2) void EvalState::printStats() { bool showStats = getEnv("NIX_SHOW_STATS", "0") != "0"; - Verbosity v = showStats ? lvlInfo : lvlDebug; struct rusage buf; getrusage(RUSAGE_SELF, &buf); @@ -1779,7 +1778,7 @@ void EvalState::printStats() sets.attr("elements", nrAttrsInAttrsets); } { - JSONObject sizes = topObj.object("sizes"); + auto sizes = topObj.object("sizes"); sizes.attr("Env", sizeof(Env)); sizes.attr("Value", sizeof(Value)); sizes.attr("Bindings", sizeof(Bindings)); @@ -1793,40 +1792,47 @@ void EvalState::printStats() topObj.attr("nrPrimOpCalls", nrPrimOpCalls); topObj.attr("nrFunctionCalls", nrFunctionCalls); #if HAVE_BOEHMGC - JSONObject gc = topObj.object("gc"); - gc.attr("heapSize", heapSize); - gc.attr("totalBytes", totalBytes); + { + auto gc = topObj.object("gc"); + gc.attr("heapSize", heapSize); + gc.attr("totalBytes", totalBytes); + } #endif - } - - - if (countCalls) { - v = lvlInfo; - - printMsg(v, format("calls to %1% primops:") % primOpCalls.size()); - typedef std::multimap PrimOpCalls_; - PrimOpCalls_ primOpCalls_; - for (auto & i : primOpCalls) - primOpCalls_.insert(std::pair(i.second, i.first)); - for (auto i = primOpCalls_.rbegin(); i != primOpCalls_.rend(); ++i) - printMsg(v, format("%1$10d %2%") % i->first % i->second); - - printMsg(v, format("calls to %1% functions:") % functionCalls.size()); - typedef std::multimap FunctionCalls_; - FunctionCalls_ functionCalls_; - for (auto & i : functionCalls) - functionCalls_.insert(std::pair(i.second, i.first)); - for (auto i = functionCalls_.rbegin(); i != functionCalls_.rend(); ++i) - printMsg(v, format("%1$10d %2%") % i->first % i->second->showNamePos()); - - printMsg(v, format("evaluations of %1% attributes:") % attrSelects.size()); - typedef std::multimap AttrSelects_; - AttrSelects_ attrSelects_; - for (auto & i : attrSelects) - attrSelects_.insert(std::pair(i.second, i.first)); - for (auto i = attrSelects_.rbegin(); i != attrSelects_.rend(); ++i) - printMsg(v, format("%1$10d %2%") % i->first % i->second); - + if (countCalls) { + { + auto obj = topObj.object("primops"); + for (auto & i : primOpCalls) + obj.attr(i.first, i.second); + } + { + auto list = topObj.list("functions"); + for (auto & i : functionCalls) { + auto obj = list.object(); + if (i.first->name.set()) + obj.attr("name", (const string &) i.first->name); + else + obj.attr("name", nullptr); + if (i.first->pos) { + obj.attr("file", (const string &) i.first->pos.file); + obj.attr("line", i.first->pos.line); + obj.attr("column", i.first->pos.column); + } + obj.attr("count", i.second); + } + } + { + auto list = topObj.list("attributes"); + for (auto & i : attrSelects) { + auto obj = list.object(); + if (i.first) { + obj.attr("file", (const string &) i.first.file); + obj.attr("line", i.first.line); + obj.attr("column", i.first.column); + } + obj.attr("count", i.second); + } + } + } } } From 33c3f91885429b60cab551e761f297a640aff7d5 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 7 Sep 2018 16:35:48 +0200 Subject: [PATCH 1216/2196] Handle queryPathInfo() failure from substituters when fallback is enabled Fixes #1990. --- src/libstore/build.cc | 7 +++++++ src/libstore/local-store.cc | 5 +++++ 2 files changed, 12 insertions(+) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index cd37f7a3fc0..96ca2874257 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -3682,6 +3682,13 @@ void SubstitutionGoal::tryNext() } catch (InvalidPath &) { tryNext(); return; + } catch (Error & e) { + if (settings.tryFallback) { + printError(e.what()); + tryNext(); + return; + } + throw; } /* Update the total expected download size. */ diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index c91dbf241bc..5e392c237e8 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -880,6 +880,11 @@ void LocalStore::querySubstitutablePathInfos(const PathSet & paths, narInfo ? narInfo->fileSize : 0, info->narSize}; } catch (InvalidPath) { + } catch (Error & e) { + if (settings.tryFallback) + printError(e.what()); + else + throw; } } } From bba3f0a308cceb56bad4aa1efe13927360ae463f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 7 Sep 2018 17:08:43 +0200 Subject: [PATCH 1217/2196] If fallback is enabled, temporarily disable substituters after a failure Otherwise, we just keep asking the substituter for other .narinfo files, which can take a very long time due to retries/timeouts. --- src/libstore/build.cc | 6 ++++ src/libstore/http-binary-cache-store.cc | 41 ++++++++++++++++++++++++- src/libstore/local-store.cc | 1 + src/libstore/store-api.hh | 1 + 4 files changed, 48 insertions(+), 1 deletion(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 96ca2874257..1402bd097c3 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -3682,6 +3682,12 @@ void SubstitutionGoal::tryNext() } catch (InvalidPath &) { tryNext(); return; + } catch (SubstituterDisabled &) { + if (settings.tryFallback) { + tryNext(); + return; + } + throw; } catch (Error & e) { if (settings.tryFallback) { printError(e.what()); diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index ab524d523cf..8da0e2f9d82 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -13,6 +13,14 @@ class HttpBinaryCacheStore : public BinaryCacheStore Path cacheUri; + struct State + { + bool enabled = true; + std::chrono::steady_clock::time_point disabledUntil; + }; + + Sync _state; + public: HttpBinaryCacheStore( @@ -46,8 +54,33 @@ class HttpBinaryCacheStore : public BinaryCacheStore protected: + void maybeDisable() + { + auto state(_state.lock()); + if (state->enabled && settings.tryFallback) { + int t = 60; + printError("disabling binary cache '%s' for %s seconds", getUri(), t); + state->enabled = false; + state->disabledUntil = std::chrono::steady_clock::now() + std::chrono::seconds(t); + } + } + + void checkEnabled() + { + auto state(_state.lock()); + if (state->enabled) return; + if (std::chrono::steady_clock::now() > state->disabledUntil) { + state->enabled = true; + debug("re-enabling binary cache '%s'", getUri()); + return; + } + throw SubstituterDisabled("substituter '%s' is disabled", getUri()); + } + bool fileExists(const std::string & path) override { + checkEnabled(); + try { DownloadRequest request(cacheUri + "/" + path); request.head = true; @@ -59,6 +92,7 @@ class HttpBinaryCacheStore : public BinaryCacheStore bucket is unlistable, so treat 403 as 404. */ if (e.error == Downloader::NotFound || e.error == Downloader::Forbidden) return false; + maybeDisable(); throw; } } @@ -86,12 +120,14 @@ class HttpBinaryCacheStore : public BinaryCacheStore void getFile(const std::string & path, Sink & sink) override { + checkEnabled(); auto request(makeRequest(path)); try { getDownloader()->download(std::move(request), sink); } catch (DownloadError & e) { if (e.error == Downloader::NotFound || e.error == Downloader::Forbidden) throw NoSuchBinaryCacheFile("file '%s' does not exist in binary cache '%s'", path, getUri()); + maybeDisable(); throw; } } @@ -99,15 +135,18 @@ class HttpBinaryCacheStore : public BinaryCacheStore void getFile(const std::string & path, Callback> callback) override { + checkEnabled(); + auto request(makeRequest(path)); getDownloader()->enqueueDownload(request, - {[callback](std::future result) { + {[callback, this](std::future result) { try { callback(result.get().data); } catch (DownloadError & e) { if (e.error == Downloader::NotFound || e.error == Downloader::Forbidden) return callback(std::shared_ptr()); + maybeDisable(); callback.rethrow(); } catch (...) { callback.rethrow(); diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 5e392c237e8..197b9d78995 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -880,6 +880,7 @@ void LocalStore::querySubstitutablePathInfos(const PathSet & paths, narInfo ? narInfo->fileSize : 0, info->narSize}; } catch (InvalidPath) { + } catch (SubstituterDisabled) { } catch (Error & e) { if (settings.tryFallback) printError(e.what()); diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 7c5b495a448..099818ed6f6 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -23,6 +23,7 @@ MakeError(BuildError, Error) /* denotes a permanent build failure */ MakeError(InvalidPath, Error) MakeError(Unsupported, Error) MakeError(SubstituteGone, Error) +MakeError(SubstituterDisabled, Error) struct BasicDerivation; From d6ac762bf7dcc844db28dcc2f2864d8cebbb5cf6 Mon Sep 17 00:00:00 2001 From: Ding Xiang Fei Date: Mon, 10 Sep 2018 15:07:50 +0800 Subject: [PATCH 1218/2196] auto args on repl --- src/nix/repl.cc | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/nix/repl.cc b/src/nix/repl.cc index b71e6f905f2..1bbe256b2d8 100644 --- a/src/nix/repl.cc +++ b/src/nix/repl.cc @@ -31,6 +31,7 @@ struct NixRepl { string curDir; EvalState state; + Bindings * autoArgs; Strings loadedFiles; @@ -446,8 +447,7 @@ void NixRepl::loadFile(const Path & path) loadedFiles.push_back(path); Value v, v2; state.evalFile(lookupFileArg(state, path), v); - Bindings & bindings(*state.allocBindings(0)); - state.autoCallFunction(bindings, v, v2); + state.autoCallFunction(*autoArgs, v, v2); addAttrsToScope(v2); } @@ -699,6 +699,7 @@ struct CmdRepl : StoreCommand, MixEvalArgs void run(ref store) override { auto repl = std::make_unique(searchPath, openStore()); + repl->autoArgs = getAutoArgs(repl->state); repl->mainLoop(files); } }; From 0aca1ffb6ef655a5147e27e4307074f0b303000f Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Fri, 7 Sep 2018 22:10:56 -0400 Subject: [PATCH 1219/2196] Prefer 'remote builder' over 'slave' --- doc/manual/command-ref/nix-store.xml | 2 +- tests/remote-builds.nix | 36 ++++++++++++++-------------- 2 files changed, 19 insertions(+), 19 deletions(-) diff --git a/doc/manual/command-ref/nix-store.xml b/doc/manual/command-ref/nix-store.xml index 5fff64a18f9..c827d85b381 100644 --- a/doc/manual/command-ref/nix-store.xml +++ b/doc/manual/command-ref/nix-store.xml @@ -275,7 +275,7 @@ as a means of providing Nix store access to a restricted ssh user. Allow the connected client to request the realization of derivations. In effect, this can be used to make the host act - as a build slave. + as a remote builder. diff --git a/tests/remote-builds.nix b/tests/remote-builds.nix index d7a4b21989e..b867f13b499 100644 --- a/tests/remote-builds.nix +++ b/tests/remote-builds.nix @@ -8,8 +8,8 @@ makeTest ( let - # The configuration of the build slaves. - slave = + # The configuration of the remote builders. + builder = { config, pkgs, ... }: { services.openssh.enable = true; virtualisation.writableStore = true; @@ -36,21 +36,21 @@ in { nodes = - { slave1 = slave; - slave2 = slave; + { builder1 = builder; + builder2 = builder; client = { config, pkgs, ... }: { nix.maxJobs = 0; # force remote building nix.distributedBuilds = true; nix.buildMachines = - [ { hostName = "slave1"; + [ { hostName = "builder1"; sshUser = "root"; sshKey = "/root/.ssh/id_ed25519"; system = "i686-linux"; maxJobs = 1; } - { hostName = "slave2"; + { hostName = "builder2"; sshUser = "root"; sshKey = "/root/.ssh/id_ed25519"; system = "i686-linux"; @@ -75,33 +75,33 @@ in $client->copyFileFromHost("key", "/root/.ssh/id_ed25519"); $client->succeed("chmod 600 /root/.ssh/id_ed25519"); - # Install the SSH key on the slaves. + # Install the SSH key on the builders. $client->waitForUnit("network.target"); - foreach my $slave ($slave1, $slave2) { - $slave->succeed("mkdir -p -m 700 /root/.ssh"); - $slave->copyFileFromHost("key.pub", "/root/.ssh/authorized_keys"); - $slave->waitForUnit("sshd"); - $client->succeed("ssh -o StrictHostKeyChecking=no " . $slave->name() . " 'echo hello world'"); + foreach my $builder ($builder1, $builder2) { + $builder->succeed("mkdir -p -m 700 /root/.ssh"); + $builder->copyFileFromHost("key.pub", "/root/.ssh/authorized_keys"); + $builder->waitForUnit("sshd"); + $client->succeed("ssh -o StrictHostKeyChecking=no " . $builder->name() . " 'echo hello world'"); } - # Perform a build and check that it was performed on the slave. + # Perform a build and check that it was performed on the builder. my $out = $client->succeed( "nix-build ${expr nodes.client.config 1} 2> build-output", "grep -q Hello build-output" ); - $slave1->succeed("test -e $out"); + $builder1->succeed("test -e $out"); # And a parallel build. my ($out1, $out2) = split /\s/, $client->succeed('nix-store -r $(nix-instantiate ${expr nodes.client.config 2})\!out $(nix-instantiate ${expr nodes.client.config 3})\!out'); - $slave1->succeed("test -e $out1 -o -e $out2"); - $slave2->succeed("test -e $out1 -o -e $out2"); + $builder1->succeed("test -e $out1 -o -e $out2"); + $builder2->succeed("test -e $out1 -o -e $out2"); # And a failing build. $client->fail("nix-build ${expr nodes.client.config 5}"); - # Test whether the build hook automatically skips unavailable slaves. - $slave1->block; + # Test whether the build hook automatically skips unavailable builders. + $builder1->block; $client->succeed("nix-build ${expr nodes.client.config 4}"); ''; From 901dfc79788595f6382c8d8d9f584ec38a4cc010 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 17 Sep 2018 16:36:11 +0200 Subject: [PATCH 1220/2196] Fix build failure if parallel xz is not available --- src/libutil/compression.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc index 204c63cd26f..0dd84e32034 100644 --- a/src/libutil/compression.cc +++ b/src/libutil/compression.cc @@ -250,7 +250,7 @@ struct XzCompressionSink : CompressionSink ret = lzma_stream_encoder_mt(&strm, &mt_options); done = true; #else - printMsg(lvlError, "warning: parallel compression requested but not supported for metho d '%1%', falling back to single-threaded compression", method); + printMsg(lvlError, "warning: parallel XZ compression requested but not supported, falling back to single-threaded compression"); #endif } From 52f6d541b9f872568f0043a20accc847ce6d5412 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 17 Sep 2018 16:36:30 +0200 Subject: [PATCH 1221/2196] nix-env: Fix segfault if -f argument is not a directory or a Nix expression Fixes #2425. --- src/nix-env/nix-env.cc | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index a43b103f6ec..f9c8a8d313e 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -150,10 +150,8 @@ static void loadSourceExpr(EvalState & state, const Path & path, Value & v) if (stat(path.c_str(), &st) == -1) throw SysError(format("getting information about '%1%'") % path); - if (isNixExpr(path, st)) { + if (isNixExpr(path, st)) state.evalFile(path, v); - return; - } /* The path is a directory. Put the Nix expressions in the directory in a set, with the file name of each expression as @@ -161,13 +159,15 @@ static void loadSourceExpr(EvalState & state, const Path & path, Value & v) set flat, not nested, to make it easier for a user to have a ~/.nix-defexpr directory that includes some system-wide directory). */ - if (S_ISDIR(st.st_mode)) { + else if (S_ISDIR(st.st_mode)) { state.mkAttrs(v, 1024); state.mkList(*state.allocAttr(v, state.symbols.create("_combineChannels")), 0); StringSet attrs; getAllExprs(state, path, attrs, v); v.attrs->sort(); } + + else throw Error("path '%s' is not a directory or a Nix expression", path); } From 0cd863197bd1f2ad5b5891e95bf75f7040d22351 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Mon, 17 Sep 2018 08:32:35 -0400 Subject: [PATCH 1222/2196] docs: document deprecated aliases --- doc/manual/command-ref/conf-file.xml | 191 ++++++++++++++++++++++++--- 1 file changed, 169 insertions(+), 22 deletions(-) diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index 26fe58d043b..fd09883be40 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -135,7 +135,6 @@ false. - builders @@ -159,7 +158,6 @@ false. - build-users-group This options specifies the Unix group containing @@ -210,7 +208,6 @@ false. - connect-timeout @@ -243,7 +240,6 @@ false. - extra-sandbox-paths @@ -283,7 +279,6 @@ false. - fallback If set to true, Nix will fall @@ -293,7 +288,6 @@ false. - fsync-metadata If set to true, changes to the @@ -304,7 +298,6 @@ false. - hashed-mirrors A list of web servers used by @@ -367,10 +360,8 @@ builtins.fetchurl { options a store path was built), so by default this option is on. Turn it off to save a bit of disk space (or a lot if keep-outputs is also turned on). - - keep-env-derivations If false (default), derivations @@ -394,7 +385,6 @@ builtins.fetchurl { - keep-outputs If true, the garbage collector @@ -408,10 +398,8 @@ builtins.fetchurl { only at build time (e.g., the C compiler, or source tarballs downloaded from the network). To prevent it from doing so, set this option to true. - - max-build-log-size @@ -444,10 +432,8 @@ builtins.fetchurl { overridden using the () command line switch. - - max-silent-time @@ -603,7 +589,6 @@ password my-password - repeat How many times to repeat builds to check whether @@ -615,7 +600,6 @@ password my-password - require-sigs If set to true (the default), @@ -679,7 +663,6 @@ password my-password - sandbox-dev-shm-size This option determines the maximum size of the @@ -745,7 +728,6 @@ password my-password - substituters A list of URLs of substituters, separated by @@ -754,7 +736,6 @@ password my-password - system This option specifies the canonical Nix system @@ -795,7 +776,6 @@ password my-password - trusted-public-keys A whitespace-separated list of public keys. When @@ -806,7 +786,6 @@ password my-password - trusted-substituters A list of URLs of substituters, separated by @@ -819,7 +798,6 @@ password my-password - trusted-users @@ -845,8 +823,177 @@ password my-password + + + + Deprecated Settings + + + + + + + binary-caches + + Deprecated: + binary-caches is now an alias to + . + + + + binary-cache-public-keys + + Deprecated: + binary-cache-public-keys is now an alias to + . + + + + build-compress-log + + Deprecated: + build-compress-log is now an alias to + . + + + + build-cores + + Deprecated: + build-cores is now an alias to + . + + + + build-extra-chroot-dirs + + Deprecated: + build-extra-chroot-dirs is now an alias to + . + + + + build-extra-sandbox-paths + + Deprecated: + build-extra-sandbox-paths is now an alias to + . + + + + build-fallback + + Deprecated: + build-fallback is now an alias to + . + + + + build-max-jobs + + Deprecated: + build-max-jobs is now an alias to + . + + + + build-max-log-size + + Deprecated: + build-max-log-size is now an alias to + . + + + + build-max-silent-time + + Deprecated: + build-max-silent-time is now an alias to + . + + + + build-repeat + + Deprecated: + build-repeat is now an alias to + . + + + + build-timeout + + Deprecated: + build-timeout is now an alias to + . + + + + build-use-chroot + + Deprecated: + build-use-chroot is now an alias to + . + + + build-use-sandbox + + Deprecated: + build-use-sandbox is now an alias to + . + + + + build-use-substitutes + + Deprecated: + build-use-substitutes is now an alias to + . + + + + gc-keep-derivations + + Deprecated: + gc-keep-derivations is now an alias to + . + + + + gc-keep-outputs + + Deprecated: + gc-keep-outputs is now an alias to + . + + + + env-keep-derivations + + Deprecated: + env-keep-derivations is now an alias to + . + + + + extra-binary-caches + + Deprecated: + extra-binary-caches is now an alias to + . + + + + trusted-binary-caches + + Deprecated: + trusted-binary-caches is now an alias to + . + + + From 1241a589756414222bbac731686dc3e2cc2538b3 Mon Sep 17 00:00:00 2001 From: Luke Clifton Date: Wed, 19 Sep 2018 15:10:06 +0800 Subject: [PATCH 1223/2196] Look inside the user profile --- scripts/nix-profile-daemon.sh.in | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/nix-profile-daemon.sh.in b/scripts/nix-profile-daemon.sh.in index 1be9a0755d8..432100d1622 100644 --- a/scripts/nix-profile-daemon.sh.in +++ b/scripts/nix-profile-daemon.sh.in @@ -61,8 +61,8 @@ elif [ -e /etc/ssl/certs/ca-bundle.crt ]; then # Old NixOS export NIX_SSL_CERT_FILE=/etc/ssl/certs/ca-bundle.crt elif [ -e /etc/pki/tls/certs/ca-bundle.crt ]; then # Fedora, CentOS export NIX_SSL_CERT_FILE=/etc/pki/tls/certs/ca-bundle.crt -elif [ -e "$NIX_USER_PROFILE_DIR/etc/ssl/certs/ca-bundle.crt" ]; then # fall back to cacert in the user's Nix profile - export NIX_SSL_CERT_FILE=$NIX_USER_PROFILE_DIR/etc/ssl/certs/ca-bundle.crt +elif [ -e "$NIX_USER_PROFILE_DIR/profile/etc/ssl/certs/ca-bundle.crt" ]; then # fall back to cacert in the user's Nix profile + export NIX_SSL_CERT_FILE=$NIX_USER_PROFILE_DIR/profile/etc/ssl/certs/ca-bundle.crt elif [ -e "/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt" ]; then # fall back to cacert in the default Nix profile export NIX_SSL_CERT_FILE=/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt fi From 10a7f199372c4bca5d81030e8a39ca7537ea47ca Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Wed, 19 Sep 2018 12:48:08 -0400 Subject: [PATCH 1224/2196] nix-shell: explain the merging property close to the example --- doc/manual/command-ref/nix-shell.xml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/doc/manual/command-ref/nix-shell.xml b/doc/manual/command-ref/nix-shell.xml index 5c44c4a8f44..3f12dda040a 100644 --- a/doc/manual/command-ref/nix-shell.xml +++ b/doc/manual/command-ref/nix-shell.xml @@ -317,13 +317,14 @@ while (my $token = $p->get_tag("a")) { -Finally, the following Haskell script uses a specific branch of -Nixpkgs/NixOS (the 14.12 stable branch): +Finally, using the merging of multiple nix-shell shebangs the +following Haskell script uses a specific branch of Nixpkgs/NixOS (the +18.03 stable branch): Date: Wed, 19 Sep 2018 13:01:27 -0400 Subject: [PATCH 1225/2196] nix-shell: document double quotes being necessary for a simple Nix expression in the shebang --- doc/manual/command-ref/nix-shell.xml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/doc/manual/command-ref/nix-shell.xml b/doc/manual/command-ref/nix-shell.xml index 3f12dda040a..cb443c888d3 100644 --- a/doc/manual/command-ref/nix-shell.xml +++ b/doc/manual/command-ref/nix-shell.xml @@ -317,6 +317,20 @@ while (my $token = $p->get_tag("a")) { +Sometimes you need to pass a simple Nix expression to customize +a package like Terraform: + + + +You must use double quotes (") when +passing a simple Nix expression in a nix-shell shebang. + + Finally, using the merging of multiple nix-shell shebangs the following Haskell script uses a specific branch of Nixpkgs/NixOS (the 18.03 stable branch): From 1915862767331dba33bc0f40bc09a3c32a22ebea Mon Sep 17 00:00:00 2001 From: Anders Riutta Date: Mon, 17 Sep 2018 13:19:02 -0700 Subject: [PATCH 1226/2196] Upgrade docs: improve the upgrade command and make it more copy-pastable. --- doc/manual/installation/upgrading.xml | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/doc/manual/installation/upgrading.xml b/doc/manual/installation/upgrading.xml index a3f86ade95c..30670d7fec9 100644 --- a/doc/manual/installation/upgrading.xml +++ b/doc/manual/installation/upgrading.xml @@ -7,15 +7,16 @@ Upgrading Nix - Multi-user Nix users on macOS can upgrade Nix by running - sudo -i sh -c 'nix-channel --update && nix-env - -iA nixpkgs.nix'; sudo launchctl stop org.nixos.nix-daemon; sudo - launchctl start org.nixos.nix-daemon. + Multi-user Nix users on macOS can upgrade Nix by running: + sudo -i sh -c 'nix-channel --update && + nix-env -iA nixpkgs.nix && + launchctl remove org.nixos.nix-daemon && + launchctl load /Library/LaunchDaemons/org.nixos.nix-daemon.plist' + - Single-user installations of Nix should run nix-channel - --update; nix-env -iA nixpkgs.nix. + Single-user installations of Nix should run this: + nix-channel --update; nix-env -iA nixpkgs.nix - From fb72104b80eb5747788ac32bcef9fc1db00d9825 Mon Sep 17 00:00:00 2001 From: Luke Clifton Date: Thu, 20 Sep 2018 07:33:35 +0800 Subject: [PATCH 1227/2196] Search NIX_PROFILE for SSL CA --- scripts/nix-profile-daemon.sh.in | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/scripts/nix-profile-daemon.sh.in b/scripts/nix-profile-daemon.sh.in index 432100d1622..567a543d95e 100644 --- a/scripts/nix-profile-daemon.sh.in +++ b/scripts/nix-profile-daemon.sh.in @@ -61,10 +61,13 @@ elif [ -e /etc/ssl/certs/ca-bundle.crt ]; then # Old NixOS export NIX_SSL_CERT_FILE=/etc/ssl/certs/ca-bundle.crt elif [ -e /etc/pki/tls/certs/ca-bundle.crt ]; then # Fedora, CentOS export NIX_SSL_CERT_FILE=/etc/pki/tls/certs/ca-bundle.crt -elif [ -e "$NIX_USER_PROFILE_DIR/profile/etc/ssl/certs/ca-bundle.crt" ]; then # fall back to cacert in the user's Nix profile - export NIX_SSL_CERT_FILE=$NIX_USER_PROFILE_DIR/profile/etc/ssl/certs/ca-bundle.crt -elif [ -e "/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt" ]; then # fall back to cacert in the default Nix profile - export NIX_SSL_CERT_FILE=/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt +else + # Fall back to what is in the nix profiles, favouring whatever is defined last. + for i in $NIX_PROFILES; do + if [ -e $i/etc/ssl/certs/ca-bundle.crt ]; then + export NIX_SSL_CERT_FILE=$i/etc/ssl/certs/ca-bundle.crt + fi + done fi export NIX_PATH="nixpkgs=@localstatedir@/nix/profiles/per-user/root/channels/nixpkgs:@localstatedir@/nix/profiles/per-user/root/channels" From 8df367a92dc417363f06a10f3e6da87d986d04f2 Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Mon, 24 Sep 2018 11:58:40 -0500 Subject: [PATCH 1228/2196] nix-daemon: allow setting builders to "" by any user (untrusted) (cherry picked from commit a94a2eb1cb1c81e90a7529be5fecac27899a3442) --- src/nix-daemon/nix-daemon.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index 644fa6681de..615c1f5dc06 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -557,7 +557,8 @@ static void performOp(TunnelLogger * logger, ref store, ; else if (trusted || name == settings.buildTimeout.name - || name == "connect-timeout") + || name == "connect-timeout" + || (name == "builders" && value == "")) settings.set(name, value); else if (setSubstituters(settings.substituters)) ; From 0ae8d4033d0b140b98e45c57e25b23d1970d356c Mon Sep 17 00:00:00 2001 From: Will Fancher Date: Tue, 25 Sep 2018 21:19:24 -0400 Subject: [PATCH 1229/2196] Fix overflow when verifying signatures of content addressable paths --- src/nix/verify.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix/verify.cc b/src/nix/verify.cc index 6540208a8a2..7ef571561a0 100644 --- a/src/nix/verify.cc +++ b/src/nix/verify.cc @@ -120,7 +120,7 @@ struct CmdVerify : StorePathsCommand for (auto sig : sigs) { if (sigsSeen.count(sig)) continue; sigsSeen.insert(sig); - if (info->checkSignature(publicKeys, sig)) + if (validSigs < ValidPathInfo::maxSigs && info->checkSignature(publicKeys, sig)) validSigs++; } }; From b7091ce41e0073b147678f05b9a620b9bb2903c1 Mon Sep 17 00:00:00 2001 From: Will Fancher Date: Tue, 25 Sep 2018 22:18:52 -0400 Subject: [PATCH 1230/2196] Add a test for signed content-addressed paths --- tests/signing.sh | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/signing.sh b/tests/signing.sh index 46929639199..9e29e3fbf06 100644 --- a/tests/signing.sh +++ b/tests/signing.sh @@ -62,6 +62,10 @@ outPathCA=$(IMPURE_VAR1=foo IMPURE_VAR2=bar nix-build ./fixed.nix -A good.0 --no nix verify $outPathCA nix verify $outPathCA --sigs-needed 1000 +# Check that signing a content-addressed path doesn't overflow validSigs +nix sign-paths --key-file $TEST_ROOT/sk1 $outPathCA +nix verify -r $outPathCA --sigs-needed 1000 --trusted-public-keys $pk1 + # Copy to a binary cache. nix copy --to file://$cacheDir $outPath2 From 44e86304b611a955f4e934fc160f3f4a0a2b1c92 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 26 Sep 2018 12:03:58 +0200 Subject: [PATCH 1231/2196] Make NAR header check more robust Changes std::bad_alloc into bad archive: input doesn't look like a Nix archive --- src/libutil/archive.cc | 2 +- src/libutil/serialise.cc | 5 +++-- src/libutil/serialise.hh | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index 1be8934a2eb..bb68e82886d 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -283,7 +283,7 @@ void parseDump(ParseSink & sink, Source & source) { string version; try { - version = readString(source); + version = readString(source, narVersionMagic1.size()); } catch (SerialisationError & e) { /* This generally means the integer at the start couldn't be decoded. Ignore and throw the exception below. */ diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index 17448f70efb..31df6fdfde8 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -268,16 +268,17 @@ void readPadding(size_t len, Source & source) size_t readString(unsigned char * buf, size_t max, Source & source) { auto len = readNum(source); - if (len > max) throw Error("string is too long"); + if (len > max) throw SerialisationError("string is too long"); source(buf, len); readPadding(len, source); return len; } -string readString(Source & source) +string readString(Source & source, size_t max) { auto len = readNum(source); + if (len > max) throw SerialisationError("string is too long"); std::string res(len, 0); source((unsigned char*) res.data(), len); readPadding(len, source); diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh index 4b6ad5da5b9..969e4dff383 100644 --- a/src/libutil/serialise.hh +++ b/src/libutil/serialise.hh @@ -284,7 +284,7 @@ inline uint64_t readLongLong(Source & source) void readPadding(size_t len, Source & source); size_t readString(unsigned char * buf, size_t max, Source & source); -string readString(Source & source); +string readString(Source & source, size_t max = std::numeric_limits::max()); template T readStrings(Source & source); Source & operator >> (Source & in, string & s); From ed78582847d3932763ad9b7b239f843306fa6fe9 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 26 Sep 2018 21:19:34 +0200 Subject: [PATCH 1232/2196] sinkToSource(): Start the coroutine lazily In particular this causes copyStorePath() from HttpBinaryCacheStore to only start a download if needed. E.g. if the destination LocalStore goes to sleep waiting for the path lock and another process creates the path, then LocalStore::addToStore() will never read from the source so we don't have to do the download. --- src/libutil/serialise.cc | 26 +++++++++++++++----------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index 31df6fdfde8..0e75eeec2bf 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -169,17 +169,13 @@ std::unique_ptr sinkToSource( { typedef boost::coroutines2::coroutine coro_t; + std::function fun; std::function eof; - coro_t::pull_type coro; + std::experimental::optional coro; + bool started = false; SinkToSource(std::function fun, std::function eof) - : eof(eof) - , coro([&](coro_t::push_type & yield) { - LambdaSink sink([&](const unsigned char * data, size_t len) { - if (len) yield(std::string((const char *) data, len)); - }); - fun(sink); - }) + : fun(fun), eof(eof) { } @@ -188,11 +184,19 @@ std::unique_ptr sinkToSource( size_t read(unsigned char * data, size_t len) override { - if (!coro) { eof(); abort(); } + if (!coro) + coro = coro_t::pull_type([&](coro_t::push_type & yield) { + LambdaSink sink([&](const unsigned char * data, size_t len) { + if (len) yield(std::string((const char *) data, len)); + }); + fun(sink); + }); + + if (!*coro) { eof(); abort(); } if (pos == cur.size()) { - if (!cur.empty()) coro(); - cur = coro.get(); + if (!cur.empty()) (*coro)(); + cur = coro->get(); pos = 0; } From 98b2cc2e6e63bfa49b8f75169a39b751b3e2c32c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 26 Sep 2018 21:39:06 +0200 Subject: [PATCH 1233/2196] Untabify --- src/libstore/store-api.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 1f42097fccf..2c0f68651e4 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -610,7 +610,7 @@ void copyStorePath(ref srcStore, ref dstStore, }); srcStore->narFromPath({storePath}, wrapperSink); }, [&]() { - throw EndOfFile("NAR for '%s' fetched from '%s' is incomplete", storePath, srcStore->getUri()); + throw EndOfFile("NAR for '%s' fetched from '%s' is incomplete", storePath, srcStore->getUri()); }); dstStore->addToStore(*info, *source, repair, checkSigs); From 97504300032c7c57388d68bbe4a05b0a494e81aa Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 26 Sep 2018 21:43:17 +0200 Subject: [PATCH 1234/2196] Ensure download thread liveness * Don't wait forever for the client to remove data from the buffer. This does mean that the buffer can grow without bounds (e.g. when downloading is faster than writing to disk), but meh. * Don't hold the state lock while calling the sink. The sink could take any amount of time to process the data (in particular when it's actually a coroutine), so we don't want to block the download thread. --- src/libstore/download.cc | 45 +++++++++++++++++++++++----------------- 1 file changed, 26 insertions(+), 19 deletions(-) diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 13913d031da..f44f1836b31 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -710,11 +710,12 @@ void Downloader::download(DownloadRequest && request, Sink & sink) /* If the buffer is full, then go to sleep until the calling thread wakes us up (i.e. when it has removed data from the - buffer). Note: this does stall the download thread. */ - while (state->data.size() > 1024 * 1024) { - if (state->quit) return; + buffer). We don't wait forever to prevent stalling the + download thread. (Hopefully sleeping will throttle the + sender.) */ + if (state->data.size() > 1024 * 1024) { debug("download buffer is full; going to sleep"); - state.wait(state->request); + state.wait_for(state->request, std::chrono::seconds(10)); } /* Append data to the buffer and wake up the calling @@ -736,30 +737,36 @@ void Downloader::download(DownloadRequest && request, Sink & sink) state->request.notify_one(); }}); - auto state(_state->lock()); - while (true) { checkInterrupt(); - /* If no data is available, then wait for the download thread - to wake us up. */ - if (state->data.empty()) { + std::string chunk; + + /* Grab data if available, otherwise wait for the download + thread to wake us up. */ + { + auto state(_state->lock()); + + while (state->data.empty()) { - if (state->quit) { - if (state->exc) std::rethrow_exception(state->exc); - break; + if (state->quit) { + if (state->exc) std::rethrow_exception(state->exc); + return; + } + + state.wait(state->avail); } - state.wait(state->avail); - } + chunk = std::move(state->data); - /* If data is available, then flush it to the sink and wake up - the download thread if it's blocked on a full buffer. */ - if (!state->data.empty()) { - sink((unsigned char *) state->data.data(), state->data.size()); - state->data.clear(); state->request.notify_one(); } + + /* Flush the data to the sink and wake up the download thread + if it's blocked on a full buffer. We don't hold the state + lock while doing this to prevent blocking the download + thread if sink() takes a long time. */ + sink((unsigned char *) chunk.data(), chunk.size()); } } From d4f78a6b64a70c7135cd841eeaef8e698b0447f8 Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Wed, 26 Sep 2018 15:37:13 -0500 Subject: [PATCH 1235/2196] bump base nixpkgs used by default 18.03 -> 18.09 --- release.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/release.nix b/release.nix index e359ebcb2ce..415c87da7cb 100644 --- a/release.nix +++ b/release.nix @@ -1,5 +1,5 @@ { nix ? builtins.fetchGit ./. -, nixpkgs ? builtins.fetchGit { url = https://github.com/NixOS/nixpkgs-channels.git; ref = "nixos-18.03"; } +, nixpkgs ? builtins.fetchGit { url = https://github.com/NixOS/nixpkgs-channels.git; ref = "nixos-18.09"; } , officialRelease ? false , systems ? [ "x86_64-linux" "i686-linux" "x86_64-darwin" "aarch64-linux" ] }: From ed25753501a7a3f77c097df01db299bdb60ede96 Mon Sep 17 00:00:00 2001 From: Daiderd Jordan Date: Wed, 26 Sep 2018 22:59:41 +0200 Subject: [PATCH 1236/2196] nix doctor: reimplement profile warning without gcroot check Calculating roots seems significantly slower on darwin compared to linux. Checking for /profile/ links could show some false positives but should still catch most issues. --- src/nix/doctor.cc | 39 +++++++++++++++++++-------------------- 1 file changed, 19 insertions(+), 20 deletions(-) diff --git a/src/nix/doctor.cc b/src/nix/doctor.cc index 6112b1f650a..b608b9d59ef 100644 --- a/src/nix/doctor.cc +++ b/src/nix/doctor.cc @@ -42,7 +42,8 @@ struct CmdDoctor : StoreCommand checkStoreProtocol(store->getProtocol()); } - void checkNixInPath() { + void checkNixInPath() + { PathSet dirs; for (auto & dir : tokenizeString(getEnv("PATH"), ":")) @@ -58,32 +59,29 @@ struct CmdDoctor : StoreCommand } } - void checkProfileRoots(ref store) { + void checkProfileRoots(ref store) + { PathSet dirs; - Roots roots = store->findRoots(); - for (auto & dir : tokenizeString(getEnv("PATH"), ":")) + for (auto & dir : tokenizeString(getEnv("PATH"), ":")) { + Path profileDir = dirOf(dir); try { - auto profileDir = canonPath(dirOf(dir), true); - if (hasSuffix(profileDir, "user-environment") && - store->isValidPath(profileDir)) { - PathSet referrers; - store->computeFSClosure({profileDir}, referrers, true, - settings.gcKeepOutputs, settings.gcKeepDerivations); - bool found = false; - for (auto & i : roots) - if (referrers.find(i.second) != referrers.end()) - found = true; - if (!found) - dirs.insert(dir); + Path userEnv = canonPath(profileDir, true); + if (store->isStorePath(userEnv) && hasSuffix(userEnv, "user-environment")) { + while (profileDir.find("/profiles/") == std::string::npos && isLink(profileDir)) + profileDir = absPath(readLink(profileDir), dirOf(profileDir)); + + if (profileDir.find("/profiles/") == std::string::npos) + dirs.insert(dir); } } catch (SysError &) {} + } if (!dirs.empty()) { - std::cout << "Warning: found profiles without a gcroot." << std::endl; - std::cout << "The generation this profile points to will be deleted with the next gc, resulting" << std::endl; - std::cout << "in broken symlinks. Make sure your profiles are in " << settings.nixStateDir << "/profiles." << std::endl; + std::cout << "Warning: found profiles outside of " << settings.nixStateDir << "/profiles." << std::endl; + std::cout << "The generation this profile points to might not have a gcroot and could be" << std::endl; + std::cout << "garbage collected, resulting in broken symlinks." << std::endl; std::cout << std::endl; for (auto & dir : dirs) std::cout << " " << dir << std::endl; @@ -91,7 +89,8 @@ struct CmdDoctor : StoreCommand } } - void checkStoreProtocol(unsigned int storeProto) { + void checkStoreProtocol(unsigned int storeProto) + { auto clientProto = GET_PROTOCOL_MAJOR(SERVE_PROTOCOL_VERSION) == GET_PROTOCOL_MAJOR(storeProto) ? SERVE_PROTOCOL_VERSION : PROTOCOL_VERSION; From 7cba4214a83fc613b60bb3abfe7ba18ff8201d37 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 27 Sep 2018 21:01:19 +0200 Subject: [PATCH 1237/2196] Don't talk about a "current folder build output" Presumably this refers to ./default.nix but the support for that in 'nix' is tenuous. Also folders are a Mac thing. --- src/nix/copy.cc | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/nix/copy.cc b/src/nix/copy.cc index 91711c8b46d..96bd453d87b 100644 --- a/src/nix/copy.cc +++ b/src/nix/copy.cc @@ -69,12 +69,12 @@ struct CmdCopy : StorePathsCommand }, #ifdef ENABLE_S3 Example{ - "To populate the current folder build output to a S3 binary cache:", - "nix copy --to s3://my-bucket?region=eu-west-1" + "To copy Hello to an S3 binary cache:", + "nix copy --to s3://my-bucket?region=eu-west-1 nixpkgs.hello" }, Example{ - "To populate the current folder build output to an S3-compatible binary cache:", - "nix copy --to s3://my-bucket?region=eu-west-1&endpoint=example.com" + "To copy Hello to an S3-compatible binary cache:", + "nix copy --to s3://my-bucket?region=eu-west-1&endpoint=example.com nixpkgs.hello" }, #endif }; From f11acbaf175ebb2c844cb40900adb912ff58f613 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Thu, 27 Sep 2018 12:40:19 -0400 Subject: [PATCH 1238/2196] nix-shell: add bashInteractive to the start of the PATH, set SHELL Tools which re-exec `$SHELL` or `$0` or `basename $SHELL` or even just `bash` will otherwise get the non-interactive bash, providing a broken shell for the same reasons described in https://github.com/NixOS/nixpkgs/issues/27493. Extends c94f3d5575d7af5403274d1e9e2f3c9d72989751 --- src/nix-build/nix-build.cc | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 94d3a27560f..b78f3d9e424 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -417,16 +417,20 @@ void mainWrapped(int argc, char * * argv) "dontAddDisableDepTrack=1; " "[ -e $stdenv/setup ] && source $stdenv/setup; " "%3%" + "PATH=\"%4%:$PATH\"; " + "SHELL=%5%; " "set +e; " R"s([ -n "$PS1" ] && PS1='\n\[\033[1;32m\][nix-shell:\w]\$\[\033[0m\] '; )s" "if [ \"$(type -t runHook)\" = function ]; then runHook shellHook; fi; " "unset NIX_ENFORCE_PURITY; " "shopt -u nullglob; " - "unset TZ; %4%" - "%5%", + "unset TZ; %6%" + "%7%", (Path) tmpDir, (pure ? "" : "p=$PATH; "), (pure ? "" : "PATH=$PATH:$p; unset p; "), + dirOf(shell), + shell, (getenv("TZ") ? (string("export TZ='") + getenv("TZ") + "'; ") : ""), envCommand)); From 63786cbd3bc0a2d0651c09eac6ad5ae609b82902 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 27 Sep 2018 21:32:25 +0200 Subject: [PATCH 1239/2196] S3BinaryCacheStore: Don't create buckets This meant that making a typo in an s3:// URI would cause a bucket to be created. Also it didn't handle eventual consistency very well. Now it's up to the user to create the bucket. --- src/libstore/s3-binary-cache-store.cc | 28 --------------------------- 1 file changed, 28 deletions(-) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 7711388f05a..ba11ce6bb6d 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -19,8 +19,6 @@ #include #include #include -#include -#include #include #include #include @@ -202,32 +200,6 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore { if (!diskCache->cacheExists(getUri(), wantMassQuery_, priority)) { - /* Create the bucket if it doesn't already exists. */ - // FIXME: HeadBucket would be more appropriate, but doesn't return - // an easily parsed 404 message. - auto res = s3Helper.client->GetBucketLocation( - Aws::S3::Model::GetBucketLocationRequest().WithBucket(bucketName)); - - if (!res.IsSuccess()) { - if (res.GetError().GetErrorType() != Aws::S3::S3Errors::NO_SUCH_BUCKET) - throw Error(format("AWS error checking bucket '%s': %s") % bucketName % res.GetError().GetMessage()); - - printInfo("creating S3 bucket '%s'...", bucketName); - - // Stupid S3 bucket locations. - auto bucketConfig = Aws::S3::Model::CreateBucketConfiguration(); - if (s3Helper.config->region != "us-east-1") - bucketConfig.SetLocationConstraint( - Aws::S3::Model::BucketLocationConstraintMapper::GetBucketLocationConstraintForName( - s3Helper.config->region)); - - checkAws(format("AWS error creating bucket '%s'") % bucketName, - s3Helper.client->CreateBucket( - Aws::S3::Model::CreateBucketRequest() - .WithBucket(bucketName) - .WithCreateBucketConfiguration(bucketConfig))); - } - BinaryCacheStore::init(); diskCache->createCache(getUri(), storeDir, wantMassQuery_, priority); From 51cbeec49ae78b0109dbed2e9efd641415853aa7 Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Thu, 27 Sep 2018 16:54:20 -0400 Subject: [PATCH 1240/2196] Update docs to describe how s3:// URLS does in fact support endpoint, region, and profile for upload --- doc/manual/packages/s3-substituter.xml | 202 +++++++++++++------------ 1 file changed, 107 insertions(+), 95 deletions(-) diff --git a/doc/manual/packages/s3-substituter.xml b/doc/manual/packages/s3-substituter.xml index bcd91cfdbcc..ea654392c6b 100644 --- a/doc/manual/packages/s3-substituter.xml +++ b/doc/manual/packages/s3-substituter.xml @@ -12,8 +12,49 @@ from Amazon S3 and S3 compatible services. This uses the same binary cache mechanism that Nix usually uses to fetch prebuilt binaries from cache.nixos.org. +The following options can be specified as URL parameters to +the S3 URL: + + + profile + + + The name of the AWS configuration profile to use. By default + Nix will use the default profile. + + + + + region + + + The region of the S3 bucket. us–east-1 by + default. + + + + If your bucket is not in us–east-1, you + should always explicitly specify the region parameter. + + + + + endpoint + + + The URL to your S3-compatible service, for when not using + Amazon S3. Do not specify this value if you're using Amazon + S3. + + This endpoint must support HTTPS and will use + path-based addressing instead of virtual host based + addressing. + + + + In this example we will use the bucket named -example-bucket. +example-nix-cache.
Anonymous Reads to your S3-compatible binary cache @@ -24,65 +65,56 @@ fetch prebuilt binaries from cache.nixos.org. cache. For AWS S3 the binary cache URL for example bucket will be - exactly https://example-bucket.s3.amazonaws.com. For S3 - compatible binary caches ago have to consult your software's - documentation. + exactly https://example-nix-cache.s3.amazonaws.com or + s3://example-nix-cache. For S3 compatible binary caches, + consult that cache's documentation. Your bucket will need the following bucket policy: - - - +]]>
Authenticated Reads to your S3 binary cache For AWS S3 the binary cache URL for example bucket will be - exactly s3://example-bucket. + exactly s3://example-nix-cache. Nix will use the default credential provider chain for authenticating requests to Amazon S3. - Nix supports authenticated writes to S3 compatible binary - caches but only supports Authenticated reads from Amazon S3. - Additionally, the following limitations are in place for - authenticated reads: - - - The bucket must actually be hosted by Amazon S3 and - not an S3 compatible - service. - - The bucket must be within the - us-east-1 region. - - The Amazon credentials, if stored in a credential - profile, must be stored in the default - profile. - + Nix supports authenticated reads from Amazon S3 and S3 + compatible binary caches. Your bucket will need a bucket policy allowing the desired - users to perform the s3:GetObject action on all - objects in the bucket. + users to perform the s3:GetObject and + s3:GetBucketLocation action on all objects in the + bucket. The anonymous policy in can be updated to + have a restricted Principal to support + this.
@@ -91,69 +123,49 @@ fetch prebuilt binaries from cache.nixos.org. Nix support fully supports writing to Amazon S3 and S3 compatible buckets. The binary cache URL for our example bucket will - be s3://example-bucket. + be s3://example-nix-cache. Nix will use the default credential provider chain for authenticating requests to Amazon S3. - The following options can be specified as URL parameters to - the S3 URL: - - profile - - - The name of the AWS configuration profile to use. By default - Nix will use the default profile. - - - - - region - - - The region of the S3 bucket. us–east-1 by - default. - - - - - endpoint - - - The URL to your S3-compatible service, for when not using - Amazon S3. Do not specify this value if you're using Amazon - S3. - - This endpoint must support HTTPS and will use - path-based addressing instead of virtual host based - addressing. - - - - - Uploading with non-default credential profile for Amazon S3 - nix copy --to ssh://machine nixpkgs.hello s3://example-bucket?profile=cache-upload + Your account will need the following IAM policy to + upload to the cache: + + + + + Uploading with a specific credential profile for Amazon S3 + nix copy --to 's3://example-nix-cache?profile=cache-upload&region=eu-west-2' nixpkgs.hello Uploading to an S3-Compatible Binary Cache - nix copy --to ssh://machine nixpkgs.hello s3://example-bucket?profile=cache-upload&endpoint=minio.example.com + nix copy --to 's3://example-nix-cache?profile=cache-upload&endpoint=minio.example.com' nixpkgs.hello - - The user writing to the bucket will need to perform the - following actions against the bucket: - - - s3:ListBucket - s3:GetBucketLocation - s3:ListObjects - s3:GetObject - s3:PutObject - s3:ListBucketMultipartUploads - s3:CreateMultipartUpload - s3:ListMultipartUploadParts - s3:AbortMultipartUpload -
From c9ba33870e6da73420317e9ef80b8c9dee693c3d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 28 Sep 2018 12:43:01 +0200 Subject: [PATCH 1241/2196] Support special attributes in structured attributes derivations E.g. __noChroot and allowedReferences now work correctly. We also now check that the attribute type is correct. For instance, instead of allowedReferences = "out"; you have to write allowedReferences = [ "out" ]; Fixes #2453. --- src/libstore/build.cc | 269 +++++++++++++++++++++++++++--------------- 1 file changed, 175 insertions(+), 94 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 1402bd097c3..727b8b3444a 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -740,6 +740,9 @@ class DerivationGoal : public Goal /* The derivation stored at drvPath. */ std::unique_ptr drv; + /* The contents of drv->env["__json"]. */ + std::experimental::optional structuredAttrs; + /* The remainder is state held during the build. */ /* Locks on the output paths. */ @@ -920,6 +923,13 @@ class DerivationGoal : public Goal /* Fill in the environment for the builder. */ void initEnv(); + /* Get an attribute from drv->env or from drv->env["__json"]. */ + std::experimental::optional getAttr(const std::string & name); + + bool getBoolAttr(const std::string & name, bool def = false); + + std::experimental::optional getStringsAttr(const std::string & name); + /* Write a JSON file containing the derivation attributes. */ void writeStructuredAttrs(); @@ -1139,6 +1149,16 @@ void DerivationGoal::haveDerivation() return; } + /* Parse the __json attribute, if any. */ + auto jsonAttr = drv->env.find("__json"); + if (jsonAttr != drv->env.end()) { + try { + structuredAttrs = nlohmann::json::parse(jsonAttr->second); + } catch (std::exception & e) { + throw Error("cannot process __json attribute of '%s': %s", drvPath, e.what()); + } + } + /* We are first going to try to create the invalid output paths through substitutes. If that doesn't work, we'll build them. */ @@ -1644,7 +1664,7 @@ HookReply DerivationGoal::tryBuildHook() /* Tell the hook about system features (beyond the system type) required from the build machine. (The hook could parse the drv file itself, but this is easier.) */ - Strings features = tokenizeString(get(drv->env, "requiredSystemFeatures")); + auto features = getStringsAttr("requiredSystemFeatures").value_or(Strings()); for (auto & i : features) checkStoreName(i); /* !!! abuse */ /* Send the request to the hook. */ @@ -1803,13 +1823,14 @@ void DerivationGoal::startBuilder() preloadNSS(); #if __APPLE__ - additionalSandboxProfile = get(drv->env, "__sandboxProfile"); + additionalSandboxProfile = getAttr("__sandboxProfile").value_or(""); #endif /* Are we doing a chroot build? */ { + auto noChroot = getBoolAttr("__noChroot"); if (settings.sandboxMode == smEnabled) { - if (get(drv->env, "__noChroot") == "1") + if (noChroot) throw Error(format("derivation '%1%' has '__noChroot' set, " "but that's not allowed when 'sandbox' is 'true'") % drvPath); #if __APPLE__ @@ -1822,7 +1843,7 @@ void DerivationGoal::startBuilder() else if (settings.sandboxMode == smDisabled) useChroot = false; else if (settings.sandboxMode == smRelaxed) - useChroot = !fixedOutput && get(drv->env, "__noChroot") != "1"; + useChroot = !fixedOutput && !noChroot; } if (worker.store.storeDir != worker.store.realStoreDir) { @@ -1873,7 +1894,7 @@ void DerivationGoal::startBuilder() writeStructuredAttrs(); /* Handle exportReferencesGraph(), if set. */ - if (!drv->env.count("__json")) { + if (!structuredAttrs) { /* The `exportReferencesGraph' feature allows the references graph to be passed to a builder. This attribute should be a list of pairs [name1 path1 name2 path2 ...]. The references graph of @@ -1938,7 +1959,7 @@ void DerivationGoal::startBuilder() PathSet allowedPaths = settings.allowedImpureHostPrefixes; /* This works like the above, except on a per-derivation level */ - Strings impurePaths = tokenizeString(get(drv->env, "__impureHostDeps")); + auto impurePaths = getStringsAttr("__impureHostDeps").value_or(Strings()); for (auto & i : impurePaths) { bool found = false; @@ -2306,7 +2327,7 @@ void DerivationGoal::initEnv() passAsFile is ignored in structure mode because it's not needed (attributes are not passed through the environment, so there is no size constraint). */ - if (!drv->env.count("__json")) { + if (!structuredAttrs) { StringSet passAsFile = tokenizeString(get(drv->env, "passAsFile")); int fileNr = 0; @@ -2353,8 +2374,8 @@ void DerivationGoal::initEnv() fixed-output derivations is by definition pure (since we already know the cryptographic hash of the output). */ if (fixedOutput) { - Strings varNames = tokenizeString(get(drv->env, "impureEnvVars")); - for (auto & i : varNames) env[i] = getEnv(i); + for (auto & i : getStringsAttr("impureEnvVars").value_or(Strings())) + env[i] = getEnv(i); } /* Currently structured log messages piggyback on stderr, but we @@ -2364,116 +2385,176 @@ void DerivationGoal::initEnv() } -static std::regex shVarName("[A-Za-z_][A-Za-z0-9_]*"); - - -void DerivationGoal::writeStructuredAttrs() +std::experimental::optional DerivationGoal::getAttr(const std::string & name) { - auto jsonAttr = drv->env.find("__json"); - if (jsonAttr == drv->env.end()) return; + if (structuredAttrs) { + auto i = structuredAttrs->find(name); + if (i == structuredAttrs->end()) + return {}; + else { + if (!i->is_string()) + throw Error("attribute '%s' of derivation '%s' must be a string", name, drvPath); + return i->get(); + } + } else { + auto i = drv->env.find(name); + if (i == drv->env.end()) + return {}; + else + return i->second; + } +} - try { - auto jsonStr = rewriteStrings(jsonAttr->second, inputRewrites); +bool DerivationGoal::getBoolAttr(const std::string & name, bool def) +{ + if (structuredAttrs) { + auto i = structuredAttrs->find(name); + if (i == structuredAttrs->end()) + return def; + else { + if (!i->is_boolean()) + throw Error("attribute '%s' of derivation '%s' must be a Boolean", name, drvPath); + return i->get(); + } + } else { + auto i = drv->env.find(name); + if (i == drv->env.end()) + return def; + else + return i->second == "1"; + } +} - auto json = nlohmann::json::parse(jsonStr); - /* Add an "outputs" object containing the output paths. */ - nlohmann::json outputs; - for (auto & i : drv->outputs) - outputs[i.first] = rewriteStrings(i.second.path, inputRewrites); - json["outputs"] = outputs; - - /* Handle exportReferencesGraph. */ - auto e = json.find("exportReferencesGraph"); - if (e != json.end() && e->is_object()) { - for (auto i = e->begin(); i != e->end(); ++i) { - std::ostringstream str; - { - JSONPlaceholder jsonRoot(str, true); - PathSet storePaths; - for (auto & p : *i) - storePaths.insert(p.get()); - worker.store.pathInfoToJSON(jsonRoot, - exportReferences(storePaths), false, true); - } - json[i.key()] = nlohmann::json::parse(str.str()); // urgh +std::experimental::optional DerivationGoal::getStringsAttr(const std::string & name) +{ + if (structuredAttrs) { + auto i = structuredAttrs->find(name); + if (i == structuredAttrs->end()) + return {}; + else { + if (!i->is_array()) + throw Error("attribute '%s' of derivation '%s' must be a list of strings", name, drvPath); + Strings res; + for (auto j = i->begin(); j != i->end(); ++j) { + if (!j->is_string()) + throw Error("attribute '%s' of derivation '%s' must be a list of strings", name, drvPath); + res.push_back(j->get()); } + return res; } + } else { + auto i = drv->env.find(name); + if (i == drv->env.end()) + return {}; + else + return tokenizeString(i->second); + } +} + + +static std::regex shVarName("[A-Za-z_][A-Za-z0-9_]*"); - writeFile(tmpDir + "/.attrs.json", json.dump()); - /* As a convenience to bash scripts, write a shell file that - maps all attributes that are representable in bash - - namely, strings, integers, nulls, Booleans, and arrays and - objects consisting entirely of those values. (So nested - arrays or objects are not supported.) */ +void DerivationGoal::writeStructuredAttrs() +{ + if (!structuredAttrs) return; - auto handleSimpleType = [](const nlohmann::json & value) -> std::experimental::optional { - if (value.is_string()) - return shellEscape(value); + auto json = *structuredAttrs; - if (value.is_number()) { - auto f = value.get(); - if (std::ceil(f) == f) - return std::to_string(value.get()); + /* Add an "outputs" object containing the output paths. */ + nlohmann::json outputs; + for (auto & i : drv->outputs) + outputs[i.first] = rewriteStrings(i.second.path, inputRewrites); + json["outputs"] = outputs; + + /* Handle exportReferencesGraph. */ + auto e = json.find("exportReferencesGraph"); + if (e != json.end() && e->is_object()) { + for (auto i = e->begin(); i != e->end(); ++i) { + std::ostringstream str; + { + JSONPlaceholder jsonRoot(str, true); + PathSet storePaths; + for (auto & p : *i) + storePaths.insert(p.get()); + worker.store.pathInfoToJSON(jsonRoot, + exportReferences(storePaths), false, true); } + json[i.key()] = nlohmann::json::parse(str.str()); // urgh + } + } - if (value.is_null()) - return std::string("''"); + writeFile(tmpDir + "/.attrs.json", rewriteStrings(json.dump(), inputRewrites)); - if (value.is_boolean()) - return value.get() ? std::string("1") : std::string(""); + /* As a convenience to bash scripts, write a shell file that + maps all attributes that are representable in bash - + namely, strings, integers, nulls, Booleans, and arrays and + objects consisting entirely of those values. (So nested + arrays or objects are not supported.) */ - return {}; - }; + auto handleSimpleType = [](const nlohmann::json & value) -> std::experimental::optional { + if (value.is_string()) + return shellEscape(value); - std::string jsonSh; + if (value.is_number()) { + auto f = value.get(); + if (std::ceil(f) == f) + return std::to_string(value.get()); + } - for (auto i = json.begin(); i != json.end(); ++i) { + if (value.is_null()) + return std::string("''"); - if (!std::regex_match(i.key(), shVarName)) continue; + if (value.is_boolean()) + return value.get() ? std::string("1") : std::string(""); - auto & value = i.value(); + return {}; + }; - auto s = handleSimpleType(value); - if (s) - jsonSh += fmt("declare %s=%s\n", i.key(), *s); + std::string jsonSh; - else if (value.is_array()) { - std::string s2; - bool good = true; + for (auto i = json.begin(); i != json.end(); ++i) { - for (auto i = value.begin(); i != value.end(); ++i) { - auto s3 = handleSimpleType(i.value()); - if (!s3) { good = false; break; } - s2 += *s3; s2 += ' '; - } + if (!std::regex_match(i.key(), shVarName)) continue; - if (good) - jsonSh += fmt("declare -a %s=(%s)\n", i.key(), s2); - } + auto & value = i.value(); - else if (value.is_object()) { - std::string s2; - bool good = true; + auto s = handleSimpleType(value); + if (s) + jsonSh += fmt("declare %s=%s\n", i.key(), *s); - for (auto i = value.begin(); i != value.end(); ++i) { - auto s3 = handleSimpleType(i.value()); - if (!s3) { good = false; break; } - s2 += fmt("[%s]=%s ", shellEscape(i.key()), *s3); - } + else if (value.is_array()) { + std::string s2; + bool good = true; - if (good) - jsonSh += fmt("declare -A %s=(%s)\n", i.key(), s2); + for (auto i = value.begin(); i != value.end(); ++i) { + auto s3 = handleSimpleType(i.value()); + if (!s3) { good = false; break; } + s2 += *s3; s2 += ' '; } + + if (good) + jsonSh += fmt("declare -a %s=(%s)\n", i.key(), s2); } - writeFile(tmpDir + "/.attrs.sh", jsonSh); + else if (value.is_object()) { + std::string s2; + bool good = true; - } catch (std::exception & e) { - throw Error("cannot process __json attribute of '%s': %s", drvPath, e.what()); + for (auto i = value.begin(); i != value.end(); ++i) { + auto s3 = handleSimpleType(i.value()); + if (!s3) { good = false; break; } + s2 += fmt("[%s]=%s ", shellEscape(i.key()), *s3); + } + + if (good) + jsonSh += fmt("declare -A %s=(%s)\n", i.key(), s2); + } } + + writeFile(tmpDir + "/.attrs.sh", rewriteStrings(jsonSh, inputRewrites)); } @@ -2917,7 +2998,7 @@ void DerivationGoal::runChild() writeFile(sandboxFile, sandboxProfile); - bool allowLocalNetworking = get(drv->env, "__darwinAllowLocalNetworking") == "1"; + bool allowLocalNetworking = getBoolAttr("__darwinAllowLocalNetworking"); /* The tmpDir in scope points at the temporary build directory for our derivation. Some packages try different mechanisms to find temporary directories, so we want to open up a broader place for them to dump their files, if needed. */ @@ -2989,10 +3070,9 @@ void DerivationGoal::runChild() /* Parse a list of reference specifiers. Each element must either be a store path, or the symbolic name of the output of the derivation (such as `out'). */ -PathSet parseReferenceSpecifiers(Store & store, const BasicDerivation & drv, string attr) +PathSet parseReferenceSpecifiers(Store & store, const BasicDerivation & drv, const Strings & paths) { PathSet result; - Paths paths = tokenizeString(attr); for (auto & i : paths) { if (store.isStorePath(i)) result.insert(i); @@ -3121,7 +3201,7 @@ void DerivationGoal::registerOutputs() the derivation to its content-addressed location. */ Hash h2 = recursive ? hashPath(h.type, actualPath).first : hashFile(h.type, actualPath); - Path dest = worker.store.makeFixedOutputPath(recursive, h2, drv->env["name"]); + Path dest = worker.store.makeFixedOutputPath(recursive, h2, storePathToName(path)); if (h != h2) { @@ -3204,9 +3284,10 @@ void DerivationGoal::registerOutputs() /* Enforce `allowedReferences' and friends. */ auto checkRefs = [&](const string & attrName, bool allowed, bool recursive) { - if (drv->env.find(attrName) == drv->env.end()) return; + auto value = getStringsAttr(attrName); + if (!value) return; - PathSet spec = parseReferenceSpecifiers(worker.store, *drv, get(drv->env, attrName)); + PathSet spec = parseReferenceSpecifiers(worker.store, *drv, *value); PathSet used; if (recursive) { From 99d4bb2d4cd94e0234e8cfb12887db155d98ac50 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 28 Sep 2018 13:28:15 +0200 Subject: [PATCH 1242/2196] Remove obsolete check on system features --- src/libstore/build.cc | 1 - 1 file changed, 1 deletion(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 727b8b3444a..69c3c2c1e04 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -1665,7 +1665,6 @@ HookReply DerivationGoal::tryBuildHook() required from the build machine. (The hook could parse the drv file itself, but this is easier.) */ auto features = getStringsAttr("requiredSystemFeatures").value_or(Strings()); - for (auto & i : features) checkStoreName(i); /* !!! abuse */ /* Send the request to the hook. */ worker.hook->sink From 7ae7a38c9a7d0a5679e65c8213cd7b58dfdc1c52 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 28 Sep 2018 14:31:16 +0200 Subject: [PATCH 1243/2196] Move structured attrs handling into a separate class This is primarily because Derivation::{can,will}BuildLocally() depends on attributes like preferLocalBuild and requiredSystemFeatures, but it can't handle them properly because it doesn't have access to the structured attributes. --- src/libstore/build.cc | 113 ++++------------------------- src/libstore/derivations.cc | 14 ---- src/libstore/derivations.hh | 4 - src/libstore/parsed-derivations.cc | 97 +++++++++++++++++++++++++ src/libstore/parsed-derivations.hh | 33 +++++++++ 5 files changed, 145 insertions(+), 116 deletions(-) create mode 100644 src/libstore/parsed-derivations.cc create mode 100644 src/libstore/parsed-derivations.hh diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 69c3c2c1e04..eb7f106a20c 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -11,6 +11,7 @@ #include "compression.hh" #include "json.hh" #include "nar-info.hh" +#include "parsed-derivations.hh" #include #include @@ -740,8 +741,7 @@ class DerivationGoal : public Goal /* The derivation stored at drvPath. */ std::unique_ptr drv; - /* The contents of drv->env["__json"]. */ - std::experimental::optional structuredAttrs; + std::unique_ptr parsedDrv; /* The remainder is state held during the build. */ @@ -923,13 +923,6 @@ class DerivationGoal : public Goal /* Fill in the environment for the builder. */ void initEnv(); - /* Get an attribute from drv->env or from drv->env["__json"]. */ - std::experimental::optional getAttr(const std::string & name); - - bool getBoolAttr(const std::string & name, bool def = false); - - std::experimental::optional getStringsAttr(const std::string & name); - /* Write a JSON file containing the derivation attributes. */ void writeStructuredAttrs(); @@ -1149,15 +1142,7 @@ void DerivationGoal::haveDerivation() return; } - /* Parse the __json attribute, if any. */ - auto jsonAttr = drv->env.find("__json"); - if (jsonAttr != drv->env.end()) { - try { - structuredAttrs = nlohmann::json::parse(jsonAttr->second); - } catch (std::exception & e) { - throw Error("cannot process __json attribute of '%s': %s", drvPath, e.what()); - } - } + parsedDrv = std::make_unique(drvPath, *drv); /* We are first going to try to create the invalid output paths through substitutes. If that doesn't work, we'll build @@ -1415,7 +1400,7 @@ void DerivationGoal::tryToBuild() /* Don't do a remote build if the derivation has the attribute `preferLocalBuild' set. Also, check and repair modes are only supported for local builds. */ - bool buildLocally = buildMode != bmNormal || drv->willBuildLocally(); + bool buildLocally = buildMode != bmNormal || parsedDrv->willBuildLocally(); auto started = [&]() { auto msg = fmt( @@ -1664,7 +1649,7 @@ HookReply DerivationGoal::tryBuildHook() /* Tell the hook about system features (beyond the system type) required from the build machine. (The hook could parse the drv file itself, but this is easier.) */ - auto features = getStringsAttr("requiredSystemFeatures").value_or(Strings()); + auto features = parsedDrv->getStringsAttr("requiredSystemFeatures").value_or(Strings()); /* Send the request to the hook. */ worker.hook->sink @@ -1812,7 +1797,7 @@ static void preloadNSS() { void DerivationGoal::startBuilder() { /* Right platform? */ - if (!drv->canBuildLocally()) { + if (!parsedDrv->canBuildLocally()) { throw Error( format("a '%1%' is required to build '%3%', but I am a '%2%'") % drv->platform % settings.thisSystem % drvPath); @@ -1822,12 +1807,12 @@ void DerivationGoal::startBuilder() preloadNSS(); #if __APPLE__ - additionalSandboxProfile = getAttr("__sandboxProfile").value_or(""); + additionalSandboxProfile = parsedDrv->getStringAttr("__sandboxProfile").value_or(""); #endif /* Are we doing a chroot build? */ { - auto noChroot = getBoolAttr("__noChroot"); + auto noChroot = parsedDrv->getBoolAttr("__noChroot"); if (settings.sandboxMode == smEnabled) { if (noChroot) throw Error(format("derivation '%1%' has '__noChroot' set, " @@ -1893,7 +1878,7 @@ void DerivationGoal::startBuilder() writeStructuredAttrs(); /* Handle exportReferencesGraph(), if set. */ - if (!structuredAttrs) { + if (!parsedDrv->getStructuredAttrs()) { /* The `exportReferencesGraph' feature allows the references graph to be passed to a builder. This attribute should be a list of pairs [name1 path1 name2 path2 ...]. The references graph of @@ -1958,7 +1943,7 @@ void DerivationGoal::startBuilder() PathSet allowedPaths = settings.allowedImpureHostPrefixes; /* This works like the above, except on a per-derivation level */ - auto impurePaths = getStringsAttr("__impureHostDeps").value_or(Strings()); + auto impurePaths = parsedDrv->getStringsAttr("__impureHostDeps").value_or(Strings()); for (auto & i : impurePaths) { bool found = false; @@ -2326,7 +2311,7 @@ void DerivationGoal::initEnv() passAsFile is ignored in structure mode because it's not needed (attributes are not passed through the environment, so there is no size constraint). */ - if (!structuredAttrs) { + if (!parsedDrv->getStructuredAttrs()) { StringSet passAsFile = tokenizeString(get(drv->env, "passAsFile")); int fileNr = 0; @@ -2373,7 +2358,7 @@ void DerivationGoal::initEnv() fixed-output derivations is by definition pure (since we already know the cryptographic hash of the output). */ if (fixedOutput) { - for (auto & i : getStringsAttr("impureEnvVars").value_or(Strings())) + for (auto & i : parsedDrv->getStringsAttr("impureEnvVars").value_or(Strings())) env[i] = getEnv(i); } @@ -2384,80 +2369,12 @@ void DerivationGoal::initEnv() } -std::experimental::optional DerivationGoal::getAttr(const std::string & name) -{ - if (structuredAttrs) { - auto i = structuredAttrs->find(name); - if (i == structuredAttrs->end()) - return {}; - else { - if (!i->is_string()) - throw Error("attribute '%s' of derivation '%s' must be a string", name, drvPath); - return i->get(); - } - } else { - auto i = drv->env.find(name); - if (i == drv->env.end()) - return {}; - else - return i->second; - } -} - - -bool DerivationGoal::getBoolAttr(const std::string & name, bool def) -{ - if (structuredAttrs) { - auto i = structuredAttrs->find(name); - if (i == structuredAttrs->end()) - return def; - else { - if (!i->is_boolean()) - throw Error("attribute '%s' of derivation '%s' must be a Boolean", name, drvPath); - return i->get(); - } - } else { - auto i = drv->env.find(name); - if (i == drv->env.end()) - return def; - else - return i->second == "1"; - } -} - - -std::experimental::optional DerivationGoal::getStringsAttr(const std::string & name) -{ - if (structuredAttrs) { - auto i = structuredAttrs->find(name); - if (i == structuredAttrs->end()) - return {}; - else { - if (!i->is_array()) - throw Error("attribute '%s' of derivation '%s' must be a list of strings", name, drvPath); - Strings res; - for (auto j = i->begin(); j != i->end(); ++j) { - if (!j->is_string()) - throw Error("attribute '%s' of derivation '%s' must be a list of strings", name, drvPath); - res.push_back(j->get()); - } - return res; - } - } else { - auto i = drv->env.find(name); - if (i == drv->env.end()) - return {}; - else - return tokenizeString(i->second); - } -} - - static std::regex shVarName("[A-Za-z_][A-Za-z0-9_]*"); void DerivationGoal::writeStructuredAttrs() { + auto & structuredAttrs = parsedDrv->getStructuredAttrs(); if (!structuredAttrs) return; auto json = *structuredAttrs; @@ -2997,7 +2914,7 @@ void DerivationGoal::runChild() writeFile(sandboxFile, sandboxProfile); - bool allowLocalNetworking = getBoolAttr("__darwinAllowLocalNetworking"); + bool allowLocalNetworking = parsedDrv->getBoolAttr("__darwinAllowLocalNetworking"); /* The tmpDir in scope points at the temporary build directory for our derivation. Some packages try different mechanisms to find temporary directories, so we want to open up a broader place for them to dump their files, if needed. */ @@ -3283,7 +3200,7 @@ void DerivationGoal::registerOutputs() /* Enforce `allowedReferences' and friends. */ auto checkRefs = [&](const string & attrName, bool allowed, bool recursive) { - auto value = getStringsAttr(attrName); + auto value = parsedDrv->getStringsAttr(attrName); if (!value) return; PathSet spec = parseReferenceSpecifiers(worker.store, *drv, *value); diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 1e187ec5e95..3961126fff9 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -36,12 +36,6 @@ Path BasicDerivation::findOutput(const string & id) const } -bool BasicDerivation::willBuildLocally() const -{ - return get(env, "preferLocalBuild") == "1" && canBuildLocally(); -} - - bool BasicDerivation::substitutesAllowed() const { return get(env, "allowSubstitutes", "1") == "1"; @@ -54,14 +48,6 @@ bool BasicDerivation::isBuiltin() const } -bool BasicDerivation::canBuildLocally() const -{ - return platform == settings.thisSystem - || settings.extraPlatforms.get().count(platform) > 0 - || isBuiltin(); -} - - Path writeDerivation(ref store, const Derivation & drv, const string & name, RepairFlag repair) { diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh index 7b97730d3bf..9753e796db5 100644 --- a/src/libstore/derivations.hh +++ b/src/libstore/derivations.hh @@ -56,14 +56,10 @@ struct BasicDerivation the given derivation. */ Path findOutput(const string & id) const; - bool willBuildLocally() const; - bool substitutesAllowed() const; bool isBuiltin() const; - bool canBuildLocally() const; - /* Return true iff this is a fixed-output derivation. */ bool isFixedOutput() const; diff --git a/src/libstore/parsed-derivations.cc b/src/libstore/parsed-derivations.cc new file mode 100644 index 00000000000..0d7acf046af --- /dev/null +++ b/src/libstore/parsed-derivations.cc @@ -0,0 +1,97 @@ +#include "parsed-derivations.hh" + +namespace nix { + +ParsedDerivation::ParsedDerivation(const Path & drvPath, BasicDerivation & drv) + : drvPath(drvPath), drv(drv) +{ + /* Parse the __json attribute, if any. */ + auto jsonAttr = drv.env.find("__json"); + if (jsonAttr != drv.env.end()) { + try { + structuredAttrs = nlohmann::json::parse(jsonAttr->second); + } catch (std::exception & e) { + throw Error("cannot process __json attribute of '%s': %s", drvPath, e.what()); + } + } +} + +std::experimental::optional ParsedDerivation::getStringAttr(const std::string & name) const +{ + if (structuredAttrs) { + auto i = structuredAttrs->find(name); + if (i == structuredAttrs->end()) + return {}; + else { + if (!i->is_string()) + throw Error("attribute '%s' of derivation '%s' must be a string", name, drvPath); + return i->get(); + } + } else { + auto i = drv.env.find(name); + if (i == drv.env.end()) + return {}; + else + return i->second; + } +} + +bool ParsedDerivation::getBoolAttr(const std::string & name, bool def) const +{ + if (structuredAttrs) { + auto i = structuredAttrs->find(name); + if (i == structuredAttrs->end()) + return def; + else { + if (!i->is_boolean()) + throw Error("attribute '%s' of derivation '%s' must be a Boolean", name, drvPath); + return i->get(); + } + } else { + auto i = drv.env.find(name); + if (i == drv.env.end()) + return def; + else + return i->second == "1"; + } +} + +std::experimental::optional ParsedDerivation::getStringsAttr(const std::string & name) const +{ + if (structuredAttrs) { + auto i = structuredAttrs->find(name); + if (i == structuredAttrs->end()) + return {}; + else { + if (!i->is_array()) + throw Error("attribute '%s' of derivation '%s' must be a list of strings", name, drvPath); + Strings res; + for (auto j = i->begin(); j != i->end(); ++j) { + if (!j->is_string()) + throw Error("attribute '%s' of derivation '%s' must be a list of strings", name, drvPath); + res.push_back(j->get()); + } + return res; + } + } else { + auto i = drv.env.find(name); + if (i == drv.env.end()) + return {}; + else + return tokenizeString(i->second); + } +} + +bool ParsedDerivation::canBuildLocally() const +{ + return drv.platform == settings.thisSystem + || settings.extraPlatforms.get().count(drv.platform) > 0 + || drv.isBuiltin(); +} + +bool ParsedDerivation::willBuildLocally() const +{ + return getBoolAttr("preferLocalBuild") && canBuildLocally(); +} + +} diff --git a/src/libstore/parsed-derivations.hh b/src/libstore/parsed-derivations.hh new file mode 100644 index 00000000000..0c7dc32e1e0 --- /dev/null +++ b/src/libstore/parsed-derivations.hh @@ -0,0 +1,33 @@ +#include "derivations.hh" + +#include + +namespace nix { + +class ParsedDerivation +{ + Path drvPath; + BasicDerivation & drv; + std::experimental::optional structuredAttrs; + +public: + + ParsedDerivation(const Path & drvPath, BasicDerivation & drv); + + const std::experimental::optional & getStructuredAttrs() const + { + return structuredAttrs; + } + + std::experimental::optional getStringAttr(const std::string & name) const; + + bool getBoolAttr(const std::string & name, bool def = false) const; + + std::experimental::optional getStringsAttr(const std::string & name) const; + + bool canBuildLocally() const; + + bool willBuildLocally() const; +}; + +} From 1e7b8deea7e052ed9ebf47d1411bcaf542054b41 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 28 Sep 2018 15:57:27 +0200 Subject: [PATCH 1244/2196] Check requiredSystemFeatures for local builds For example, this prevents a "kvm" build on machines that don't have KVM. Fixes #2012. --- doc/manual/command-ref/conf-file.xml | 27 ++++++++++++++++++++++ doc/manual/release-notes/release-notes.xml | 1 + src/libstore/build.cc | 21 ++++++++--------- src/libstore/globals.cc | 15 ++++++++++++ src/libstore/globals.hh | 6 +++++ src/libstore/parsed-derivations.cc | 20 +++++++++++++--- src/libstore/parsed-derivations.hh | 2 ++ tests/build-remote.sh | 3 ++- 8 files changed, 79 insertions(+), 16 deletions(-) diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml index fd09883be40..e9947ebc673 100644 --- a/doc/manual/command-ref/conf-file.xml +++ b/doc/manual/command-ref/conf-file.xml @@ -757,6 +757,33 @@ password my-password + system-features + + A set of system “features” supported by this + machine, e.g. kvm. Derivations can express a + dependency on such features through the derivation attribute + requiredSystemFeatures. For example, the + attribute + + +requiredSystemFeatures = [ "kvm" ]; + + + ensures that the derivation can only be built on a machine with + the kvm feature. + + This setting by default includes kvm if + /dev/kvm is accessible, and the + pseudo-features nixos-test, + benchmark and big-parallel + that are used in Nixpkgs to route builds to specific + machines. + + + + + + timeout diff --git a/doc/manual/release-notes/release-notes.xml b/doc/manual/release-notes/release-notes.xml index ff4085cb792..e8ff586fa43 100644 --- a/doc/manual/release-notes/release-notes.xml +++ b/doc/manual/release-notes/release-notes.xml @@ -12,6 +12,7 @@ --> + diff --git a/src/libstore/build.cc b/src/libstore/build.cc index eb7f106a20c..0073b9b727e 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -1646,18 +1646,13 @@ HookReply DerivationGoal::tryBuildHook() try { - /* Tell the hook about system features (beyond the system type) - required from the build machine. (The hook could parse the - drv file itself, but this is easier.) */ - auto features = parsedDrv->getStringsAttr("requiredSystemFeatures").value_or(Strings()); - /* Send the request to the hook. */ worker.hook->sink << "try" << (worker.getNrLocalBuilds() < settings.maxBuildJobs ? 1 : 0) << drv->platform << drvPath - << features; + << parsedDrv->getRequiredSystemFeatures(); worker.hook->sink.flush(); /* Read the first line of input, which should be a word indicating @@ -1797,11 +1792,13 @@ static void preloadNSS() { void DerivationGoal::startBuilder() { /* Right platform? */ - if (!parsedDrv->canBuildLocally()) { - throw Error( - format("a '%1%' is required to build '%3%', but I am a '%2%'") - % drv->platform % settings.thisSystem % drvPath); - } + if (!parsedDrv->canBuildLocally()) + throw Error("a '%s' with features {%s} is required to build '%s', but I am a '%s' with features {%s}", + drv->platform, + concatStringsSep(", ", parsedDrv->getRequiredSystemFeatures()), + drvPath, + settings.thisSystem, + concatStringsSep(", ", settings.systemFeatures)); if (drv->isBuiltin()) preloadNSS(); @@ -2625,7 +2622,7 @@ void DerivationGoal::runChild() createDirs(chrootRootDir + "/dev/shm"); createDirs(chrootRootDir + "/dev/pts"); ss.push_back("/dev/full"); - if (pathExists("/dev/kvm")) + if (settings.systemFeatures.get().count("kvm") && pathExists("/dev/kvm")) ss.push_back("/dev/kvm"); ss.push_back("/dev/null"); ss.push_back("/dev/random"); diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index d95db56726c..a9c07b23a6f 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -86,6 +86,21 @@ unsigned int Settings::getDefaultCores() return std::max(1U, std::thread::hardware_concurrency()); } +StringSet Settings::getDefaultSystemFeatures() +{ + /* For backwards compatibility, accept some "features" that are + used in Nixpkgs to route builds to certain machines but don't + actually require anything special on the machines. */ + StringSet features{"nixos-test", "benchmark", "big-parallel"}; + + #if __linux__ + if (access("/dev/kvm", R_OK | W_OK) == 0) + features.insert("kvm"); + #endif + + return features; +} + const string nixVersion = PACKAGE_VERSION; template<> void BaseSetting::set(const std::string & str) diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index f589078dbb9..cf4ae63cdc2 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -32,6 +32,8 @@ class Settings : public Config { unsigned int getDefaultCores(); + StringSet getDefaultSystemFeatures(); + public: Settings(); @@ -261,6 +263,10 @@ public: "These may be supported natively (e.g. armv7 on some aarch64 CPUs " "or using hacks like qemu-user."}; + Setting systemFeatures{this, getDefaultSystemFeatures(), + "system-features", + "Optional features that this system implements (like \"kvm\")."}; + Setting substituters{this, nixStore == "/nix/store" ? Strings{"https://cache.nixos.org/"} : Strings(), "substituters", diff --git a/src/libstore/parsed-derivations.cc b/src/libstore/parsed-derivations.cc index 0d7acf046af..dc328648273 100644 --- a/src/libstore/parsed-derivations.cc +++ b/src/libstore/parsed-derivations.cc @@ -82,11 +82,25 @@ std::experimental::optional ParsedDerivation::getStringsAttr(const std: } } +StringSet ParsedDerivation::getRequiredSystemFeatures() const +{ + StringSet res; + for (auto & i : getStringsAttr("requiredSystemFeatures").value_or(Strings())) + res.insert(i); + return res; +} + bool ParsedDerivation::canBuildLocally() const { - return drv.platform == settings.thisSystem - || settings.extraPlatforms.get().count(drv.platform) > 0 - || drv.isBuiltin(); + if (drv.platform != settings.thisSystem.get() + && !settings.extraPlatforms.get().count(drv.platform) + && !drv.isBuiltin()) + return false; + + for (auto & feature : getRequiredSystemFeatures()) + if (!settings.systemFeatures.get().count(feature)) return false; + + return true; } bool ParsedDerivation::willBuildLocally() const diff --git a/src/libstore/parsed-derivations.hh b/src/libstore/parsed-derivations.hh index 0c7dc32e1e0..0a82c146172 100644 --- a/src/libstore/parsed-derivations.hh +++ b/src/libstore/parsed-derivations.hh @@ -25,6 +25,8 @@ public: std::experimental::optional getStringsAttr(const std::string & name) const; + StringSet getRequiredSystemFeatures() const; + bool canBuildLocally() const; bool willBuildLocally() const; diff --git a/tests/build-remote.sh b/tests/build-remote.sh index 9bca0f4a385..ddd68f327a1 100644 --- a/tests/build-remote.sh +++ b/tests/build-remote.sh @@ -11,7 +11,8 @@ rm -rf $TEST_ROOT/store0 $TEST_ROOT/store1 nix build -f build-hook.nix -o $TEST_ROOT/result --max-jobs 0 \ --sandbox-paths /nix/store --sandbox-build-dir /build-tmp \ - --builders "$TEST_ROOT/store0; $TEST_ROOT/store1 - - 1 1 foo" + --builders "$TEST_ROOT/store0; $TEST_ROOT/store1 - - 1 1 foo" \ + --system-features foo outPath=$TEST_ROOT/result From 5b7cfa487e42d0ee198f628a2f2a17bcedc81dc1 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 1 Oct 2018 11:58:54 +0200 Subject: [PATCH 1245/2196] Add missing file --- doc/manual/release-notes/rl-2.2.xml | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 doc/manual/release-notes/rl-2.2.xml diff --git a/doc/manual/release-notes/rl-2.2.xml b/doc/manual/release-notes/rl-2.2.xml new file mode 100644 index 00000000000..bc28a56c940 --- /dev/null +++ b/doc/manual/release-notes/rl-2.2.xml @@ -0,0 +1,25 @@ +
+ +Release 2.2 (201?-??-??) + +This release has the following changes: + + + + + The derivation attribute + requiredSystemFeatures is now enforced for + local builds, and not just to route builds to remote builders. + The supported features of a machine can be specified through the + configuration setting system-features. + + + + + +
+ From 9cc876fb11dd28081d74c74834e75b05f08af8ef Mon Sep 17 00:00:00 2001 From: Matthew Bauer Date: Mon, 1 Oct 2018 13:26:59 -0500 Subject: [PATCH 1246/2196] nix-profile-daemon: remove cruft This removes part of the PATH that were being added automatically in multi-user installs: - $HOME/.nix-profile/lib/kde4/libexec - shouldn't be needed anymore, we are now using kde5 - @localstatedir@/nix/profiles/default/lib/kde4/libexec - same as above - @localstatedir@/nix/profiles/default - shouldn't ever contain binaries --- scripts/nix-profile-daemon.sh.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/nix-profile-daemon.sh.in b/scripts/nix-profile-daemon.sh.in index 1be9a0755d8..87d9fe5061a 100644 --- a/scripts/nix-profile-daemon.sh.in +++ b/scripts/nix-profile-daemon.sh.in @@ -68,4 +68,4 @@ elif [ -e "/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt" ]; then # fi export NIX_PATH="nixpkgs=@localstatedir@/nix/profiles/per-user/root/channels/nixpkgs:@localstatedir@/nix/profiles/per-user/root/channels" -export PATH="$HOME/.nix-profile/bin:$HOME/.nix-profile/lib/kde4/libexec:@localstatedir@/nix/profiles/default/bin:@localstatedir@/nix/profiles/default:@localstatedir@/nix/profiles/default/lib/kde4/libexec:$PATH" +export PATH="$HOME/.nix-profile/bin:@localstatedir@/nix/profiles/default/bin:$PATH" From 05cfc71cab4dd4c53f919d7451c36c8b5876919d Mon Sep 17 00:00:00 2001 From: Profpatsch Date: Tue, 2 Oct 2018 15:38:16 +0200 Subject: [PATCH 1247/2196] manual: add XML IDs to all builtin list entries --- doc/manual/expressions/builtins.xml | 364 +++++++++++++++++----------- 1 file changed, 217 insertions(+), 147 deletions(-) diff --git a/doc/manual/expressions/builtins.xml b/doc/manual/expressions/builtins.xml index 873f30b062e..8d12da9b135 100644 --- a/doc/manual/expressions/builtins.xml +++ b/doc/manual/expressions/builtins.xml @@ -21,7 +21,8 @@ available as builtins.derivation. - abort s + + abort s Abort Nix expression evaluation, print error message s. @@ -29,8 +30,10 @@ available as builtins.derivation. - builtins.add - e1 e2 + + builtins.add + e1 e2 + Return the sum of the numbers e1 and @@ -39,8 +42,9 @@ available as builtins.derivation. - builtins.all - pred list + + builtins.all + pred list Return true if the function pred returns true @@ -50,8 +54,9 @@ available as builtins.derivation. - builtins.any - pred list + + builtins.any + pred list Return true if the function pred returns true @@ -61,8 +66,9 @@ available as builtins.derivation. - builtins.attrNames - set + + builtins.attrNames + set Return the names of the attributes in the set set in an alphabetically sorted list. For instance, @@ -72,8 +78,9 @@ available as builtins.derivation. - builtins.attrValues - set + + builtins.attrValues + set Return the values of the attributes in the set set in the order corresponding to the @@ -82,7 +89,8 @@ available as builtins.derivation. - baseNameOf s + + baseNameOf s Return the base name of the string s, that is, everything following @@ -92,8 +100,9 @@ available as builtins.derivation. - builtins.bitAnd - e1 e2 + + builtins.bitAnd + e1 e2 Return the bitwise AND of the integers e1 and @@ -102,8 +111,9 @@ available as builtins.derivation. - builtins.bitOr - e1 e2 + + builtins.bitOr + e1 e2 Return the bitwise OR of the integers e1 and @@ -112,8 +122,9 @@ available as builtins.derivation. - builtins.bitXor - e1 e2 + + builtins.bitXor + e1 e2 Return the bitwise XOR of the integers e1 and @@ -122,7 +133,8 @@ available as builtins.derivation. - builtins + + builtins The set builtins contains all the built-in functions and values. You can use @@ -139,8 +151,9 @@ if builtins ? getEnv then builtins.getEnv "PATH" else "" - builtins.compareVersions - s1 s2 + + builtins.compareVersions + s1 s2 Compare two strings representing versions and return -1 if version @@ -156,8 +169,9 @@ if builtins ? getEnv then builtins.getEnv "PATH" else "" - builtins.splitVersion - s + + builtins.splitVersion + s Split a string representing a version into its components, by the same version splitting logic underlying the @@ -167,16 +181,18 @@ if builtins ? getEnv then builtins.getEnv "PATH" else "" - builtins.concatLists - lists + + builtins.concatLists + lists Concatenate a list of lists into a single list. - builtins.concatStringsSep - separator list + + builtins.concatStringsSep + separator list Concatenate a list of strings with a separator between each element, e.g. concatStringsSep "/" @@ -184,8 +200,8 @@ if builtins ? getEnv then builtins.getEnv "PATH" else "" - builtins.currentSystem + + builtins.currentSystem The built-in value currentSystem evaluates to the Nix platform identifier for the Nix installation @@ -218,8 +234,9 @@ if builtins ? getEnv then builtins.getEnv "PATH" else "" --> - builtins.deepSeq - e1 e2 + + builtins.deepSeq + e1 e2 This is like seq e1 @@ -231,8 +248,9 @@ if builtins ? getEnv then builtins.getEnv "PATH" else "" - derivation - attrs + + derivation + attrs derivation is described in . @@ -240,7 +258,8 @@ if builtins ? getEnv then builtins.getEnv "PATH" else "" - dirOf s + + dirOf s Return the directory part of the string s, that is, everything before the final @@ -250,8 +269,9 @@ if builtins ? getEnv then builtins.getEnv "PATH" else "" - builtins.div - e1 e2 + + builtins.div + e1 e2 Return the quotient of the numbers e1 and @@ -259,8 +279,9 @@ if builtins ? getEnv then builtins.getEnv "PATH" else "" - builtins.elem - x xs + + builtins.elem + x xs Return true if a value equal to x occurs in the list @@ -270,8 +291,9 @@ if builtins ? getEnv then builtins.getEnv "PATH" else "" - builtins.elemAt - xs n + + builtins.elemAt + xs n Return element n from the list xs. Elements are counted @@ -281,8 +303,9 @@ if builtins ? getEnv then builtins.getEnv "PATH" else "" - builtins.fetchurl - url + + builtins.fetchurl + url Download the specified URL and return the path of the downloaded file. This function is not available if - fetchTarball - url + + fetchTarball + url Download the specified URL, unpack it and return the path of the unpacked tree. The file must be a tape archive @@ -346,7 +370,7 @@ stdenv.mkDerivation { … } - + builtins.fetchGit args @@ -546,7 +570,8 @@ stdenv.mkDerivation { - builtins.foldl’ + + builtins.foldl’ op nul list Reduce a list by applying a binary operator, from @@ -559,7 +584,8 @@ stdenv.mkDerivation { - builtins.functionArgs + + builtins.functionArgs f @@ -577,7 +603,8 @@ stdenv.mkDerivation { - builtins.fromJSON e + + builtins.fromJSON e Convert a JSON string to a Nix value. For example, @@ -592,8 +619,9 @@ builtins.fromJSON ''{"x": [1, 2, 3], "y": null}'' - builtins.genList - generator length + + builtins.genList + generator length Generate list of size length, with each element @@ -610,8 +638,9 @@ builtins.genList (x: x * x) 5 - builtins.getAttr - s set + + builtins.getAttr + s set getAttr returns the attribute named s from @@ -623,8 +652,9 @@ builtins.genList (x: x * x) 5 - builtins.getEnv - s + + builtins.getEnv + s getEnv returns the value of the environment variable s, or an empty @@ -641,8 +671,9 @@ builtins.genList (x: x * x) 5 - builtins.hasAttr - s set + + builtins.hasAttr + s set hasAttr returns true if set has an @@ -655,8 +686,9 @@ builtins.genList (x: x * x) 5 - builtins.hashString - type s + + builtins.hashString + type s Return a base-16 representation of the cryptographic hash of string s. The @@ -667,8 +699,9 @@ builtins.genList (x: x * x) 5 - builtins.head - list + + builtins.head + list Return the first element of a list; abort evaluation if the argument isn’t a list or is an empty list. You @@ -678,8 +711,9 @@ builtins.genList (x: x * x) 5 - import - path + + import + path Load, parse and return the Nix expression in the file path. If path @@ -733,8 +767,9 @@ x: x + 456 - builtins.intersectAttrs - e1 e2 + + builtins.intersectAttrs + e1 e2 Return a set consisting of the attributes in the set e2 that also exist in the set @@ -743,8 +778,9 @@ x: x + 456 - builtins.isAttrs - e + + builtins.isAttrs + e Return true if e evaluates to a set, and @@ -753,8 +789,9 @@ x: x + 456 - builtins.isList - e + + builtins.isList + e Return true if e evaluates to a list, and @@ -763,7 +800,7 @@ x: x + 456 - builtins.isFunction + builtins.isFunction e Return true if @@ -773,8 +810,9 @@ x: x + 456 - builtins.isString - e + + builtins.isString + e Return true if e evaluates to a string, and @@ -783,8 +821,9 @@ x: x + 456 - builtins.isInt - e + + builtins.isInt + e Return true if e evaluates to an int, and @@ -793,8 +832,9 @@ x: x + 456 - builtins.isFloat - e + + builtins.isFloat + e Return true if e evaluates to a float, and @@ -803,8 +843,9 @@ x: x + 456 - builtins.isBool - e + + builtins.isBool + e Return true if e evaluates to a bool, and @@ -813,8 +854,9 @@ x: x + 456 - isNull - e + + isNull + e Return true if e evaluates to null, @@ -828,8 +870,9 @@ x: x + 456 - builtins.length - e + + builtins.length + e Return the length of the list e. @@ -837,8 +880,9 @@ x: x + 456 - builtins.lessThan - e1 e2 + + builtins.lessThan + e1 e2 Return true if the number e1 is less than the number @@ -850,8 +894,9 @@ x: x + 456 - builtins.listToAttrs - e + + builtins.listToAttrs + e Construct a set from a list specifying the names and values of each attribute. Each element of the list should be @@ -877,8 +922,9 @@ builtins.listToAttrs - map - f list + + map + f list Apply the function f to each element in the list list. For @@ -893,14 +939,15 @@ map (x: "foo" + x) [ "bar" "bla" "abc" ] - builtins.match - regex str + + builtins.match + regex str - Returns a list if the extended - POSIX regular expression regex - matches str precisely, otherwise returns - null. Each item in the list is a regex group. + Returns a list if the extended + POSIX regular expression regex + matches str precisely, otherwise returns + null. Each item in the list is a regex group. builtins.match "ab" "abc" @@ -926,11 +973,12 @@ builtins.match "[[:space:]]+([[:upper:]]+)[[:space:]]+" " FOO " Evaluates to [ "foo" ]. - + - builtins.mul - e1 e2 + + builtins.mul + e1 e2 Return the product of the numbers e1 and @@ -939,8 +987,9 @@ Evaluates to [ "foo" ]. - builtins.parseDrvName - s + + builtins.parseDrvName + s Split the string s into a package name and version. The package name is everything up to @@ -953,7 +1002,7 @@ Evaluates to [ "foo" ]. - + builtins.path args @@ -1023,8 +1072,9 @@ Evaluates to [ "foo" ]. - builtins.pathExists - path + + builtins.pathExists + path Return true if the path path exists, and @@ -1047,8 +1097,9 @@ in config.someSetting - builtins.readDir - path + + builtins.readDir + path Return the contents of the directory path as a set mapping directory entries @@ -1069,8 +1120,9 @@ in config.someSetting - builtins.readFile - path + + builtins.readFile + path Return the contents of the file path as a string. @@ -1078,8 +1130,9 @@ in config.someSetting - removeAttrs - set list + + removeAttrs + set list Remove the attributes listed in list from @@ -1094,8 +1147,9 @@ removeAttrs { x = 1; y = 2; z = 3; } [ "a" "x" "z" ] - builtins.replaceStrings - from to s + + builtins.replaceStrings + from to s Given string s, replace every occurrence of the strings in from @@ -1111,8 +1165,9 @@ builtins.replaceStrings ["oo" "a"] ["a" "i"] "foobar" - builtins.seq - e1 e2 + + builtins.seq + e1 e2 Evaluate e1, then evaluate and return e2. This ensures @@ -1122,8 +1177,9 @@ builtins.replaceStrings ["oo" "a"] ["a" "i"] "foobar" - builtins.sort - comparator list + + builtins.sort + comparator list Return list in sorted order. It repeatedly calls the function @@ -1145,15 +1201,16 @@ builtins.sort builtins.lessThan [ 483 249 526 147 42 77 ] - builtins.split - regex str + + builtins.split + regex str - Returns a list composed of non matched strings interleaved - with the lists of the extended - POSIX regular expression regex matches - of str. Each item in the lists of matched - sequences is a regex group. + Returns a list composed of non matched strings interleaved + with the lists of the extended + POSIX regular expression regex matches + of str. Each item in the lists of matched + sequences is a regex group. builtins.split "(a)b" "abc" @@ -1179,11 +1236,12 @@ builtins.split "([[:upper:]]+)" " FOO " Evaluates to [ " " [ "FOO" ] " " ]. - + - builtins.stringLength - e + + builtins.stringLength + e Return the length of the string e. If e is @@ -1192,8 +1250,9 @@ Evaluates to [ " " [ "FOO" ] " " ]. - builtins.sub - e1 e2 + + builtins.sub + e1 e2 Return the difference between the numbers e1 and @@ -1202,9 +1261,10 @@ Evaluates to [ " " [ "FOO" ] " " ]. - builtins.substring - start len - s + + builtins.substring + start len + s Return the substring of s from character position @@ -1227,8 +1287,9 @@ builtins.substring 0 3 "nixos" - builtins.tail - list + + builtins.tail + list Return the second to last elements of a list; abort evaluation if the argument isn’t a list or is an empty @@ -1237,8 +1298,9 @@ builtins.substring 0 3 "nixos" - throw - s + + throw + s Throw an error message s. This usually aborts Nix expression @@ -1251,9 +1313,10 @@ builtins.substring 0 3 "nixos" - builtins.toFile - name s + + builtins.toFile + name + s Store the string s in a file in the Nix store and return its path. The file has suffix @@ -1329,7 +1392,8 @@ in foo - builtins.toJSON e + + builtins.toJSON e Return a string containing a JSON representation of e. Strings, integers, floats, booleans, @@ -1342,7 +1406,8 @@ in foo - builtins.toPath s + + builtins.toPath s Convert the string value s into a path value. The string @@ -1355,7 +1420,8 @@ in foo - toString e + + toString e Convert the expression e to a string. @@ -1374,7 +1440,8 @@ in foo - builtins.toXML e + + builtins.toXML e Return a string containing an XML representation of e. The main application for @@ -1489,8 +1556,9 @@ stdenv.mkDerivation (rec { - builtins.trace - e1 e2 + + builtins.trace + e1 e2 Evaluate e1 and print its abstract syntax representation on standard error. Then return @@ -1499,8 +1567,9 @@ stdenv.mkDerivation (rec { - builtins.tryEval - e + + builtins.tryEval + e Try to evaluate e. Return a set containing the attributes success @@ -1513,8 +1582,9 @@ stdenv.mkDerivation (rec { - builtins.typeOf - e + + builtins.typeOf + e Return a string representing the type of the value e, namely "int", From 65453e2d777dac8d90f98e9d24b6428fce9c2ab5 Mon Sep 17 00:00:00 2001 From: Benjamin Hipple Date: Thu, 4 Oct 2018 02:37:15 +0000 Subject: [PATCH 1248/2196] Docs: update fixed output hashes `fetchurl` will now throw if given an `md5`, and the hashes have generally been upgraded to avoid it and use `sha256` as a default. This updates the documentation examples in the manual accordingly. --- doc/manual/expressions/advanced-attributes.xml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/doc/manual/expressions/advanced-attributes.xml b/doc/manual/expressions/advanced-attributes.xml index 9422e82ff36..2af7a51acfb 100644 --- a/doc/manual/expressions/advanced-attributes.xml +++ b/doc/manual/expressions/advanced-attributes.xml @@ -216,7 +216,7 @@ fetchurl { { stdenv, curl }: # The curl program is used for downloading. -{ url, md5 }: +{ url, sha256 }: stdenv.mkDerivation { name = baseNameOf (toString url); @@ -224,10 +224,10 @@ stdenv.mkDerivation { buildInputs = [ curl ]; # This is a fixed-output derivation; the output must be a regular - # file with MD5 hash md5. + # file with SHA256 hash sha256. outputHashMode = "flat"; - outputHashAlgo = "md5"; - outputHash = md5; + outputHashAlgo = "sha256"; + outputHash = sha256; inherit url; } @@ -237,8 +237,8 @@ stdenv.mkDerivation { The outputHashAlgo attribute specifies the hash algorithm used to compute the hash. It can currently be - "md5", "sha1" or - "sha256". + "sha1", "sha256" or + "sha512". The outputHashMode attribute determines how the hash is computed. It must be one of the following two @@ -251,7 +251,7 @@ stdenv.mkDerivation { The output must be a non-executable regular file. If it isn’t, the build fails. The hash is simply computed over the contents of that file (so it’s equal to what - Unix commands like md5sum or + Unix commands like sha256sum or sha1sum produce). This is the default. From 0fda9b22c7cc0f0191a51d168c4b03d6c4e98d71 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 4 Oct 2018 13:16:30 +0200 Subject: [PATCH 1249/2196] Remove unnecessary typecast --- src/nix-daemon/nix-daemon.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index 615c1f5dc06..423ba15e2ad 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -709,7 +709,7 @@ static void performOp(TunnelLogger * logger, ref store, logger->startWork(); // FIXME: race if addToStore doesn't read source? - store.cast()->addToStore(info, *source, (RepairFlag) repair, + store->addToStore(info, *source, (RepairFlag) repair, dontCheckSigs ? NoCheckSigs : CheckSigs, nullptr); logger->stopWork(); From ea41838ae049817fd996ead14e040d922bb7c067 Mon Sep 17 00:00:00 2001 From: "Graham Christensen (Target)" <39596207+graham-at-target@users.noreply.github.com> Date: Tue, 16 Oct 2018 10:22:36 -0400 Subject: [PATCH 1250/2196] install script: remove unportable command check, fixup errant escape `which` isn't necessarily portable, but `command -v` is an equivalent form. Additionally, the `\'` is not necessary, as it is already quoted by `"`. --- scripts/install.in | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/install.in b/scripts/install.in index 26ab85ba099..7bff7b216d9 100644 --- a/scripts/install.in +++ b/scripts/install.in @@ -11,14 +11,14 @@ oops() { } tmpDir="$(mktemp -d -t nix-binary-tarball-unpack.XXXXXXXXXX || \ - oops "Can\'t create temporary directory for downloading the Nix binary tarball")" + oops "Can't create temporary directory for downloading the Nix binary tarball")" cleanup() { rm -rf "$tmpDir" } trap cleanup EXIT INT QUIT TERM require_util() { - type "$1" > /dev/null 2>&1 || which "$1" > /dev/null 2>&1 || + type "$1" > /dev/null 2>&1 || command -v "$1" > /dev/null 2>&1 || oops "you do not have '$1' installed, which I need to $2" } From ba51100d64c18f627f97e606c4884ba2fb78dfa0 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 16 Oct 2018 22:58:01 +0200 Subject: [PATCH 1251/2196] Get rid of UDSRemoteStore::Connection Since its superclass RemoteStore::Connection contains 'to' and 'from' fields that refer to the file descriptor maintained in the subclass, it was possible for the flush() call in Connection::~Connection() to write to a closed file descriptor (or worse, a file descriptor now referencing another file). So make sure that the file descriptor survives 'to' and 'from'. --- src/libstore/remote-store.hh | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh index b488e34ce26..9d768576bad 100644 --- a/src/libstore/remote-store.hh +++ b/src/libstore/remote-store.hh @@ -103,6 +103,7 @@ protected: struct Connection { + AutoCloseFD fd; FdSink to; FdSource from; unsigned int daemonVersion; @@ -140,11 +141,6 @@ public: private: - struct Connection : RemoteStore::Connection - { - AutoCloseFD fd; - }; - ref openConnection() override; std::experimental::optional path; }; From 79e358ce6d2a2c34f365c24d68ddbab7461380f8 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 16 Oct 2018 23:36:15 +0200 Subject: [PATCH 1252/2196] RemoteStore: Close connection if an exception occurs Fixes #2075. --- src/libstore/remote-store.cc | 171 ++++++++++++++++++++++------------- src/libstore/remote-store.hh | 7 +- src/libutil/pool.hh | 6 +- 3 files changed, 121 insertions(+), 63 deletions(-) diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index ea86ef052f5..ef8b0e53b80 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -161,7 +161,8 @@ void RemoteStore::initConnection(Connection & conn) if (GET_PROTOCOL_MINOR(conn.daemonVersion) >= 11) conn.to << false; - conn.processStderr(); + auto ex = conn.processStderr(); + if (ex) std::rethrow_exception(ex); } catch (Error & e) { throw Error("cannot open connection to remote store '%s': %s", getUri(), e.what()); @@ -195,22 +196,68 @@ void RemoteStore::setOptions(Connection & conn) conn.to << i.first << i.second.value; } - conn.processStderr(); + auto ex = conn.processStderr(); + if (ex) std::rethrow_exception(ex); +} + + +/* A wrapper around Pool::Handle that marks + the connection as bad (causing it to be closed) if a non-daemon + exception is thrown before the handle is closed. Such an exception + causes a deviation from the expected protocol and therefore a + desynchronization between the client and daemon. */ +struct ConnectionHandle +{ + Pool::Handle handle; + bool daemonException = false; + + ConnectionHandle(Pool::Handle && handle) + : handle(std::move(handle)) + { } + + ConnectionHandle(ConnectionHandle && h) + : handle(std::move(h.handle)) + { } + + ~ConnectionHandle() + { + if (!daemonException && std::uncaught_exception()) { + handle.markBad(); + debug("closing daemon connection because of an exception"); + } + } + + RemoteStore::Connection * operator -> () { return &*handle; } + + void processStderr(Sink * sink = 0, Source * source = 0) + { + auto ex = handle->processStderr(sink, source); + if (ex) { + daemonException = true; + std::rethrow_exception(ex); + } + } +}; + + +ConnectionHandle RemoteStore::getConnection() +{ + return ConnectionHandle(connections->get()); } bool RemoteStore::isValidPathUncached(const Path & path) { - auto conn(connections->get()); + auto conn(getConnection()); conn->to << wopIsValidPath << path; - conn->processStderr(); + conn.processStderr(); return readInt(conn->from); } PathSet RemoteStore::queryValidPaths(const PathSet & paths, SubstituteFlag maybeSubstitute) { - auto conn(connections->get()); + auto conn(getConnection()); if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 12) { PathSet res; for (auto & i : paths) @@ -218,7 +265,7 @@ PathSet RemoteStore::queryValidPaths(const PathSet & paths, SubstituteFlag maybe return res; } else { conn->to << wopQueryValidPaths << paths; - conn->processStderr(); + conn.processStderr(); return readStorePaths(*this, conn->from); } } @@ -226,27 +273,27 @@ PathSet RemoteStore::queryValidPaths(const PathSet & paths, SubstituteFlag maybe PathSet RemoteStore::queryAllValidPaths() { - auto conn(connections->get()); + auto conn(getConnection()); conn->to << wopQueryAllValidPaths; - conn->processStderr(); + conn.processStderr(); return readStorePaths(*this, conn->from); } PathSet RemoteStore::querySubstitutablePaths(const PathSet & paths) { - auto conn(connections->get()); + auto conn(getConnection()); if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 12) { PathSet res; for (auto & i : paths) { conn->to << wopHasSubstitutes << i; - conn->processStderr(); + conn.processStderr(); if (readInt(conn->from)) res.insert(i); } return res; } else { conn->to << wopQuerySubstitutablePaths << paths; - conn->processStderr(); + conn.processStderr(); return readStorePaths(*this, conn->from); } } @@ -257,14 +304,14 @@ void RemoteStore::querySubstitutablePathInfos(const PathSet & paths, { if (paths.empty()) return; - auto conn(connections->get()); + auto conn(getConnection()); if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 12) { for (auto & i : paths) { SubstitutablePathInfo info; conn->to << wopQuerySubstitutablePathInfo << i; - conn->processStderr(); + conn.processStderr(); unsigned int reply = readInt(conn->from); if (reply == 0) continue; info.deriver = readString(conn->from); @@ -278,7 +325,7 @@ void RemoteStore::querySubstitutablePathInfos(const PathSet & paths, } else { conn->to << wopQuerySubstitutablePathInfos << paths; - conn->processStderr(); + conn.processStderr(); size_t count = readNum(conn->from); for (size_t n = 0; n < count; n++) { Path path = readStorePath(*this, conn->from); @@ -300,10 +347,10 @@ void RemoteStore::queryPathInfoUncached(const Path & path, try { std::shared_ptr info; { - auto conn(connections->get()); + auto conn(getConnection()); conn->to << wopQueryPathInfo << path; try { - conn->processStderr(); + conn.processStderr(); } catch (Error & e) { // Ugly backwards compatibility hack. if (e.msg().find("is not valid") != std::string::npos) @@ -335,9 +382,9 @@ void RemoteStore::queryPathInfoUncached(const Path & path, void RemoteStore::queryReferrers(const Path & path, PathSet & referrers) { - auto conn(connections->get()); + auto conn(getConnection()); conn->to << wopQueryReferrers << path; - conn->processStderr(); + conn.processStderr(); PathSet referrers2 = readStorePaths(*this, conn->from); referrers.insert(referrers2.begin(), referrers2.end()); } @@ -345,36 +392,36 @@ void RemoteStore::queryReferrers(const Path & path, PathSet RemoteStore::queryValidDerivers(const Path & path) { - auto conn(connections->get()); + auto conn(getConnection()); conn->to << wopQueryValidDerivers << path; - conn->processStderr(); + conn.processStderr(); return readStorePaths(*this, conn->from); } PathSet RemoteStore::queryDerivationOutputs(const Path & path) { - auto conn(connections->get()); + auto conn(getConnection()); conn->to << wopQueryDerivationOutputs << path; - conn->processStderr(); + conn.processStderr(); return readStorePaths(*this, conn->from); } PathSet RemoteStore::queryDerivationOutputNames(const Path & path) { - auto conn(connections->get()); + auto conn(getConnection()); conn->to << wopQueryDerivationOutputNames << path; - conn->processStderr(); + conn.processStderr(); return readStrings(conn->from); } Path RemoteStore::queryPathFromHashPart(const string & hashPart) { - auto conn(connections->get()); + auto conn(getConnection()); conn->to << wopQueryPathFromHashPart << hashPart; - conn->processStderr(); + conn.processStderr(); Path path = readString(conn->from); if (!path.empty()) assertStorePath(path); return path; @@ -384,7 +431,7 @@ Path RemoteStore::queryPathFromHashPart(const string & hashPart) void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, RepairFlag repair, CheckSigsFlag checkSigs, std::shared_ptr accessor) { - auto conn(connections->get()); + auto conn(getConnection()); if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 18) { conn->to << wopImportPaths; @@ -403,7 +450,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, ; }); - conn->processStderr(0, source2.get()); + conn.processStderr(0, source2.get()); auto importedPaths = readStorePaths(*this, conn->from); assert(importedPaths.size() <= 1); @@ -417,7 +464,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, << repair << !checkSigs; bool tunnel = GET_PROTOCOL_MINOR(conn->daemonVersion) >= 21; if (!tunnel) copyNAR(source, conn->to); - conn->processStderr(0, tunnel ? &source : nullptr); + conn.processStderr(0, tunnel ? &source : nullptr); } } @@ -427,7 +474,7 @@ Path RemoteStore::addToStore(const string & name, const Path & _srcPath, { if (repair) throw Error("repairing is not supported when building through the Nix daemon"); - auto conn(connections->get()); + auto conn(getConnection()); Path srcPath(absPath(_srcPath)); @@ -445,13 +492,13 @@ Path RemoteStore::addToStore(const string & name, const Path & _srcPath, dumpPath(srcPath, conn->to, filter); } conn->to.warn = false; - conn->processStderr(); + conn.processStderr(); } catch (SysError & e) { /* Daemon closed while we were sending the path. Probably OOM or I/O error. */ if (e.errNo == EPIPE) try { - conn->processStderr(); + conn.processStderr(); } catch (EndOfFile & e) { } throw; } @@ -465,17 +512,17 @@ Path RemoteStore::addTextToStore(const string & name, const string & s, { if (repair) throw Error("repairing is not supported when building through the Nix daemon"); - auto conn(connections->get()); + auto conn(getConnection()); conn->to << wopAddTextToStore << name << s << references; - conn->processStderr(); + conn.processStderr(); return readStorePath(*this, conn->from); } void RemoteStore::buildPaths(const PathSet & drvPaths, BuildMode buildMode) { - auto conn(connections->get()); + auto conn(getConnection()); conn->to << wopBuildPaths; if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 13) { conn->to << drvPaths; @@ -494,7 +541,7 @@ void RemoteStore::buildPaths(const PathSet & drvPaths, BuildMode buildMode) drvPaths2.insert(string(i, 0, i.find('!'))); conn->to << drvPaths2; } - conn->processStderr(); + conn.processStderr(); readInt(conn->from); } @@ -502,9 +549,9 @@ void RemoteStore::buildPaths(const PathSet & drvPaths, BuildMode buildMode) BuildResult RemoteStore::buildDerivation(const Path & drvPath, const BasicDerivation & drv, BuildMode buildMode) { - auto conn(connections->get()); + auto conn(getConnection()); conn->to << wopBuildDerivation << drvPath << drv << buildMode; - conn->processStderr(); + conn.processStderr(); BuildResult res; unsigned int status; conn->from >> status >> res.errorMsg; @@ -515,45 +562,45 @@ BuildResult RemoteStore::buildDerivation(const Path & drvPath, const BasicDeriva void RemoteStore::ensurePath(const Path & path) { - auto conn(connections->get()); + auto conn(getConnection()); conn->to << wopEnsurePath << path; - conn->processStderr(); + conn.processStderr(); readInt(conn->from); } void RemoteStore::addTempRoot(const Path & path) { - auto conn(connections->get()); + auto conn(getConnection()); conn->to << wopAddTempRoot << path; - conn->processStderr(); + conn.processStderr(); readInt(conn->from); } void RemoteStore::addIndirectRoot(const Path & path) { - auto conn(connections->get()); + auto conn(getConnection()); conn->to << wopAddIndirectRoot << path; - conn->processStderr(); + conn.processStderr(); readInt(conn->from); } void RemoteStore::syncWithGC() { - auto conn(connections->get()); + auto conn(getConnection()); conn->to << wopSyncWithGC; - conn->processStderr(); + conn.processStderr(); readInt(conn->from); } Roots RemoteStore::findRoots() { - auto conn(connections->get()); + auto conn(getConnection()); conn->to << wopFindRoots; - conn->processStderr(); + conn.processStderr(); size_t count = readNum(conn->from); Roots result; while (count--) { @@ -567,7 +614,7 @@ Roots RemoteStore::findRoots() void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results) { - auto conn(connections->get()); + auto conn(getConnection()); conn->to << wopCollectGarbage << options.action << options.pathsToDelete << options.ignoreLiveness @@ -575,7 +622,7 @@ void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results) /* removed options */ << 0 << 0 << 0; - conn->processStderr(); + conn.processStderr(); results.paths = readStrings(conn->from); results.bytesFreed = readLongLong(conn->from); @@ -590,27 +637,27 @@ void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results) void RemoteStore::optimiseStore() { - auto conn(connections->get()); + auto conn(getConnection()); conn->to << wopOptimiseStore; - conn->processStderr(); + conn.processStderr(); readInt(conn->from); } bool RemoteStore::verifyStore(bool checkContents, RepairFlag repair) { - auto conn(connections->get()); + auto conn(getConnection()); conn->to << wopVerifyStore << checkContents << repair; - conn->processStderr(); + conn.processStderr(); return readInt(conn->from); } void RemoteStore::addSignatures(const Path & storePath, const StringSet & sigs) { - auto conn(connections->get()); + auto conn(getConnection()); conn->to << wopAddSignatures << storePath << sigs; - conn->processStderr(); + conn.processStderr(); readInt(conn->from); } @@ -620,13 +667,13 @@ void RemoteStore::queryMissing(const PathSet & targets, unsigned long long & downloadSize, unsigned long long & narSize) { { - auto conn(connections->get()); + auto conn(getConnection()); if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 19) // Don't hold the connection handle in the fallback case // to prevent a deadlock. goto fallback; conn->to << wopQueryMissing << targets; - conn->processStderr(); + conn.processStderr(); willBuild = readStorePaths(*this, conn->from); willSubstitute = readStorePaths(*this, conn->from); unknown = readStorePaths(*this, conn->from); @@ -642,7 +689,7 @@ void RemoteStore::queryMissing(const PathSet & targets, void RemoteStore::connect() { - auto conn(connections->get()); + auto conn(getConnection()); } @@ -679,7 +726,7 @@ static Logger::Fields readFields(Source & from) } -void RemoteStore::Connection::processStderr(Sink * sink, Source * source) +std::exception_ptr RemoteStore::Connection::processStderr(Sink * sink, Source * source) { to.flush(); @@ -704,7 +751,7 @@ void RemoteStore::Connection::processStderr(Sink * sink, Source * source) else if (msg == STDERR_ERROR) { string error = readString(from); unsigned int status = readInt(from); - throw Error(status, error); + return std::make_exception_ptr(Error(status, error)); } else if (msg == STDERR_NEXT) @@ -738,6 +785,8 @@ void RemoteStore::Connection::processStderr(Sink * sink, Source * source) else throw Error("got unknown message type %x from Nix daemon", msg); } + + return nullptr; } static std::string uriScheme = "unix://"; diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh index 9d768576bad..7f9d7d1f56d 100644 --- a/src/libstore/remote-store.hh +++ b/src/libstore/remote-store.hh @@ -14,6 +14,7 @@ class Pid; struct FdSink; struct FdSource; template class Pool; +struct ConnectionHandle; /* FIXME: RemoteStore is a misnomer - should be something like @@ -111,7 +112,7 @@ protected: virtual ~Connection(); - void processStderr(Sink * sink = 0, Source * source = 0); + std::exception_ptr processStderr(Sink * sink = 0, Source * source = 0); }; ref openConnectionWrapper(); @@ -124,6 +125,10 @@ protected: virtual void setOptions(Connection & conn); + ConnectionHandle getConnection(); + + friend class ConnectionHandle; + private: std::atomic_bool failed{false}; diff --git a/src/libutil/pool.hh b/src/libutil/pool.hh index 0b142b0597c..d49067bb95d 100644 --- a/src/libutil/pool.hh +++ b/src/libutil/pool.hh @@ -97,6 +97,7 @@ public: private: Pool & pool; std::shared_ptr r; + bool bad = false; friend Pool; @@ -112,7 +113,8 @@ public: if (!r) return; { auto state_(pool.state.lock()); - state_->idle.push_back(ref(r)); + if (!bad) + state_->idle.push_back(ref(r)); assert(state_->inUse); state_->inUse--; } @@ -121,6 +123,8 @@ public: R * operator -> () { return &*r; } R & operator * () { return *r; } + + void markBad() { bad = true; } }; Handle get() From bd78544f66a001f00623d3d0e9d50b019ada6a9a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 16 Oct 2018 23:39:36 +0200 Subject: [PATCH 1253/2196] Fix assertion failure in Store::queryPathInfo() $ nix-store -qR /nix/store/fnord nix-store: src/libstore/store-api.cc:80: std::__cxx11::string nix::storePathToHash(const Path&): Assertion `base.size() >= storePathHashLen' failed. Aborted --- src/libstore/store-api.cc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 2c0f68651e4..92e2685f7f6 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -320,6 +320,8 @@ ref Store::queryPathInfo(const Path & storePath) void Store::queryPathInfo(const Path & storePath, Callback> callback) { + assertStorePath(storePath); + auto hashPart = storePathToHash(storePath); try { From 73c2ae43f08ca35cbb8f86ec7c2efc15ad8686b9 Mon Sep 17 00:00:00 2001 From: Antoine Eiche Date: Sat, 29 Sep 2018 09:42:11 +0200 Subject: [PATCH 1254/2196] Add --graphml option to the nix-store --query command This prints the references graph of the store paths in the graphML format [1]. The graphML format is supported by several graph tools such as the Python Networkx library or the Apache Thinkerpop project. [1] http://graphml.graphdrawing.org --- doc/manual/command-ref/nix-store.xml | 12 ++++ src/nix-store/graphml.cc | 90 ++++++++++++++++++++++++++++ src/nix-store/graphml.hh | 11 ++++ src/nix-store/nix-store.cc | 14 ++++- 4 files changed, 126 insertions(+), 1 deletion(-) create mode 100644 src/nix-store/graphml.cc create mode 100644 src/nix-store/graphml.hh diff --git a/doc/manual/command-ref/nix-store.xml b/doc/manual/command-ref/nix-store.xml index c827d85b381..41a04f265d7 100644 --- a/doc/manual/command-ref/nix-store.xml +++ b/doc/manual/command-ref/nix-store.xml @@ -679,6 +679,18 @@ query is applied to the target of the symlink. + + + Prints the references graph of the store paths + paths in the GraphML file format. + This can be used to visualise dependency graphs. To obtain a + build-time dependency graph, apply this to a store derivation. To + obtain a runtime dependency graph, apply it to an output + path. + + + name name diff --git a/src/nix-store/graphml.cc b/src/nix-store/graphml.cc new file mode 100644 index 00000000000..670fbe227a4 --- /dev/null +++ b/src/nix-store/graphml.cc @@ -0,0 +1,90 @@ +#include "graphml.hh" +#include "util.hh" +#include "store-api.hh" +#include "derivations.hh" + +#include + + +using std::cout; + +namespace nix { + + +static inline const string & xmlQuote(const string & s) +{ + // Luckily, store paths shouldn't contain any character that needs to be + // quoted. + return s; +} + + +static string symbolicName(const string & path) +{ + string p = baseNameOf(path); + return string(p, p.find('-') + 1); +} + + +static string makeEdge(const string & src, const string & dst) +{ + return fmt(" \n", + xmlQuote(src), xmlQuote(dst)); +} + + +static string makeNode(const ValidPathInfo & info) +{ + return fmt( + " \n" + " %2%\n" + " %3%\n" + " %4%\n" + " \n", + info.path, + info.narSize, + symbolicName(info.path), + (isDerivation(info.path) ? "derivation" : "output-path")); +} + + +void printGraphML(ref store, const PathSet & roots) +{ + PathSet workList(roots); + PathSet doneSet; + std::pair ret; + + cout << "\n" + << "\n" + << "" + << "" + << "" + << "\n"; + + while (!workList.empty()) { + Path path = *(workList.begin()); + workList.erase(path); + + ret = doneSet.insert(path); + if (ret.second == false) continue; + + ValidPathInfo info = *(store->queryPathInfo(path)); + cout << makeNode(info); + + for (auto & p : store->queryPathInfo(path)->references) { + if (p != path) { + workList.insert(p); + cout << makeEdge(path, p); + } + } + + } + + cout << "\n"; + cout << "\n"; +} + + +} diff --git a/src/nix-store/graphml.hh b/src/nix-store/graphml.hh new file mode 100644 index 00000000000..b78df1e49a6 --- /dev/null +++ b/src/nix-store/graphml.hh @@ -0,0 +1,11 @@ +#pragma once + +#include "types.hh" + +namespace nix { + +class Store; + +void printGraphML(ref store, const PathSet & roots); + +} diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index fe68f681ae2..4051fdbe166 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -9,6 +9,7 @@ #include "util.hh" #include "worker-protocol.hh" #include "xmlgraph.hh" +#include "graphml.hh" #include #include @@ -273,7 +274,7 @@ static void opQuery(Strings opFlags, Strings opArgs) enum QueryType { qDefault, qOutputs, qRequisites, qReferences, qReferrers , qReferrersClosure, qDeriver, qBinding, qHash, qSize - , qTree, qGraph, qXml, qResolve, qRoots }; + , qTree, qGraph, qXml, qGraphML, qResolve, qRoots }; QueryType query = qDefault; bool useOutput = false; bool includeOutputs = false; @@ -300,6 +301,7 @@ static void opQuery(Strings opFlags, Strings opArgs) else if (i == "--tree") query = qTree; else if (i == "--graph") query = qGraph; else if (i == "--xml") query = qXml; + else if (i == "--graphml") query = qGraphML; else if (i == "--resolve") query = qResolve; else if (i == "--roots") query = qRoots; else if (i == "--use-output" || i == "-u") useOutput = true; @@ -413,6 +415,16 @@ static void opQuery(Strings opFlags, Strings opArgs) break; } + case qGraphML: { + PathSet roots; + for (auto & i : opArgs) { + PathSet paths = maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise); + roots.insert(paths.begin(), paths.end()); + } + printGraphML(ref(store), roots); + break; + } + case qResolve: { for (auto & i : opArgs) cout << format("%1%\n") % store->followLinksToStorePath(i); From d506342aa2b6945899988878b7c58de683cb573a Mon Sep 17 00:00:00 2001 From: Antoine Eiche Date: Sat, 20 Oct 2018 09:48:53 +0200 Subject: [PATCH 1255/2196] Remove the `--xml` query command option The `--graphml` option can be used instead. --- src/nix-store/nix-store.cc | 14 +------- src/nix-store/xmlgraph.cc | 66 -------------------------------------- src/nix-store/xmlgraph.hh | 11 ------- 3 files changed, 1 insertion(+), 90 deletions(-) delete mode 100644 src/nix-store/xmlgraph.cc delete mode 100644 src/nix-store/xmlgraph.hh diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 4051fdbe166..e245bd643d4 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -8,7 +8,6 @@ #include "shared.hh" #include "util.hh" #include "worker-protocol.hh" -#include "xmlgraph.hh" #include "graphml.hh" #include @@ -274,7 +273,7 @@ static void opQuery(Strings opFlags, Strings opArgs) enum QueryType { qDefault, qOutputs, qRequisites, qReferences, qReferrers , qReferrersClosure, qDeriver, qBinding, qHash, qSize - , qTree, qGraph, qXml, qGraphML, qResolve, qRoots }; + , qTree, qGraph, qGraphML, qResolve, qRoots }; QueryType query = qDefault; bool useOutput = false; bool includeOutputs = false; @@ -300,7 +299,6 @@ static void opQuery(Strings opFlags, Strings opArgs) else if (i == "--size") query = qSize; else if (i == "--tree") query = qTree; else if (i == "--graph") query = qGraph; - else if (i == "--xml") query = qXml; else if (i == "--graphml") query = qGraphML; else if (i == "--resolve") query = qResolve; else if (i == "--roots") query = qRoots; @@ -405,16 +403,6 @@ static void opQuery(Strings opFlags, Strings opArgs) break; } - case qXml: { - PathSet roots; - for (auto & i : opArgs) { - PathSet paths = maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise); - roots.insert(paths.begin(), paths.end()); - } - printXmlGraph(ref(store), roots); - break; - } - case qGraphML: { PathSet roots; for (auto & i : opArgs) { diff --git a/src/nix-store/xmlgraph.cc b/src/nix-store/xmlgraph.cc deleted file mode 100644 index 0f7be7f7a02..00000000000 --- a/src/nix-store/xmlgraph.cc +++ /dev/null @@ -1,66 +0,0 @@ -#include "xmlgraph.hh" -#include "util.hh" -#include "store-api.hh" - -#include - - -using std::cout; - -namespace nix { - - -static inline const string & xmlQuote(const string & s) -{ - // Luckily, store paths shouldn't contain any character that needs to be - // quoted. - return s; -} - - -static string makeEdge(const string & src, const string & dst) -{ - format f = format(" \n") - % xmlQuote(src) % xmlQuote(dst); - return f.str(); -} - - -static string makeNode(const string & id) -{ - format f = format(" \n") % xmlQuote(id); - return f.str(); -} - - -void printXmlGraph(ref store, const PathSet & roots) -{ - PathSet workList(roots); - PathSet doneSet; - - cout << "\n" - << "\n"; - - while (!workList.empty()) { - Path path = *(workList.begin()); - workList.erase(path); - - if (doneSet.find(path) != doneSet.end()) continue; - doneSet.insert(path); - - cout << makeNode(path); - - for (auto & p : store->queryPathInfo(path)->references) { - if (p != path) { - workList.insert(p); - cout << makeEdge(p, path); - } - } - - } - - cout << "\n"; -} - - -} diff --git a/src/nix-store/xmlgraph.hh b/src/nix-store/xmlgraph.hh deleted file mode 100644 index a6e7d4e2805..00000000000 --- a/src/nix-store/xmlgraph.hh +++ /dev/null @@ -1,11 +0,0 @@ -#pragma once - -#include "types.hh" - -namespace nix { - -class Store; - -void printXmlGraph(ref store, const PathSet & roots); - -} From 3cd15c5b1f5a8e6de87d5b7e8cc2f1326b420c88 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 22 Oct 2018 21:49:56 +0200 Subject: [PATCH 1256/2196] Per-output reference and closure size checks In structured-attributes derivations, you can now specify per-output checks such as: outputChecks."out" = { # The closure of 'out' must not be larger than 256 MiB. maxClosureSize = 256 * 1024 * 1024; # It must not refer to C compiler or to the 'dev' output. disallowedRequisites = [ stdenv.cc "dev" ]; }; outputChecks."dev" = { # The 'dev' output must not be larger than 128 KiB. maxSize = 128 * 1024; }; Also fixed a bug in allowedRequisites that caused it to ignore self-references. --- src/libstore/build.cc | 219 ++++++++++++++++++++++++++++++++---------- tests/check-reqs.nix | 2 +- 2 files changed, 169 insertions(+), 52 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 0073b9b727e..cf4218a261f 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -21,6 +21,7 @@ #include #include #include +#include #include #include @@ -857,7 +858,7 @@ class DerivationGoal : public Goal building multiple times. Since this contains the hash, it allows us to compare whether two rounds produced the same result. */ - ValidPathInfos prevInfos; + std::map prevInfos; const uid_t sandboxUid = 1000; const gid_t sandboxGid = 100; @@ -938,6 +939,11 @@ class DerivationGoal : public Goal as valid. */ void registerOutputs(); + /* Check that an output meets the requirements specified by the + 'outputChecks' attribute (or the legacy + '{allowed,disallowed}{References,Requisites}' attributes). */ + void checkOutputs(const std::map & outputs); + /* Open a log file and a pipe to it. */ Path openLogFile(); @@ -3010,7 +3016,7 @@ void DerivationGoal::registerOutputs() if (allValid) return; } - ValidPathInfos infos; + std::map infos; /* Set of inodes seen during calls to canonicalisePathMetaData() for this build's outputs. This needs to be shared between @@ -3195,49 +3201,6 @@ void DerivationGoal::registerOutputs() debug(format("referenced input: '%1%'") % i); } - /* Enforce `allowedReferences' and friends. */ - auto checkRefs = [&](const string & attrName, bool allowed, bool recursive) { - auto value = parsedDrv->getStringsAttr(attrName); - if (!value) return; - - PathSet spec = parseReferenceSpecifiers(worker.store, *drv, *value); - - PathSet used; - if (recursive) { - /* Our requisites are the union of the closures of our references. */ - for (auto & i : references) - /* Don't call computeFSClosure on ourselves. */ - if (path != i) - worker.store.computeFSClosure(i, used); - } else - used = references; - - PathSet badPaths; - - for (auto & i : used) - if (allowed) { - if (spec.find(i) == spec.end()) - badPaths.insert(i); - } else { - if (spec.find(i) != spec.end()) - badPaths.insert(i); - } - - if (!badPaths.empty()) { - string badPathsStr; - for (auto & i : badPaths) { - badPathsStr += "\n\t"; - badPathsStr += i; - } - throw BuildError(format("output '%1%' is not allowed to refer to the following paths:%2%") % actualPath % badPathsStr); - } - }; - - checkRefs("allowedReferences", true, false); - checkRefs("allowedRequisites", true, true); - checkRefs("disallowedReferences", false, false); - checkRefs("disallowedRequisites", false, true); - if (curRound == nrRounds) { worker.store.optimisePath(actualPath); // FIXME: combine with scanForReferences() worker.markContentsGood(path); @@ -3253,11 +3216,14 @@ void DerivationGoal::registerOutputs() if (!info.references.empty()) info.ca.clear(); - infos.push_back(info); + infos[i.first] = info; } if (buildMode == bmCheck) return; + /* Apply output checks. */ + checkOutputs(infos); + /* Compare the result with the previous round, and report which path is different, if any.*/ if (curRound > 1 && prevInfos != infos) { @@ -3265,16 +3231,16 @@ void DerivationGoal::registerOutputs() for (auto i = prevInfos.begin(), j = infos.begin(); i != prevInfos.end(); ++i, ++j) if (!(*i == *j)) { result.isNonDeterministic = true; - Path prev = i->path + checkSuffix; + Path prev = i->second.path + checkSuffix; bool prevExists = keepPreviousRound && pathExists(prev); auto msg = prevExists - ? fmt("output '%1%' of '%2%' differs from '%3%' from previous round", i->path, drvPath, prev) - : fmt("output '%1%' of '%2%' differs from previous round", i->path, drvPath); + ? fmt("output '%1%' of '%2%' differs from '%3%' from previous round", i->second.path, drvPath, prev) + : fmt("output '%1%' of '%2%' differs from previous round", i->second.path, drvPath); auto diffHook = settings.diffHook; if (prevExists && diffHook != "" && runDiffHook) { try { - auto diff = runProgram(diffHook, true, {prev, i->path}); + auto diff = runProgram(diffHook, true, {prev, i->second.path}); if (diff != "") printError(chomp(diff)); } catch (Error & error) { @@ -3319,7 +3285,11 @@ void DerivationGoal::registerOutputs() /* Register each output path as valid, and register the sets of paths referenced by each of them. If there are cycles in the outputs, this will fail. */ - worker.store.registerValidPaths(infos); + { + ValidPathInfos infos2; + for (auto & i : infos) infos2.push_back(i.second); + worker.store.registerValidPaths(infos2); + } /* In case of a fixed-output derivation hash mismatch, throw an exception now that we have registered the output as valid. */ @@ -3328,6 +3298,153 @@ void DerivationGoal::registerOutputs() } +void DerivationGoal::checkOutputs(const std::map & outputs) +{ + std::map outputsByPath; + for (auto & output : outputs) + outputsByPath.emplace(output.second.path, output.second); + + for (auto & output : outputs) { + auto & outputName = output.first; + auto & info = output.second; + + struct Checks + { + std::experimental::optional maxSize, maxClosureSize; + std::experimental::optional allowedReferences, allowedRequisites, disallowedReferences, disallowedRequisites; + }; + + /* Compute the closure and closure size of some output. This + is slightly tricky because some of its references (namely + other outputs) may not be valid yet. */ + auto getClosure = [&](const Path & path) + { + uint64_t closureSize = 0; + PathSet pathsDone; + std::queue pathsLeft; + pathsLeft.push(path); + + while (!pathsLeft.empty()) { + auto path = pathsLeft.front(); + pathsLeft.pop(); + if (!pathsDone.insert(path).second) continue; + + auto i = outputsByPath.find(path); + if (i != outputsByPath.end()) { + closureSize += i->second.narSize; + for (auto & ref : i->second.references) + pathsLeft.push(ref); + } else { + auto info = worker.store.queryPathInfo(path); + closureSize += info->narSize; + for (auto & ref : info->references) + pathsLeft.push(ref); + } + } + + return std::make_pair(pathsDone, closureSize); + }; + + auto checkRefs = [&](const std::experimental::optional & value, bool allowed, bool recursive) + { + if (!value) return; + + PathSet spec = parseReferenceSpecifiers(worker.store, *drv, *value); + + PathSet used = recursive ? getClosure(info.path).first : info.references; + + PathSet badPaths; + + for (auto & i : used) + if (allowed) { + if (spec.find(i) == spec.end()) + badPaths.insert(i); + } else { + if (spec.find(i) != spec.end()) + badPaths.insert(i); + } + + if (!badPaths.empty()) { + string badPathsStr; + for (auto & i : badPaths) { + badPathsStr += "\n "; + badPathsStr += i; + } + throw BuildError("output '%s' is not allowed to refer to the following paths:%s", info.path, badPathsStr); + } + }; + + auto applyChecks = [&](const Checks & checks) + { + if (checks.maxSize && info.narSize > *checks.maxSize) + throw BuildError("path '%s' is too large at %d bytes; limit is %d bytes", + info.path, info.narSize, *checks.maxSize); + + if (checks.maxClosureSize) { + uint64_t closureSize = getClosure(info.path).second; + if (closureSize > *checks.maxClosureSize) + throw BuildError("closure of path '%s' is too large at %d bytes; limit is %d bytes", + info.path, closureSize, *checks.maxClosureSize); + } + + checkRefs(checks.allowedReferences, true, false); + checkRefs(checks.allowedRequisites, true, true); + checkRefs(checks.disallowedReferences, false, false); + checkRefs(checks.disallowedRequisites, false, true); + }; + + if (auto structuredAttrs = parsedDrv->getStructuredAttrs()) { + auto outputChecks = structuredAttrs->find("outputChecks"); + if (outputChecks != structuredAttrs->end()) { + auto output = outputChecks->find(outputName); + + if (output != outputChecks->end()) { + Checks checks; + + auto maxSize = output->find("maxSize"); + if (maxSize != output->end()) + checks.maxSize = maxSize->get(); + + auto maxClosureSize = output->find("maxClosureSize"); + if (maxClosureSize != output->end()) + checks.maxClosureSize = maxClosureSize->get(); + + auto get = [&](const std::string & name) -> std::experimental::optional { + auto i = output->find(name); + if (i != output->end()) { + Strings res; + for (auto j = i->begin(); j != i->end(); ++j) { + if (!j->is_string()) + throw Error("attribute '%s' of derivation '%s' must be a list of strings", name, drvPath); + res.push_back(j->get()); + } + checks.disallowedRequisites = res; + return res; + } + return {}; + }; + + checks.allowedReferences = get("allowedReferences"); + checks.allowedRequisites = get("allowedRequisites"); + checks.disallowedReferences = get("disallowedReferences"); + checks.disallowedRequisites = get("disallowedRequisites"); + + applyChecks(checks); + } + } + } else { + // legacy non-structured-attributes case + Checks checks; + checks.allowedReferences = parsedDrv->getStringsAttr("allowedReferences"); + checks.allowedRequisites = parsedDrv->getStringsAttr("allowedRequisites"); + checks.disallowedReferences = parsedDrv->getStringsAttr("disallowedReferences"); + checks.disallowedRequisites = parsedDrv->getStringsAttr("disallowedRequisites"); + applyChecks(checks); + } + } +} + + Path DerivationGoal::openLogFile() { logSize = 0; diff --git a/tests/check-reqs.nix b/tests/check-reqs.nix index 41436cb48e0..47b5b3d9c72 100644 --- a/tests/check-reqs.nix +++ b/tests/check-reqs.nix @@ -33,7 +33,7 @@ rec { }; # When specifying all the requisites, the build succeeds. - test1 = makeTest 1 [ dep1 dep2 deps ]; + test1 = makeTest 1 [ "out" dep1 dep2 deps ]; # But missing anything it fails. test2 = makeTest 2 [ dep2 deps ]; From a25abe823fd2719eee09c28ccddf6d9b1c13a2e1 Mon Sep 17 00:00:00 2001 From: Linus Heckemann Date: Mon, 22 Oct 2018 12:14:02 +0200 Subject: [PATCH 1257/2196] Promote log-lines to a fully-qualified option This allows commands like nix build --log-lines 30 nixpkgs.hello in order to obtain more information in case of a failure. --- src/libstore/globals.hh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index cf4ae63cdc2..6b3e204536f 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -82,9 +82,9 @@ public: /* Whether to show build log output in real time. */ bool verboseBuild = true; - /* If verboseBuild is false, the number of lines of the tail of - the log to show if a build fails. */ - size_t logLines = 10; + Setting logLines{this, 10, "log-lines", + "If verbose-build is false, the number of lines of the tail of " + "the log to show if a build fails."}; MaxBuildJobsSetting maxBuildJobs{this, 1, "max-jobs", "Maximum number of parallel build jobs. \"auto\" means use number of cores.", From f6a3dfe4e06980b2d060fd1a646cb5ca20f29779 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 26 Oct 2018 11:35:46 +0200 Subject: [PATCH 1258/2196] Merge all nix-* binaries into nix These are all symlinks to 'nix' now, reducing the installed size by about ~1.7 MiB. --- Makefile | 10 --------- local.mk | 2 +- src/build-remote/build-remote.cc | 19 +++++++++-------- src/build-remote/local.mk | 9 -------- src/nix-build/local.mk | 9 -------- src/nix-build/nix-build.cc | 14 ++++--------- src/nix-channel/local.mk | 7 ------- src/nix-channel/nix-channel.cc | 16 ++++++++------ src/nix-collect-garbage/local.mk | 7 ------- .../nix-collect-garbage.cc | 15 +++++++------ src/nix-copy-closure/local.mk | 7 ------- src/nix-copy-closure/nix-copy-closure.cc | 13 +++++++----- src/nix-daemon/local.mk | 13 ------------ src/nix-daemon/nix-daemon.cc | 15 +++++++------ src/nix-env/local.mk | 7 ------- src/nix-env/nix-env.cc | 14 +++++++------ src/nix-instantiate/local.mk | 7 ------- src/nix-instantiate/nix-instantiate.cc | 16 +++++++------- src/nix-prefetch-url/local.mk | 7 ------- src/nix-prefetch-url/nix-prefetch-url.cc | 14 +++++++------ src/nix-store/local.mk | 9 -------- src/nix-store/nix-store.cc | 13 +++++++----- src/nix/local.mk | 21 ++++++++++++++++--- src/nix/main.cc | 6 +++--- 24 files changed, 105 insertions(+), 165 deletions(-) delete mode 100644 src/build-remote/local.mk delete mode 100644 src/nix-build/local.mk delete mode 100644 src/nix-channel/local.mk delete mode 100644 src/nix-collect-garbage/local.mk delete mode 100644 src/nix-copy-closure/local.mk delete mode 100644 src/nix-daemon/local.mk delete mode 100644 src/nix-env/local.mk delete mode 100644 src/nix-instantiate/local.mk delete mode 100644 src/nix-prefetch-url/local.mk delete mode 100644 src/nix-store/local.mk diff --git a/Makefile b/Makefile index 834f84b286b..45a3338ed21 100644 --- a/Makefile +++ b/Makefile @@ -5,17 +5,7 @@ makefiles = \ src/libmain/local.mk \ src/libexpr/local.mk \ src/nix/local.mk \ - src/nix-store/local.mk \ - src/nix-instantiate/local.mk \ - src/nix-env/local.mk \ - src/nix-daemon/local.mk \ - src/nix-collect-garbage/local.mk \ - src/nix-copy-closure/local.mk \ - src/nix-prefetch-url/local.mk \ src/resolve-system-dependencies/local.mk \ - src/nix-channel/local.mk \ - src/nix-build/local.mk \ - src/build-remote/local.mk \ scripts/local.mk \ corepkgs/local.mk \ misc/systemd/local.mk \ diff --git a/local.mk b/local.mk index 5d7e0fb2e42..4b380176f2e 100644 --- a/local.mk +++ b/local.mk @@ -6,7 +6,7 @@ dist-files += configure config.h.in nix.spec perl/configure clean-files += Makefile.config -GLOBAL_CXXFLAGS += -I . -I src -I src/libutil -I src/libstore -I src/libmain -I src/libexpr +GLOBAL_CXXFLAGS += -I . -I src -I src/libutil -I src/libstore -I src/libmain -I src/libexpr -I src/nix $(foreach i, config.h $(call rwildcard, src/lib*, *.hh), \ $(eval $(call install-file-in, $(i), $(includedir)/nix, 0644))) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index 38dbe3e58b2..abf3669b5b3 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -17,6 +17,7 @@ #include "store-api.hh" #include "derivations.hh" #include "local-store.hh" +#include "legacy.hh" using namespace nix; using std::cin; @@ -37,11 +38,9 @@ static AutoCloseFD openSlotLock(const Machine & m, unsigned long long slot) return openLockFile(fmt("%s/%s-%d", currentLoad, escapeUri(m.storeUri), slot), true); } -int main (int argc, char * * argv) +static int _main(int argc, char * * argv) { - return handleExceptions(argv[0], [&]() { - initNix(); - + { logger = makeJSONLogger(*logger); /* Ensure we don't get any SSH passphrase or host key popups. */ @@ -80,7 +79,7 @@ int main (int argc, char * * argv) if (machines.empty()) { std::cerr << "# decline-permanently\n"; - return; + return 0; } string drvPath; @@ -90,8 +89,8 @@ int main (int argc, char * * argv) try { auto s = readString(source); - if (s != "try") return; - } catch (EndOfFile &) { return; } + if (s != "try") return 0; + } catch (EndOfFile &) { return 0; } auto amWilling = readInt(source); auto neededSystem = readString(source); @@ -253,6 +252,8 @@ int main (int argc, char * * argv) copyPaths(ref(sshStore), store, missing, NoRepair, NoCheckSigs, NoSubstitute); } - return; - }); + return 0; + } } + +static RegisterLegacyCommand s1("build-remote", _main); diff --git a/src/build-remote/local.mk b/src/build-remote/local.mk deleted file mode 100644 index 50b0409d188..00000000000 --- a/src/build-remote/local.mk +++ /dev/null @@ -1,9 +0,0 @@ -programs += build-remote - -build-remote_DIR := $(d) - -build-remote_INSTALL_DIR := $(libexecdir)/nix - -build-remote_LIBS = libmain libformat libstore libutil - -build-remote_SOURCES := $(d)/build-remote.cc diff --git a/src/nix-build/local.mk b/src/nix-build/local.mk deleted file mode 100644 index a2d1c91dfd9..00000000000 --- a/src/nix-build/local.mk +++ /dev/null @@ -1,9 +0,0 @@ -programs += nix-build - -nix-build_DIR := $(d) - -nix-build_SOURCES := $(d)/nix-build.cc - -nix-build_LIBS = libmain libexpr libstore libutil libformat - -$(eval $(call install-symlink, nix-build, $(bindir)/nix-shell)) diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index b78f3d9e424..618895d387d 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -16,6 +16,7 @@ #include "get-drvs.hh" #include "common-eval-args.hh" #include "attr-path.hh" +#include "legacy.hh" using namespace nix; using namespace std::string_literals; @@ -66,11 +67,8 @@ std::vector shellwords(const string & s) return res; } -void mainWrapped(int argc, char * * argv) +static void _main(int argc, char * * argv) { - initNix(); - initGC(); - auto dryRun = false; auto runEnv = std::regex_search(argv[0], std::regex("nix-shell$")); auto pure = false; @@ -504,9 +502,5 @@ void mainWrapped(int argc, char * * argv) } } -int main(int argc, char * * argv) -{ - return handleExceptions(argv[0], [&]() { - return mainWrapped(argc, argv); - }); -} +static RegisterLegacyCommand s1("nix-build", _main); +static RegisterLegacyCommand s2("nix-shell", _main); diff --git a/src/nix-channel/local.mk b/src/nix-channel/local.mk deleted file mode 100644 index c14e8c359ca..00000000000 --- a/src/nix-channel/local.mk +++ /dev/null @@ -1,7 +0,0 @@ -programs += nix-channel - -nix-channel_DIR := $(d) - -nix-channel_LIBS = libmain libformat libstore libutil - -nix-channel_SOURCES := $(d)/nix-channel.cc diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc index 2083d3df5ca..8b66cc7e314 100755 --- a/src/nix-channel/nix-channel.cc +++ b/src/nix-channel/nix-channel.cc @@ -1,9 +1,11 @@ #include "shared.hh" #include "globals.hh" #include "download.hh" +#include "store-api.hh" +#include "legacy.hh" + #include #include -#include "store-api.hh" #include using namespace nix; @@ -157,11 +159,9 @@ static void update(const StringSet & channelNames) replaceSymlink(profile, channelLink); } -int main(int argc, char ** argv) +static int _main(int argc, char ** argv) { - return handleExceptions(argv[0], [&]() { - initNix(); - + { // Figure out the name of the `.nix-channels' file to use auto home = getHome(); channelsList = home + "/.nix-channels"; @@ -255,5 +255,9 @@ int main(int argc, char ** argv) runProgram(settings.nixBinDir + "/nix-env", false, envArgs); break; } - }); + + return 0; + } } + +static RegisterLegacyCommand s1("nix-channel", _main); diff --git a/src/nix-collect-garbage/local.mk b/src/nix-collect-garbage/local.mk deleted file mode 100644 index 02d14cf6219..00000000000 --- a/src/nix-collect-garbage/local.mk +++ /dev/null @@ -1,7 +0,0 @@ -programs += nix-collect-garbage - -nix-collect-garbage_DIR := $(d) - -nix-collect-garbage_SOURCES := $(d)/nix-collect-garbage.cc - -nix-collect-garbage_LIBS = libmain libstore libutil libformat diff --git a/src/nix-collect-garbage/nix-collect-garbage.cc b/src/nix-collect-garbage/nix-collect-garbage.cc index 37fe22f4813..d4060ac937f 100644 --- a/src/nix-collect-garbage/nix-collect-garbage.cc +++ b/src/nix-collect-garbage/nix-collect-garbage.cc @@ -2,6 +2,7 @@ #include "profiles.hh" #include "shared.hh" #include "globals.hh" +#include "legacy.hh" #include #include @@ -48,12 +49,10 @@ void removeOldGenerations(std::string dir) } } -int main(int argc, char * * argv) +static int _main(int argc, char * * argv) { - bool removeOld = false; - - return handleExceptions(argv[0], [&]() { - initNix(); + { + bool removeOld = false; GCOptions options; @@ -90,5 +89,9 @@ int main(int argc, char * * argv) PrintFreed freed(true, results); store->collectGarbage(options, results); } - }); + + return 0; + } } + +static RegisterLegacyCommand s1("nix-collect-garbage", _main); diff --git a/src/nix-copy-closure/local.mk b/src/nix-copy-closure/local.mk deleted file mode 100644 index 5018ab975b4..00000000000 --- a/src/nix-copy-closure/local.mk +++ /dev/null @@ -1,7 +0,0 @@ -programs += nix-copy-closure - -nix-copy-closure_DIR := $(d) - -nix-copy-closure_LIBS = libmain libformat libstore libutil - -nix-copy-closure_SOURCES := $(d)/nix-copy-closure.cc diff --git a/src/nix-copy-closure/nix-copy-closure.cc b/src/nix-copy-closure/nix-copy-closure.cc index dfb1b8fc5dc..fdcde8b076b 100755 --- a/src/nix-copy-closure/nix-copy-closure.cc +++ b/src/nix-copy-closure/nix-copy-closure.cc @@ -1,13 +1,12 @@ #include "shared.hh" #include "store-api.hh" +#include "legacy.hh" using namespace nix; -int main(int argc, char ** argv) +static int _main(int argc, char ** argv) { - return handleExceptions(argv[0], [&]() { - initNix(); - + { auto gzip = false; auto toMode = true; auto includeOutputs = false; @@ -61,5 +60,9 @@ int main(int argc, char ** argv) from->computeFSClosure(storePaths2, closure, false, includeOutputs); copyPaths(from, to, closure, NoRepair, NoCheckSigs, useSubstitutes); - }); + + return 0; + } } + +static RegisterLegacyCommand s1("nix-copy-closure", _main); diff --git a/src/nix-daemon/local.mk b/src/nix-daemon/local.mk deleted file mode 100644 index 5a4474465b3..00000000000 --- a/src/nix-daemon/local.mk +++ /dev/null @@ -1,13 +0,0 @@ -programs += nix-daemon - -nix-daemon_DIR := $(d) - -nix-daemon_SOURCES := $(d)/nix-daemon.cc - -nix-daemon_LIBS = libmain libstore libutil libformat - -nix-daemon_LDFLAGS = -pthread - -ifeq ($(OS), SunOS) - nix-daemon_LDFLAGS += -lsocket -endif diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc index 423ba15e2ad..8368c326614 100644 --- a/src/nix-daemon/nix-daemon.cc +++ b/src/nix-daemon/nix-daemon.cc @@ -9,6 +9,7 @@ #include "monitor-fd.hh" #include "derivations.hh" #include "finally.hh" +#include "legacy.hh" #include @@ -1058,11 +1059,9 @@ static void daemonLoop(char * * argv) } -int main(int argc, char * * argv) +static int _main(int argc, char * * argv) { - return handleExceptions(argv[0], [&]() { - initNix(); - + { auto stdio = false; parseCmdLine(argc, argv, [&](Strings::iterator & arg, const Strings::iterator & end) { @@ -1122,7 +1121,7 @@ int main(int argc, char * * argv) if (res == -1) throw SysError("splicing data from stdin to daemon socket"); else if (res == 0) - return; + return 0; } } } else { @@ -1131,5 +1130,9 @@ int main(int argc, char * * argv) } else { daemonLoop(argv); } - }); + + return 0; + } } + +static RegisterLegacyCommand s1("nix-daemon", _main); diff --git a/src/nix-env/local.mk b/src/nix-env/local.mk deleted file mode 100644 index e80719cd76f..00000000000 --- a/src/nix-env/local.mk +++ /dev/null @@ -1,7 +0,0 @@ -programs += nix-env - -nix-env_DIR := $(d) - -nix-env_SOURCES := $(wildcard $(d)/*.cc) - -nix-env_LIBS = libexpr libmain libstore libutil libformat diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index f9c8a8d313e..56ed75daee4 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -13,6 +13,7 @@ #include "json.hh" #include "value-to-json.hh" #include "xml-writer.hh" +#include "legacy.hh" #include #include @@ -1311,12 +1312,9 @@ static void opVersion(Globals & globals, Strings opFlags, Strings opArgs) } -int main(int argc, char * * argv) +static int _main(int argc, char * * argv) { - return handleExceptions(argv[0], [&]() { - initNix(); - initGC(); - + { Strings opFlags, opArgs; Operation op = 0; RepairFlag repair = NoRepair; @@ -1428,5 +1426,9 @@ int main(int argc, char * * argv) op(globals, opFlags, opArgs); globals.state->printStats(); - }); + + return 0; + } } + +static RegisterLegacyCommand s1("nix-env", _main); diff --git a/src/nix-instantiate/local.mk b/src/nix-instantiate/local.mk deleted file mode 100644 index 7d1bc5ec9df..00000000000 --- a/src/nix-instantiate/local.mk +++ /dev/null @@ -1,7 +0,0 @@ -programs += nix-instantiate - -nix-instantiate_DIR := $(d) - -nix-instantiate_SOURCES := $(d)/nix-instantiate.cc - -nix-instantiate_LIBS = libexpr libmain libstore libutil libformat diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index eb6d34dd821..a736caa8f05 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -9,6 +9,7 @@ #include "util.hh" #include "store-api.hh" #include "common-eval-args.hh" +#include "legacy.hh" #include #include @@ -83,12 +84,9 @@ void processExpr(EvalState & state, const Strings & attrPaths, } -int main(int argc, char * * argv) +static int _main(int argc, char * * argv) { - return handleExceptions(argv[0], [&]() { - initNix(); - initGC(); - + { Strings files; bool readStdin = false; bool fromArgs = false; @@ -171,7 +169,7 @@ int main(int argc, char * * argv) if (p == "") throw Error(format("unable to find '%1%'") % i); std::cout << p << std::endl; } - return; + return 0; } if (readStdin) { @@ -190,5 +188,9 @@ int main(int argc, char * * argv) } state->printStats(); - }); + + return 0; + } } + +static RegisterLegacyCommand s1("nix-instantiate", _main); diff --git a/src/nix-prefetch-url/local.mk b/src/nix-prefetch-url/local.mk deleted file mode 100644 index 3e7735406af..00000000000 --- a/src/nix-prefetch-url/local.mk +++ /dev/null @@ -1,7 +0,0 @@ -programs += nix-prefetch-url - -nix-prefetch-url_DIR := $(d) - -nix-prefetch-url_SOURCES := $(d)/nix-prefetch-url.cc - -nix-prefetch-url_LIBS = libmain libexpr libstore libutil libformat diff --git a/src/nix-prefetch-url/nix-prefetch-url.cc b/src/nix-prefetch-url/nix-prefetch-url.cc index a3b025723cf..ddb72491321 100644 --- a/src/nix-prefetch-url/nix-prefetch-url.cc +++ b/src/nix-prefetch-url/nix-prefetch-url.cc @@ -6,6 +6,7 @@ #include "eval-inline.hh" #include "common-eval-args.hh" #include "attr-path.hh" +#include "legacy.hh" #include @@ -44,12 +45,9 @@ string resolveMirrorUri(EvalState & state, string uri) } -int main(int argc, char * * argv) +static int _main(int argc, char * * argv) { - return handleExceptions(argv[0], [&]() { - initNix(); - initGC(); - + { HashType ht = htSHA256; std::vector args; bool printPath = getEnv("PRINT_PATH") != ""; @@ -221,5 +219,9 @@ int main(int argc, char * * argv) std::cout << printHash16or32(hash) << std::endl; if (printPath) std::cout << storePath << std::endl; - }); + + return 0; + } } + +static RegisterLegacyCommand s1("nix-prefetch-url", _main); diff --git a/src/nix-store/local.mk b/src/nix-store/local.mk deleted file mode 100644 index ade0b233adf..00000000000 --- a/src/nix-store/local.mk +++ /dev/null @@ -1,9 +0,0 @@ -programs += nix-store - -nix-store_DIR := $(d) - -nix-store_SOURCES := $(wildcard $(d)/*.cc) - -nix-store_LIBS = libmain libstore libutil libformat - -nix-store_LDFLAGS = -lbz2 -pthread $(SODIUM_LIBS) diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index e245bd643d4..a9ad14762e6 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -9,6 +9,7 @@ #include "util.hh" #include "worker-protocol.hh" #include "graphml.hh" +#include "legacy.hh" #include #include @@ -993,11 +994,9 @@ static void opVersion(Strings opFlags, Strings opArgs) /* Scan the arguments; find the operation, set global flags, put all other flags in a list, and put all other arguments in another list. */ -int main(int argc, char * * argv) +static int _main(int argc, char * * argv) { - return handleExceptions(argv[0], [&]() { - initNix(); - + { Strings opFlags, opArgs; Operation op = 0; @@ -1084,5 +1083,9 @@ int main(int argc, char * * argv) store = openStore(); op(opFlags, opArgs); - }); + + return 0; + } } + +static RegisterLegacyCommand s1("nix-store", _main); diff --git a/src/nix/local.mk b/src/nix/local.mk index f76da194467..168936314dc 100644 --- a/src/nix/local.mk +++ b/src/nix/local.mk @@ -2,10 +2,25 @@ programs += nix nix_DIR := $(d) -nix_SOURCES := $(wildcard $(d)/*.cc) $(wildcard src/linenoise/*.cpp) +nix_SOURCES := \ + $(wildcard $(d)/*.cc) \ + $(wildcard src/linenoise/*.cpp) \ + $(wildcard src/build-remote/*.cc) \ + $(wildcard src/nix-build/*.cc) \ + $(wildcard src/nix-channel/*.cc) \ + $(wildcard src/nix-collect-garbage/*.cc) \ + $(wildcard src/nix-copy-closure/*.cc) \ + $(wildcard src/nix-daemon/*.cc) \ + $(wildcard src/nix-env/*.cc) \ + $(wildcard src/nix-instantiate/*.cc) \ + $(wildcard src/nix-prefetch-url/*.cc) \ + $(wildcard src/nix-store/*.cc) \ nix_LIBS = libexpr libmain libstore libutil libformat -nix_LDFLAGS = -pthread +nix_LDFLAGS = -pthread $(SODIUM_LIBS) -$(eval $(call install-symlink, nix, $(bindir)/nix-hash)) +$(foreach name, \ + nix-build nix-channel nix-collect-garbage nix-copy-closure nix-daemon nix-env nix-hash nix-instantiate nix-prefetch-rul nix-shell nix-shore, \ + $(eval $(call install-symlink, nix, $(bindir)/$(name)))) +$(eval $(call install-symlink, $(bindir)/nix, $(libexecdir)/nix/build-remote)) diff --git a/src/nix/main.cc b/src/nix/main.cc index 69791e223c2..64c1dc35787 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -67,9 +67,6 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs void mainWrapped(int argc, char * * argv) { - verbosity = lvlError; - settings.verboseBuild = false; - /* The chroot helper needs to be run before any threads have been started. */ if (argc > 0 && argv[0] == chrootHelperName) { @@ -88,6 +85,9 @@ void mainWrapped(int argc, char * * argv) if (legacy) return legacy(argc, argv); } + verbosity = lvlError; + settings.verboseBuild = false; + NixArgs args; args.parseCmdline(argvToStrings(argc, argv)); From 27c2fcd4c067a176491317e47ca448772f8fcc92 Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Fri, 26 Oct 2018 12:38:06 -0500 Subject: [PATCH 1259/2196] src/nix/local.mk: fix typos in names of symlinks --- src/nix/local.mk | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix/local.mk b/src/nix/local.mk index 168936314dc..bdcca33d2a6 100644 --- a/src/nix/local.mk +++ b/src/nix/local.mk @@ -21,6 +21,6 @@ nix_LIBS = libexpr libmain libstore libutil libformat nix_LDFLAGS = -pthread $(SODIUM_LIBS) $(foreach name, \ - nix-build nix-channel nix-collect-garbage nix-copy-closure nix-daemon nix-env nix-hash nix-instantiate nix-prefetch-rul nix-shell nix-shore, \ + nix-build nix-channel nix-collect-garbage nix-copy-closure nix-daemon nix-env nix-hash nix-instantiate nix-prefetch-url nix-shell nix-store, \ $(eval $(call install-symlink, nix, $(bindir)/$(name)))) $(eval $(call install-symlink, $(bindir)/nix, $(libexecdir)/nix/build-remote)) From 3283c0dc45e69f1a8e180cfcb226b47d3c826649 Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Fri, 26 Oct 2018 12:34:29 -0500 Subject: [PATCH 1260/2196] remote-store.hh: ConnectionHandle is struct, minor fix warning --- src/libstore/remote-store.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh index 7f9d7d1f56d..3686dc360fe 100644 --- a/src/libstore/remote-store.hh +++ b/src/libstore/remote-store.hh @@ -127,7 +127,7 @@ protected: ConnectionHandle getConnection(); - friend class ConnectionHandle; + friend struct ConnectionHandle; private: From 9d24b5d56e024b51240fe1010810f1c343de01ff Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Sat, 20 Oct 2018 01:11:22 -0500 Subject: [PATCH 1261/2196] nix-prefetch-url: progressbar --- src/nix-prefetch-url/nix-prefetch-url.cc | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/nix-prefetch-url/nix-prefetch-url.cc b/src/nix-prefetch-url/nix-prefetch-url.cc index ddb72491321..54a402241e3 100644 --- a/src/nix-prefetch-url/nix-prefetch-url.cc +++ b/src/nix-prefetch-url/nix-prefetch-url.cc @@ -7,6 +7,8 @@ #include "common-eval-args.hh" #include "attr-path.hh" #include "legacy.hh" +#include "finally.hh" +#include "progress-bar.hh" #include @@ -96,6 +98,11 @@ static int _main(int argc, char * * argv) if (args.size() > 2) throw UsageError("too many arguments"); + Finally f([]() { stopProgressBar(); }); + + if (isatty(STDERR_FILENO)) + startProgressBar(); + auto store = openStore(); auto state = std::make_unique(myArgs.searchPath, store); From 6a5bf9b1438ed0b721657568fbb3a7c0b829e89e Mon Sep 17 00:00:00 2001 From: Guillaume Maudoux Date: Tue, 14 Jun 2016 17:42:46 +0200 Subject: [PATCH 1262/2196] simplify handling of extra '}' --- src/libexpr/lexer.l | 33 +++++++++++++++------------------ 1 file changed, 15 insertions(+), 18 deletions(-) diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index a052447d3dc..c34e5c38392 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -6,9 +6,9 @@ %option nounput noyy_top_state +%s DEFAULT %x STRING %x IND_STRING -%x INSIDE_DOLLAR_CURLY %{ @@ -99,8 +99,6 @@ URI [a-zA-Z][a-zA-Z0-9\+\-\.]*\:[a-zA-Z0-9\%\/\?\:\@\&\=\+\$\,\-\_\.\!\~ %% -{ - if { return IF; } then { return THEN; } @@ -140,17 +138,19 @@ or { return OR_KW; } return FLOAT; } -\$\{ { PUSH_STATE(INSIDE_DOLLAR_CURLY); return DOLLAR_CURLY; } -} +\$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; } -\} { return '}'; } -\} { POP_STATE(); return '}'; } -\{ { return '{'; } -\{ { PUSH_STATE(INSIDE_DOLLAR_CURLY); return '{'; } +\} { /* State INITIAL only exists at the bottom of the stack and is + used as a marker. DEFAULT replaces it everywhere else. + Popping when in INITIAL state causes an empty stack exception, + so don't */ + if (YYSTATE != INITIAL) + POP_STATE(); + return '}'; + } +\{ { PUSH_STATE(DEFAULT); return '{'; } -\" { - PUSH_STATE(STRING); return '"'; - } +\" { PUSH_STATE(STRING); return '"'; } ([^\$\"\\]|\$[^\{\"\\]|\\{ANY}|\$\\{ANY})*\$/\" | ([^\$\"\\]|\$[^\{\"\\]|\\{ANY}|\$\\{ANY})+ { /* It is impossible to match strings ending with '$' with one @@ -159,7 +159,7 @@ or { return OR_KW; } yylval->e = unescapeStr(data->symbols, yytext, yyleng); return STR; } -\$\{ { PUSH_STATE(INSIDE_DOLLAR_CURLY); return DOLLAR_CURLY; } +\$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; } \" { POP_STATE(); return '"'; } \$|\\|\$\\ { /* This can only occur when we reach EOF, otherwise the above @@ -169,7 +169,7 @@ or { return OR_KW; } return STR; } -\'\'(\ *\n)? { PUSH_STATE(IND_STRING); return IND_STRING_OPEN; } +\'\'(\ *\n)? { PUSH_STATE(IND_STRING); return IND_STRING_OPEN; } ([^\$\']|\$[^\{\']|\'[^\'\$])+ { yylval->e = new ExprIndStr(yytext); return IND_STR; @@ -187,14 +187,13 @@ or { return OR_KW; } yylval->e = unescapeStr(data->symbols, yytext + 2, yyleng - 2); return IND_STR; } -\$\{ { PUSH_STATE(INSIDE_DOLLAR_CURLY); return DOLLAR_CURLY; } +\$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; } \'\' { POP_STATE(); return IND_STRING_CLOSE; } \' { yylval->e = new ExprIndStr("'"); return IND_STR; } -{ {PATH} { if (yytext[yyleng-1] == '/') throw ParseError("path '%s' has a trailing slash", yytext); @@ -219,7 +218,5 @@ or { return OR_KW; } return (unsigned char) yytext[0]; } -} - %% From 0c61515be1a247fd42c2290b9ac0f36b2f7b9b14 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Sat, 27 Oct 2018 13:07:26 +0200 Subject: [PATCH 1263/2196] Fix signedness warning --- src/nix/doctor.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix/doctor.cc b/src/nix/doctor.cc index b608b9d59ef..1ce3efcb041 100644 --- a/src/nix/doctor.cc +++ b/src/nix/doctor.cc @@ -91,7 +91,7 @@ struct CmdDoctor : StoreCommand void checkStoreProtocol(unsigned int storeProto) { - auto clientProto = GET_PROTOCOL_MAJOR(SERVE_PROTOCOL_VERSION) == GET_PROTOCOL_MAJOR(storeProto) + unsigned int clientProto = GET_PROTOCOL_MAJOR(SERVE_PROTOCOL_VERSION) == GET_PROTOCOL_MAJOR(storeProto) ? SERVE_PROTOCOL_VERSION : PROTOCOL_VERSION; From 18b4c53f71dfc626f5f5ffa0282afd1b9faad6a4 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Sat, 27 Oct 2018 15:40:09 +0200 Subject: [PATCH 1264/2196] Restore old (dis)allowedRequisites behaviour for self-references stdenv relies on this. So ignore self-references (but only in legacy non-structured attributes mode). --- src/libstore/build.cc | 63 +++++++++++++++++++++++-------------------- tests/check-refs.sh | 2 ++ tests/check-reqs.nix | 2 +- tests/check-reqs.sh | 2 ++ 4 files changed, 39 insertions(+), 30 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index cf4218a261f..676ad5856b1 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -3310,6 +3310,7 @@ void DerivationGoal::checkOutputs(const std::map & outputs) struct Checks { + bool ignoreSelfRefs = false; std::experimental::optional maxSize, maxClosureSize; std::experimental::optional allowedReferences, allowedRequisites, disallowedReferences, disallowedRequisites; }; @@ -3345,35 +3346,6 @@ void DerivationGoal::checkOutputs(const std::map & outputs) return std::make_pair(pathsDone, closureSize); }; - auto checkRefs = [&](const std::experimental::optional & value, bool allowed, bool recursive) - { - if (!value) return; - - PathSet spec = parseReferenceSpecifiers(worker.store, *drv, *value); - - PathSet used = recursive ? getClosure(info.path).first : info.references; - - PathSet badPaths; - - for (auto & i : used) - if (allowed) { - if (spec.find(i) == spec.end()) - badPaths.insert(i); - } else { - if (spec.find(i) != spec.end()) - badPaths.insert(i); - } - - if (!badPaths.empty()) { - string badPathsStr; - for (auto & i : badPaths) { - badPathsStr += "\n "; - badPathsStr += i; - } - throw BuildError("output '%s' is not allowed to refer to the following paths:%s", info.path, badPathsStr); - } - }; - auto applyChecks = [&](const Checks & checks) { if (checks.maxSize && info.narSize > *checks.maxSize) @@ -3387,6 +3359,38 @@ void DerivationGoal::checkOutputs(const std::map & outputs) info.path, closureSize, *checks.maxClosureSize); } + auto checkRefs = [&](const std::experimental::optional & value, bool allowed, bool recursive) + { + if (!value) return; + + PathSet spec = parseReferenceSpecifiers(worker.store, *drv, *value); + + PathSet used = recursive ? getClosure(info.path).first : info.references; + + if (recursive && checks.ignoreSelfRefs) + used.erase(info.path); + + PathSet badPaths; + + for (auto & i : used) + if (allowed) { + if (!spec.count(i)) + badPaths.insert(i); + } else { + if (spec.count(i)) + badPaths.insert(i); + } + + if (!badPaths.empty()) { + string badPathsStr; + for (auto & i : badPaths) { + badPathsStr += "\n "; + badPathsStr += i; + } + throw BuildError("output '%s' is not allowed to refer to the following paths:%s", info.path, badPathsStr); + } + }; + checkRefs(checks.allowedReferences, true, false); checkRefs(checks.allowedRequisites, true, true); checkRefs(checks.disallowedReferences, false, false); @@ -3435,6 +3439,7 @@ void DerivationGoal::checkOutputs(const std::map & outputs) } else { // legacy non-structured-attributes case Checks checks; + checks.ignoreSelfRefs = true; checks.allowedReferences = parsedDrv->getStringsAttr("allowedReferences"); checks.allowedRequisites = parsedDrv->getStringsAttr("allowedRequisites"); checks.disallowedReferences = parsedDrv->getStringsAttr("disallowedReferences"); diff --git a/tests/check-refs.sh b/tests/check-refs.sh index 34ee22cfc8f..16bbabc4098 100644 --- a/tests/check-refs.sh +++ b/tests/check-refs.sh @@ -1,5 +1,7 @@ source common.sh +clearStore + RESULT=$TEST_ROOT/result dep=$(nix-build -o $RESULT check-refs.nix -A dep) diff --git a/tests/check-reqs.nix b/tests/check-reqs.nix index 47b5b3d9c72..41436cb48e0 100644 --- a/tests/check-reqs.nix +++ b/tests/check-reqs.nix @@ -33,7 +33,7 @@ rec { }; # When specifying all the requisites, the build succeeds. - test1 = makeTest 1 [ "out" dep1 dep2 deps ]; + test1 = makeTest 1 [ dep1 dep2 deps ]; # But missing anything it fails. test2 = makeTest 2 [ dep2 deps ]; diff --git a/tests/check-reqs.sh b/tests/check-reqs.sh index 77689215def..e9f65fc2a6d 100644 --- a/tests/check-reqs.sh +++ b/tests/check-reqs.sh @@ -1,5 +1,7 @@ source common.sh +clearStore + RESULT=$TEST_ROOT/result nix-build -o $RESULT check-reqs.nix -A test1 From 8e6bf492971347e18c1b5800e9e8fa5191a0839b Mon Sep 17 00:00:00 2001 From: Daiderd Jordan Date: Mon, 29 Oct 2018 12:09:22 +0000 Subject: [PATCH 1265/2196] nix doctor: return nonzero exitcode if a check fails This makes it easier to use this when testing the installer or when running the checks with other automated tooling. --- src/nix/doctor.cc | 26 ++++++++++++++++++++------ 1 file changed, 20 insertions(+), 6 deletions(-) diff --git a/src/nix/doctor.cc b/src/nix/doctor.cc index 1ce3efcb041..7b544461947 100644 --- a/src/nix/doctor.cc +++ b/src/nix/doctor.cc @@ -18,6 +18,8 @@ std::string formatProtocol(unsigned int proto) struct CmdDoctor : StoreCommand { + bool success = true; + std::string name() override { return "doctor"; @@ -36,13 +38,16 @@ struct CmdDoctor : StoreCommand auto type = getStoreType(); if (type < tOther) { - checkNixInPath(); - checkProfileRoots(store); + success &= checkNixInPath(); + success &= checkProfileRoots(store); } - checkStoreProtocol(store->getProtocol()); + success &= checkStoreProtocol(store->getProtocol()); + + if (!success) + throw Exit(2); } - void checkNixInPath() + bool checkNixInPath() { PathSet dirs; @@ -56,10 +61,13 @@ struct CmdDoctor : StoreCommand for (auto & dir : dirs) std::cout << " " << dir << std::endl; std::cout << std::endl; + return false; } + + return true; } - void checkProfileRoots(ref store) + bool checkProfileRoots(ref store) { PathSet dirs; @@ -86,10 +94,13 @@ struct CmdDoctor : StoreCommand for (auto & dir : dirs) std::cout << " " << dir << std::endl; std::cout << std::endl; + return false; } + + return true; } - void checkStoreProtocol(unsigned int storeProto) + bool checkStoreProtocol(unsigned int storeProto) { unsigned int clientProto = GET_PROTOCOL_MAJOR(SERVE_PROTOCOL_VERSION) == GET_PROTOCOL_MAJOR(storeProto) ? SERVE_PROTOCOL_VERSION @@ -103,7 +114,10 @@ struct CmdDoctor : StoreCommand std::cout << "Client protocol: " << formatProtocol(clientProto) << std::endl; std::cout << "Store protocol: " << formatProtocol(storeProto) << std::endl; std::cout << std::endl; + return false; } + + return true; } }; From 3d974d31facefe8eaf59af56b0187e6a63fdd0cc Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Mon, 29 Oct 2018 08:44:58 -0500 Subject: [PATCH 1266/2196] editline: wip --- Makefile.config.in | 1 + configure.ac | 2 + doc/manual/introduction/about-nix.xml | 6 - editline.nix | 24 + release-common.nix | 4 +- src/linenoise/ConvertUTF.cpp | 542 ---- src/linenoise/ConvertUTF.h | 162 -- src/linenoise/LICENSE | 66 - src/linenoise/linenoise.cpp | 3450 ------------------------- src/linenoise/linenoise.h | 73 - src/linenoise/wcwidth.cpp | 315 --- src/nix/local.mk | 3 +- src/nix/repl.cc | 89 +- 13 files changed, 89 insertions(+), 4648 deletions(-) create mode 100644 editline.nix delete mode 100644 src/linenoise/ConvertUTF.cpp delete mode 100755 src/linenoise/ConvertUTF.h delete mode 100644 src/linenoise/LICENSE delete mode 100644 src/linenoise/linenoise.cpp delete mode 100644 src/linenoise/linenoise.h delete mode 100644 src/linenoise/wcwidth.cpp diff --git a/Makefile.config.in b/Makefile.config.in index 08edcb863ef..fad077d869b 100644 --- a/Makefile.config.in +++ b/Makefile.config.in @@ -15,6 +15,7 @@ SODIUM_LIBS = @SODIUM_LIBS@ LIBLZMA_LIBS = @LIBLZMA_LIBS@ SQLITE3_LIBS = @SQLITE3_LIBS@ LIBBROTLI_LIBS = @LIBBROTLI_LIBS@ +EDITLINE_LIBS = @EDITLINE_LIBS@ bash = @bash@ bindir = @bindir@ brotli = @brotli@ diff --git a/configure.ac b/configure.ac index 9c85182efbf..85fec77a29c 100644 --- a/configure.ac +++ b/configure.ac @@ -164,6 +164,8 @@ PKG_CHECK_MODULES([SQLITE3], [sqlite3 >= 3.6.19], [CXXFLAGS="$SQLITE3_CFLAGS $CX # Look for libcurl, a required dependency. PKG_CHECK_MODULES([LIBCURL], [libcurl], [CXXFLAGS="$LIBCURL_CFLAGS $CXXFLAGS"]) +# Look for editline, a required dependency. +PKG_CHECK_MODULES([EDITLINE], [libeditline], [CXXFLAGS="$EDITLINE_CFLAGS $CXXFLAGS"]) # Look for libsodium, an optional dependency. PKG_CHECK_MODULES([SODIUM], [libsodium], diff --git a/doc/manual/introduction/about-nix.xml b/doc/manual/introduction/about-nix.xml index e8c0a29753a..c21ed34ddc7 100644 --- a/doc/manual/introduction/about-nix.xml +++ b/doc/manual/introduction/about-nix.xml @@ -262,12 +262,6 @@ xlink:href="http://nixos.org/">NixOS homepage. xlink:href="http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html">GNU LGPLv2.1 or (at your option) any later version. -Nix uses the linenoise-ng -library, which has the following license: - - -
diff --git a/editline.nix b/editline.nix new file mode 100644 index 00000000000..d9b5a2b64d6 --- /dev/null +++ b/editline.nix @@ -0,0 +1,24 @@ +{ stdenv, fetchFromGitHub, autoreconfHook }: + +stdenv.mkDerivation rec { + name = "editline-${version}"; + version = "1.15.3"; + src = fetchFromGitHub { + owner = "troglobit"; + repo = "editline"; + rev = version; + sha256 = "0dm5fgq0acpprr938idwml64nclg9l6c6avirsd8r6f40qicbgma"; + }; + + nativeBuildInputs = [ autoreconfHook ]; + + dontDisableStatic = true; + + meta = with stdenv.lib; { + homepage = http://troglobit.com/editline.html; + description = "A readline() replacement for UNIX without termcap (ncurses)"; + license = licenses.bsdOriginal; + maintainers = with maintainers; [ dtzWill ]; + platforms = platforms.all; + }; +} diff --git a/release-common.nix b/release-common.nix index ace2a4f9b91..690860abfdc 100644 --- a/release-common.nix +++ b/release-common.nix @@ -29,6 +29,8 @@ rec { ''; }); + editline = pkgs.editline or (pkgs.callPackage ./editline.nix {}); + configureFlags = [ "--enable-gc" @@ -49,7 +51,7 @@ rec { buildDeps = [ curl - bzip2 xz brotli + bzip2 xz brotli editline openssl pkgconfig sqlite boehmgc boost diff --git a/src/linenoise/ConvertUTF.cpp b/src/linenoise/ConvertUTF.cpp deleted file mode 100644 index f7e5915d5e8..00000000000 --- a/src/linenoise/ConvertUTF.cpp +++ /dev/null @@ -1,542 +0,0 @@ -/* - * Copyright 2001-2004 Unicode, Inc. - * - * Disclaimer - * - * This source code is provided as is by Unicode, Inc. No claims are - * made as to fitness for any particular purpose. No warranties of any - * kind are expressed or implied. The recipient agrees to determine - * applicability of information provided. If this file has been - * purchased on magnetic or optical media from Unicode, Inc., the - * sole remedy for any claim will be exchange of defective media - * within 90 days of receipt. - * - * Limitations on Rights to Redistribute This Code - * - * Unicode, Inc. hereby grants the right to freely use the information - * supplied in this file in the creation of products supporting the - * Unicode Standard, and to make copies of this file in any form - * for internal or external distribution as long as this notice - * remains attached. - */ - -/* --------------------------------------------------------------------- - - Conversions between UTF32, UTF-16, and UTF-8. Source code file. - Author: Mark E. Davis, 1994. - Rev History: Rick McGowan, fixes & updates May 2001. - Sept 2001: fixed const & error conditions per - mods suggested by S. Parent & A. Lillich. - June 2002: Tim Dodd added detection and handling of incomplete - source sequences, enhanced error detection, added casts - to eliminate compiler warnings. - July 2003: slight mods to back out aggressive FFFE detection. - Jan 2004: updated switches in from-UTF8 conversions. - Oct 2004: updated to use UNI_MAX_LEGAL_UTF32 in UTF-32 conversions. - - See the header file "ConvertUTF.h" for complete documentation. - ------------------------------------------------------------------------- */ - -#include "ConvertUTF.h" -#ifdef CVTUTF_DEBUG -#include -#endif - -namespace linenoise_ng { - -static const int halfShift = 10; /* used for shifting by 10 bits */ - -static const UTF32 halfBase = 0x0010000UL; -static const UTF32 halfMask = 0x3FFUL; - -#define UNI_SUR_HIGH_START (UTF32)0xD800 -#define UNI_SUR_HIGH_END (UTF32)0xDBFF -#define UNI_SUR_LOW_START (UTF32)0xDC00 -#define UNI_SUR_LOW_END (UTF32)0xDFFF -#define false 0 -#define true 1 - -/* --------------------------------------------------------------------- */ - -ConversionResult ConvertUTF32toUTF16 ( - const UTF32** sourceStart, const UTF32* sourceEnd, - char16_t** targetStart, char16_t* targetEnd, ConversionFlags flags) { - ConversionResult result = conversionOK; - const UTF32* source = *sourceStart; - char16_t* target = *targetStart; - while (source < sourceEnd) { - UTF32 ch; - if (target >= targetEnd) { - result = targetExhausted; break; - } - ch = *source++; - if (ch <= UNI_MAX_BMP) { /* Target is a character <= 0xFFFF */ - /* UTF-16 surrogate values are illegal in UTF-32; 0xffff or 0xfffe are both reserved values */ - if (ch >= UNI_SUR_HIGH_START && ch <= UNI_SUR_LOW_END) { - if (flags == strictConversion) { - --source; /* return to the illegal value itself */ - result = sourceIllegal; - break; - } else { - *target++ = UNI_REPLACEMENT_CHAR; - } - } else { - *target++ = (UTF16)ch; /* normal case */ - } - } else if (ch > UNI_MAX_LEGAL_UTF32) { - if (flags == strictConversion) { - result = sourceIllegal; - } else { - *target++ = UNI_REPLACEMENT_CHAR; - } - } else { - /* target is a character in range 0xFFFF - 0x10FFFF. */ - if (target + 1 >= targetEnd) { - --source; /* Back up source pointer! */ - result = targetExhausted; break; - } - ch -= halfBase; - *target++ = (UTF16)((ch >> halfShift) + UNI_SUR_HIGH_START); - *target++ = (UTF16)((ch & halfMask) + UNI_SUR_LOW_START); - } - } - *sourceStart = source; - *targetStart = target; - return result; -} - -/* --------------------------------------------------------------------- */ - -ConversionResult ConvertUTF16toUTF32 ( - const UTF16** sourceStart, const UTF16* sourceEnd, - UTF32** targetStart, UTF32* targetEnd, ConversionFlags flags) { - ConversionResult result = conversionOK; - const UTF16* source = *sourceStart; - UTF32* target = *targetStart; - UTF32 ch, ch2; - while (source < sourceEnd) { - const UTF16* oldSource = source; /* In case we have to back up because of target overflow. */ - ch = *source++; - /* If we have a surrogate pair, convert to UTF32 first. */ - if (ch >= UNI_SUR_HIGH_START && ch <= UNI_SUR_HIGH_END) { - /* If the 16 bits following the high surrogate are in the source buffer... */ - if (source < sourceEnd) { - ch2 = *source; - /* If it's a low surrogate, convert to UTF32. */ - if (ch2 >= UNI_SUR_LOW_START && ch2 <= UNI_SUR_LOW_END) { - ch = ((ch - UNI_SUR_HIGH_START) << halfShift) - + (ch2 - UNI_SUR_LOW_START) + halfBase; - ++source; - } else if (flags == strictConversion) { /* it's an unpaired high surrogate */ - --source; /* return to the illegal value itself */ - result = sourceIllegal; - break; - } - } else { /* We don't have the 16 bits following the high surrogate. */ - --source; /* return to the high surrogate */ - result = sourceExhausted; - break; - } - } else if (flags == strictConversion) { - /* UTF-16 surrogate values are illegal in UTF-32 */ - if (ch >= UNI_SUR_LOW_START && ch <= UNI_SUR_LOW_END) { - --source; /* return to the illegal value itself */ - result = sourceIllegal; - break; - } - } - if (target >= targetEnd) { - source = oldSource; /* Back up source pointer! */ - result = targetExhausted; break; - } - *target++ = ch; - } - *sourceStart = source; - *targetStart = target; -#ifdef CVTUTF_DEBUG -if (result == sourceIllegal) { - fprintf(stderr, "ConvertUTF16toUTF32 illegal seq 0x%04x,%04x\n", ch, ch2); - fflush(stderr); -} -#endif - return result; -} - -/* --------------------------------------------------------------------- */ - -/* - * Index into the table below with the first byte of a UTF-8 sequence to - * get the number of trailing bytes that are supposed to follow it. - * Note that *legal* UTF-8 values can't have 4 or 5-bytes. The table is - * left as-is for anyone who may want to do such conversion, which was - * allowed in earlier algorithms. - */ -static const char trailingBytesForUTF8[256] = { - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, - 2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, 3,3,3,3,3,3,3,3,4,4,4,4,5,5,5,5 -}; - -/* - * Magic values subtracted from a buffer value during UTF8 conversion. - * This table contains as many values as there might be trailing bytes - * in a UTF-8 sequence. - */ -static const UTF32 offsetsFromUTF8[6] = { 0x00000000UL, 0x00003080UL, 0x000E2080UL, - 0x03C82080UL, 0xFA082080UL, 0x82082080UL }; - -/* - * Once the bits are split out into bytes of UTF-8, this is a mask OR-ed - * into the first byte, depending on how many bytes follow. There are - * as many entries in this table as there are UTF-8 sequence types. - * (I.e., one byte sequence, two byte... etc.). Remember that sequencs - * for *legal* UTF-8 will be 4 or fewer bytes total. - */ -static const UTF8 firstByteMark[7] = { 0x00, 0x00, 0xC0, 0xE0, 0xF0, 0xF8, 0xFC }; - -/* --------------------------------------------------------------------- */ - -/* The interface converts a whole buffer to avoid function-call overhead. - * Constants have been gathered. Loops & conditionals have been removed as - * much as possible for efficiency, in favor of drop-through switches. - * (See "Note A" at the bottom of the file for equivalent code.) - * If your compiler supports it, the "isLegalUTF8" call can be turned - * into an inline function. - */ - -/* --------------------------------------------------------------------- */ - -ConversionResult ConvertUTF16toUTF8 ( - const UTF16** sourceStart, const UTF16* sourceEnd, - UTF8** targetStart, UTF8* targetEnd, ConversionFlags flags) { - ConversionResult result = conversionOK; - const UTF16* source = *sourceStart; - UTF8* target = *targetStart; - while (source < sourceEnd) { - UTF32 ch; - unsigned short bytesToWrite = 0; - const UTF32 byteMask = 0xBF; - const UTF32 byteMark = 0x80; - const UTF16* oldSource = source; /* In case we have to back up because of target overflow. */ - ch = *source++; - /* If we have a surrogate pair, convert to UTF32 first. */ - if (ch >= UNI_SUR_HIGH_START && ch <= UNI_SUR_HIGH_END) { - /* If the 16 bits following the high surrogate are in the source buffer... */ - if (source < sourceEnd) { - UTF32 ch2 = *source; - /* If it's a low surrogate, convert to UTF32. */ - if (ch2 >= UNI_SUR_LOW_START && ch2 <= UNI_SUR_LOW_END) { - ch = ((ch - UNI_SUR_HIGH_START) << halfShift) - + (ch2 - UNI_SUR_LOW_START) + halfBase; - ++source; - } else if (flags == strictConversion) { /* it's an unpaired high surrogate */ - --source; /* return to the illegal value itself */ - result = sourceIllegal; - break; - } - } else { /* We don't have the 16 bits following the high surrogate. */ - --source; /* return to the high surrogate */ - result = sourceExhausted; - break; - } - } else if (flags == strictConversion) { - /* UTF-16 surrogate values are illegal in UTF-32 */ - if (ch >= UNI_SUR_LOW_START && ch <= UNI_SUR_LOW_END) { - --source; /* return to the illegal value itself */ - result = sourceIllegal; - break; - } - } - /* Figure out how many bytes the result will require */ - if (ch < (UTF32)0x80) { bytesToWrite = 1; - } else if (ch < (UTF32)0x800) { bytesToWrite = 2; - } else if (ch < (UTF32)0x10000) { bytesToWrite = 3; - } else if (ch < (UTF32)0x110000) { bytesToWrite = 4; - } else { bytesToWrite = 3; - ch = UNI_REPLACEMENT_CHAR; - } - - target += bytesToWrite; - if (target > targetEnd) { - source = oldSource; /* Back up source pointer! */ - target -= bytesToWrite; result = targetExhausted; break; - } - switch (bytesToWrite) { /* note: everything falls through. */ - case 4: *--target = (UTF8)((ch | byteMark) & byteMask); ch >>= 6; - case 3: *--target = (UTF8)((ch | byteMark) & byteMask); ch >>= 6; - case 2: *--target = (UTF8)((ch | byteMark) & byteMask); ch >>= 6; - case 1: *--target = (UTF8)(ch | firstByteMark[bytesToWrite]); - } - target += bytesToWrite; - } - *sourceStart = source; - *targetStart = target; - return result; -} - -/* --------------------------------------------------------------------- */ - -/* - * Utility routine to tell whether a sequence of bytes is legal UTF-8. - * This must be called with the length pre-determined by the first byte. - * If not calling this from ConvertUTF8to*, then the length can be set by: - * length = trailingBytesForUTF8[*source]+1; - * and the sequence is illegal right away if there aren't that many bytes - * available. - * If presented with a length > 4, this returns false. The Unicode - * definition of UTF-8 goes up to 4-byte sequences. - */ - -static Boolean isLegalUTF8(const UTF8 *source, int length) { - UTF8 a; - const UTF8 *srcptr = source+length; - switch (length) { - default: return false; - /* Everything else falls through when "true"... */ - case 4: if ((a = (*--srcptr)) < 0x80 || a > 0xBF) return false; - case 3: if ((a = (*--srcptr)) < 0x80 || a > 0xBF) return false; - case 2: if ((a = (*--srcptr)) > 0xBF) return false; - - switch (*source) { - /* no fall-through in this inner switch */ - case 0xE0: if (a < 0xA0) return false; break; - case 0xED: if (a > 0x9F) return false; break; - case 0xF0: if (a < 0x90) return false; break; - case 0xF4: if (a > 0x8F) return false; break; - default: if (a < 0x80) return false; - } - - case 1: if (*source >= 0x80 && *source < 0xC2) return false; - } - if (*source > 0xF4) return false; - return true; -} - -/* --------------------------------------------------------------------- */ - -/* - * Exported function to return whether a UTF-8 sequence is legal or not. - * This is not used here; it's just exported. - */ -Boolean isLegalUTF8Sequence(const UTF8 *source, const UTF8 *sourceEnd) { - int length = trailingBytesForUTF8[*source]+1; - if (source+length > sourceEnd) { - return false; - } - return isLegalUTF8(source, length); -} - -/* --------------------------------------------------------------------- */ - -ConversionResult ConvertUTF8toUTF16 ( - const UTF8** sourceStart, const UTF8* sourceEnd, - UTF16** targetStart, UTF16* targetEnd, ConversionFlags flags) { - ConversionResult result = conversionOK; - const UTF8* source = *sourceStart; - UTF16* target = *targetStart; - while (source < sourceEnd) { - UTF32 ch = 0; - unsigned short extraBytesToRead = trailingBytesForUTF8[*source]; - if (source + extraBytesToRead >= sourceEnd) { - result = sourceExhausted; break; - } - /* Do this check whether lenient or strict */ - if (! isLegalUTF8(source, extraBytesToRead+1)) { - result = sourceIllegal; - break; - } - /* - * The cases all fall through. See "Note A" below. - */ - switch (extraBytesToRead) { - case 5: ch += *source++; ch <<= 6; /* remember, illegal UTF-8 */ - case 4: ch += *source++; ch <<= 6; /* remember, illegal UTF-8 */ - case 3: ch += *source++; ch <<= 6; - case 2: ch += *source++; ch <<= 6; - case 1: ch += *source++; ch <<= 6; - case 0: ch += *source++; - } - ch -= offsetsFromUTF8[extraBytesToRead]; - - if (target >= targetEnd) { - source -= (extraBytesToRead+1); /* Back up source pointer! */ - result = targetExhausted; break; - } - if (ch <= UNI_MAX_BMP) { /* Target is a character <= 0xFFFF */ - /* UTF-16 surrogate values are illegal in UTF-32 */ - if (ch >= UNI_SUR_HIGH_START && ch <= UNI_SUR_LOW_END) { - if (flags == strictConversion) { - source -= (extraBytesToRead+1); /* return to the illegal value itself */ - result = sourceIllegal; - break; - } else { - *target++ = UNI_REPLACEMENT_CHAR; - } - } else { - *target++ = (UTF16)ch; /* normal case */ - } - } else if (ch > UNI_MAX_UTF16) { - if (flags == strictConversion) { - result = sourceIllegal; - source -= (extraBytesToRead+1); /* return to the start */ - break; /* Bail out; shouldn't continue */ - } else { - *target++ = UNI_REPLACEMENT_CHAR; - } - } else { - /* target is a character in range 0xFFFF - 0x10FFFF. */ - if (target + 1 >= targetEnd) { - source -= (extraBytesToRead+1); /* Back up source pointer! */ - result = targetExhausted; break; - } - ch -= halfBase; - *target++ = (UTF16)((ch >> halfShift) + UNI_SUR_HIGH_START); - *target++ = (UTF16)((ch & halfMask) + UNI_SUR_LOW_START); - } - } - *sourceStart = source; - *targetStart = target; - return result; -} - -/* --------------------------------------------------------------------- */ - -ConversionResult ConvertUTF32toUTF8 ( - const UTF32** sourceStart, const UTF32* sourceEnd, - UTF8** targetStart, UTF8* targetEnd, ConversionFlags flags) { - ConversionResult result = conversionOK; - const UTF32* source = *sourceStart; - UTF8* target = *targetStart; - while (source < sourceEnd) { - UTF32 ch; - unsigned short bytesToWrite = 0; - const UTF32 byteMask = 0xBF; - const UTF32 byteMark = 0x80; - ch = *source++; - if (flags == strictConversion ) { - /* UTF-16 surrogate values are illegal in UTF-32 */ - if (ch >= UNI_SUR_HIGH_START && ch <= UNI_SUR_LOW_END) { - --source; /* return to the illegal value itself */ - result = sourceIllegal; - break; - } - } - /* - * Figure out how many bytes the result will require. Turn any - * illegally large UTF32 things (> Plane 17) into replacement chars. - */ - if (ch < (UTF32)0x80) { bytesToWrite = 1; - } else if (ch < (UTF32)0x800) { bytesToWrite = 2; - } else if (ch < (UTF32)0x10000) { bytesToWrite = 3; - } else if (ch <= UNI_MAX_LEGAL_UTF32) { bytesToWrite = 4; - } else { bytesToWrite = 3; - ch = UNI_REPLACEMENT_CHAR; - result = sourceIllegal; - } - - target += bytesToWrite; - if (target > targetEnd) { - --source; /* Back up source pointer! */ - target -= bytesToWrite; result = targetExhausted; break; - } - switch (bytesToWrite) { /* note: everything falls through. */ - case 4: *--target = (UTF8)((ch | byteMark) & byteMask); ch >>= 6; - case 3: *--target = (UTF8)((ch | byteMark) & byteMask); ch >>= 6; - case 2: *--target = (UTF8)((ch | byteMark) & byteMask); ch >>= 6; - case 1: *--target = (UTF8) (ch | firstByteMark[bytesToWrite]); - } - target += bytesToWrite; - } - *sourceStart = source; - *targetStart = target; - return result; -} - -/* --------------------------------------------------------------------- */ - -ConversionResult ConvertUTF8toUTF32 ( - const UTF8** sourceStart, const UTF8* sourceEnd, - UTF32** targetStart, UTF32* targetEnd, ConversionFlags flags) { - ConversionResult result = conversionOK; - const UTF8* source = *sourceStart; - UTF32* target = *targetStart; - while (source < sourceEnd) { - UTF32 ch = 0; - unsigned short extraBytesToRead = trailingBytesForUTF8[*source]; - if (source + extraBytesToRead >= sourceEnd) { - result = sourceExhausted; break; - } - /* Do this check whether lenient or strict */ - if (! isLegalUTF8(source, extraBytesToRead+1)) { - result = sourceIllegal; - break; - } - /* - * The cases all fall through. See "Note A" below. - */ - switch (extraBytesToRead) { - case 5: ch += *source++; ch <<= 6; - case 4: ch += *source++; ch <<= 6; - case 3: ch += *source++; ch <<= 6; - case 2: ch += *source++; ch <<= 6; - case 1: ch += *source++; ch <<= 6; - case 0: ch += *source++; - } - ch -= offsetsFromUTF8[extraBytesToRead]; - - if (target >= targetEnd) { - source -= (extraBytesToRead+1); /* Back up the source pointer! */ - result = targetExhausted; break; - } - if (ch <= UNI_MAX_LEGAL_UTF32) { - /* - * UTF-16 surrogate values are illegal in UTF-32, and anything - * over Plane 17 (> 0x10FFFF) is illegal. - */ - if (ch >= UNI_SUR_HIGH_START && ch <= UNI_SUR_LOW_END) { - if (flags == strictConversion) { - source -= (extraBytesToRead+1); /* return to the illegal value itself */ - result = sourceIllegal; - break; - } else { - *target++ = UNI_REPLACEMENT_CHAR; - } - } else { - *target++ = ch; - } - } else { /* i.e., ch > UNI_MAX_LEGAL_UTF32 */ - result = sourceIllegal; - *target++ = UNI_REPLACEMENT_CHAR; - } - } - *sourceStart = source; - *targetStart = target; - return result; -} - -} - -/* --------------------------------------------------------------------- - - Note A. - The fall-through switches in UTF-8 reading code save a - temp variable, some decrements & conditionals. The switches - are equivalent to the following loop: - { - int tmpBytesToRead = extraBytesToRead+1; - do { - ch += *source++; - --tmpBytesToRead; - if (tmpBytesToRead) ch <<= 6; - } while (tmpBytesToRead > 0); - } - In UTF-8 writing code, the switches on "bytesToWrite" are - similarly unrolled loops. - - --------------------------------------------------------------------- */ diff --git a/src/linenoise/ConvertUTF.h b/src/linenoise/ConvertUTF.h deleted file mode 100755 index 8a296235dcd..00000000000 --- a/src/linenoise/ConvertUTF.h +++ /dev/null @@ -1,162 +0,0 @@ -/* - * Copyright 2001-2004 Unicode, Inc. - * - * Disclaimer - * - * This source code is provided as is by Unicode, Inc. No claims are - * made as to fitness for any particular purpose. No warranties of any - * kind are expressed or implied. The recipient agrees to determine - * applicability of information provided. If this file has been - * purchased on magnetic or optical media from Unicode, Inc., the - * sole remedy for any claim will be exchange of defective media - * within 90 days of receipt. - * - * Limitations on Rights to Redistribute This Code - * - * Unicode, Inc. hereby grants the right to freely use the information - * supplied in this file in the creation of products supporting the - * Unicode Standard, and to make copies of this file in any form - * for internal or external distribution as long as this notice - * remains attached. - */ - -/* --------------------------------------------------------------------- - - Conversions between UTF32, UTF-16, and UTF-8. Header file. - - Several funtions are included here, forming a complete set of - conversions between the three formats. UTF-7 is not included - here, but is handled in a separate source file. - - Each of these routines takes pointers to input buffers and output - buffers. The input buffers are const. - - Each routine converts the text between *sourceStart and sourceEnd, - putting the result into the buffer between *targetStart and - targetEnd. Note: the end pointers are *after* the last item: e.g. - *(sourceEnd - 1) is the last item. - - The return result indicates whether the conversion was successful, - and if not, whether the problem was in the source or target buffers. - (Only the first encountered problem is indicated.) - - After the conversion, *sourceStart and *targetStart are both - updated to point to the end of last text successfully converted in - the respective buffers. - - Input parameters: - sourceStart - pointer to a pointer to the source buffer. - The contents of this are modified on return so that - it points at the next thing to be converted. - targetStart - similarly, pointer to pointer to the target buffer. - sourceEnd, targetEnd - respectively pointers to the ends of the - two buffers, for overflow checking only. - - These conversion functions take a ConversionFlags argument. When this - flag is set to strict, both irregular sequences and isolated surrogates - will cause an error. When the flag is set to lenient, both irregular - sequences and isolated surrogates are converted. - - Whether the flag is strict or lenient, all illegal sequences will cause - an error return. This includes sequences such as: , , - or in UTF-8, and values above 0x10FFFF in UTF-32. Conformant code - must check for illegal sequences. - - When the flag is set to lenient, characters over 0x10FFFF are converted - to the replacement character; otherwise (when the flag is set to strict) - they constitute an error. - - Output parameters: - The value "sourceIllegal" is returned from some routines if the input - sequence is malformed. When "sourceIllegal" is returned, the source - value will point to the illegal value that caused the problem. E.g., - in UTF-8 when a sequence is malformed, it points to the start of the - malformed sequence. - - Author: Mark E. Davis, 1994. - Rev History: Rick McGowan, fixes & updates May 2001. - Fixes & updates, Sept 2001. - ------------------------------------------------------------------------- */ - -/* --------------------------------------------------------------------- - The following 4 definitions are compiler-specific. - The C standard does not guarantee that wchar_t has at least - 16 bits, so wchar_t is no less portable than unsigned short! - All should be unsigned values to avoid sign extension during - bit mask & shift operations. ------------------------------------------------------------------------- */ - -#if 0 -typedef unsigned long UTF32; /* at least 32 bits */ -typedef unsigned short UTF16; /* at least 16 bits */ -typedef unsigned char UTF8; /* typically 8 bits */ -#endif - -#include -#include - -namespace linenoise_ng { - -typedef uint32_t UTF32; -typedef uint16_t UTF16; -typedef uint8_t UTF8; -typedef unsigned char Boolean; /* 0 or 1 */ - -/* Some fundamental constants */ -#define UNI_REPLACEMENT_CHAR (UTF32)0x0000FFFD -#define UNI_MAX_BMP (UTF32)0x0000FFFF -#define UNI_MAX_UTF16 (UTF32)0x0010FFFF -#define UNI_MAX_UTF32 (UTF32)0x7FFFFFFF -#define UNI_MAX_LEGAL_UTF32 (UTF32)0x0010FFFF - -typedef enum { - conversionOK, /* conversion successful */ - sourceExhausted, /* partial character in source, but hit end */ - targetExhausted, /* insuff. room in target for conversion */ - sourceIllegal /* source sequence is illegal/malformed */ -} ConversionResult; - -typedef enum { - strictConversion = 0, - lenientConversion -} ConversionFlags; - -// /* This is for C++ and does no harm in C */ -// #ifdef __cplusplus -// extern "C" { -// #endif - -ConversionResult ConvertUTF8toUTF16 ( - const UTF8** sourceStart, const UTF8* sourceEnd, - UTF16** targetStart, UTF16* targetEnd, ConversionFlags flags); - -ConversionResult ConvertUTF16toUTF8 ( - const UTF16** sourceStart, const UTF16* sourceEnd, - UTF8** targetStart, UTF8* targetEnd, ConversionFlags flags); - -ConversionResult ConvertUTF8toUTF32 ( - const UTF8** sourceStart, const UTF8* sourceEnd, - UTF32** targetStart, UTF32* targetEnd, ConversionFlags flags); - -ConversionResult ConvertUTF32toUTF8 ( - const UTF32** sourceStart, const UTF32* sourceEnd, - UTF8** targetStart, UTF8* targetEnd, ConversionFlags flags); - -ConversionResult ConvertUTF16toUTF32 ( - const UTF16** sourceStart, const UTF16* sourceEnd, - UTF32** targetStart, UTF32* targetEnd, ConversionFlags flags); - -ConversionResult ConvertUTF32toUTF16 ( - const UTF32** sourceStart, const UTF32* sourceEnd, - char16_t** targetStart, char16_t* targetEnd, ConversionFlags flags); - -Boolean isLegalUTF8Sequence(const UTF8 *source, const UTF8 *sourceEnd); - -// #ifdef __cplusplus -// } -// #endif - -} - -/* --------------------------------------------------------------------- */ diff --git a/src/linenoise/LICENSE b/src/linenoise/LICENSE deleted file mode 100644 index b7c58c44586..00000000000 --- a/src/linenoise/LICENSE +++ /dev/null @@ -1,66 +0,0 @@ -linenoise.cpp -============= - -Copyright (c) 2010, Salvatore Sanfilippo -Copyright (c) 2010, Pieter Noordhuis - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of Redis nor the names of its contributors may be used - to endorse or promote products derived from this software without - specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -POSSIBILITY OF SUCH DAMAGE. - - -wcwidth.cpp -=========== - -Markus Kuhn -- 2007-05-26 (Unicode 5.0) - -Permission to use, copy, modify, and distribute this software -for any purpose and without fee is hereby granted. The author -disclaims all warranties with regard to this software. - - - -ConvertUTF.cpp -============== - -Copyright 2001-2004 Unicode, Inc. - -Disclaimer - -This source code is provided as is by Unicode, Inc. No claims are -made as to fitness for any particular purpose. No warranties of any -kind are expressed or implied. The recipient agrees to determine -applicability of information provided. If this file has been -purchased on magnetic or optical media from Unicode, Inc., the -sole remedy for any claim will be exchange of defective media -within 90 days of receipt. - -Limitations on Rights to Redistribute This Code - -Unicode, Inc. hereby grants the right to freely use the information -supplied in this file in the creation of products supporting the -Unicode Standard, and to make copies of this file in any form -for internal or external distribution as long as this notice -remains attached. diff --git a/src/linenoise/linenoise.cpp b/src/linenoise/linenoise.cpp deleted file mode 100644 index c57505d2fa9..00000000000 --- a/src/linenoise/linenoise.cpp +++ /dev/null @@ -1,3450 +0,0 @@ -/* linenoise.c -- guerrilla line editing library against the idea that a - * line editing lib needs to be 20,000 lines of C code. - * - * Copyright (c) 2010, Salvatore Sanfilippo - * Copyright (c) 2010, Pieter Noordhuis - * - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * * Neither the name of Redis nor the names of its contributors may be used - * to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - * - * line editing lib needs to be 20,000 lines of C code. - * - * You can find the latest source code at: - * - * http://github.com/antirez/linenoise - * - * Does a number of crazy assumptions that happen to be true in 99.9999% of - * the 2010 UNIX computers around. - * - * References: - * - http://invisible-island.net/xterm/ctlseqs/ctlseqs.html - * - http://www.3waylabs.com/nw/WWW/products/wizcon/vt220.html - * - * Todo list: - * - Switch to gets() if $TERM is something we can't support. - * - Filter bogus Ctrl+ combinations. - * - Win32 support - * - * Bloat: - * - Completion? - * - History search like Ctrl+r in readline? - * - * List of escape sequences used by this program, we do everything just - * with three sequences. In order to be so cheap we may have some - * flickering effect with some slow terminal, but the lesser sequences - * the more compatible. - * - * CHA (Cursor Horizontal Absolute) - * Sequence: ESC [ n G - * Effect: moves cursor to column n (1 based) - * - * EL (Erase Line) - * Sequence: ESC [ n K - * Effect: if n is 0 or missing, clear from cursor to end of line - * Effect: if n is 1, clear from beginning of line to cursor - * Effect: if n is 2, clear entire line - * - * CUF (Cursor Forward) - * Sequence: ESC [ n C - * Effect: moves cursor forward of n chars - * - * The following are used to clear the screen: ESC [ H ESC [ 2 J - * This is actually composed of two sequences: - * - * cursorhome - * Sequence: ESC [ H - * Effect: moves the cursor to upper left corner - * - * ED2 (Clear entire screen) - * Sequence: ESC [ 2 J - * Effect: clear the whole screen - * - */ - -#ifdef _WIN32 - -#include -#include -#include - -#if defined(_MSC_VER) && _MSC_VER < 1900 -#define snprintf _snprintf // Microsoft headers use underscores in some names -#endif - -#if !defined GNUC -#define strcasecmp _stricmp -#endif - -#define strdup _strdup -#define isatty _isatty -#define write _write -#define STDIN_FILENO 0 - -#else /* _WIN32 */ - -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#endif /* _WIN32 */ - -#include -#include -#include - -#include "linenoise.h" -#include "ConvertUTF.h" - -#include -#include -#include - -using std::string; -using std::vector; -using std::unique_ptr; -using namespace linenoise_ng; - -typedef unsigned char char8_t; - -static ConversionResult copyString8to32(char32_t* dst, size_t dstSize, - size_t& dstCount, const char* src) { - const UTF8* sourceStart = reinterpret_cast(src); - const UTF8* sourceEnd = sourceStart + strlen(src); - UTF32* targetStart = reinterpret_cast(dst); - UTF32* targetEnd = targetStart + dstSize; - - ConversionResult res = ConvertUTF8toUTF32( - &sourceStart, sourceEnd, &targetStart, targetEnd, lenientConversion); - - if (res == conversionOK) { - dstCount = targetStart - reinterpret_cast(dst); - - if (dstCount < dstSize) { - *targetStart = 0; - } - } - - return res; -} - -static ConversionResult copyString8to32(char32_t* dst, size_t dstSize, - size_t& dstCount, const char8_t* src) { - return copyString8to32(dst, dstSize, dstCount, - reinterpret_cast(src)); -} - -static size_t strlen32(const char32_t* str) { - const char32_t* ptr = str; - - while (*ptr) { - ++ptr; - } - - return ptr - str; -} - -static size_t strlen8(const char8_t* str) { - return strlen(reinterpret_cast(str)); -} - -static char8_t* strdup8(const char* src) { - return reinterpret_cast(strdup(src)); -} - -#ifdef _WIN32 -static const int FOREGROUND_WHITE = - FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE; -static const int BACKGROUND_WHITE = - BACKGROUND_RED | BACKGROUND_GREEN | BACKGROUND_BLUE; -static const int INTENSITY = FOREGROUND_INTENSITY | BACKGROUND_INTENSITY; - -class WinAttributes { - public: - WinAttributes() { - CONSOLE_SCREEN_BUFFER_INFO info; - GetConsoleScreenBufferInfo(GetStdHandle(STD_OUTPUT_HANDLE), &info); - _defaultAttribute = info.wAttributes & INTENSITY; - _defaultColor = info.wAttributes & FOREGROUND_WHITE; - _defaultBackground = info.wAttributes & BACKGROUND_WHITE; - - _consoleAttribute = _defaultAttribute; - _consoleColor = _defaultColor | _defaultBackground; - } - - public: - int _defaultAttribute; - int _defaultColor; - int _defaultBackground; - - int _consoleAttribute; - int _consoleColor; -}; - -static WinAttributes WIN_ATTR; - -static void copyString32to16(char16_t* dst, size_t dstSize, size_t* dstCount, - const char32_t* src, size_t srcSize) { - const UTF32* sourceStart = reinterpret_cast(src); - const UTF32* sourceEnd = sourceStart + srcSize; - char16_t* targetStart = reinterpret_cast(dst); - char16_t* targetEnd = targetStart + dstSize; - - ConversionResult res = ConvertUTF32toUTF16( - &sourceStart, sourceEnd, &targetStart, targetEnd, lenientConversion); - - if (res == conversionOK) { - *dstCount = targetStart - reinterpret_cast(dst); - - if (*dstCount < dstSize) { - *targetStart = 0; - } - } -} -#endif - -static void copyString32to8(char* dst, size_t dstSize, size_t* dstCount, - const char32_t* src, size_t srcSize) { - const UTF32* sourceStart = reinterpret_cast(src); - const UTF32* sourceEnd = sourceStart + srcSize; - UTF8* targetStart = reinterpret_cast(dst); - UTF8* targetEnd = targetStart + dstSize; - - ConversionResult res = ConvertUTF32toUTF8( - &sourceStart, sourceEnd, &targetStart, targetEnd, lenientConversion); - - if (res == conversionOK) { - *dstCount = targetStart - reinterpret_cast(dst); - - if (*dstCount < dstSize) { - *targetStart = 0; - } - } -} - -static void copyString32to8(char* dst, size_t dstLen, const char32_t* src) { - size_t dstCount = 0; - copyString32to8(dst, dstLen, &dstCount, src, strlen32(src)); -} - -static void copyString32(char32_t* dst, const char32_t* src, size_t len) { - while (0 < len && *src) { - *dst++ = *src++; - --len; - } - - *dst = 0; -} - -static int strncmp32(const char32_t* left, const char32_t* right, size_t len) { - while (0 < len && *left) { - if (*left != *right) { - return *left - *right; - } - - ++left; - ++right; - --len; - } - - return 0; -} - -#ifdef _WIN32 -#include - -static size_t OutputWin(char16_t* text16, char32_t* text32, size_t len32) { - size_t count16 = 0; - - copyString32to16(text16, len32, &count16, text32, len32); - WriteConsoleW(GetStdHandle(STD_OUTPUT_HANDLE), text16, - static_cast(count16), nullptr, nullptr); - - return count16; -} - -static char32_t* HandleEsc(char32_t* p, char32_t* end) { - if (*p == '[') { - int code = 0; - - for (++p; p < end; ++p) { - char32_t c = *p; - - if ('0' <= c && c <= '9') { - code = code * 10 + (c - '0'); - } else if (c == 'm' || c == ';') { - switch (code) { - case 0: - WIN_ATTR._consoleAttribute = WIN_ATTR._defaultAttribute; - WIN_ATTR._consoleColor = - WIN_ATTR._defaultColor | WIN_ATTR._defaultBackground; - break; - - case 1: // BOLD - case 5: // BLINK - WIN_ATTR._consoleAttribute = - (WIN_ATTR._defaultAttribute ^ FOREGROUND_INTENSITY) & INTENSITY; - break; - - case 30: - WIN_ATTR._consoleColor = BACKGROUND_WHITE; - break; - - case 31: - WIN_ATTR._consoleColor = - FOREGROUND_RED | WIN_ATTR._defaultBackground; - break; - - case 32: - WIN_ATTR._consoleColor = - FOREGROUND_GREEN | WIN_ATTR._defaultBackground; - break; - - case 33: - WIN_ATTR._consoleColor = - FOREGROUND_RED | FOREGROUND_GREEN | WIN_ATTR._defaultBackground; - break; - - case 34: - WIN_ATTR._consoleColor = - FOREGROUND_BLUE | WIN_ATTR._defaultBackground; - break; - - case 35: - WIN_ATTR._consoleColor = - FOREGROUND_BLUE | FOREGROUND_RED | WIN_ATTR._defaultBackground; - break; - - case 36: - WIN_ATTR._consoleColor = FOREGROUND_BLUE | FOREGROUND_GREEN | - WIN_ATTR._defaultBackground; - break; - - case 37: - WIN_ATTR._consoleColor = FOREGROUND_GREEN | FOREGROUND_RED | - FOREGROUND_BLUE | - WIN_ATTR._defaultBackground; - break; - } - - code = 0; - } - - if (*p == 'm') { - ++p; - break; - } - } - } else { - ++p; - } - - auto handle = GetStdHandle(STD_OUTPUT_HANDLE); - SetConsoleTextAttribute(handle, - WIN_ATTR._consoleAttribute | WIN_ATTR._consoleColor); - - return p; -} - -static size_t WinWrite32(char16_t* text16, char32_t* text32, size_t len32) { - char32_t* p = text32; - char32_t* q = p; - char32_t* e = text32 + len32; - size_t count16 = 0; - - while (p < e) { - if (*p == 27) { - if (q < p) { - count16 += OutputWin(text16, q, p - q); - } - - q = p = HandleEsc(p + 1, e); - } else { - ++p; - } - } - - if (q < p) { - count16 += OutputWin(text16, q, p - q); - } - - return count16; -} -#endif - -static int write32(int fd, char32_t* text32, int len32) { -#ifdef _WIN32 - if (isatty(fd)) { - size_t len16 = 2 * len32 + 1; - unique_ptr text16(new char16_t[len16]); - size_t count16 = WinWrite32(text16.get(), text32, len32); - - return static_cast(count16); - } else { - size_t len8 = 4 * len32 + 1; - unique_ptr text8(new char[len8]); - size_t count8 = 0; - - copyString32to8(text8.get(), len8, &count8, text32, len32); - - return write(fd, text8.get(), static_cast(count8)); - } -#else - size_t len8 = 4 * len32 + 1; - unique_ptr text8(new char[len8]); - size_t count8 = 0; - - copyString32to8(text8.get(), len8, &count8, text32, len32); - - return write(fd, text8.get(), count8); -#endif -} - -class Utf32String { - public: - Utf32String() : _length(0), _data(nullptr) { - // note: parens intentional, _data must be properly initialized - _data = new char32_t[1](); - } - - explicit Utf32String(const char* src) : _length(0), _data(nullptr) { - size_t len = strlen(src); - // note: parens intentional, _data must be properly initialized - _data = new char32_t[len + 1](); - copyString8to32(_data, len + 1, _length, src); - } - - explicit Utf32String(const char8_t* src) : _length(0), _data(nullptr) { - size_t len = strlen(reinterpret_cast(src)); - // note: parens intentional, _data must be properly initialized - _data = new char32_t[len + 1](); - copyString8to32(_data, len + 1, _length, src); - } - - explicit Utf32String(const char32_t* src) : _length(0), _data(nullptr) { - for (_length = 0; src[_length] != 0; ++_length) { - } - - // note: parens intentional, _data must be properly initialized - _data = new char32_t[_length + 1](); - memcpy(_data, src, _length * sizeof(char32_t)); - } - - explicit Utf32String(const char32_t* src, int len) : _length(len), _data(nullptr) { - // note: parens intentional, _data must be properly initialized - _data = new char32_t[len + 1](); - memcpy(_data, src, len * sizeof(char32_t)); - } - - explicit Utf32String(int len) : _length(0), _data(nullptr) { - // note: parens intentional, _data must be properly initialized - _data = new char32_t[len](); - } - - explicit Utf32String(const Utf32String& that) : _length(that._length), _data(nullptr) { - // note: parens intentional, _data must be properly initialized - _data = new char32_t[_length + 1](); - memcpy(_data, that._data, sizeof(char32_t) * _length); - } - - Utf32String& operator=(const Utf32String& that) { - if (this != &that) { - delete[] _data; - _data = new char32_t[that._length](); - _length = that._length; - memcpy(_data, that._data, sizeof(char32_t) * _length); - } - - return *this; - } - - ~Utf32String() { delete[] _data; } - - public: - char32_t* get() const { return _data; } - - size_t length() const { return _length; } - - size_t chars() const { return _length; } - - void initFromBuffer() { - for (_length = 0; _data[_length] != 0; ++_length) { - } - } - - const char32_t& operator[](size_t pos) const { return _data[pos]; } - - char32_t& operator[](size_t pos) { return _data[pos]; } - - private: - size_t _length; - char32_t* _data; -}; - -class Utf8String { - Utf8String(const Utf8String&) = delete; - Utf8String& operator=(const Utf8String&) = delete; - - public: - explicit Utf8String(const Utf32String& src) { - size_t len = src.length() * 4 + 1; - _data = new char[len]; - copyString32to8(_data, len, src.get()); - } - - ~Utf8String() { delete[] _data; } - - public: - char* get() const { return _data; } - - private: - char* _data; -}; - -struct linenoiseCompletions { - vector completionStrings; -}; - -#define LINENOISE_DEFAULT_HISTORY_MAX_LEN 100 -#define LINENOISE_MAX_LINE 4096 - -// make control-characters more readable -#define ctrlChar(upperCaseASCII) (upperCaseASCII - 0x40) - -/** - * Recompute widths of all characters in a char32_t buffer - * @param text input buffer of Unicode characters - * @param widths output buffer of character widths - * @param charCount number of characters in buffer - */ -namespace linenoise_ng { -int mk_wcwidth(char32_t ucs); -} - -static void recomputeCharacterWidths(const char32_t* text, char* widths, - int charCount) { - for (int i = 0; i < charCount; ++i) { - widths[i] = mk_wcwidth(text[i]); - } -} - -/** - * Calculate a new screen position given a starting position, screen width and - * character count - * @param x initial x position (zero-based) - * @param y initial y position (zero-based) - * @param screenColumns screen column count - * @param charCount character positions to advance - * @param xOut returned x position (zero-based) - * @param yOut returned y position (zero-based) - */ -static void calculateScreenPosition(int x, int y, int screenColumns, - int charCount, int& xOut, int& yOut) { - xOut = x; - yOut = y; - int charsRemaining = charCount; - while (charsRemaining > 0) { - int charsThisRow = (x + charsRemaining < screenColumns) ? charsRemaining - : screenColumns - x; - xOut = x + charsThisRow; - yOut = y; - charsRemaining -= charsThisRow; - x = 0; - ++y; - } - if (xOut == screenColumns) { // we have to special-case line wrap - xOut = 0; - ++yOut; - } -} - -/** - * Calculate a column width using mk_wcswidth() - * @param buf32 text to calculate - * @param len length of text to calculate - */ -namespace linenoise_ng { -int mk_wcswidth(const char32_t* pwcs, size_t n); -} - -static int calculateColumnPosition(char32_t* buf32, int len) { - int width = mk_wcswidth(reinterpret_cast(buf32), len); - if (width == -1) - return len; - else - return width; -} - -static bool isControlChar(char32_t testChar) { - return (testChar < ' ') || // C0 controls - (testChar >= 0x7F && testChar <= 0x9F); // DEL and C1 controls -} - -struct PromptBase { // a convenience struct for grouping prompt info - Utf32String promptText; // our copy of the prompt text, edited - char* promptCharWidths; // character widths from mk_wcwidth() - int promptChars; // chars in promptText - int promptBytes; // bytes in promptText - int promptExtraLines; // extra lines (beyond 1) occupied by prompt - int promptIndentation; // column offset to end of prompt - int promptLastLinePosition; // index into promptText where last line begins - int promptPreviousInputLen; // promptChars of previous input line, for - // clearing - int promptCursorRowOffset; // where the cursor is relative to the start of - // the prompt - int promptScreenColumns; // width of screen in columns - int promptPreviousLen; // help erasing - int promptErrorCode; // error code (invalid UTF-8) or zero - - PromptBase() : promptPreviousInputLen(0) {} - - bool write() { - if (write32(1, promptText.get(), promptBytes) == -1) return false; - - return true; - } -}; - -struct PromptInfo : public PromptBase { - PromptInfo(const char* textPtr, int columns) { - promptExtraLines = 0; - promptLastLinePosition = 0; - promptPreviousLen = 0; - promptScreenColumns = columns; - Utf32String tempUnicode(textPtr); - - // strip control characters from the prompt -- we do allow newline - char32_t* pIn = tempUnicode.get(); - char32_t* pOut = pIn; - - int len = 0; - int x = 0; - - bool const strip = (isatty(1) == 0); - - while (*pIn) { - char32_t c = *pIn; - if ('\n' == c || !isControlChar(c)) { - *pOut = c; - ++pOut; - ++pIn; - ++len; - if ('\n' == c || ++x >= promptScreenColumns) { - x = 0; - ++promptExtraLines; - promptLastLinePosition = len; - } - } else if (c == '\x1b') { - if (strip) { - // jump over control chars - ++pIn; - if (*pIn == '[') { - ++pIn; - while (*pIn && ((*pIn == ';') || ((*pIn >= '0' && *pIn <= '9')))) { - ++pIn; - } - if (*pIn == 'm') { - ++pIn; - } - } - } else { - // copy control chars - *pOut = *pIn; - ++pOut; - ++pIn; - if (*pIn == '[') { - *pOut = *pIn; - ++pOut; - ++pIn; - while (*pIn && ((*pIn == ';') || ((*pIn >= '0' && *pIn <= '9')))) { - *pOut = *pIn; - ++pOut; - ++pIn; - } - if (*pIn == 'm') { - *pOut = *pIn; - ++pOut; - ++pIn; - } - } - } - } else { - ++pIn; - } - } - *pOut = 0; - promptChars = len; - promptBytes = static_cast(pOut - tempUnicode.get()); - promptText = tempUnicode; - - promptIndentation = len - promptLastLinePosition; - promptCursorRowOffset = promptExtraLines; - } -}; - -// Used with DynamicPrompt (history search) -// -static const Utf32String forwardSearchBasePrompt("(i-search)`"); -static const Utf32String reverseSearchBasePrompt("(reverse-i-search)`"); -static const Utf32String endSearchBasePrompt("': "); -static Utf32String - previousSearchText; // remembered across invocations of linenoise() - -// changing prompt for "(reverse-i-search)`text':" etc. -// -struct DynamicPrompt : public PromptBase { - Utf32String searchText; // text we are searching for - char* searchCharWidths; // character widths from mk_wcwidth() - int searchTextLen; // chars in searchText - int direction; // current search direction, 1=forward, -1=reverse - - DynamicPrompt(PromptBase& pi, int initialDirection) - : searchTextLen(0), direction(initialDirection) { - promptScreenColumns = pi.promptScreenColumns; - promptCursorRowOffset = 0; - Utf32String emptyString(1); - searchText = emptyString; - const Utf32String* basePrompt = - (direction > 0) ? &forwardSearchBasePrompt : &reverseSearchBasePrompt; - size_t promptStartLength = basePrompt->length(); - promptChars = - static_cast(promptStartLength + endSearchBasePrompt.length()); - promptBytes = promptChars; - promptLastLinePosition = promptChars; // TODO fix this, we are asssuming - // that the history prompt won't wrap - // (!) - promptPreviousLen = promptChars; - Utf32String tempUnicode(promptChars + 1); - memcpy(tempUnicode.get(), basePrompt->get(), - sizeof(char32_t) * promptStartLength); - memcpy(&tempUnicode[promptStartLength], endSearchBasePrompt.get(), - sizeof(char32_t) * (endSearchBasePrompt.length() + 1)); - tempUnicode.initFromBuffer(); - promptText = tempUnicode; - calculateScreenPosition(0, 0, pi.promptScreenColumns, promptChars, - promptIndentation, promptExtraLines); - } - - void updateSearchPrompt(void) { - const Utf32String* basePrompt = - (direction > 0) ? &forwardSearchBasePrompt : &reverseSearchBasePrompt; - size_t promptStartLength = basePrompt->length(); - promptChars = static_cast(promptStartLength + searchTextLen + - endSearchBasePrompt.length()); - promptBytes = promptChars; - Utf32String tempUnicode(promptChars + 1); - memcpy(tempUnicode.get(), basePrompt->get(), - sizeof(char32_t) * promptStartLength); - memcpy(&tempUnicode[promptStartLength], searchText.get(), - sizeof(char32_t) * searchTextLen); - size_t endIndex = promptStartLength + searchTextLen; - memcpy(&tempUnicode[endIndex], endSearchBasePrompt.get(), - sizeof(char32_t) * (endSearchBasePrompt.length() + 1)); - tempUnicode.initFromBuffer(); - promptText = tempUnicode; - } - - void updateSearchText(const char32_t* textPtr) { - Utf32String tempUnicode(textPtr); - searchTextLen = static_cast(tempUnicode.chars()); - searchText = tempUnicode; - updateSearchPrompt(); - } -}; - -class KillRing { - static const int capacity = 10; - int size; - int index; - char indexToSlot[10]; - vector theRing; - - public: - enum action { actionOther, actionKill, actionYank }; - action lastAction; - size_t lastYankSize; - - KillRing() : size(0), index(0), lastAction(actionOther) { - theRing.reserve(capacity); - } - - void kill(const char32_t* text, int textLen, bool forward) { - if (textLen == 0) { - return; - } - Utf32String killedText(text, textLen); - if (lastAction == actionKill && size > 0) { - int slot = indexToSlot[0]; - int currentLen = static_cast(theRing[slot].length()); - int resultLen = currentLen + textLen; - Utf32String temp(resultLen + 1); - if (forward) { - memcpy(temp.get(), theRing[slot].get(), currentLen * sizeof(char32_t)); - memcpy(&temp[currentLen], killedText.get(), textLen * sizeof(char32_t)); - } else { - memcpy(temp.get(), killedText.get(), textLen * sizeof(char32_t)); - memcpy(&temp[textLen], theRing[slot].get(), - currentLen * sizeof(char32_t)); - } - temp[resultLen] = 0; - temp.initFromBuffer(); - theRing[slot] = temp; - } else { - if (size < capacity) { - if (size > 0) { - memmove(&indexToSlot[1], &indexToSlot[0], size); - } - indexToSlot[0] = size; - size++; - theRing.push_back(killedText); - } else { - int slot = indexToSlot[capacity - 1]; - theRing[slot] = killedText; - memmove(&indexToSlot[1], &indexToSlot[0], capacity - 1); - indexToSlot[0] = slot; - } - index = 0; - } - } - - Utf32String* yank() { return (size > 0) ? &theRing[indexToSlot[index]] : 0; } - - Utf32String* yankPop() { - if (size == 0) { - return 0; - } - ++index; - if (index == size) { - index = 0; - } - return &theRing[indexToSlot[index]]; - } -}; - -class InputBuffer { - char32_t* buf32; // input buffer - char* charWidths; // character widths from mk_wcwidth() - int buflen; // buffer size in characters - int len; // length of text in input buffer - int pos; // character position in buffer ( 0 <= pos <= len ) - - void clearScreen(PromptBase& pi); - int incrementalHistorySearch(PromptBase& pi, int startChar); - int completeLine(PromptBase& pi); - void refreshLine(PromptBase& pi); - - public: - InputBuffer(char32_t* buffer, char* widthArray, int bufferLen) - : buf32(buffer), - charWidths(widthArray), - buflen(bufferLen - 1), - len(0), - pos(0) { - buf32[0] = 0; - } - void preloadBuffer(const char* preloadText) { - size_t ucharCount = 0; - copyString8to32(buf32, buflen + 1, ucharCount, preloadText); - recomputeCharacterWidths(buf32, charWidths, static_cast(ucharCount)); - len = static_cast(ucharCount); - pos = static_cast(ucharCount); - } - int getInputLine(PromptBase& pi); - int length(void) const { return len; } -}; - -// Special codes for keyboard input: -// -// Between Windows and the various Linux "terminal" programs, there is some -// pretty diverse behavior in the "scan codes" and escape sequences we are -// presented with. So ... we'll translate them all into our own pidgin -// pseudocode, trying to stay out of the way of UTF-8 and international -// characters. Here's the general plan. -// -// "User input keystrokes" (key chords, whatever) will be encoded as a single -// value. -// The low 21 bits are reserved for Unicode characters. Popular function-type -// keys -// get their own codes in the range 0x10200000 to (if needed) 0x1FE00000, -// currently -// just arrow keys, Home, End and Delete. Keypresses with Ctrl get ORed with -// 0x20000000, with Alt get ORed with 0x40000000. So, Ctrl+Alt+Home is encoded -// as 0x20000000 + 0x40000000 + 0x10A00000 == 0x70A00000. To keep things -// complicated, -// the Alt key is equivalent to prefixing the keystroke with ESC, so ESC -// followed by -// D is treated the same as Alt + D ... we'll just use Emacs terminology and -// call -// this "Meta". So, we will encode both ESC followed by D and Alt held down -// while D -// is pressed the same, as Meta-D, encoded as 0x40000064. -// -// Here are the definitions of our component constants: -// -// Maximum unsigned 32-bit value = 0xFFFFFFFF; // For reference, max 32-bit -// value -// Highest allocated Unicode char = 0x001FFFFF; // For reference, max -// Unicode value -static const int META = 0x40000000; // Meta key combination -static const int CTRL = 0x20000000; // Ctrl key combination -// static const int SPECIAL_KEY = 0x10000000; // Common bit for all special -// keys -static const int UP_ARROW_KEY = 0x10200000; // Special keys -static const int DOWN_ARROW_KEY = 0x10400000; -static const int RIGHT_ARROW_KEY = 0x10600000; -static const int LEFT_ARROW_KEY = 0x10800000; -static const int HOME_KEY = 0x10A00000; -static const int END_KEY = 0x10C00000; -static const int DELETE_KEY = 0x10E00000; -static const int PAGE_UP_KEY = 0x11000000; -static const int PAGE_DOWN_KEY = 0x11200000; - -static const char* unsupported_term[] = {"dumb", "cons25", "emacs", NULL}; -static linenoiseCompletionCallback* completionCallback = NULL; - -#ifdef _WIN32 -static HANDLE console_in, console_out; -static DWORD oldMode; -static WORD oldDisplayAttribute; -#else -static struct termios orig_termios; /* in order to restore at exit */ -#endif - -static KillRing killRing; - -static int rawmode = 0; /* for atexit() function to check if restore is needed*/ -static int atexit_registered = 0; /* register atexit just 1 time */ -static int historyMaxLen = LINENOISE_DEFAULT_HISTORY_MAX_LEN; -static int historyLen = 0; -static int historyIndex = 0; -static char8_t** history = NULL; - -// used to emulate Windows command prompt on down-arrow after a recall -// we use -2 as our "not set" value because we add 1 to the previous index on -// down-arrow, -// and zero is a valid index (so -1 is a valid "previous index") -static int historyPreviousIndex = -2; -static bool historyRecallMostRecent = false; - -static void linenoiseAtExit(void); - -static bool isUnsupportedTerm(void) { - char* term = getenv("TERM"); - if (term == NULL) return false; - for (int j = 0; unsupported_term[j]; ++j) - if (!strcasecmp(term, unsupported_term[j])) { - return true; - } - return false; -} - -static void beep() { - fprintf(stderr, "\x7"); // ctrl-G == bell/beep - fflush(stderr); -} - -void linenoiseHistoryFree(void) { - if (history) { - for (int j = 0; j < historyLen; ++j) free(history[j]); - historyLen = 0; - free(history); - history = 0; - } -} - -static int enableRawMode(void) { -#ifdef _WIN32 - if (!console_in) { - console_in = GetStdHandle(STD_INPUT_HANDLE); - console_out = GetStdHandle(STD_OUTPUT_HANDLE); - - GetConsoleMode(console_in, &oldMode); - SetConsoleMode(console_in, oldMode & - ~(ENABLE_LINE_INPUT | ENABLE_ECHO_INPUT | - ENABLE_PROCESSED_INPUT)); - } - return 0; -#else - struct termios raw; - - if (!isatty(STDIN_FILENO)) goto fatal; - if (!atexit_registered) { - atexit(linenoiseAtExit); - atexit_registered = 1; - } - if (tcgetattr(0, &orig_termios) == -1) goto fatal; - - raw = orig_termios; /* modify the original mode */ - /* input modes: no break, no CR to NL, no parity check, no strip char, - * no start/stop output control. */ - raw.c_iflag &= ~(BRKINT | ICRNL | INPCK | ISTRIP | IXON); - /* output modes - disable post processing */ - // this is wrong, we don't want raw output, it turns newlines into straight - // linefeeds - // raw.c_oflag &= ~(OPOST); - /* control modes - set 8 bit chars */ - raw.c_cflag |= (CS8); - /* local modes - echoing off, canonical off, no extended functions, - * no signal chars (^Z,^C) */ - raw.c_lflag &= ~(ECHO | ICANON | IEXTEN | ISIG); - /* control chars - set return condition: min number of bytes and timer. - * We want read to return every single byte, without timeout. */ - raw.c_cc[VMIN] = 1; - raw.c_cc[VTIME] = 0; /* 1 byte, no timer */ - - /* put terminal in raw mode after flushing */ - if (tcsetattr(0, TCSADRAIN, &raw) < 0) goto fatal; - rawmode = 1; - return 0; - -fatal: - errno = ENOTTY; - return -1; -#endif -} - -static void disableRawMode(void) { -#ifdef _WIN32 - SetConsoleMode(console_in, oldMode); - console_in = 0; - console_out = 0; -#else - if (rawmode && tcsetattr(0, TCSADRAIN, &orig_termios) != -1) rawmode = 0; -#endif -} - -// At exit we'll try to fix the terminal to the initial conditions -static void linenoiseAtExit(void) { disableRawMode(); } - -static int getScreenColumns(void) { - int cols; -#ifdef _WIN32 - CONSOLE_SCREEN_BUFFER_INFO inf; - GetConsoleScreenBufferInfo(GetStdHandle(STD_OUTPUT_HANDLE), &inf); - cols = inf.dwSize.X; -#else - struct winsize ws; - cols = (ioctl(1, TIOCGWINSZ, &ws) == -1) ? 80 : ws.ws_col; -#endif - // cols is 0 in certain circumstances like inside debugger, which creates - // further issues - return (cols > 0) ? cols : 80; -} - -static int getScreenRows(void) { - int rows; -#ifdef _WIN32 - CONSOLE_SCREEN_BUFFER_INFO inf; - GetConsoleScreenBufferInfo(GetStdHandle(STD_OUTPUT_HANDLE), &inf); - rows = 1 + inf.srWindow.Bottom - inf.srWindow.Top; -#else - struct winsize ws; - rows = (ioctl(1, TIOCGWINSZ, &ws) == -1) ? 24 : ws.ws_row; -#endif - return (rows > 0) ? rows : 24; -} - -static void setDisplayAttribute(bool enhancedDisplay, bool error) { -#ifdef _WIN32 - if (enhancedDisplay) { - CONSOLE_SCREEN_BUFFER_INFO inf; - GetConsoleScreenBufferInfo(console_out, &inf); - oldDisplayAttribute = inf.wAttributes; - BYTE oldLowByte = oldDisplayAttribute & 0xFF; - BYTE newLowByte; - switch (oldLowByte) { - case 0x07: - // newLowByte = FOREGROUND_BLUE | FOREGROUND_INTENSITY; // too dim - // newLowByte = FOREGROUND_BLUE; // even dimmer - newLowByte = FOREGROUND_BLUE | - FOREGROUND_GREEN; // most similar to xterm appearance - break; - case 0x70: - newLowByte = BACKGROUND_BLUE | BACKGROUND_INTENSITY; - break; - default: - newLowByte = oldLowByte ^ 0xFF; // default to inverse video - break; - } - inf.wAttributes = (inf.wAttributes & 0xFF00) | newLowByte; - SetConsoleTextAttribute(console_out, inf.wAttributes); - } else { - SetConsoleTextAttribute(console_out, oldDisplayAttribute); - } -#else - if (enhancedDisplay) { - char const* p = (error ? "\x1b[1;31m" : "\x1b[1;34m"); - if (write(1, p, 7) == -1) - return; /* bright blue (visible with both B&W bg) */ - } else { - if (write(1, "\x1b[0m", 4) == -1) return; /* reset */ - } -#endif -} - -/** - * Display the dynamic incremental search prompt and the current user input - * line. - * @param pi PromptBase struct holding information about the prompt and our - * screen position - * @param buf32 input buffer to be displayed - * @param len count of characters in the buffer - * @param pos current cursor position within the buffer (0 <= pos <= len) - */ -static void dynamicRefresh(PromptBase& pi, char32_t* buf32, int len, int pos) { - // calculate the position of the end of the prompt - int xEndOfPrompt, yEndOfPrompt; - calculateScreenPosition(0, 0, pi.promptScreenColumns, pi.promptChars, - xEndOfPrompt, yEndOfPrompt); - pi.promptIndentation = xEndOfPrompt; - - // calculate the position of the end of the input line - int xEndOfInput, yEndOfInput; - calculateScreenPosition(xEndOfPrompt, yEndOfPrompt, pi.promptScreenColumns, - calculateColumnPosition(buf32, len), xEndOfInput, - yEndOfInput); - - // calculate the desired position of the cursor - int xCursorPos, yCursorPos; - calculateScreenPosition(xEndOfPrompt, yEndOfPrompt, pi.promptScreenColumns, - calculateColumnPosition(buf32, pos), xCursorPos, - yCursorPos); - -#ifdef _WIN32 - // position at the start of the prompt, clear to end of previous input - CONSOLE_SCREEN_BUFFER_INFO inf; - GetConsoleScreenBufferInfo(console_out, &inf); - inf.dwCursorPosition.X = 0; - inf.dwCursorPosition.Y -= pi.promptCursorRowOffset /*- pi.promptExtraLines*/; - SetConsoleCursorPosition(console_out, inf.dwCursorPosition); - DWORD count; - FillConsoleOutputCharacterA(console_out, ' ', - pi.promptPreviousLen + pi.promptPreviousInputLen, - inf.dwCursorPosition, &count); - pi.promptPreviousLen = pi.promptIndentation; - pi.promptPreviousInputLen = len; - - // display the prompt - if (!pi.write()) return; - - // display the input line - if (write32(1, buf32, len) == -1) return; - - // position the cursor - GetConsoleScreenBufferInfo(console_out, &inf); - inf.dwCursorPosition.X = xCursorPos; // 0-based on Win32 - inf.dwCursorPosition.Y -= yEndOfInput - yCursorPos; - SetConsoleCursorPosition(console_out, inf.dwCursorPosition); -#else // _WIN32 - char seq[64]; - int cursorRowMovement = pi.promptCursorRowOffset - pi.promptExtraLines; - if (cursorRowMovement > 0) { // move the cursor up as required - snprintf(seq, sizeof seq, "\x1b[%dA", cursorRowMovement); - if (write(1, seq, strlen(seq)) == -1) return; - } - // position at the start of the prompt, clear to end of screen - snprintf(seq, sizeof seq, "\x1b[1G\x1b[J"); // 1-based on VT100 - if (write(1, seq, strlen(seq)) == -1) return; - - // display the prompt - if (!pi.write()) return; - - // display the input line - if (write32(1, buf32, len) == -1) return; - - // we have to generate our own newline on line wrap - if (xEndOfInput == 0 && yEndOfInput > 0) - if (write(1, "\n", 1) == -1) return; - - // position the cursor - cursorRowMovement = yEndOfInput - yCursorPos; - if (cursorRowMovement > 0) { // move the cursor up as required - snprintf(seq, sizeof seq, "\x1b[%dA", cursorRowMovement); - if (write(1, seq, strlen(seq)) == -1) return; - } - // position the cursor within the line - snprintf(seq, sizeof seq, "\x1b[%dG", xCursorPos + 1); // 1-based on VT100 - if (write(1, seq, strlen(seq)) == -1) return; -#endif - - pi.promptCursorRowOffset = - pi.promptExtraLines + yCursorPos; // remember row for next pass -} - -/** - * Refresh the user's input line: the prompt is already onscreen and is not - * redrawn here - * @param pi PromptBase struct holding information about the prompt and our - * screen position - */ -void InputBuffer::refreshLine(PromptBase& pi) { - // check for a matching brace/bracket/paren, remember its position if found - int highlight = -1; - bool indicateError = false; - if (pos < len) { - /* this scans for a brace matching buf32[pos] to highlight */ - unsigned char part1, part2; - int scanDirection = 0; - if (strchr("}])", buf32[pos])) { - scanDirection = -1; /* backwards */ - if (buf32[pos] == '}') { - part1 = '}'; part2 = '{'; - } else if (buf32[pos] == ']') { - part1 = ']'; part2 = '['; - } else { - part1 = ')'; part2 = '('; - } - } - else if (strchr("{[(", buf32[pos])) { - scanDirection = 1; /* forwards */ - if (buf32[pos] == '{') { - //part1 = '{'; part2 = '}'; - part1 = '}'; part2 = '{'; - } else if (buf32[pos] == '[') { - //part1 = '['; part2 = ']'; - part1 = ']'; part2 = '['; - } else { - //part1 = '('; part2 = ')'; - part1 = ')'; part2 = '('; - } - } - - if (scanDirection) { - int unmatched = scanDirection; - int unmatchedOther = 0; - for (int i = pos + scanDirection; i >= 0 && i < len; i += scanDirection) { - /* TODO: the right thing when inside a string */ - if (strchr("}])", buf32[i])) { - if (buf32[i] == part1) { - --unmatched; - } else { - --unmatchedOther; - } - } else if (strchr("{[(", buf32[i])) { - if (buf32[i] == part2) { - ++unmatched; - } else { - ++unmatchedOther; - } - } -/* - if (strchr("}])", buf32[i])) - --unmatched; - else if (strchr("{[(", buf32[i])) - ++unmatched; -*/ - if (unmatched == 0) { - highlight = i; - indicateError = (unmatchedOther != 0); - break; - } - } - } - } - - // calculate the position of the end of the input line - int xEndOfInput, yEndOfInput; - calculateScreenPosition(pi.promptIndentation, 0, pi.promptScreenColumns, - calculateColumnPosition(buf32, len), xEndOfInput, - yEndOfInput); - - // calculate the desired position of the cursor - int xCursorPos, yCursorPos; - calculateScreenPosition(pi.promptIndentation, 0, pi.promptScreenColumns, - calculateColumnPosition(buf32, pos), xCursorPos, - yCursorPos); - -#ifdef _WIN32 - // position at the end of the prompt, clear to end of previous input - CONSOLE_SCREEN_BUFFER_INFO inf; - GetConsoleScreenBufferInfo(console_out, &inf); - inf.dwCursorPosition.X = pi.promptIndentation; // 0-based on Win32 - inf.dwCursorPosition.Y -= pi.promptCursorRowOffset - pi.promptExtraLines; - SetConsoleCursorPosition(console_out, inf.dwCursorPosition); - DWORD count; - if (len < pi.promptPreviousInputLen) - FillConsoleOutputCharacterA(console_out, ' ', pi.promptPreviousInputLen, - inf.dwCursorPosition, &count); - pi.promptPreviousInputLen = len; - - // display the input line - if (highlight == -1) { - if (write32(1, buf32, len) == -1) return; - } else { - if (write32(1, buf32, highlight) == -1) return; - setDisplayAttribute(true, indicateError); /* bright blue (visible with both B&W bg) */ - if (write32(1, &buf32[highlight], 1) == -1) return; - setDisplayAttribute(false, indicateError); - if (write32(1, buf32 + highlight + 1, len - highlight - 1) == -1) return; - } - - // position the cursor - GetConsoleScreenBufferInfo(console_out, &inf); - inf.dwCursorPosition.X = xCursorPos; // 0-based on Win32 - inf.dwCursorPosition.Y -= yEndOfInput - yCursorPos; - SetConsoleCursorPosition(console_out, inf.dwCursorPosition); -#else // _WIN32 - char seq[64]; - int cursorRowMovement = pi.promptCursorRowOffset - pi.promptExtraLines; - if (cursorRowMovement > 0) { // move the cursor up as required - snprintf(seq, sizeof seq, "\x1b[%dA", cursorRowMovement); - if (write(1, seq, strlen(seq)) == -1) return; - } - // position at the end of the prompt, clear to end of screen - snprintf(seq, sizeof seq, "\x1b[%dG\x1b[J", - pi.promptIndentation + 1); // 1-based on VT100 - if (write(1, seq, strlen(seq)) == -1) return; - - if (highlight == -1) { // write unhighlighted text - if (write32(1, buf32, len) == -1) return; - } else { // highlight the matching brace/bracket/parenthesis - if (write32(1, buf32, highlight) == -1) return; - setDisplayAttribute(true, indicateError); - if (write32(1, &buf32[highlight], 1) == -1) return; - setDisplayAttribute(false, indicateError); - if (write32(1, buf32 + highlight + 1, len - highlight - 1) == -1) return; - } - - // we have to generate our own newline on line wrap - if (xEndOfInput == 0 && yEndOfInput > 0) - if (write(1, "\n", 1) == -1) return; - - // position the cursor - cursorRowMovement = yEndOfInput - yCursorPos; - if (cursorRowMovement > 0) { // move the cursor up as required - snprintf(seq, sizeof seq, "\x1b[%dA", cursorRowMovement); - if (write(1, seq, strlen(seq)) == -1) return; - } - // position the cursor within the line - snprintf(seq, sizeof seq, "\x1b[%dG", xCursorPos + 1); // 1-based on VT100 - if (write(1, seq, strlen(seq)) == -1) return; -#endif - - pi.promptCursorRowOffset = - pi.promptExtraLines + yCursorPos; // remember row for next pass -} - -#ifndef _WIN32 - -/** - * Read a UTF-8 sequence from the non-Windows keyboard and return the Unicode - * (char32_t) character it - * encodes - * - * @return char32_t Unicode character - */ -static char32_t readUnicodeCharacter(void) { - static char8_t utf8String[5]; - static size_t utf8Count = 0; - while (true) { - char8_t c; - - /* Continue reading if interrupted by signal. */ - ssize_t nread; - do { - nread = read(0, &c, 1); - } while ((nread == -1) && (errno == EINTR)); - - if (nread <= 0) return 0; - if (c <= 0x7F) { // short circuit ASCII - utf8Count = 0; - return c; - } else if (utf8Count < sizeof(utf8String) - 1) { - utf8String[utf8Count++] = c; - utf8String[utf8Count] = 0; - char32_t unicodeChar[2]; - size_t ucharCount; - ConversionResult res = - copyString8to32(unicodeChar, 2, ucharCount, utf8String); - if (res == conversionOK && ucharCount) { - utf8Count = 0; - return unicodeChar[0]; - } - } else { - utf8Count = - 0; // this shouldn't happen: got four bytes but no UTF-8 character - } - } -} - -namespace EscapeSequenceProcessing { // move these out of global namespace - -// This chunk of code does parsing of the escape sequences sent by various Linux -// terminals. -// -// It handles arrow keys, Home, End and Delete keys by interpreting the -// sequences sent by -// gnome terminal, xterm, rxvt, konsole, aterm and yakuake including the Alt and -// Ctrl key -// combinations that are understood by linenoise. -// -// The parsing uses tables, a bunch of intermediate dispatch routines and a -// doDispatch -// loop that reads the tables and sends control to "deeper" routines to continue -// the -// parsing. The starting call to doDispatch( c, initialDispatch ) will -// eventually return -// either a character (with optional CTRL and META bits set), or -1 if parsing -// fails, or -// zero if an attempt to read from the keyboard fails. -// -// This is rather sloppy escape sequence processing, since we're not paying -// attention to what the -// actual TERM is set to and are processing all key sequences for all terminals, -// but it works with -// the most common keystrokes on the most common terminals. It's intricate, but -// the nested 'if' -// statements required to do it directly would be worse. This way has the -// advantage of allowing -// changes and extensions without having to touch a lot of code. - -// This is a typedef for the routine called by doDispatch(). It takes the -// current character -// as input, does any required processing including reading more characters and -// calling other -// dispatch routines, then eventually returns the final (possibly extended or -// special) character. -// -typedef char32_t (*CharacterDispatchRoutine)(char32_t); - -// This structure is used by doDispatch() to hold a list of characters to test -// for and -// a list of routines to call if the character matches. The dispatch routine -// list is one -// longer than the character list; the final entry is used if no character -// matches. -// -struct CharacterDispatch { - unsigned int len; // length of the chars list - const char* chars; // chars to test - CharacterDispatchRoutine* dispatch; // array of routines to call -}; - -// This dispatch routine is given a dispatch table and then farms work out to -// routines -// listed in the table based on the character it is called with. The dispatch -// routines can -// read more input characters to decide what should eventually be returned. -// Eventually, -// a called routine returns either a character or -1 to indicate parsing -// failure. -// -static char32_t doDispatch(char32_t c, CharacterDispatch& dispatchTable) { - for (unsigned int i = 0; i < dispatchTable.len; ++i) { - if (static_cast(dispatchTable.chars[i]) == c) { - return dispatchTable.dispatch[i](c); - } - } - return dispatchTable.dispatch[dispatchTable.len](c); -} - -static char32_t thisKeyMetaCtrl = - 0; // holds pre-set Meta and/or Ctrl modifiers - -// Final dispatch routines -- return something -// -static char32_t normalKeyRoutine(char32_t c) { return thisKeyMetaCtrl | c; } -static char32_t upArrowKeyRoutine(char32_t) { - return thisKeyMetaCtrl | UP_ARROW_KEY; -} -static char32_t downArrowKeyRoutine(char32_t) { - return thisKeyMetaCtrl | DOWN_ARROW_KEY; -} -static char32_t rightArrowKeyRoutine(char32_t) { - return thisKeyMetaCtrl | RIGHT_ARROW_KEY; -} -static char32_t leftArrowKeyRoutine(char32_t) { - return thisKeyMetaCtrl | LEFT_ARROW_KEY; -} -static char32_t homeKeyRoutine(char32_t) { return thisKeyMetaCtrl | HOME_KEY; } -static char32_t endKeyRoutine(char32_t) { return thisKeyMetaCtrl | END_KEY; } -static char32_t pageUpKeyRoutine(char32_t) { - return thisKeyMetaCtrl | PAGE_UP_KEY; -} -static char32_t pageDownKeyRoutine(char32_t) { - return thisKeyMetaCtrl | PAGE_DOWN_KEY; -} -static char32_t deleteCharRoutine(char32_t) { - return thisKeyMetaCtrl | ctrlChar('H'); -} // key labeled Backspace -static char32_t deleteKeyRoutine(char32_t) { - return thisKeyMetaCtrl | DELETE_KEY; -} // key labeled Delete -static char32_t ctrlUpArrowKeyRoutine(char32_t) { - return thisKeyMetaCtrl | CTRL | UP_ARROW_KEY; -} -static char32_t ctrlDownArrowKeyRoutine(char32_t) { - return thisKeyMetaCtrl | CTRL | DOWN_ARROW_KEY; -} -static char32_t ctrlRightArrowKeyRoutine(char32_t) { - return thisKeyMetaCtrl | CTRL | RIGHT_ARROW_KEY; -} -static char32_t ctrlLeftArrowKeyRoutine(char32_t) { - return thisKeyMetaCtrl | CTRL | LEFT_ARROW_KEY; -} -static char32_t escFailureRoutine(char32_t) { - beep(); - return -1; -} - -// Handle ESC [ 1 ; 3 (or 5) escape sequences -// -static CharacterDispatchRoutine escLeftBracket1Semicolon3or5Routines[] = { - upArrowKeyRoutine, downArrowKeyRoutine, rightArrowKeyRoutine, - leftArrowKeyRoutine, escFailureRoutine}; -static CharacterDispatch escLeftBracket1Semicolon3or5Dispatch = { - 4, "ABCD", escLeftBracket1Semicolon3or5Routines}; - -// Handle ESC [ 1 ; escape sequences -// -static char32_t escLeftBracket1Semicolon3Routine(char32_t c) { - c = readUnicodeCharacter(); - if (c == 0) return 0; - thisKeyMetaCtrl |= META; - return doDispatch(c, escLeftBracket1Semicolon3or5Dispatch); -} -static char32_t escLeftBracket1Semicolon5Routine(char32_t c) { - c = readUnicodeCharacter(); - if (c == 0) return 0; - thisKeyMetaCtrl |= CTRL; - return doDispatch(c, escLeftBracket1Semicolon3or5Dispatch); -} -static CharacterDispatchRoutine escLeftBracket1SemicolonRoutines[] = { - escLeftBracket1Semicolon3Routine, escLeftBracket1Semicolon5Routine, - escFailureRoutine}; -static CharacterDispatch escLeftBracket1SemicolonDispatch = { - 2, "35", escLeftBracket1SemicolonRoutines}; - -// Handle ESC [ 1 escape sequences -// -static char32_t escLeftBracket1SemicolonRoutine(char32_t c) { - c = readUnicodeCharacter(); - if (c == 0) return 0; - return doDispatch(c, escLeftBracket1SemicolonDispatch); -} -static CharacterDispatchRoutine escLeftBracket1Routines[] = { - homeKeyRoutine, escLeftBracket1SemicolonRoutine, escFailureRoutine}; -static CharacterDispatch escLeftBracket1Dispatch = {2, "~;", - escLeftBracket1Routines}; - -// Handle ESC [ 3 escape sequences -// -static CharacterDispatchRoutine escLeftBracket3Routines[] = {deleteKeyRoutine, - escFailureRoutine}; -static CharacterDispatch escLeftBracket3Dispatch = {1, "~", - escLeftBracket3Routines}; - -// Handle ESC [ 4 escape sequences -// -static CharacterDispatchRoutine escLeftBracket4Routines[] = {endKeyRoutine, - escFailureRoutine}; -static CharacterDispatch escLeftBracket4Dispatch = {1, "~", - escLeftBracket4Routines}; - -// Handle ESC [ 5 escape sequences -// -static CharacterDispatchRoutine escLeftBracket5Routines[] = {pageUpKeyRoutine, - escFailureRoutine}; -static CharacterDispatch escLeftBracket5Dispatch = {1, "~", - escLeftBracket5Routines}; - -// Handle ESC [ 6 escape sequences -// -static CharacterDispatchRoutine escLeftBracket6Routines[] = {pageDownKeyRoutine, - escFailureRoutine}; -static CharacterDispatch escLeftBracket6Dispatch = {1, "~", - escLeftBracket6Routines}; - -// Handle ESC [ 7 escape sequences -// -static CharacterDispatchRoutine escLeftBracket7Routines[] = {homeKeyRoutine, - escFailureRoutine}; -static CharacterDispatch escLeftBracket7Dispatch = {1, "~", - escLeftBracket7Routines}; - -// Handle ESC [ 8 escape sequences -// -static CharacterDispatchRoutine escLeftBracket8Routines[] = {endKeyRoutine, - escFailureRoutine}; -static CharacterDispatch escLeftBracket8Dispatch = {1, "~", - escLeftBracket8Routines}; - -// Handle ESC [ escape sequences -// -static char32_t escLeftBracket0Routine(char32_t c) { - return escFailureRoutine(c); -} -static char32_t escLeftBracket1Routine(char32_t c) { - c = readUnicodeCharacter(); - if (c == 0) return 0; - return doDispatch(c, escLeftBracket1Dispatch); -} -static char32_t escLeftBracket2Routine(char32_t c) { - return escFailureRoutine(c); // Insert key, unused -} -static char32_t escLeftBracket3Routine(char32_t c) { - c = readUnicodeCharacter(); - if (c == 0) return 0; - return doDispatch(c, escLeftBracket3Dispatch); -} -static char32_t escLeftBracket4Routine(char32_t c) { - c = readUnicodeCharacter(); - if (c == 0) return 0; - return doDispatch(c, escLeftBracket4Dispatch); -} -static char32_t escLeftBracket5Routine(char32_t c) { - c = readUnicodeCharacter(); - if (c == 0) return 0; - return doDispatch(c, escLeftBracket5Dispatch); -} -static char32_t escLeftBracket6Routine(char32_t c) { - c = readUnicodeCharacter(); - if (c == 0) return 0; - return doDispatch(c, escLeftBracket6Dispatch); -} -static char32_t escLeftBracket7Routine(char32_t c) { - c = readUnicodeCharacter(); - if (c == 0) return 0; - return doDispatch(c, escLeftBracket7Dispatch); -} -static char32_t escLeftBracket8Routine(char32_t c) { - c = readUnicodeCharacter(); - if (c == 0) return 0; - return doDispatch(c, escLeftBracket8Dispatch); -} -static char32_t escLeftBracket9Routine(char32_t c) { - return escFailureRoutine(c); -} - -// Handle ESC [ escape sequences -// -static CharacterDispatchRoutine escLeftBracketRoutines[] = { - upArrowKeyRoutine, downArrowKeyRoutine, rightArrowKeyRoutine, - leftArrowKeyRoutine, homeKeyRoutine, endKeyRoutine, - escLeftBracket0Routine, escLeftBracket1Routine, escLeftBracket2Routine, - escLeftBracket3Routine, escLeftBracket4Routine, escLeftBracket5Routine, - escLeftBracket6Routine, escLeftBracket7Routine, escLeftBracket8Routine, - escLeftBracket9Routine, escFailureRoutine}; -static CharacterDispatch escLeftBracketDispatch = {16, "ABCDHF0123456789", - escLeftBracketRoutines}; - -// Handle ESC O escape sequences -// -static CharacterDispatchRoutine escORoutines[] = { - upArrowKeyRoutine, downArrowKeyRoutine, rightArrowKeyRoutine, - leftArrowKeyRoutine, homeKeyRoutine, endKeyRoutine, - ctrlUpArrowKeyRoutine, ctrlDownArrowKeyRoutine, ctrlRightArrowKeyRoutine, - ctrlLeftArrowKeyRoutine, escFailureRoutine}; -static CharacterDispatch escODispatch = {10, "ABCDHFabcd", escORoutines}; - -// Initial ESC dispatch -- could be a Meta prefix or the start of an escape -// sequence -// -static char32_t escLeftBracketRoutine(char32_t c) { - c = readUnicodeCharacter(); - if (c == 0) return 0; - return doDispatch(c, escLeftBracketDispatch); -} -static char32_t escORoutine(char32_t c) { - c = readUnicodeCharacter(); - if (c == 0) return 0; - return doDispatch(c, escODispatch); -} -static char32_t setMetaRoutine(char32_t c); // need forward reference -static CharacterDispatchRoutine escRoutines[] = {escLeftBracketRoutine, - escORoutine, setMetaRoutine}; -static CharacterDispatch escDispatch = {2, "[O", escRoutines}; - -// Initial dispatch -- we are not in the middle of anything yet -// -static char32_t escRoutine(char32_t c) { - c = readUnicodeCharacter(); - if (c == 0) return 0; - return doDispatch(c, escDispatch); -} -static CharacterDispatchRoutine initialRoutines[] = { - escRoutine, deleteCharRoutine, normalKeyRoutine}; -static CharacterDispatch initialDispatch = {2, "\x1B\x7F", initialRoutines}; - -// Special handling for the ESC key because it does double duty -// -static char32_t setMetaRoutine(char32_t c) { - thisKeyMetaCtrl = META; - if (c == 0x1B) { // another ESC, stay in ESC processing mode - c = readUnicodeCharacter(); - if (c == 0) return 0; - return doDispatch(c, escDispatch); - } - return doDispatch(c, initialDispatch); -} - -} // namespace EscapeSequenceProcessing // move these out of global namespace - -#endif // #ifndef _WIN32 - -// linenoiseReadChar -- read a keystroke or keychord from the keyboard, and -// translate it -// into an encoded "keystroke". When convenient, extended keys are translated -// into their -// simpler Emacs keystrokes, so an unmodified "left arrow" becomes Ctrl-B. -// -// A return value of zero means "no input available", and a return value of -1 -// means "invalid key". -// -static char32_t linenoiseReadChar(void) { -#ifdef _WIN32 - - INPUT_RECORD rec; - DWORD count; - int modifierKeys = 0; - bool escSeen = false; - while (true) { - ReadConsoleInputW(console_in, &rec, 1, &count); -#if 0 // helper for debugging keystrokes, display info in the debug "Output" - // window in the debugger - { - if ( rec.EventType == KEY_EVENT ) { - //if ( rec.Event.KeyEvent.uChar.UnicodeChar ) { - char buf[1024]; - sprintf( - buf, - "Unicode character 0x%04X, repeat count %d, virtual keycode 0x%04X, " - "virtual scancode 0x%04X, key %s%s%s%s%s\n", - rec.Event.KeyEvent.uChar.UnicodeChar, - rec.Event.KeyEvent.wRepeatCount, - rec.Event.KeyEvent.wVirtualKeyCode, - rec.Event.KeyEvent.wVirtualScanCode, - rec.Event.KeyEvent.bKeyDown ? "down" : "up", - (rec.Event.KeyEvent.dwControlKeyState & LEFT_CTRL_PRESSED) ? - " L-Ctrl" : "", - (rec.Event.KeyEvent.dwControlKeyState & RIGHT_CTRL_PRESSED) ? - " R-Ctrl" : "", - (rec.Event.KeyEvent.dwControlKeyState & LEFT_ALT_PRESSED) ? - " L-Alt" : "", - (rec.Event.KeyEvent.dwControlKeyState & RIGHT_ALT_PRESSED) ? - " R-Alt" : "" - ); - OutputDebugStringA( buf ); - //} - } - } -#endif - if (rec.EventType != KEY_EVENT) { - continue; - } - // Windows provides for entry of characters that are not on your keyboard by - // sending the - // Unicode characters as a "key up" with virtual keycode 0x12 (VK_MENU == - // Alt key) ... - // accept these characters, otherwise only process characters on "key down" - if (!rec.Event.KeyEvent.bKeyDown && - rec.Event.KeyEvent.wVirtualKeyCode != VK_MENU) { - continue; - } - modifierKeys = 0; - // AltGr is encoded as ( LEFT_CTRL_PRESSED | RIGHT_ALT_PRESSED ), so don't - // treat this - // combination as either CTRL or META we just turn off those two bits, so it - // is still - // possible to combine CTRL and/or META with an AltGr key by using - // right-Ctrl and/or - // left-Alt - if ((rec.Event.KeyEvent.dwControlKeyState & - (LEFT_CTRL_PRESSED | RIGHT_ALT_PRESSED)) == - (LEFT_CTRL_PRESSED | RIGHT_ALT_PRESSED)) { - rec.Event.KeyEvent.dwControlKeyState &= - ~(LEFT_CTRL_PRESSED | RIGHT_ALT_PRESSED); - } - if (rec.Event.KeyEvent.dwControlKeyState & - (RIGHT_CTRL_PRESSED | LEFT_CTRL_PRESSED)) { - modifierKeys |= CTRL; - } - if (rec.Event.KeyEvent.dwControlKeyState & - (RIGHT_ALT_PRESSED | LEFT_ALT_PRESSED)) { - modifierKeys |= META; - } - if (escSeen) { - modifierKeys |= META; - } - if (rec.Event.KeyEvent.uChar.UnicodeChar == 0) { - switch (rec.Event.KeyEvent.wVirtualKeyCode) { - case VK_LEFT: - return modifierKeys | LEFT_ARROW_KEY; - case VK_RIGHT: - return modifierKeys | RIGHT_ARROW_KEY; - case VK_UP: - return modifierKeys | UP_ARROW_KEY; - case VK_DOWN: - return modifierKeys | DOWN_ARROW_KEY; - case VK_DELETE: - return modifierKeys | DELETE_KEY; - case VK_HOME: - return modifierKeys | HOME_KEY; - case VK_END: - return modifierKeys | END_KEY; - case VK_PRIOR: - return modifierKeys | PAGE_UP_KEY; - case VK_NEXT: - return modifierKeys | PAGE_DOWN_KEY; - default: - continue; // in raw mode, ReadConsoleInput shows shift, ctrl ... - } // ... ignore them - } else if (rec.Event.KeyEvent.uChar.UnicodeChar == - ctrlChar('[')) { // ESC, set flag for later - escSeen = true; - continue; - } else { - // we got a real character, return it - return modifierKeys | rec.Event.KeyEvent.uChar.UnicodeChar; - } - } - -#else - char32_t c; - c = readUnicodeCharacter(); - if (c == 0) return 0; - -// If _DEBUG_LINUX_KEYBOARD is set, then ctrl-^ puts us into a keyboard -// debugging mode -// where we print out decimal and decoded values for whatever the "terminal" -// program -// gives us on different keystrokes. Hit ctrl-C to exit this mode. -// -#define _DEBUG_LINUX_KEYBOARD -#if defined(_DEBUG_LINUX_KEYBOARD) - if (c == ctrlChar('^')) { // ctrl-^, special debug mode, prints all keys hit, - // ctrl-C to get out - printf( - "\nEntering keyboard debugging mode (on ctrl-^), press ctrl-C to exit " - "this mode\n"); - while (true) { - unsigned char keys[10]; - int ret = read(0, keys, 10); - - if (ret <= 0) { - printf("\nret: %d\n", ret); - } - for (int i = 0; i < ret; ++i) { - char32_t key = static_cast(keys[i]); - char* friendlyTextPtr; - char friendlyTextBuf[10]; - const char* prefixText = (key < 0x80) ? "" : "0x80+"; - char32_t keyCopy = (key < 0x80) ? key : key - 0x80; - if (keyCopy >= '!' && keyCopy <= '~') { // printable - friendlyTextBuf[0] = '\''; - friendlyTextBuf[1] = keyCopy; - friendlyTextBuf[2] = '\''; - friendlyTextBuf[3] = 0; - friendlyTextPtr = friendlyTextBuf; - } else if (keyCopy == ' ') { - friendlyTextPtr = const_cast("space"); - } else if (keyCopy == 27) { - friendlyTextPtr = const_cast("ESC"); - } else if (keyCopy == 0) { - friendlyTextPtr = const_cast("NUL"); - } else if (keyCopy == 127) { - friendlyTextPtr = const_cast("DEL"); - } else { - friendlyTextBuf[0] = '^'; - friendlyTextBuf[1] = keyCopy + 0x40; - friendlyTextBuf[2] = 0; - friendlyTextPtr = friendlyTextBuf; - } - printf("%d x%02X (%s%s) ", key, key, prefixText, friendlyTextPtr); - } - printf("\x1b[1G\n"); // go to first column of new line - - // drop out of this loop on ctrl-C - if (keys[0] == ctrlChar('C')) { - printf("Leaving keyboard debugging mode (on ctrl-C)\n"); - fflush(stdout); - return -2; - } - } - } -#endif // _DEBUG_LINUX_KEYBOARD - - EscapeSequenceProcessing::thisKeyMetaCtrl = - 0; // no modifiers yet at initialDispatch - return EscapeSequenceProcessing::doDispatch( - c, EscapeSequenceProcessing::initialDispatch); -#endif // #_WIN32 -} - -/** - * Free memory used in a recent command completion session - * - * @param lc pointer to a linenoiseCompletions struct - */ -static void freeCompletions(linenoiseCompletions* lc) { - lc->completionStrings.clear(); -} - -/** - * convert {CTRL + 'A'}, {CTRL + 'a'} and {CTRL + ctrlChar( 'A' )} into - * ctrlChar( 'A' ) - * leave META alone - * - * @param c character to clean up - * @return cleaned-up character - */ -static int cleanupCtrl(int c) { - if (c & CTRL) { - int d = c & 0x1FF; - if (d >= 'a' && d <= 'z') { - c = (c + ('a' - ctrlChar('A'))) & ~CTRL; - } - if (d >= 'A' && d <= 'Z') { - c = (c + ('A' - ctrlChar('A'))) & ~CTRL; - } - if (d >= ctrlChar('A') && d <= ctrlChar('Z')) { - c = c & ~CTRL; - } - } - return c; -} - -// break characters that may precede items to be completed -static const char breakChars[] = " =+-/\\*?\"'`&<>;|@{([])}"; - -// maximum number of completions to display without asking -static const size_t completionCountCutoff = 100; - -/** - * Handle command completion, using a completionCallback() routine to provide - * possible substitutions - * This routine handles the mechanics of updating the user's input buffer with - * possible replacement - * of text as the user selects a proposed completion string, or cancels the - * completion attempt. - * @param pi PromptBase struct holding information about the prompt and our - * screen position - */ -int InputBuffer::completeLine(PromptBase& pi) { - linenoiseCompletions lc; - char32_t c = 0; - - // completionCallback() expects a parsable entity, so find the previous break - // character and - // extract a copy to parse. we also handle the case where tab is hit while - // not at end-of-line. - int startIndex = pos; - while (--startIndex >= 0) { - if (strchr(breakChars, buf32[startIndex])) { - break; - } - } - ++startIndex; - int itemLength = pos - startIndex; - Utf32String unicodeCopy(&buf32[startIndex], itemLength); - Utf8String parseItem(unicodeCopy); - - // get a list of completions - completionCallback(parseItem.get(), &lc); - - // if no completions, we are done - if (lc.completionStrings.size() == 0) { - beep(); - freeCompletions(&lc); - return 0; - } - - // at least one completion - int longestCommonPrefix = 0; - int displayLength = 0; - if (lc.completionStrings.size() == 1) { - longestCommonPrefix = static_cast(lc.completionStrings[0].length()); - } else { - bool keepGoing = true; - while (keepGoing) { - for (size_t j = 0; j < lc.completionStrings.size() - 1; ++j) { - char32_t c1 = lc.completionStrings[j][longestCommonPrefix]; - char32_t c2 = lc.completionStrings[j + 1][longestCommonPrefix]; - if ((0 == c1) || (0 == c2) || (c1 != c2)) { - keepGoing = false; - break; - } - } - if (keepGoing) { - ++longestCommonPrefix; - } - } - } - if (lc.completionStrings.size() != 1) { // beep if ambiguous - beep(); - } - - // if we can extend the item, extend it and return to main loop - if (longestCommonPrefix > itemLength) { - displayLength = len + longestCommonPrefix - itemLength; - if (displayLength > buflen) { - longestCommonPrefix -= displayLength - buflen; // don't overflow buffer - displayLength = buflen; // truncate the insertion - beep(); // and make a noise - } - Utf32String displayText(displayLength + 1); - memcpy(displayText.get(), buf32, sizeof(char32_t) * startIndex); - memcpy(&displayText[startIndex], &lc.completionStrings[0][0], - sizeof(char32_t) * longestCommonPrefix); - int tailIndex = startIndex + longestCommonPrefix; - memcpy(&displayText[tailIndex], &buf32[pos], - sizeof(char32_t) * (displayLength - tailIndex + 1)); - copyString32(buf32, displayText.get(), displayLength); - pos = startIndex + longestCommonPrefix; - len = displayLength; - refreshLine(pi); - return 0; - } - - // we can't complete any further, wait for second tab - do { - c = linenoiseReadChar(); - c = cleanupCtrl(c); - } while (c == static_cast(-1)); - - // if any character other than tab, pass it to the main loop - if (c != ctrlChar('I')) { - freeCompletions(&lc); - return c; - } - - // we got a second tab, maybe show list of possible completions - bool showCompletions = true; - bool onNewLine = false; - if (lc.completionStrings.size() > completionCountCutoff) { - int savePos = - pos; // move cursor to EOL to avoid overwriting the command line - pos = len; - refreshLine(pi); - pos = savePos; - printf("\nDisplay all %u possibilities? (y or n)", - static_cast(lc.completionStrings.size())); - fflush(stdout); - onNewLine = true; - while (c != 'y' && c != 'Y' && c != 'n' && c != 'N' && c != ctrlChar('C')) { - do { - c = linenoiseReadChar(); - c = cleanupCtrl(c); - } while (c == static_cast(-1)); - } - switch (c) { - case 'n': - case 'N': - showCompletions = false; - freeCompletions(&lc); - break; - case ctrlChar('C'): - showCompletions = false; - freeCompletions(&lc); - if (write(1, "^C", 2) == -1) return -1; // Display the ^C we got - c = 0; - break; - } - } - - // if showing the list, do it the way readline does it - bool stopList = false; - if (showCompletions) { - int longestCompletion = 0; - for (size_t j = 0; j < lc.completionStrings.size(); ++j) { - itemLength = static_cast(lc.completionStrings[j].length()); - if (itemLength > longestCompletion) { - longestCompletion = itemLength; - } - } - longestCompletion += 2; - int columnCount = pi.promptScreenColumns / longestCompletion; - if (columnCount < 1) { - columnCount = 1; - } - if (!onNewLine) { // skip this if we showed "Display all %d possibilities?" - int savePos = - pos; // move cursor to EOL to avoid overwriting the command line - pos = len; - refreshLine(pi); - pos = savePos; - } - size_t pauseRow = getScreenRows() - 1; - size_t rowCount = - (lc.completionStrings.size() + columnCount - 1) / columnCount; - for (size_t row = 0; row < rowCount; ++row) { - if (row == pauseRow) { - printf("\n--More--"); - fflush(stdout); - c = 0; - bool doBeep = false; - while (c != ' ' && c != '\r' && c != '\n' && c != 'y' && c != 'Y' && - c != 'n' && c != 'N' && c != 'q' && c != 'Q' && - c != ctrlChar('C')) { - if (doBeep) { - beep(); - } - doBeep = true; - do { - c = linenoiseReadChar(); - c = cleanupCtrl(c); - } while (c == static_cast(-1)); - } - switch (c) { - case ' ': - case 'y': - case 'Y': - printf("\r \r"); - pauseRow += getScreenRows() - 1; - break; - case '\r': - case '\n': - printf("\r \r"); - ++pauseRow; - break; - case 'n': - case 'N': - case 'q': - case 'Q': - printf("\r \r"); - stopList = true; - break; - case ctrlChar('C'): - if (write(1, "^C", 2) == -1) return -1; // Display the ^C we got - stopList = true; - break; - } - } else { - printf("\n"); - } - if (stopList) { - break; - } - for (int column = 0; column < columnCount; ++column) { - size_t index = (column * rowCount) + row; - if (index < lc.completionStrings.size()) { - itemLength = static_cast(lc.completionStrings[index].length()); - fflush(stdout); - if (write32(1, lc.completionStrings[index].get(), itemLength) == -1) - return -1; - if (((column + 1) * rowCount) + row < lc.completionStrings.size()) { - for (int k = itemLength; k < longestCompletion; ++k) { - printf(" "); - } - } - } - } - } - fflush(stdout); - freeCompletions(&lc); - } - - // display the prompt on a new line, then redisplay the input buffer - if (!stopList || c == ctrlChar('C')) { - if (write(1, "\n", 1) == -1) return 0; - } - if (!pi.write()) return 0; -#ifndef _WIN32 - // we have to generate our own newline on line wrap on Linux - if (pi.promptIndentation == 0 && pi.promptExtraLines > 0) - if (write(1, "\n", 1) == -1) return 0; -#endif - pi.promptCursorRowOffset = pi.promptExtraLines; - refreshLine(pi); - return 0; -} - -/** - * Clear the screen ONLY (no redisplay of anything) - */ -void linenoiseClearScreen(void) { -#ifdef _WIN32 - COORD coord = {0, 0}; - CONSOLE_SCREEN_BUFFER_INFO inf; - HANDLE screenHandle = GetStdHandle(STD_OUTPUT_HANDLE); - GetConsoleScreenBufferInfo(screenHandle, &inf); - SetConsoleCursorPosition(screenHandle, coord); - DWORD count; - FillConsoleOutputCharacterA(screenHandle, ' ', inf.dwSize.X * inf.dwSize.Y, - coord, &count); -#else - if (write(1, "\x1b[H\x1b[2J", 7) <= 0) return; -#endif -} - -void InputBuffer::clearScreen(PromptBase& pi) { - linenoiseClearScreen(); - if (!pi.write()) return; -#ifndef _WIN32 - // we have to generate our own newline on line wrap on Linux - if (pi.promptIndentation == 0 && pi.promptExtraLines > 0) - if (write(1, "\n", 1) == -1) return; -#endif - pi.promptCursorRowOffset = pi.promptExtraLines; - refreshLine(pi); -} - -/** - * Incremental history search -- take over the prompt and keyboard as the user - * types a search - * string, deletes characters from it, changes direction, and either accepts the - * found line (for - * execution orediting) or cancels. - * @param pi PromptBase struct holding information about the (old, - * static) prompt and our - * screen position - * @param startChar the character that began the search, used to set the initial - * direction - */ -int InputBuffer::incrementalHistorySearch(PromptBase& pi, int startChar) { - size_t bufferSize; - size_t ucharCount = 0; - - // if not already recalling, add the current line to the history list so we - // don't have to - // special case it - if (historyIndex == historyLen - 1) { - free(history[historyLen - 1]); - bufferSize = sizeof(char32_t) * len + 1; - unique_ptr tempBuffer(new char[bufferSize]); - copyString32to8(tempBuffer.get(), bufferSize, buf32); - history[historyLen - 1] = strdup8(tempBuffer.get()); - } - int historyLineLength = len; - int historyLinePosition = pos; - char32_t emptyBuffer[1]; - char emptyWidths[1]; - InputBuffer empty(emptyBuffer, emptyWidths, 1); - empty.refreshLine(pi); // erase the old input first - DynamicPrompt dp(pi, (startChar == ctrlChar('R')) ? -1 : 1); - - dp.promptPreviousLen = pi.promptPreviousLen; - dp.promptPreviousInputLen = pi.promptPreviousInputLen; - dynamicRefresh(dp, buf32, historyLineLength, - historyLinePosition); // draw user's text with our prompt - - // loop until we get an exit character - int c = 0; - bool keepLooping = true; - bool useSearchedLine = true; - bool searchAgain = false; - char32_t* activeHistoryLine = 0; - while (keepLooping) { - c = linenoiseReadChar(); - c = cleanupCtrl(c); // convert CTRL + into normal ctrl - - switch (c) { - // these characters keep the selected text but do not execute it - case ctrlChar('A'): // ctrl-A, move cursor to start of line - case HOME_KEY: - case ctrlChar('B'): // ctrl-B, move cursor left by one character - case LEFT_ARROW_KEY: - case META + 'b': // meta-B, move cursor left by one word - case META + 'B': - case CTRL + LEFT_ARROW_KEY: - case META + LEFT_ARROW_KEY: // Emacs allows Meta, bash & readline don't - case ctrlChar('D'): - case META + 'd': // meta-D, kill word to right of cursor - case META + 'D': - case ctrlChar('E'): // ctrl-E, move cursor to end of line - case END_KEY: - case ctrlChar('F'): // ctrl-F, move cursor right by one character - case RIGHT_ARROW_KEY: - case META + 'f': // meta-F, move cursor right by one word - case META + 'F': - case CTRL + RIGHT_ARROW_KEY: - case META + RIGHT_ARROW_KEY: // Emacs allows Meta, bash & readline don't - case META + ctrlChar('H'): - case ctrlChar('J'): - case ctrlChar('K'): // ctrl-K, kill from cursor to end of line - case ctrlChar('M'): - case ctrlChar('N'): // ctrl-N, recall next line in history - case ctrlChar('P'): // ctrl-P, recall previous line in history - case DOWN_ARROW_KEY: - case UP_ARROW_KEY: - case ctrlChar('T'): // ctrl-T, transpose characters - case ctrlChar( - 'U'): // ctrl-U, kill all characters to the left of the cursor - case ctrlChar('W'): - case META + 'y': // meta-Y, "yank-pop", rotate popped text - case META + 'Y': - case 127: - case DELETE_KEY: - case META + '<': // start of history - case PAGE_UP_KEY: - case META + '>': // end of history - case PAGE_DOWN_KEY: - keepLooping = false; - break; - - // these characters revert the input line to its previous state - case ctrlChar('C'): // ctrl-C, abort this line - case ctrlChar('G'): - case ctrlChar('L'): // ctrl-L, clear screen and redisplay line - keepLooping = false; - useSearchedLine = false; - if (c != ctrlChar('L')) { - c = -1; // ctrl-C and ctrl-G just abort the search and do nothing - // else - } - break; - - // these characters stay in search mode and update the display - case ctrlChar('S'): - case ctrlChar('R'): - if (dp.searchTextLen == - 0) { // if no current search text, recall previous text - if (previousSearchText.length()) { - dp.updateSearchText(previousSearchText.get()); - } - } - if ((dp.direction == 1 && c == ctrlChar('R')) || - (dp.direction == -1 && c == ctrlChar('S'))) { - dp.direction = 0 - dp.direction; // reverse direction - dp.updateSearchPrompt(); // change the prompt - } else { - searchAgain = true; // same direction, search again - } - break; - -// job control is its own thing -#ifndef _WIN32 - case ctrlChar('Z'): // ctrl-Z, job control - disableRawMode(); // Returning to Linux (whatever) shell, leave raw - // mode - raise(SIGSTOP); // Break out in mid-line - enableRawMode(); // Back from Linux shell, re-enter raw mode - { - bufferSize = historyLineLength + 1; - unique_ptr tempUnicode(new char32_t[bufferSize]); - copyString8to32(tempUnicode.get(), bufferSize, ucharCount, - history[historyIndex]); - dynamicRefresh(dp, tempUnicode.get(), historyLineLength, - historyLinePosition); - } - continue; - break; -#endif - - // these characters update the search string, and hence the selected input - // line - case ctrlChar('H'): // backspace/ctrl-H, delete char to left of cursor - if (dp.searchTextLen > 0) { - unique_ptr tempUnicode(new char32_t[dp.searchTextLen]); - --dp.searchTextLen; - dp.searchText[dp.searchTextLen] = 0; - copyString32(tempUnicode.get(), dp.searchText.get(), - dp.searchTextLen); - dp.updateSearchText(tempUnicode.get()); - } else { - beep(); - } - break; - - case ctrlChar('Y'): // ctrl-Y, yank killed text - break; - - default: - if (!isControlChar(c) && c <= 0x0010FFFF) { // not an action character - unique_ptr tempUnicode( - new char32_t[dp.searchTextLen + 2]); - copyString32(tempUnicode.get(), dp.searchText.get(), - dp.searchTextLen); - tempUnicode[dp.searchTextLen] = c; - tempUnicode[dp.searchTextLen + 1] = 0; - dp.updateSearchText(tempUnicode.get()); - } else { - beep(); - } - } // switch - - // if we are staying in search mode, search now - if (keepLooping) { - bufferSize = historyLineLength + 1; - if (activeHistoryLine) { - delete[] activeHistoryLine; - activeHistoryLine = nullptr; - } - activeHistoryLine = new char32_t[bufferSize]; - copyString8to32(activeHistoryLine, bufferSize, ucharCount, - history[historyIndex]); - if (dp.searchTextLen > 0) { - bool found = false; - int historySearchIndex = historyIndex; - int lineLength = static_cast(ucharCount); - int lineSearchPos = historyLinePosition; - if (searchAgain) { - lineSearchPos += dp.direction; - } - searchAgain = false; - while (true) { - while ((dp.direction > 0) ? (lineSearchPos < lineLength) - : (lineSearchPos >= 0)) { - if (strncmp32(dp.searchText.get(), - &activeHistoryLine[lineSearchPos], - dp.searchTextLen) == 0) { - found = true; - break; - } - lineSearchPos += dp.direction; - } - if (found) { - historyIndex = historySearchIndex; - historyLineLength = lineLength; - historyLinePosition = lineSearchPos; - break; - } else if ((dp.direction > 0) ? (historySearchIndex < historyLen - 1) - : (historySearchIndex > 0)) { - historySearchIndex += dp.direction; - bufferSize = strlen8(history[historySearchIndex]) + 1; - delete[] activeHistoryLine; - activeHistoryLine = nullptr; - activeHistoryLine = new char32_t[bufferSize]; - copyString8to32(activeHistoryLine, bufferSize, ucharCount, - history[historySearchIndex]); - lineLength = static_cast(ucharCount); - lineSearchPos = - (dp.direction > 0) ? 0 : (lineLength - dp.searchTextLen); - } else { - beep(); - break; - } - }; // while - } - if (activeHistoryLine) { - delete[] activeHistoryLine; - activeHistoryLine = nullptr; - } - bufferSize = historyLineLength + 1; - activeHistoryLine = new char32_t[bufferSize]; - copyString8to32(activeHistoryLine, bufferSize, ucharCount, - history[historyIndex]); - dynamicRefresh(dp, activeHistoryLine, historyLineLength, - historyLinePosition); // draw user's text with our prompt - } - } // while - - // leaving history search, restore previous prompt, maybe make searched line - // current - PromptBase pb; - pb.promptChars = pi.promptIndentation; - pb.promptBytes = pi.promptBytes; - Utf32String tempUnicode(pb.promptBytes + 1); - - copyString32(tempUnicode.get(), &pi.promptText[pi.promptLastLinePosition], - pb.promptBytes - pi.promptLastLinePosition); - tempUnicode.initFromBuffer(); - pb.promptText = tempUnicode; - pb.promptExtraLines = 0; - pb.promptIndentation = pi.promptIndentation; - pb.promptLastLinePosition = 0; - pb.promptPreviousInputLen = historyLineLength; - pb.promptCursorRowOffset = dp.promptCursorRowOffset; - pb.promptScreenColumns = pi.promptScreenColumns; - pb.promptPreviousLen = dp.promptChars; - if (useSearchedLine && activeHistoryLine) { - historyRecallMostRecent = true; - copyString32(buf32, activeHistoryLine, buflen + 1); - len = historyLineLength; - pos = historyLinePosition; - } - if (activeHistoryLine) { - delete[] activeHistoryLine; - activeHistoryLine = nullptr; - } - dynamicRefresh(pb, buf32, len, - pos); // redraw the original prompt with current input - pi.promptPreviousInputLen = len; - pi.promptCursorRowOffset = pi.promptExtraLines + pb.promptCursorRowOffset; - previousSearchText = - dp.searchText; // save search text for possible reuse on ctrl-R ctrl-R - return c; // pass a character or -1 back to main loop -} - -static bool isCharacterAlphanumeric(char32_t testChar) { -#ifdef _WIN32 - return (iswalnum((wint_t)testChar) != 0 ? true : false); -#else - return (iswalnum(testChar) != 0 ? true : false); -#endif -} - -#ifndef _WIN32 -static bool gotResize = false; -#endif -static int keyType = 0; - -int InputBuffer::getInputLine(PromptBase& pi) { - keyType = 0; - - // The latest history entry is always our current buffer - if (len > 0) { - size_t bufferSize = sizeof(char32_t) * len + 1; - unique_ptr tempBuffer(new char[bufferSize]); - copyString32to8(tempBuffer.get(), bufferSize, buf32); - linenoiseHistoryAdd(tempBuffer.get()); - } else { - linenoiseHistoryAdd(""); - } - historyIndex = historyLen - 1; - historyRecallMostRecent = false; - - // display the prompt - if (!pi.write()) return -1; - -#ifndef _WIN32 - // we have to generate our own newline on line wrap on Linux - if (pi.promptIndentation == 0 && pi.promptExtraLines > 0) - if (write(1, "\n", 1) == -1) return -1; -#endif - - // the cursor starts out at the end of the prompt - pi.promptCursorRowOffset = pi.promptExtraLines; - - // kill and yank start in "other" mode - killRing.lastAction = KillRing::actionOther; - - // when history search returns control to us, we execute its terminating - // keystroke - int terminatingKeystroke = -1; - - // if there is already text in the buffer, display it first - if (len > 0) { - refreshLine(pi); - } - - // loop collecting characters, respond to line editing characters - while (true) { - int c; - if (terminatingKeystroke == -1) { - c = linenoiseReadChar(); // get a new keystroke - - keyType = 0; - if (c != 0) { - // set flag that we got some input - if (c == ctrlChar('C')) { - keyType = 1; - } else if (c == ctrlChar('D')) { - keyType = 2; - } - } - -#ifndef _WIN32 - if (c == 0 && gotResize) { - // caught a window resize event - // now redraw the prompt and line - gotResize = false; - pi.promptScreenColumns = getScreenColumns(); - dynamicRefresh(pi, buf32, len, - pos); // redraw the original prompt with current input - continue; - } -#endif - } else { - c = terminatingKeystroke; // use the terminating keystroke from search - terminatingKeystroke = -1; // clear it once we've used it - } - - c = cleanupCtrl(c); // convert CTRL + into normal ctrl - - if (c == 0) { - return len; - } - - if (c == -1) { - refreshLine(pi); - continue; - } - - if (c == -2) { - if (!pi.write()) return -1; - refreshLine(pi); - continue; - } - - // ctrl-I/tab, command completion, needs to be before switch statement - if (c == ctrlChar('I') && completionCallback) { - killRing.lastAction = KillRing::actionOther; - historyRecallMostRecent = false; - - // completeLine does the actual completion and replacement - c = completeLine(pi); - - if (c < 0) // return on error - return len; - - if (c == 0) // read next character when 0 - continue; - - // deliberate fall-through here, so we use the terminating character - } - - switch (c) { - case ctrlChar('A'): // ctrl-A, move cursor to start of line - case HOME_KEY: - killRing.lastAction = KillRing::actionOther; - pos = 0; - refreshLine(pi); - break; - - case ctrlChar('B'): // ctrl-B, move cursor left by one character - case LEFT_ARROW_KEY: - killRing.lastAction = KillRing::actionOther; - if (pos > 0) { - --pos; - refreshLine(pi); - } - break; - - case META + 'b': // meta-B, move cursor left by one word - case META + 'B': - case CTRL + LEFT_ARROW_KEY: - case META + LEFT_ARROW_KEY: // Emacs allows Meta, bash & readline don't - killRing.lastAction = KillRing::actionOther; - if (pos > 0) { - while (pos > 0 && !isCharacterAlphanumeric(buf32[pos - 1])) { - --pos; - } - while (pos > 0 && isCharacterAlphanumeric(buf32[pos - 1])) { - --pos; - } - refreshLine(pi); - } - break; - - case ctrlChar('C'): // ctrl-C, abort this line - killRing.lastAction = KillRing::actionOther; - historyRecallMostRecent = false; - errno = EAGAIN; - --historyLen; - free(history[historyLen]); - // we need one last refresh with the cursor at the end of the line - // so we don't display the next prompt over the previous input line - pos = len; // pass len as pos for EOL - refreshLine(pi); - if (write(1, "^C", 2) == -1) return -1; // Display the ^C we got - return -1; - - case META + 'c': // meta-C, give word initial Cap - case META + 'C': - killRing.lastAction = KillRing::actionOther; - historyRecallMostRecent = false; - if (pos < len) { - while (pos < len && !isCharacterAlphanumeric(buf32[pos])) { - ++pos; - } - if (pos < len && isCharacterAlphanumeric(buf32[pos])) { - if (buf32[pos] >= 'a' && buf32[pos] <= 'z') { - buf32[pos] += 'A' - 'a'; - } - ++pos; - } - while (pos < len && isCharacterAlphanumeric(buf32[pos])) { - if (buf32[pos] >= 'A' && buf32[pos] <= 'Z') { - buf32[pos] += 'a' - 'A'; - } - ++pos; - } - refreshLine(pi); - } - break; - - // ctrl-D, delete the character under the cursor - // on an empty line, exit the shell - case ctrlChar('D'): - killRing.lastAction = KillRing::actionOther; - if (len > 0 && pos < len) { - historyRecallMostRecent = false; - memmove(buf32 + pos, buf32 + pos + 1, sizeof(char32_t) * (len - pos)); - --len; - refreshLine(pi); - } else if (len == 0) { - --historyLen; - free(history[historyLen]); - return -1; - } - break; - - case META + 'd': // meta-D, kill word to right of cursor - case META + 'D': - if (pos < len) { - historyRecallMostRecent = false; - int endingPos = pos; - while (endingPos < len && - !isCharacterAlphanumeric(buf32[endingPos])) { - ++endingPos; - } - while (endingPos < len && isCharacterAlphanumeric(buf32[endingPos])) { - ++endingPos; - } - killRing.kill(&buf32[pos], endingPos - pos, true); - memmove(buf32 + pos, buf32 + endingPos, - sizeof(char32_t) * (len - endingPos + 1)); - len -= endingPos - pos; - refreshLine(pi); - } - killRing.lastAction = KillRing::actionKill; - break; - - case ctrlChar('E'): // ctrl-E, move cursor to end of line - case END_KEY: - killRing.lastAction = KillRing::actionOther; - pos = len; - refreshLine(pi); - break; - - case ctrlChar('F'): // ctrl-F, move cursor right by one character - case RIGHT_ARROW_KEY: - killRing.lastAction = KillRing::actionOther; - if (pos < len) { - ++pos; - refreshLine(pi); - } - break; - - case META + 'f': // meta-F, move cursor right by one word - case META + 'F': - case CTRL + RIGHT_ARROW_KEY: - case META + RIGHT_ARROW_KEY: // Emacs allows Meta, bash & readline don't - killRing.lastAction = KillRing::actionOther; - if (pos < len) { - while (pos < len && !isCharacterAlphanumeric(buf32[pos])) { - ++pos; - } - while (pos < len && isCharacterAlphanumeric(buf32[pos])) { - ++pos; - } - refreshLine(pi); - } - break; - - case ctrlChar('H'): // backspace/ctrl-H, delete char to left of cursor - killRing.lastAction = KillRing::actionOther; - if (pos > 0) { - historyRecallMostRecent = false; - memmove(buf32 + pos - 1, buf32 + pos, - sizeof(char32_t) * (1 + len - pos)); - --pos; - --len; - refreshLine(pi); - } - break; - - // meta-Backspace, kill word to left of cursor - case META + ctrlChar('H'): - if (pos > 0) { - historyRecallMostRecent = false; - int startingPos = pos; - while (pos > 0 && !isCharacterAlphanumeric(buf32[pos - 1])) { - --pos; - } - while (pos > 0 && isCharacterAlphanumeric(buf32[pos - 1])) { - --pos; - } - killRing.kill(&buf32[pos], startingPos - pos, false); - memmove(buf32 + pos, buf32 + startingPos, - sizeof(char32_t) * (len - startingPos + 1)); - len -= startingPos - pos; - refreshLine(pi); - } - killRing.lastAction = KillRing::actionKill; - break; - - case ctrlChar('J'): // ctrl-J/linefeed/newline, accept line - case ctrlChar('M'): // ctrl-M/return/enter - killRing.lastAction = KillRing::actionOther; - // we need one last refresh with the cursor at the end of the line - // so we don't display the next prompt over the previous input line - pos = len; // pass len as pos for EOL - refreshLine(pi); - historyPreviousIndex = historyRecallMostRecent ? historyIndex : -2; - --historyLen; - free(history[historyLen]); - return len; - - case ctrlChar('K'): // ctrl-K, kill from cursor to end of line - killRing.kill(&buf32[pos], len - pos, true); - buf32[pos] = '\0'; - len = pos; - refreshLine(pi); - killRing.lastAction = KillRing::actionKill; - historyRecallMostRecent = false; - break; - - case ctrlChar('L'): // ctrl-L, clear screen and redisplay line - clearScreen(pi); - break; - - case META + 'l': // meta-L, lowercase word - case META + 'L': - killRing.lastAction = KillRing::actionOther; - if (pos < len) { - historyRecallMostRecent = false; - while (pos < len && !isCharacterAlphanumeric(buf32[pos])) { - ++pos; - } - while (pos < len && isCharacterAlphanumeric(buf32[pos])) { - if (buf32[pos] >= 'A' && buf32[pos] <= 'Z') { - buf32[pos] += 'a' - 'A'; - } - ++pos; - } - refreshLine(pi); - } - break; - - case ctrlChar('N'): // ctrl-N, recall next line in history - case ctrlChar('P'): // ctrl-P, recall previous line in history - case DOWN_ARROW_KEY: - case UP_ARROW_KEY: - killRing.lastAction = KillRing::actionOther; - // if not already recalling, add the current line to the history list so - // we don't - // have to special case it - if (historyIndex == historyLen - 1) { - free(history[historyLen - 1]); - size_t tempBufferSize = sizeof(char32_t) * len + 1; - unique_ptr tempBuffer(new char[tempBufferSize]); - copyString32to8(tempBuffer.get(), tempBufferSize, buf32); - history[historyLen - 1] = strdup8(tempBuffer.get()); - } - if (historyLen > 1) { - if (c == UP_ARROW_KEY) { - c = ctrlChar('P'); - } - if (historyPreviousIndex != -2 && c != ctrlChar('P')) { - historyIndex = - 1 + historyPreviousIndex; // emulate Windows down-arrow - } else { - historyIndex += (c == ctrlChar('P')) ? -1 : 1; - } - historyPreviousIndex = -2; - if (historyIndex < 0) { - historyIndex = 0; - break; - } else if (historyIndex >= historyLen) { - historyIndex = historyLen - 1; - break; - } - historyRecallMostRecent = true; - size_t ucharCount = 0; - copyString8to32(buf32, buflen, ucharCount, history[historyIndex]); - len = pos = static_cast(ucharCount); - refreshLine(pi); - } - break; - - case ctrlChar('R'): // ctrl-R, reverse history search - case ctrlChar('S'): // ctrl-S, forward history search - terminatingKeystroke = incrementalHistorySearch(pi, c); - break; - - case ctrlChar('T'): // ctrl-T, transpose characters - killRing.lastAction = KillRing::actionOther; - if (pos > 0 && len > 1) { - historyRecallMostRecent = false; - size_t leftCharPos = (pos == len) ? pos - 2 : pos - 1; - char32_t aux = buf32[leftCharPos]; - buf32[leftCharPos] = buf32[leftCharPos + 1]; - buf32[leftCharPos + 1] = aux; - if (pos != len) ++pos; - refreshLine(pi); - } - break; - - case ctrlChar( - 'U'): // ctrl-U, kill all characters to the left of the cursor - if (pos > 0) { - historyRecallMostRecent = false; - killRing.kill(&buf32[0], pos, false); - len -= pos; - memmove(buf32, buf32 + pos, sizeof(char32_t) * (len + 1)); - pos = 0; - refreshLine(pi); - } - killRing.lastAction = KillRing::actionKill; - break; - - case META + 'u': // meta-U, uppercase word - case META + 'U': - killRing.lastAction = KillRing::actionOther; - if (pos < len) { - historyRecallMostRecent = false; - while (pos < len && !isCharacterAlphanumeric(buf32[pos])) { - ++pos; - } - while (pos < len && isCharacterAlphanumeric(buf32[pos])) { - if (buf32[pos] >= 'a' && buf32[pos] <= 'z') { - buf32[pos] += 'A' - 'a'; - } - ++pos; - } - refreshLine(pi); - } - break; - - // ctrl-W, kill to whitespace (not word) to left of cursor - case ctrlChar('W'): - if (pos > 0) { - historyRecallMostRecent = false; - int startingPos = pos; - while (pos > 0 && buf32[pos - 1] == ' ') { - --pos; - } - while (pos > 0 && buf32[pos - 1] != ' ') { - --pos; - } - killRing.kill(&buf32[pos], startingPos - pos, false); - memmove(buf32 + pos, buf32 + startingPos, - sizeof(char32_t) * (len - startingPos + 1)); - len -= startingPos - pos; - refreshLine(pi); - } - killRing.lastAction = KillRing::actionKill; - break; - - case ctrlChar('Y'): // ctrl-Y, yank killed text - historyRecallMostRecent = false; - { - Utf32String* restoredText = killRing.yank(); - if (restoredText) { - bool truncated = false; - size_t ucharCount = restoredText->length(); - if (ucharCount > static_cast(buflen - len)) { - ucharCount = buflen - len; - truncated = true; - } - memmove(buf32 + pos + ucharCount, buf32 + pos, - sizeof(char32_t) * (len - pos + 1)); - memmove(buf32 + pos, restoredText->get(), - sizeof(char32_t) * ucharCount); - pos += static_cast(ucharCount); - len += static_cast(ucharCount); - refreshLine(pi); - killRing.lastAction = KillRing::actionYank; - killRing.lastYankSize = ucharCount; - if (truncated) { - beep(); - } - } else { - beep(); - } - } - break; - - case META + 'y': // meta-Y, "yank-pop", rotate popped text - case META + 'Y': - if (killRing.lastAction == KillRing::actionYank) { - historyRecallMostRecent = false; - Utf32String* restoredText = killRing.yankPop(); - if (restoredText) { - bool truncated = false; - size_t ucharCount = restoredText->length(); - if (ucharCount > - static_cast(killRing.lastYankSize + buflen - len)) { - ucharCount = killRing.lastYankSize + buflen - len; - truncated = true; - } - if (ucharCount > killRing.lastYankSize) { - memmove(buf32 + pos + ucharCount - killRing.lastYankSize, - buf32 + pos, sizeof(char32_t) * (len - pos + 1)); - memmove(buf32 + pos - killRing.lastYankSize, restoredText->get(), - sizeof(char32_t) * ucharCount); - } else { - memmove(buf32 + pos - killRing.lastYankSize, restoredText->get(), - sizeof(char32_t) * ucharCount); - memmove(buf32 + pos + ucharCount - killRing.lastYankSize, - buf32 + pos, sizeof(char32_t) * (len - pos + 1)); - } - pos += static_cast(ucharCount - killRing.lastYankSize); - len += static_cast(ucharCount - killRing.lastYankSize); - killRing.lastYankSize = ucharCount; - refreshLine(pi); - if (truncated) { - beep(); - } - break; - } - } - beep(); - break; - -#ifndef _WIN32 - case ctrlChar('Z'): // ctrl-Z, job control - disableRawMode(); // Returning to Linux (whatever) shell, leave raw - // mode - raise(SIGSTOP); // Break out in mid-line - enableRawMode(); // Back from Linux shell, re-enter raw mode - if (!pi.write()) break; // Redraw prompt - refreshLine(pi); // Refresh the line - break; -#endif - - // DEL, delete the character under the cursor - case 127: - case DELETE_KEY: - killRing.lastAction = KillRing::actionOther; - if (len > 0 && pos < len) { - historyRecallMostRecent = false; - memmove(buf32 + pos, buf32 + pos + 1, sizeof(char32_t) * (len - pos)); - --len; - refreshLine(pi); - } - break; - - case META + '<': // meta-<, beginning of history - case PAGE_UP_KEY: // Page Up, beginning of history - case META + '>': // meta->, end of history - case PAGE_DOWN_KEY: // Page Down, end of history - killRing.lastAction = KillRing::actionOther; - // if not already recalling, add the current line to the history list so - // we don't - // have to special case it - if (historyIndex == historyLen - 1) { - free(history[historyLen - 1]); - size_t tempBufferSize = sizeof(char32_t) * len + 1; - unique_ptr tempBuffer(new char[tempBufferSize]); - copyString32to8(tempBuffer.get(), tempBufferSize, buf32); - history[historyLen - 1] = strdup8(tempBuffer.get()); - } - if (historyLen > 1) { - historyIndex = - (c == META + '<' || c == PAGE_UP_KEY) ? 0 : historyLen - 1; - historyPreviousIndex = -2; - historyRecallMostRecent = true; - size_t ucharCount = 0; - copyString8to32(buf32, buflen, ucharCount, history[historyIndex]); - len = pos = static_cast(ucharCount); - refreshLine(pi); - } - break; - - // not one of our special characters, maybe insert it in the buffer - default: - killRing.lastAction = KillRing::actionOther; - historyRecallMostRecent = false; - if (c & (META | CTRL)) { // beep on unknown Ctrl and/or Meta keys - beep(); - break; - } - if (len < buflen) { - if (isControlChar(c)) { // don't insert control characters - beep(); - break; - } - if (len == pos) { // at end of buffer - buf32[pos] = c; - ++pos; - ++len; - buf32[len] = '\0'; - int inputLen = calculateColumnPosition(buf32, len); - if (pi.promptIndentation + inputLen < pi.promptScreenColumns) { - if (inputLen > pi.promptPreviousInputLen) - pi.promptPreviousInputLen = inputLen; - /* Avoid a full update of the line in the - * trivial case. */ - if (write32(1, reinterpret_cast(&c), 1) == -1) - return -1; - } else { - refreshLine(pi); - } - } else { // not at end of buffer, have to move characters to our - // right - memmove(buf32 + pos + 1, buf32 + pos, - sizeof(char32_t) * (len - pos)); - buf32[pos] = c; - ++len; - ++pos; - buf32[len] = '\0'; - refreshLine(pi); - } - } else { - beep(); // buffer is full, beep on new characters - } - break; - } - } - return len; -} - -static string preloadedBufferContents; // used with linenoisePreloadBuffer -static string preloadErrorMessage; - -/** - * linenoisePreloadBuffer provides text to be inserted into the command buffer - * - * the provided text will be processed to be usable and will be used to preload - * the input buffer on the next call to linenoise() - * - * @param preloadText text to begin with on the next call to linenoise() - */ -void linenoisePreloadBuffer(const char* preloadText) { - if (!preloadText) { - return; - } - int bufferSize = static_cast(strlen(preloadText) + 1); - unique_ptr tempBuffer(new char[bufferSize]); - strncpy(&tempBuffer[0], preloadText, bufferSize); - - // remove characters that won't display correctly - char* pIn = &tempBuffer[0]; - char* pOut = pIn; - bool controlsStripped = false; - bool whitespaceSeen = false; - while (*pIn) { - unsigned char c = - *pIn++; // we need unsigned so chars 0x80 and above are allowed - if ('\r' == c) { // silently skip CR - continue; - } - if ('\n' == c || '\t' == c) { // note newline or tab - whitespaceSeen = true; - continue; - } - if (isControlChar( - c)) { // remove other control characters, flag for message - controlsStripped = true; - *pOut++ = ' '; - continue; - } - if (whitespaceSeen) { // convert whitespace to a single space - *pOut++ = ' '; - whitespaceSeen = false; - } - *pOut++ = c; - } - *pOut = 0; - int processedLength = static_cast(pOut - tempBuffer.get()); - bool lineTruncated = false; - if (processedLength > (LINENOISE_MAX_LINE - 1)) { - lineTruncated = true; - tempBuffer[LINENOISE_MAX_LINE - 1] = 0; - } - preloadedBufferContents = tempBuffer.get(); - if (controlsStripped) { - preloadErrorMessage += - " [Edited line: control characters were converted to spaces]\n"; - } - if (lineTruncated) { - preloadErrorMessage += " [Edited line: the line length was reduced from "; - char buf[128]; - snprintf(buf, sizeof(buf), "%d to %d]\n", processedLength, - (LINENOISE_MAX_LINE - 1)); - preloadErrorMessage += buf; - } -} - -/** - * linenoise is a readline replacement. - * - * call it with a prompt to display and it will return a line of input from the - * user - * - * @param prompt text of prompt to display to the user - * @return the returned string belongs to the caller on return and must be - * freed to prevent - * memory leaks - */ -char* linenoise(const char* prompt) { -#ifndef _WIN32 - gotResize = false; -#endif - if (isatty(STDIN_FILENO)) { // input is from a terminal - char32_t buf32[LINENOISE_MAX_LINE]; - char charWidths[LINENOISE_MAX_LINE]; - if (!preloadErrorMessage.empty()) { - printf("%s", preloadErrorMessage.c_str()); - fflush(stdout); - preloadErrorMessage.clear(); - } - PromptInfo pi(prompt, getScreenColumns()); - if (isUnsupportedTerm()) { - if (!pi.write()) return 0; - fflush(stdout); - if (preloadedBufferContents.empty()) { - unique_ptr buf8(new char[LINENOISE_MAX_LINE]); - if (fgets(buf8.get(), LINENOISE_MAX_LINE, stdin) == NULL) { - return NULL; - } - size_t len = strlen(buf8.get()); - while (len && (buf8[len - 1] == '\n' || buf8[len - 1] == '\r')) { - --len; - buf8[len] = '\0'; - } - return strdup(buf8.get()); // caller must free buffer - } else { - char* buf8 = strdup(preloadedBufferContents.c_str()); - preloadedBufferContents.clear(); - return buf8; // caller must free buffer - } - } else { - if (enableRawMode() == -1) { - return NULL; - } - InputBuffer ib(buf32, charWidths, LINENOISE_MAX_LINE); - if (!preloadedBufferContents.empty()) { - ib.preloadBuffer(preloadedBufferContents.c_str()); - preloadedBufferContents.clear(); - } - int count = ib.getInputLine(pi); - disableRawMode(); - printf("\n"); - if (count == -1) { - return NULL; - } - size_t bufferSize = sizeof(char32_t) * ib.length() + 1; - unique_ptr buf8(new char[bufferSize]); - copyString32to8(buf8.get(), bufferSize, buf32); - return strdup(buf8.get()); // caller must free buffer - } - } else { // input not from a terminal, we should work with piped input, i.e. - // redirected stdin - unique_ptr buf8(new char[LINENOISE_MAX_LINE]); - if (fgets(buf8.get(), LINENOISE_MAX_LINE, stdin) == NULL) { - return NULL; - } - - // if fgets() gave us the newline, remove it - int count = static_cast(strlen(buf8.get())); - if (count > 0 && buf8[count - 1] == '\n') { - --count; - buf8[count] = '\0'; - } - return strdup(buf8.get()); // caller must free buffer - } -} - -/* Register a callback function to be called for tab-completion. */ -void linenoiseSetCompletionCallback(linenoiseCompletionCallback* fn) { - completionCallback = fn; -} - -void linenoiseAddCompletion(linenoiseCompletions* lc, const char* str) { - lc->completionStrings.push_back(Utf32String(str)); -} - -int linenoiseHistoryAdd(const char* line) { - if (historyMaxLen == 0) { - return 0; - } - if (history == NULL) { - history = - reinterpret_cast(malloc(sizeof(char8_t*) * historyMaxLen)); - if (history == NULL) { - return 0; - } - memset(history, 0, (sizeof(char*) * historyMaxLen)); - } - char8_t* linecopy = strdup8(line); - if (!linecopy) { - return 0; - } - - // convert newlines in multi-line code to spaces before storing - char8_t* p = linecopy; - while (*p) { - if (*p == '\n') { - *p = ' '; - } - ++p; - } - - // prevent duplicate history entries - if (historyLen > 0 && history[historyLen - 1] != nullptr && - strcmp(reinterpret_cast(history[historyLen - 1]), - reinterpret_cast(linecopy)) == 0) { - free(linecopy); - return 0; - } - - if (historyLen == historyMaxLen) { - free(history[0]); - memmove(history, history + 1, sizeof(char*) * (historyMaxLen - 1)); - --historyLen; - if (--historyPreviousIndex < -1) { - historyPreviousIndex = -2; - } - } - - history[historyLen] = linecopy; - ++historyLen; - return 1; -} - -int linenoiseHistorySetMaxLen(int len) { - if (len < 1) { - return 0; - } - if (history) { - int tocopy = historyLen; - char8_t** newHistory = - reinterpret_cast(malloc(sizeof(char8_t*) * len)); - if (newHistory == NULL) { - return 0; - } - if (len < tocopy) { - tocopy = len; - } - memcpy(newHistory, history + historyMaxLen - tocopy, - sizeof(char8_t*) * tocopy); - free(history); - history = newHistory; - } - historyMaxLen = len; - if (historyLen > historyMaxLen) { - historyLen = historyMaxLen; - } - return 1; -} - -/* Fetch a line of the history by (zero-based) index. If the requested - * line does not exist, NULL is returned. The return value is a heap-allocated - * copy of the line, and the caller is responsible for de-allocating it. */ -char* linenoiseHistoryLine(int index) { - if (index < 0 || index >= historyLen) return NULL; - - return strdup(reinterpret_cast(history[index])); -} - -/* Save the history in the specified file. On success 0 is returned - * otherwise -1 is returned. */ -int linenoiseHistorySave(const char* filename) { -#if _WIN32 - FILE* fp = fopen(filename, "wt"); -#else - int fd = open(filename, O_CREAT | O_TRUNC | O_WRONLY, S_IRUSR | S_IWUSR); - - if (fd < 0) { - return -1; - } - - FILE* fp = fdopen(fd, "wt"); -#endif - - if (fp == NULL) { - return -1; - } - - for (int j = 0; j < historyLen; ++j) { - if (history[j][0] != '\0') { - fprintf(fp, "%s\n", history[j]); - } - } - - fclose(fp); - - return 0; -} - -/* Load the history from the specified file. If the file does not exist - * zero is returned and no operation is performed. - * - * If the file exists and the operation succeeded 0 is returned, otherwise - * on error -1 is returned. */ -int linenoiseHistoryLoad(const char* filename) { - FILE* fp = fopen(filename, "rt"); - if (fp == NULL) { - return -1; - } - - char buf[LINENOISE_MAX_LINE]; - while (fgets(buf, LINENOISE_MAX_LINE, fp) != NULL) { - char* p = strchr(buf, '\r'); - if (!p) { - p = strchr(buf, '\n'); - } - if (p) { - *p = '\0'; - } - if (p != buf) { - linenoiseHistoryAdd(buf); - } - } - fclose(fp); - return 0; -} - -/* Set if to use or not the multi line mode. */ -/* note that this is a stub only, as linenoise-ng always multi-line */ -void linenoiseSetMultiLine(int) {} - -/* This special mode is used by linenoise in order to print scan codes - * on screen for debugging / development purposes. It is implemented - * by the linenoise_example program using the --keycodes option. */ -void linenoisePrintKeyCodes(void) { - char quit[4]; - - printf( - "Linenoise key codes debugging mode.\n" - "Press keys to see scan codes. Type 'quit' at any time to exit.\n"); - if (enableRawMode() == -1) return; - memset(quit, ' ', 4); - while (1) { - char c; - int nread; - -#if _WIN32 - nread = _read(STDIN_FILENO, &c, 1); -#else - nread = read(STDIN_FILENO, &c, 1); -#endif - if (nread <= 0) continue; - memmove(quit, quit + 1, sizeof(quit) - 1); /* shift string to left. */ - quit[sizeof(quit) - 1] = c; /* Insert current char on the right. */ - if (memcmp(quit, "quit", sizeof(quit)) == 0) break; - - printf("'%c' %02x (%d) (type quit to exit)\n", isprint(c) ? c : '?', (int)c, - (int)c); - printf("\r"); /* Go left edge manually, we are in raw mode. */ - fflush(stdout); - } - disableRawMode(); -} - -#ifndef _WIN32 -static void WindowSizeChanged(int) { - // do nothing here but setting this flag - gotResize = true; -} -#endif - -int linenoiseInstallWindowChangeHandler(void) { -#ifndef _WIN32 - struct sigaction sa; - sigemptyset(&sa.sa_mask); - sa.sa_flags = 0; - sa.sa_handler = &WindowSizeChanged; - - if (sigaction(SIGWINCH, &sa, nullptr) == -1) { - return errno; - } -#endif - return 0; -} - -int linenoiseKeyType(void) { - return keyType; -} diff --git a/src/linenoise/linenoise.h b/src/linenoise/linenoise.h deleted file mode 100644 index 3a8eb9f7ee6..00000000000 --- a/src/linenoise/linenoise.h +++ /dev/null @@ -1,73 +0,0 @@ -/* linenoise.h -- guerrilla line editing library against the idea that a - * line editing lib needs to be 20,000 lines of C code. - * - * See linenoise.c for more information. - * - * Copyright (c) 2010, Salvatore Sanfilippo - * Copyright (c) 2010, Pieter Noordhuis - * - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * * Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * * Neither the name of Redis nor the names of its contributors may be used - * to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -#ifndef __LINENOISE_H -#define __LINENOISE_H - -#define LINENOISE_VERSION "1.0.0" -#define LINENOISE_VERSION_MAJOR 1 -#define LINENOISE_VERSION_MINOR 1 - -#ifdef __cplusplus -extern "C" { -#endif - -typedef struct linenoiseCompletions linenoiseCompletions; - -typedef void(linenoiseCompletionCallback)(const char*, linenoiseCompletions*); -void linenoiseSetCompletionCallback(linenoiseCompletionCallback* fn); -void linenoiseAddCompletion(linenoiseCompletions* lc, const char* str); - -char* linenoise(const char* prompt); -void linenoisePreloadBuffer(const char* preloadText); -int linenoiseHistoryAdd(const char* line); -int linenoiseHistorySetMaxLen(int len); -char* linenoiseHistoryLine(int index); -int linenoiseHistorySave(const char* filename); -int linenoiseHistoryLoad(const char* filename); -void linenoiseHistoryFree(void); -void linenoiseClearScreen(void); -void linenoiseSetMultiLine(int ml); -void linenoisePrintKeyCodes(void); -/* the following are extensions to the original linenoise API */ -int linenoiseInstallWindowChangeHandler(void); -/* returns type of key pressed: 1 = CTRL-C, 2 = CTRL-D, 0 = other */ -int linenoiseKeyType(void); - -#ifdef __cplusplus -} -#endif - -#endif /* __LINENOISE_H */ diff --git a/src/linenoise/wcwidth.cpp b/src/linenoise/wcwidth.cpp deleted file mode 100644 index deec0ba6b57..00000000000 --- a/src/linenoise/wcwidth.cpp +++ /dev/null @@ -1,315 +0,0 @@ -/* - * This is an implementation of wcwidth() and wcswidth() (defined in - * IEEE Std 1002.1-2001) for Unicode. - * - * http://www.opengroup.org/onlinepubs/007904975/functions/wcwidth.html - * http://www.opengroup.org/onlinepubs/007904975/functions/wcswidth.html - * - * In fixed-width output devices, Latin characters all occupy a single - * "cell" position of equal width, whereas ideographic CJK characters - * occupy two such cells. Interoperability between terminal-line - * applications and (teletype-style) character terminals using the - * UTF-8 encoding requires agreement on which character should advance - * the cursor by how many cell positions. No established formal - * standards exist at present on which Unicode character shall occupy - * how many cell positions on character terminals. These routines are - * a first attempt of defining such behavior based on simple rules - * applied to data provided by the Unicode Consortium. - * - * For some graphical characters, the Unicode standard explicitly - * defines a character-cell width via the definition of the East Asian - * FullWidth (F), Wide (W), Half-width (H), and Narrow (Na) classes. - * In all these cases, there is no ambiguity about which width a - * terminal shall use. For characters in the East Asian Ambiguous (A) - * class, the width choice depends purely on a preference of backward - * compatibility with either historic CJK or Western practice. - * Choosing single-width for these characters is easy to justify as - * the appropriate long-term solution, as the CJK practice of - * displaying these characters as double-width comes from historic - * implementation simplicity (8-bit encoded characters were displayed - * single-width and 16-bit ones double-width, even for Greek, - * Cyrillic, etc.) and not any typographic considerations. - * - * Much less clear is the choice of width for the Not East Asian - * (Neutral) class. Existing practice does not dictate a width for any - * of these characters. It would nevertheless make sense - * typographically to allocate two character cells to characters such - * as for instance EM SPACE or VOLUME INTEGRAL, which cannot be - * represented adequately with a single-width glyph. The following - * routines at present merely assign a single-cell width to all - * neutral characters, in the interest of simplicity. This is not - * entirely satisfactory and should be reconsidered before - * establishing a formal standard in this area. At the moment, the - * decision which Not East Asian (Neutral) characters should be - * represented by double-width glyphs cannot yet be answered by - * applying a simple rule from the Unicode database content. Setting - * up a proper standard for the behavior of UTF-8 character terminals - * will require a careful analysis not only of each Unicode character, - * but also of each presentation form, something the author of these - * routines has avoided to do so far. - * - * http://www.unicode.org/unicode/reports/tr11/ - * - * Markus Kuhn -- 2007-05-26 (Unicode 5.0) - * - * Permission to use, copy, modify, and distribute this software - * for any purpose and without fee is hereby granted. The author - * disclaims all warranties with regard to this software. - * - * Latest version: http://www.cl.cam.ac.uk/~mgk25/ucs/wcwidth.c - */ - -#include -#include -#include - -namespace linenoise_ng { - -struct interval { - char32_t first; - char32_t last; -}; - -/* auxiliary function for binary search in interval table */ -static int bisearch(char32_t ucs, const struct interval *table, int max) { - int min = 0; - int mid; - - if (ucs < table[0].first || ucs > table[max].last) - return 0; - while (max >= min) { - mid = (min + max) / 2; - if (ucs > table[mid].last) - min = mid + 1; - else if (ucs < table[mid].first) - max = mid - 1; - else - return 1; - } - - return 0; -} - - -/* The following two functions define the column width of an ISO 10646 - * character as follows: - * - * - The null character (U+0000) has a column width of 0. - * - * - Other C0/C1 control characters and DEL will lead to a return - * value of -1. - * - * - Non-spacing and enclosing combining characters (general - * category code Mn or Me in the Unicode database) have a - * column width of 0. - * - * - SOFT HYPHEN (U+00AD) has a column width of 1. - * - * - Other format characters (general category code Cf in the Unicode - * database) and ZERO WIDTH SPACE (U+200B) have a column width of 0. - * - * - Hangul Jamo medial vowels and final consonants (U+1160-U+11FF) - * have a column width of 0. - * - * - Spacing characters in the East Asian Wide (W) or East Asian - * Full-width (F) category as defined in Unicode Technical - * Report #11 have a column width of 2. - * - * - All remaining characters (including all printable - * ISO 8859-1 and WGL4 characters, Unicode control characters, - * etc.) have a column width of 1. - * - * This implementation assumes that wchar_t characters are encoded - * in ISO 10646. - */ - -int mk_wcwidth(char32_t ucs) -{ - /* sorted list of non-overlapping intervals of non-spacing characters */ - /* generated by "uniset +cat=Me +cat=Mn +cat=Cf -00AD +1160-11FF +200B c" */ - static const struct interval combining[] = { - { 0x0300, 0x036F }, { 0x0483, 0x0486 }, { 0x0488, 0x0489 }, - { 0x0591, 0x05BD }, { 0x05BF, 0x05BF }, { 0x05C1, 0x05C2 }, - { 0x05C4, 0x05C5 }, { 0x05C7, 0x05C7 }, { 0x0600, 0x0603 }, - { 0x0610, 0x0615 }, { 0x064B, 0x065E }, { 0x0670, 0x0670 }, - { 0x06D6, 0x06E4 }, { 0x06E7, 0x06E8 }, { 0x06EA, 0x06ED }, - { 0x070F, 0x070F }, { 0x0711, 0x0711 }, { 0x0730, 0x074A }, - { 0x07A6, 0x07B0 }, { 0x07EB, 0x07F3 }, { 0x0901, 0x0902 }, - { 0x093C, 0x093C }, { 0x0941, 0x0948 }, { 0x094D, 0x094D }, - { 0x0951, 0x0954 }, { 0x0962, 0x0963 }, { 0x0981, 0x0981 }, - { 0x09BC, 0x09BC }, { 0x09C1, 0x09C4 }, { 0x09CD, 0x09CD }, - { 0x09E2, 0x09E3 }, { 0x0A01, 0x0A02 }, { 0x0A3C, 0x0A3C }, - { 0x0A41, 0x0A42 }, { 0x0A47, 0x0A48 }, { 0x0A4B, 0x0A4D }, - { 0x0A70, 0x0A71 }, { 0x0A81, 0x0A82 }, { 0x0ABC, 0x0ABC }, - { 0x0AC1, 0x0AC5 }, { 0x0AC7, 0x0AC8 }, { 0x0ACD, 0x0ACD }, - { 0x0AE2, 0x0AE3 }, { 0x0B01, 0x0B01 }, { 0x0B3C, 0x0B3C }, - { 0x0B3F, 0x0B3F }, { 0x0B41, 0x0B43 }, { 0x0B4D, 0x0B4D }, - { 0x0B56, 0x0B56 }, { 0x0B82, 0x0B82 }, { 0x0BC0, 0x0BC0 }, - { 0x0BCD, 0x0BCD }, { 0x0C3E, 0x0C40 }, { 0x0C46, 0x0C48 }, - { 0x0C4A, 0x0C4D }, { 0x0C55, 0x0C56 }, { 0x0CBC, 0x0CBC }, - { 0x0CBF, 0x0CBF }, { 0x0CC6, 0x0CC6 }, { 0x0CCC, 0x0CCD }, - { 0x0CE2, 0x0CE3 }, { 0x0D41, 0x0D43 }, { 0x0D4D, 0x0D4D }, - { 0x0DCA, 0x0DCA }, { 0x0DD2, 0x0DD4 }, { 0x0DD6, 0x0DD6 }, - { 0x0E31, 0x0E31 }, { 0x0E34, 0x0E3A }, { 0x0E47, 0x0E4E }, - { 0x0EB1, 0x0EB1 }, { 0x0EB4, 0x0EB9 }, { 0x0EBB, 0x0EBC }, - { 0x0EC8, 0x0ECD }, { 0x0F18, 0x0F19 }, { 0x0F35, 0x0F35 }, - { 0x0F37, 0x0F37 }, { 0x0F39, 0x0F39 }, { 0x0F71, 0x0F7E }, - { 0x0F80, 0x0F84 }, { 0x0F86, 0x0F87 }, { 0x0F90, 0x0F97 }, - { 0x0F99, 0x0FBC }, { 0x0FC6, 0x0FC6 }, { 0x102D, 0x1030 }, - { 0x1032, 0x1032 }, { 0x1036, 0x1037 }, { 0x1039, 0x1039 }, - { 0x1058, 0x1059 }, { 0x1160, 0x11FF }, { 0x135F, 0x135F }, - { 0x1712, 0x1714 }, { 0x1732, 0x1734 }, { 0x1752, 0x1753 }, - { 0x1772, 0x1773 }, { 0x17B4, 0x17B5 }, { 0x17B7, 0x17BD }, - { 0x17C6, 0x17C6 }, { 0x17C9, 0x17D3 }, { 0x17DD, 0x17DD }, - { 0x180B, 0x180D }, { 0x18A9, 0x18A9 }, { 0x1920, 0x1922 }, - { 0x1927, 0x1928 }, { 0x1932, 0x1932 }, { 0x1939, 0x193B }, - { 0x1A17, 0x1A18 }, { 0x1B00, 0x1B03 }, { 0x1B34, 0x1B34 }, - { 0x1B36, 0x1B3A }, { 0x1B3C, 0x1B3C }, { 0x1B42, 0x1B42 }, - { 0x1B6B, 0x1B73 }, { 0x1DC0, 0x1DCA }, { 0x1DFE, 0x1DFF }, - { 0x200B, 0x200F }, { 0x202A, 0x202E }, { 0x2060, 0x2063 }, - { 0x206A, 0x206F }, { 0x20D0, 0x20EF }, { 0x302A, 0x302F }, - { 0x3099, 0x309A }, { 0xA806, 0xA806 }, { 0xA80B, 0xA80B }, - { 0xA825, 0xA826 }, { 0xFB1E, 0xFB1E }, { 0xFE00, 0xFE0F }, - { 0xFE20, 0xFE23 }, { 0xFEFF, 0xFEFF }, { 0xFFF9, 0xFFFB }, - { 0x10A01, 0x10A03 }, { 0x10A05, 0x10A06 }, { 0x10A0C, 0x10A0F }, - { 0x10A38, 0x10A3A }, { 0x10A3F, 0x10A3F }, { 0x1D167, 0x1D169 }, - { 0x1D173, 0x1D182 }, { 0x1D185, 0x1D18B }, { 0x1D1AA, 0x1D1AD }, - { 0x1D242, 0x1D244 }, { 0xE0001, 0xE0001 }, { 0xE0020, 0xE007F }, - { 0xE0100, 0xE01EF } - }; - - /* test for 8-bit control characters */ - if (ucs == 0) - return 0; - if (ucs < 32 || (ucs >= 0x7f && ucs < 0xa0)) - return -1; - - /* binary search in table of non-spacing characters */ - if (bisearch(ucs, combining, - sizeof(combining) / sizeof(struct interval) - 1)) - return 0; - - /* if we arrive here, ucs is not a combining or C0/C1 control character */ - - return 1 + - (ucs >= 0x1100 && - (ucs <= 0x115f || /* Hangul Jamo init. consonants */ - ucs == 0x2329 || ucs == 0x232a || - (ucs >= 0x2e80 && ucs <= 0xa4cf && - ucs != 0x303f) || /* CJK ... Yi */ - (ucs >= 0xac00 && ucs <= 0xd7a3) || /* Hangul Syllables */ - (ucs >= 0xf900 && ucs <= 0xfaff) || /* CJK Compatibility Ideographs */ - (ucs >= 0xfe10 && ucs <= 0xfe19) || /* Vertical forms */ - (ucs >= 0xfe30 && ucs <= 0xfe6f) || /* CJK Compatibility Forms */ - (ucs >= 0xff00 && ucs <= 0xff60) || /* Fullwidth Forms */ - (ucs >= 0xffe0 && ucs <= 0xffe6) || - (ucs >= 0x20000 && ucs <= 0x2fffd) || - (ucs >= 0x30000 && ucs <= 0x3fffd))); -} - - -int mk_wcswidth(const char32_t* pwcs, size_t n) -{ - int w, width = 0; - - for (;*pwcs && n-- > 0; pwcs++) - if ((w = mk_wcwidth(*pwcs)) < 0) - return -1; - else - width += w; - - return width; -} - - -/* - * The following functions are the same as mk_wcwidth() and - * mk_wcswidth(), except that spacing characters in the East Asian - * Ambiguous (A) category as defined in Unicode Technical Report #11 - * have a column width of 2. This variant might be useful for users of - * CJK legacy encodings who want to migrate to UCS without changing - * the traditional terminal character-width behaviour. It is not - * otherwise recommended for general use. - */ -int mk_wcwidth_cjk(wchar_t ucs) -{ - /* sorted list of non-overlapping intervals of East Asian Ambiguous - * characters, generated by "uniset +WIDTH-A -cat=Me -cat=Mn -cat=Cf c" */ - static const struct interval ambiguous[] = { - { 0x00A1, 0x00A1 }, { 0x00A4, 0x00A4 }, { 0x00A7, 0x00A8 }, - { 0x00AA, 0x00AA }, { 0x00AE, 0x00AE }, { 0x00B0, 0x00B4 }, - { 0x00B6, 0x00BA }, { 0x00BC, 0x00BF }, { 0x00C6, 0x00C6 }, - { 0x00D0, 0x00D0 }, { 0x00D7, 0x00D8 }, { 0x00DE, 0x00E1 }, - { 0x00E6, 0x00E6 }, { 0x00E8, 0x00EA }, { 0x00EC, 0x00ED }, - { 0x00F0, 0x00F0 }, { 0x00F2, 0x00F3 }, { 0x00F7, 0x00FA }, - { 0x00FC, 0x00FC }, { 0x00FE, 0x00FE }, { 0x0101, 0x0101 }, - { 0x0111, 0x0111 }, { 0x0113, 0x0113 }, { 0x011B, 0x011B }, - { 0x0126, 0x0127 }, { 0x012B, 0x012B }, { 0x0131, 0x0133 }, - { 0x0138, 0x0138 }, { 0x013F, 0x0142 }, { 0x0144, 0x0144 }, - { 0x0148, 0x014B }, { 0x014D, 0x014D }, { 0x0152, 0x0153 }, - { 0x0166, 0x0167 }, { 0x016B, 0x016B }, { 0x01CE, 0x01CE }, - { 0x01D0, 0x01D0 }, { 0x01D2, 0x01D2 }, { 0x01D4, 0x01D4 }, - { 0x01D6, 0x01D6 }, { 0x01D8, 0x01D8 }, { 0x01DA, 0x01DA }, - { 0x01DC, 0x01DC }, { 0x0251, 0x0251 }, { 0x0261, 0x0261 }, - { 0x02C4, 0x02C4 }, { 0x02C7, 0x02C7 }, { 0x02C9, 0x02CB }, - { 0x02CD, 0x02CD }, { 0x02D0, 0x02D0 }, { 0x02D8, 0x02DB }, - { 0x02DD, 0x02DD }, { 0x02DF, 0x02DF }, { 0x0391, 0x03A1 }, - { 0x03A3, 0x03A9 }, { 0x03B1, 0x03C1 }, { 0x03C3, 0x03C9 }, - { 0x0401, 0x0401 }, { 0x0410, 0x044F }, { 0x0451, 0x0451 }, - { 0x2010, 0x2010 }, { 0x2013, 0x2016 }, { 0x2018, 0x2019 }, - { 0x201C, 0x201D }, { 0x2020, 0x2022 }, { 0x2024, 0x2027 }, - { 0x2030, 0x2030 }, { 0x2032, 0x2033 }, { 0x2035, 0x2035 }, - { 0x203B, 0x203B }, { 0x203E, 0x203E }, { 0x2074, 0x2074 }, - { 0x207F, 0x207F }, { 0x2081, 0x2084 }, { 0x20AC, 0x20AC }, - { 0x2103, 0x2103 }, { 0x2105, 0x2105 }, { 0x2109, 0x2109 }, - { 0x2113, 0x2113 }, { 0x2116, 0x2116 }, { 0x2121, 0x2122 }, - { 0x2126, 0x2126 }, { 0x212B, 0x212B }, { 0x2153, 0x2154 }, - { 0x215B, 0x215E }, { 0x2160, 0x216B }, { 0x2170, 0x2179 }, - { 0x2190, 0x2199 }, { 0x21B8, 0x21B9 }, { 0x21D2, 0x21D2 }, - { 0x21D4, 0x21D4 }, { 0x21E7, 0x21E7 }, { 0x2200, 0x2200 }, - { 0x2202, 0x2203 }, { 0x2207, 0x2208 }, { 0x220B, 0x220B }, - { 0x220F, 0x220F }, { 0x2211, 0x2211 }, { 0x2215, 0x2215 }, - { 0x221A, 0x221A }, { 0x221D, 0x2220 }, { 0x2223, 0x2223 }, - { 0x2225, 0x2225 }, { 0x2227, 0x222C }, { 0x222E, 0x222E }, - { 0x2234, 0x2237 }, { 0x223C, 0x223D }, { 0x2248, 0x2248 }, - { 0x224C, 0x224C }, { 0x2252, 0x2252 }, { 0x2260, 0x2261 }, - { 0x2264, 0x2267 }, { 0x226A, 0x226B }, { 0x226E, 0x226F }, - { 0x2282, 0x2283 }, { 0x2286, 0x2287 }, { 0x2295, 0x2295 }, - { 0x2299, 0x2299 }, { 0x22A5, 0x22A5 }, { 0x22BF, 0x22BF }, - { 0x2312, 0x2312 }, { 0x2460, 0x24E9 }, { 0x24EB, 0x254B }, - { 0x2550, 0x2573 }, { 0x2580, 0x258F }, { 0x2592, 0x2595 }, - { 0x25A0, 0x25A1 }, { 0x25A3, 0x25A9 }, { 0x25B2, 0x25B3 }, - { 0x25B6, 0x25B7 }, { 0x25BC, 0x25BD }, { 0x25C0, 0x25C1 }, - { 0x25C6, 0x25C8 }, { 0x25CB, 0x25CB }, { 0x25CE, 0x25D1 }, - { 0x25E2, 0x25E5 }, { 0x25EF, 0x25EF }, { 0x2605, 0x2606 }, - { 0x2609, 0x2609 }, { 0x260E, 0x260F }, { 0x2614, 0x2615 }, - { 0x261C, 0x261C }, { 0x261E, 0x261E }, { 0x2640, 0x2640 }, - { 0x2642, 0x2642 }, { 0x2660, 0x2661 }, { 0x2663, 0x2665 }, - { 0x2667, 0x266A }, { 0x266C, 0x266D }, { 0x266F, 0x266F }, - { 0x273D, 0x273D }, { 0x2776, 0x277F }, { 0xE000, 0xF8FF }, - { 0xFFFD, 0xFFFD }, { 0xF0000, 0xFFFFD }, { 0x100000, 0x10FFFD } - }; - - /* binary search in table of non-spacing characters */ - if (bisearch(ucs, ambiguous, - sizeof(ambiguous) / sizeof(struct interval) - 1)) - return 2; - - return mk_wcwidth(ucs); -} - - -int mk_wcswidth_cjk(const wchar_t *pwcs, size_t n) -{ - int w, width = 0; - - for (;*pwcs && n-- > 0; pwcs++) - if ((w = mk_wcwidth_cjk(*pwcs)) < 0) - return -1; - else - width += w; - - return width; -} - -} diff --git a/src/nix/local.mk b/src/nix/local.mk index bdcca33d2a6..40a0e8d6bde 100644 --- a/src/nix/local.mk +++ b/src/nix/local.mk @@ -4,7 +4,6 @@ nix_DIR := $(d) nix_SOURCES := \ $(wildcard $(d)/*.cc) \ - $(wildcard src/linenoise/*.cpp) \ $(wildcard src/build-remote/*.cc) \ $(wildcard src/nix-build/*.cc) \ $(wildcard src/nix-channel/*.cc) \ @@ -18,7 +17,7 @@ nix_SOURCES := \ nix_LIBS = libexpr libmain libstore libutil libformat -nix_LDFLAGS = -pthread $(SODIUM_LIBS) +nix_LDFLAGS = -pthread $(SODIUM_LIBS) $(EDITLINE_LIBS) $(foreach name, \ nix-build nix-channel nix-collect-garbage nix-copy-closure nix-daemon nix-env nix-hash nix-instantiate nix-prefetch-url nix-shell nix-store, \ diff --git a/src/nix/repl.cc b/src/nix/repl.cc index 1bbe256b2d8..7ce37077021 100644 --- a/src/nix/repl.cc +++ b/src/nix/repl.cc @@ -1,8 +1,12 @@ #include #include +#include +#include #include +#include + #include "shared.hh" #include "eval.hh" #include "eval-inline.hh" @@ -15,8 +19,6 @@ #include "command.hh" #include "finally.hh" -#include "src/linenoise/linenoise.h" - namespace nix { #define ESC_RED "\033[31m" @@ -118,17 +120,53 @@ NixRepl::NixRepl(const Strings & searchPath, nix::ref store) NixRepl::~NixRepl() { - linenoiseHistorySave(historyFile.c_str()); + write_history(historyFile.c_str()); } - static NixRepl * curRepl; // ugly -static void completionCallback(const char * s, linenoiseCompletions *lc) -{ - /* Otherwise, return all symbols that start with the prefix. */ - for (auto & c : curRepl->completePrefix(s)) - linenoiseAddCompletion(lc, c.c_str()); +static char * completionCallback(char * s, int *match) { + auto possible = curRepl->completePrefix(s); + if (possible.size() == 1) { + *match = 1; + auto *res = strdup(possible.begin()->c_str() + strlen(s)); + if (!res) throw Error("allocation failure"); + return res; + } + + *match = 0; + return nullptr; +} + +static int listPossibleCallback(char *s, char ***avp) { + auto possible = curRepl->completePrefix(s); + + if (possible.size() > (INT_MAX / sizeof(char*))) + throw Error("too many completions"); + + int ac = 0; + char **vp = nullptr; + + auto check = [&](auto *p) { + if (!p) { + if (vp) { + while (--ac >= 0) + free(vp[ac]); + free(vp); + } + throw Error("allocation failure"); + } + return p; + }; + + vp = check((char **)malloc(possible.size() * sizeof(char*))); + + for (auto & p : possible) + vp[ac++] = check(strdup(p.c_str())); + + *avp = vp; + + return ac; } @@ -143,12 +181,16 @@ void NixRepl::mainLoop(const std::vector & files) reloadFiles(); if (!loadedFiles.empty()) std::cout << std::endl; + // Allow nix-repl specific settings in .inputrc + rl_readline_name = "nix-repl"; createDirs(dirOf(historyFile)); - linenoiseHistorySetMaxLen(1000); - linenoiseHistoryLoad(historyFile.c_str()); - + el_hist_size = 1000; + read_history(historyFile.c_str()); + // rl_initialize(); + // linenoiseSetCompletionCallback(completionCallback); curRepl = this; - linenoiseSetCompletionCallback(completionCallback); + rl_set_complete_func(completionCallback); + rl_set_list_possib_func(listPossibleCallback); std::string input; @@ -174,12 +216,6 @@ void NixRepl::mainLoop(const std::vector & files) printMsg(lvlError, format(error + "%1%%2%") % (settings.showTrace ? e.prefix() : "") % e.msg()); } - if (input.size() > 0) { - // Remove trailing newline before adding to history - input.erase(input.size() - 1); - linenoiseHistoryAdd(input.c_str()); - } - // We handled the current input fully, so we should clear it // and read brand new input. input.clear(); @@ -190,19 +226,10 @@ void NixRepl::mainLoop(const std::vector & files) bool NixRepl::getLine(string & input, const std::string &prompt) { - char * s = linenoise(prompt.c_str()); + char * s = readline(prompt.c_str()); Finally doFree([&]() { free(s); }); - if (!s) { - switch (auto type = linenoiseKeyType()) { - case 1: // ctrl-C - input = ""; - return true; - case 2: // ctrl-D - return false; - default: - throw Error(format("Unexpected linenoise keytype: %1%") % type); - } - } + if (!s) + return false; input += s; input += '\n'; return true; From 9f998096d23eab433f47d8dc1ad5eb1e2ff4b667 Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Tue, 26 Jun 2018 13:06:46 -0500 Subject: [PATCH 1267/2196] repl: complete if all matches share prefix --- src/nix/repl.cc | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/src/nix/repl.cc b/src/nix/repl.cc index 7ce37077021..838aa64ae29 100644 --- a/src/nix/repl.cc +++ b/src/nix/repl.cc @@ -132,6 +132,24 @@ static char * completionCallback(char * s, int *match) { auto *res = strdup(possible.begin()->c_str() + strlen(s)); if (!res) throw Error("allocation failure"); return res; + } else if (possible.size() > 1) { + auto checkAllHaveSameAt = [&](size_t pos) { + auto &first = *possible.begin(); + for (auto &p : possible) { + if (p.size() <= pos || p[pos] != first[pos]) + return false; + } + return true; + }; + size_t start = strlen(s); + size_t len = 0; + while (checkAllHaveSameAt(start + len)) ++len; + if (len > 0) { + *match = 1; + auto *res = strdup(std::string(*possible.begin(), start, len).c_str()); + if (!res) throw Error("allocation failure"); + return res; + } } *match = 0; From 2a8bdfd31a1711b19bbf19ee3b1bed15b724c007 Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Mon, 29 Oct 2018 08:47:46 -0500 Subject: [PATCH 1268/2196] editline: 1.15.3 -> 1.16.0 Bump fallback editline expression to latest in nixpkgs. --- editline.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/editline.nix b/editline.nix index d9b5a2b64d6..dc44c7d8705 100644 --- a/editline.nix +++ b/editline.nix @@ -2,12 +2,12 @@ stdenv.mkDerivation rec { name = "editline-${version}"; - version = "1.15.3"; + version = "1.16.0"; src = fetchFromGitHub { owner = "troglobit"; repo = "editline"; rev = version; - sha256 = "0dm5fgq0acpprr938idwml64nclg9l6c6avirsd8r6f40qicbgma"; + sha256 = "0a751dp34mk9hwv59ss447csknpm5i5cgd607m3fqf24rszyhbf2"; }; nativeBuildInputs = [ autoreconfHook ]; From 49e272f6472360ba952aa4fa62cc8e61a1b4d6b7 Mon Sep 17 00:00:00 2001 From: Falco Peijnenburg Date: Sat, 27 Oct 2018 18:12:02 +0200 Subject: [PATCH 1269/2196] copyStorePath: Fix hash errors when copying from older store This commit partially reverts 48662d151bdf4a38670897beacea9d1bd750376a. When copying from an older store (in my case a store running Nix 1.11.7), nix would throw errors about there being no hash. This is fixed by recalculating the hash. --- src/libstore/store-api.cc | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 92e2685f7f6..dc54c735fdb 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -588,15 +588,19 @@ void copyStorePath(ref srcStore, ref dstStore, uint64_t total = 0; - // FIXME -#if 0 if (!info->narHash) { + StringSink sink; + srcStore->narFromPath({storePath}, sink); auto info2 = make_ref(*info); info2->narHash = hashString(htSHA256, *sink.s); if (!info->narSize) info2->narSize = sink.s->size(); + if (info->ultimate) info2->ultimate = false; info = info2; + + StringSource source(*sink.s); + dstStore->addToStore(*info, source, repair, checkSigs); + return; } -#endif if (info->ultimate) { auto info2 = make_ref(*info); From 0163e8928c624251456adacb669a94a4adf230ff Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 30 Oct 2018 11:45:31 +0100 Subject: [PATCH 1270/2196] Fix broken uploadProgressCallback closure Since the callback is global we can't refer to 'path' in it. This could cause a segfault or printing of arbitrary data. --- src/libstore/s3-binary-cache-store.cc | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index ba11ce6bb6d..13ee257ba25 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -269,14 +269,14 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore transferConfig.bufferSize = bufferSize; transferConfig.uploadProgressCallback = - [&](const TransferManager *transferManager, - const std::shared_ptr - &transferHandle) + [](const TransferManager *transferManager, + const std::shared_ptr + &transferHandle) { //FIXME: find a way to properly abort the multipart upload. //checkInterrupt(); debug("upload progress ('%s'): '%d' of '%d' bytes", - path, + transferHandle->GetKey(), transferHandle->GetBytesTransferred(), transferHandle->GetBytesTotalSize()); }; From 9f99d62480cf7c58c0a110b180f2096b7d25adab Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 30 Oct 2018 14:25:00 +0100 Subject: [PATCH 1271/2196] S3BinaryCacheStore: Allow disabling multipart uploads The use of TransferManager has several issues, including that it doesn't allow setting a Content-Encoding without a patch, and it doesn't handle exceptions in worker threads (causing termination on memory allocation failure). Fixes #2493. --- src/libstore/s3-binary-cache-store.cc | 88 +++++++++++++++++---------- 1 file changed, 57 insertions(+), 31 deletions(-) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 13ee257ba25..c5c6b89b197 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -173,6 +173,8 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore const Setting narinfoCompression{this, "", "narinfo-compression", "compression method for .narinfo files"}; const Setting lsCompression{this, "", "ls-compression", "compression method for .ls files"}; const Setting logCompression{this, "", "log-compression", "compression method for log/* files"}; + const Setting multipartUpload{ + this, false, "multipart-upload", "whether to use multi-part uploads"}; const Setting bufferSize{ this, 5 * 1024 * 1024, "buffer-size", "size (in bytes) of each part in multi-part uploads"}; @@ -261,46 +263,70 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore static std::shared_ptr executor = std::make_shared(maxThreads); - std::call_once(transferManagerCreated, [&]() { + std::call_once(transferManagerCreated, [&]() + { + if (multipartUpload) { + TransferManagerConfiguration transferConfig(executor.get()); + + transferConfig.s3Client = s3Helper.client; + transferConfig.bufferSize = bufferSize; + + transferConfig.uploadProgressCallback = + [](const TransferManager *transferManager, + const std::shared_ptr + &transferHandle) + { + //FIXME: find a way to properly abort the multipart upload. + //checkInterrupt(); + debug("upload progress ('%s'): '%d' of '%d' bytes", + transferHandle->GetKey(), + transferHandle->GetBytesTransferred(), + transferHandle->GetBytesTotalSize()); + }; + + transferManager = TransferManager::Create(transferConfig); + } + }); - TransferManagerConfiguration transferConfig(executor.get()); + auto now1 = std::chrono::steady_clock::now(); - transferConfig.s3Client = s3Helper.client; - transferConfig.bufferSize = bufferSize; + if (transferManager) { - transferConfig.uploadProgressCallback = - [](const TransferManager *transferManager, - const std::shared_ptr - &transferHandle) - { - //FIXME: find a way to properly abort the multipart upload. - //checkInterrupt(); - debug("upload progress ('%s'): '%d' of '%d' bytes", - transferHandle->GetKey(), - transferHandle->GetBytesTransferred(), - transferHandle->GetBytesTotalSize()); - }; + std::shared_ptr transferHandle = + transferManager->UploadFile( + stream, bucketName, path, mimeType, + Aws::Map(), + nullptr, contentEncoding); - transferManager = TransferManager::Create(transferConfig); - }); + transferHandle->WaitUntilFinished(); - auto now1 = std::chrono::steady_clock::now(); + if (transferHandle->GetStatus() == TransferStatus::FAILED) + throw Error("AWS error: failed to upload 's3://%s/%s': %s", + bucketName, path, transferHandle->GetLastError().GetMessage()); - std::shared_ptr transferHandle = - transferManager->UploadFile( - stream, bucketName, path, mimeType, - Aws::Map(), - nullptr, contentEncoding); + if (transferHandle->GetStatus() != TransferStatus::COMPLETED) + throw Error("AWS error: transfer status of 's3://%s/%s' in unexpected state", + bucketName, path); - transferHandle->WaitUntilFinished(); + } else { - if (transferHandle->GetStatus() == TransferStatus::FAILED) - throw Error("AWS error: failed to upload 's3://%s/%s': %s", - bucketName, path, transferHandle->GetLastError().GetMessage()); + auto request = + Aws::S3::Model::PutObjectRequest() + .WithBucket(bucketName) + .WithKey(path); - if (transferHandle->GetStatus() != TransferStatus::COMPLETED) - throw Error("AWS error: transfer status of 's3://%s/%s' in unexpected state", - bucketName, path); + request.SetContentType(mimeType); + + if (contentEncoding != "") + request.SetContentEncoding(contentEncoding); + + auto stream = std::make_shared(data); + + request.SetBody(stream); + + auto result = checkAws(fmt("AWS error uploading '%s'", path), + s3Helper.client->PutObject(request)); + } printTalkative("upload of '%s' completed", path); From 38d76d6d78e691c5ede8b929cf41ffe849349a8e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 30 Oct 2018 19:28:42 +0100 Subject: [PATCH 1272/2196] Remove redundant message --- src/libstore/s3-binary-cache-store.cc | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index c5c6b89b197..1f755ba9eee 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -328,8 +328,6 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore s3Helper.client->PutObject(request)); } - printTalkative("upload of '%s' completed", path); - auto now2 = std::chrono::steady_clock::now(); auto duration = From f3b8173a93a3571046ef6765afe1a15efc1e0de6 Mon Sep 17 00:00:00 2001 From: Linus Heckemann Date: Thu, 25 Oct 2018 13:00:21 +0200 Subject: [PATCH 1273/2196] config: use all of XDG_CONFIG_DIRS Previously, config would only be read from XDG_CONFIG_HOME. This change allows reading config from additional directories, which enables e.g. per-project binary caches or chroot stores with the help of direnv. --- src/libstore/globals.cc | 6 +++++- src/libutil/util.cc | 9 +++++++++ src/libutil/util.hh | 3 +++ 3 files changed, 17 insertions(+), 1 deletion(-) diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index a9c07b23a6f..1c2c08715a1 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -78,7 +78,11 @@ void loadConfFile() ~/.nix/nix.conf or the command line. */ globalConfig.resetOverriden(); - globalConfig.applyConfigFile(getConfigDir() + "/nix/nix.conf"); + auto dirs = getConfigDirs(); + // Iterate over them in reverse so that the ones appearing first in the path take priority + for (auto dir = dirs.rbegin(); dir != dirs.rend(); dir++) { + globalConfig.applyConfigFile(*dir + "/nix/nix.conf"); + } } unsigned int Settings::getDefaultCores() diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 03f0be705c1..259eaf0a0dd 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -496,6 +496,15 @@ Path getConfigDir() return configDir; } +std::vector getConfigDirs() +{ + Path configHome = getConfigDir(); + string configDirs = getEnv("XDG_CONFIG_DIRS"); + std::vector result = tokenizeString>(configDirs, ":"); + result.insert(result.begin(), configHome); + return result; +} + Path getDataDir() { diff --git a/src/libutil/util.hh b/src/libutil/util.hh index fc25d27758c..bda87bee433 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -131,6 +131,9 @@ Path getCacheDir(); /* Return $XDG_CONFIG_HOME or $HOME/.config. */ Path getConfigDir(); +/* Return the directories to search for user configuration files */ +std::vector getConfigDirs(); + /* Return $XDG_DATA_HOME or $HOME/.local/share. */ Path getDataDir(); From d1b049c4ea795ba7c1d1f1640ccd087e538b9c79 Mon Sep 17 00:00:00 2001 From: Jan Path Date: Wed, 31 Oct 2018 20:50:18 +0100 Subject: [PATCH 1274/2196] Fix typo in comments --- src/libexpr/get-drvs.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/get-drvs.hh b/src/libexpr/get-drvs.hh index 4d9128e3f44..daaa635fe1b 100644 --- a/src/libexpr/get-drvs.hh +++ b/src/libexpr/get-drvs.hh @@ -44,7 +44,7 @@ public: string queryDrvPath() const; string queryOutPath() const; string queryOutputName() const; - /** Return the list of outputs. The "outputs to install" are determined by `mesa.outputsToInstall`. */ + /** Return the list of outputs. The "outputs to install" are determined by `meta.outputsToInstall`. */ Outputs queryOutputs(bool onlyOutputsToInstall = false); StringSet queryMetaNames(); From 6323b0729a27f312ba8cce6279de68181644823e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 1 Nov 2018 15:17:35 +0100 Subject: [PATCH 1275/2196] Disable the S3 content-encoding patch Since we're not using multi-part uploads at the moment, we can drop this patch. --- release-common.nix | 2 ++ shell.nix | 2 +- src/libstore/s3-binary-cache-store.cc | 5 ++++- 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/release-common.nix b/release-common.nix index ace2a4f9b91..9f584993daa 100644 --- a/release-common.nix +++ b/release-common.nix @@ -64,10 +64,12 @@ rec { apis = ["s3" "transfer"]; customMemoryManagement = false; }).overrideDerivation (args: { + /* patches = args.patches or [] ++ [ (fetchpatch { url = https://github.com/edolstra/aws-sdk-cpp/commit/3e07e1f1aae41b4c8b340735ff9e8c735f0c063f.patch; sha256 = "1pij0v449p166f9l29x7ppzk8j7g9k9mp15ilh5qxp29c7fnvxy2"; }) ]; + */ })); perlDeps = diff --git a/shell.nix b/shell.nix index c04bcd15130..817684b7646 100644 --- a/shell.nix +++ b/shell.nix @@ -1,6 +1,6 @@ { useClang ? false }: -with import (builtins.fetchGit { url = https://github.com/NixOS/nixpkgs-channels.git; ref = "nixos-18.03"; }) {}; +with import (builtins.fetchGit { url = https://github.com/NixOS/nixpkgs-channels.git; ref = "nixos-18.09"; }) {}; with import ./release-common.nix { inherit pkgs; }; diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 1f755ba9eee..4f1e23198ff 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -292,11 +292,14 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore if (transferManager) { + if (contentEncoding != "") + throw Error("setting a content encoding is not supported with S3 multi-part uploads"); + std::shared_ptr transferHandle = transferManager->UploadFile( stream, bucketName, path, mimeType, Aws::Map(), - nullptr, contentEncoding); + nullptr /*, contentEncoding */); transferHandle->WaitUntilFinished(); From 5a1a870849d47e198d3690e4befae9b7cd79f098 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 5 Nov 2018 23:28:32 +0100 Subject: [PATCH 1276/2196] Bump version --- version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version b/version index 616187889b6..c0943d3e98d 100644 --- a/version +++ b/version @@ -1 +1 @@ -2.2 \ No newline at end of file +2.3 \ No newline at end of file From 1109193ea3a25208d438c65b2d79e207ae1af039 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 7 Nov 2018 11:37:43 +0100 Subject: [PATCH 1277/2196] Fix preferLocalBuild description preferLocalBuild does not in fact prevent substitution. --- doc/manual/expressions/advanced-attributes.xml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/doc/manual/expressions/advanced-attributes.xml b/doc/manual/expressions/advanced-attributes.xml index 2af7a51acfb..db981b60713 100644 --- a/doc/manual/expressions/advanced-attributes.xml +++ b/doc/manual/expressions/advanced-attributes.xml @@ -312,9 +312,7 @@ big = "a very long string"; preferLocalBuild If this attribute is set to - true, it has two effects. First, the - derivation will always be built, not substituted, even if a - substitute is available. Second, if true and distributed building is enabled, then, if possible, the derivaton will be built locally instead of forwarded to a remote machine. This is From 5a3f140856185ae0c6ee9270ad5d5fbc0505e3f2 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 7 Nov 2018 11:42:22 +0100 Subject: [PATCH 1278/2196] Document allowSubstitutes --- doc/manual/expressions/advanced-attributes.xml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/doc/manual/expressions/advanced-attributes.xml b/doc/manual/expressions/advanced-attributes.xml index db981b60713..a9b97b91a0a 100644 --- a/doc/manual/expressions/advanced-attributes.xml +++ b/doc/manual/expressions/advanced-attributes.xml @@ -322,6 +322,20 @@ big = "a very long string"; + + allowSubstitutes + + If this attribute is set to + false, then Nix will always build this + derivation; it will not try to substitute its outputs. This is + useful for very trivial derivations (such as + writeText in Nixpkgs) that are cheaper to + build locally than to substitute from a binary + cache. + + + + From 812e39313c2bcf8909b83e1e8bc548a85dcd626c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 7 Nov 2018 17:08:28 +0100 Subject: [PATCH 1279/2196] Enable sandboxing by default Closes #179. --- doc/manual/release-notes/release-notes.xml | 1 + doc/manual/release-notes/rl-2.3.xml | 19 +++++++++++++++++++ src/libstore/globals.hh | 8 +++++++- tests/common.sh.in | 15 ++++++--------- tests/init.sh | 1 + 5 files changed, 34 insertions(+), 10 deletions(-) create mode 100644 doc/manual/release-notes/rl-2.3.xml diff --git a/doc/manual/release-notes/release-notes.xml b/doc/manual/release-notes/release-notes.xml index e8ff586fa43..2655d68e354 100644 --- a/doc/manual/release-notes/release-notes.xml +++ b/doc/manual/release-notes/release-notes.xml @@ -12,6 +12,7 @@ --> + diff --git a/doc/manual/release-notes/rl-2.3.xml b/doc/manual/release-notes/rl-2.3.xml new file mode 100644 index 00000000000..6b68fbfd7e7 --- /dev/null +++ b/doc/manual/release-notes/rl-2.3.xml @@ -0,0 +1,19 @@ +
+ +Release 2.3 (2019-??-??) + +This release has the following changes: + + + + + Sandbox builds are now enabled by default on Linux. + + + + +
diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 6b3e204536f..53efc6a90fb 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -195,7 +195,13 @@ public: Setting showTrace{this, false, "show-trace", "Whether to show a stack trace on evaluation errors."}; - Setting sandboxMode{this, smDisabled, "sandbox", + Setting sandboxMode{this, + #if __linux__ + smEnabled + #else + smDisabled + #endif + , "sandbox", "Whether to enable sandboxed builds. Can be \"true\", \"false\" or \"relaxed\".", {"build-use-chroot", "build-use-sandbox"}}; diff --git a/tests/common.sh.in b/tests/common.sh.in index 2ee2f589dae..6a523ca9d83 100644 --- a/tests/common.sh.in +++ b/tests/common.sh.in @@ -85,16 +85,13 @@ killDaemon() { trap "" EXIT } -canUseSandbox() { - if [[ $(uname) != Linux ]]; then return 1; fi - - if [ ! -L /proc/self/ns/user ]; then - echo "Kernel doesn't support user namespaces, skipping this test..." - return 1 - fi +if [[ $(uname) == Linux ]] && [[ -L /proc/self/ns/user ]] && unshare --user true; then + _canUseSandbox=1 +fi - if ! unshare --user true ; then - echo "Unprivileged user namespaces disabled by sysctl, skipping this test..." +canUseSandbox() { + if [[ ! $_canUseSandbox ]]; then + echo "Sandboxing not supported, skipping this test..." return 1 fi diff --git a/tests/init.sh b/tests/init.sh index e5353598bcc..19a12c1e2d9 100644 --- a/tests/init.sh +++ b/tests/init.sh @@ -16,6 +16,7 @@ mkdir "$NIX_CONF_DIR" cat > "$NIX_CONF_DIR"/nix.conf < Date: Thu, 8 Nov 2018 12:03:50 +0000 Subject: [PATCH 1280/2196] Deprecate builtins.toPath --- doc/manual/expressions/builtins.xml | 29 ++++++----------------------- 1 file changed, 6 insertions(+), 23 deletions(-) diff --git a/doc/manual/expressions/builtins.xml b/doc/manual/expressions/builtins.xml index 873f30b062e..3f396ac1d99 100644 --- a/doc/manual/expressions/builtins.xml +++ b/doc/manual/expressions/builtins.xml @@ -1027,22 +1027,8 @@ Evaluates to [ "foo" ]. path Return true if the path - path exists, and - false otherwise. One application of this - function is to conditionally include a Nix expression containing - user configuration: - - -let - fileName = builtins.getEnv "CONFIG_FILE"; - config = - if fileName != "" && builtins.pathExists (builtins.toPath fileName) - then import (builtins.toPath fileName) - else { someSetting = false; }; # default configuration -in config.someSetting - - (Note that CONFIG_FILE must be an absolute path for - this to work.) + path exists at evaluation time, and + false otherwise. @@ -1344,13 +1330,10 @@ in foo builtins.toPath s - Convert the string value - s into a path value. The string - s must represent an absolute path - (i.e., must start with /). The path need not - exist. The resulting path is canonicalised, e.g., - builtins.toPath "//foo/xyzzy/../bar/" returns - /foo/bar. + DEPRECATED. Use /. + "/path" + to convert a string into an absolute path. For relative paths, + use ./. + "/path". + From 34d2948f219afdecb41ffd080f02f6ccc0a7bb7c Mon Sep 17 00:00:00 2001 From: Linus Heckemann Date: Thu, 8 Nov 2018 14:07:19 +0100 Subject: [PATCH 1281/2196] Fix manual build This was broken by some missing closing tags in 0bea4a50e03 --- doc/manual/expressions/builtins.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/expressions/builtins.xml b/doc/manual/expressions/builtins.xml index 4a20ffd81be..03a60a3c4c7 100644 --- a/doc/manual/expressions/builtins.xml +++ b/doc/manual/expressions/builtins.xml @@ -1078,7 +1078,7 @@ Evaluates to [ "foo" ]. Return true if the path path exists at evaluation time, and - false otherwise. + false otherwise. From fdd19fa2d736bb8ff0e75b963071dfb9dffe1702 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 9 Nov 2018 10:26:39 +0100 Subject: [PATCH 1282/2196] Revert "Bump version" This reverts commit 5a1a870849d47e198d3690e4befae9b7cd79f098. Counting is hard. --- version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version b/version index c0943d3e98d..616187889b6 100644 --- a/version +++ b/version @@ -1 +1 @@ -2.3 \ No newline at end of file +2.2 \ No newline at end of file From 4ea4d0b1a39deb0f12a66e10e4809f2d0ddb324e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 9 Nov 2018 10:29:16 +0100 Subject: [PATCH 1283/2196] Urgh --- doc/manual/release-notes/release-notes.xml | 1 - doc/manual/release-notes/rl-2.2.xml | 4 ++++ doc/manual/release-notes/rl-2.3.xml | 19 ------------------- 3 files changed, 4 insertions(+), 20 deletions(-) delete mode 100644 doc/manual/release-notes/rl-2.3.xml diff --git a/doc/manual/release-notes/release-notes.xml b/doc/manual/release-notes/release-notes.xml index 2655d68e354..e8ff586fa43 100644 --- a/doc/manual/release-notes/release-notes.xml +++ b/doc/manual/release-notes/release-notes.xml @@ -12,7 +12,6 @@ --> - diff --git a/doc/manual/release-notes/rl-2.2.xml b/doc/manual/release-notes/rl-2.2.xml index bc28a56c940..abe9b49adad 100644 --- a/doc/manual/release-notes/rl-2.2.xml +++ b/doc/manual/release-notes/rl-2.2.xml @@ -19,6 +19,10 @@ + + Sandbox builds are now enabled by default on Linux. + + diff --git a/doc/manual/release-notes/rl-2.3.xml b/doc/manual/release-notes/rl-2.3.xml deleted file mode 100644 index 6b68fbfd7e7..00000000000 --- a/doc/manual/release-notes/rl-2.3.xml +++ /dev/null @@ -1,19 +0,0 @@ -
- -Release 2.3 (2019-??-??) - -This release has the following changes: - - - - - Sandbox builds are now enabled by default on Linux. - - - - -
From 77516166450008de8c64bb04c67f9b890ddfe38d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 9 Nov 2018 10:34:12 +0100 Subject: [PATCH 1284/2196] nix-prefetch-url: Stop progress bar before printing results --- src/nix-prefetch-url/nix-prefetch-url.cc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/nix-prefetch-url/nix-prefetch-url.cc b/src/nix-prefetch-url/nix-prefetch-url.cc index 54a402241e3..f54706a8a01 100644 --- a/src/nix-prefetch-url/nix-prefetch-url.cc +++ b/src/nix-prefetch-url/nix-prefetch-url.cc @@ -220,6 +220,8 @@ static int _main(int argc, char * * argv) assert(storePath == store->makeFixedOutputPath(unpack, hash, name)); } + stopProgressBar(); + if (!printPath) printInfo(format("path is '%1%'") % storePath); From b05d6dac7ba662ccf7af8b5a112a20f460e0d50e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 9 Nov 2018 16:08:36 +0100 Subject: [PATCH 1285/2196] Manual: build locally -> build --- doc/manual/expressions/advanced-attributes.xml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/doc/manual/expressions/advanced-attributes.xml b/doc/manual/expressions/advanced-attributes.xml index a9b97b91a0a..07b0d97d3f7 100644 --- a/doc/manual/expressions/advanced-attributes.xml +++ b/doc/manual/expressions/advanced-attributes.xml @@ -330,8 +330,7 @@ big = "a very long string"; derivation; it will not try to substitute its outputs. This is useful for very trivial derivations (such as writeText in Nixpkgs) that are cheaper to - build locally than to substitute from a binary - cache. + build than to substitute from a binary cache. From 18215be59d39741dae96a4a3be6d364b52c10e9b Mon Sep 17 00:00:00 2001 From: "Felix C. Stegerman" Date: Mon, 12 Nov 2018 17:50:39 +0100 Subject: [PATCH 1286/2196] fix typo (s/gift/git/) --- doc/manual/expressions/builtins.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/manual/expressions/builtins.xml b/doc/manual/expressions/builtins.xml index 03a60a3c4c7..cdadfef707b 100644 --- a/doc/manual/expressions/builtins.xml +++ b/doc/manual/expressions/builtins.xml @@ -436,7 +436,7 @@ stdenv.mkDerivation { … } Fetching a repository's specific commit on an arbitrary branch If the revision you're looking for is in the default branch - of the gift repository you don't strictly need to specify + of the git repository you don't strictly need to specify the branch name in the ref attribute. @@ -465,7 +465,7 @@ stdenv.mkDerivation { … } Fetching a repository's specific commit on the default branch If the revision you're looking for is in the default branch - of the gift repository you may omit the + of the git repository you may omit the ref attribute. builtins.fetchGit { From fb2c21f71c8a31229740cdbff5b2d08a1752133a Mon Sep 17 00:00:00 2001 From: "Felix C. Stegerman" Date: Tue, 13 Nov 2018 01:18:26 +0100 Subject: [PATCH 1287/2196] manual: quote $servlets --- doc/manual/expressions/builtins.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/expressions/builtins.xml b/doc/manual/expressions/builtins.xml index cdadfef707b..412622714d4 100644 --- a/doc/manual/expressions/builtins.xml +++ b/doc/manual/expressions/builtins.xml @@ -1480,7 +1480,7 @@ stdenv.mkDerivation (rec { builder = builtins.toFile "builder.sh" " source $stdenv/setup mkdir $out - echo $servlets | xsltproc ${stylesheet} - > $out/server-conf.xml]]> $out/server-conf.xml]]> Date: Tue, 13 Nov 2018 16:15:30 +0100 Subject: [PATCH 1288/2196] Restore parent mount namespace before executing a child process This ensures that they can't write to /nix/store. Fixes #2535. --- src/libstore/local-store.cc | 2 ++ src/libstore/ssh.cc | 3 +++ src/libutil/util.cc | 23 +++++++++++++++++++++++ src/libutil/util.hh | 9 +++++++++ src/nix-build/nix-build.cc | 4 ++-- src/nix/edit.cc | 5 +++++ src/nix/repl.cc | 2 ++ src/nix/run.cc | 4 ++-- 8 files changed, 48 insertions(+), 4 deletions(-) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 216f3417c4a..e1cb423d151 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -366,6 +366,8 @@ void LocalStore::makeStoreWritable() throw SysError("getting info about the Nix store mount point"); if (stat.f_flag & ST_RDONLY) { + saveMountNamespace(); + if (unshare(CLONE_NEWNS) == -1) throw SysError("setting up a private mount namespace"); diff --git a/src/libstore/ssh.cc b/src/libstore/ssh.cc index 5e0e44935cc..cf133b57cb2 100644 --- a/src/libstore/ssh.cc +++ b/src/libstore/ssh.cc @@ -1,4 +1,5 @@ #include "ssh.hh" +#include "affinity.hh" namespace nix { @@ -34,7 +35,9 @@ std::unique_ptr SSHMaster::startCommand(const std::string auto conn = std::make_unique(); conn->sshPid = startProcess([&]() { + restoreAffinity(); restoreSignals(); + restoreMountNamespace(); close(in.writeSide.get()); close(out.readSide.get()); diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 259eaf0a0dd..6e4536e6e4e 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -936,6 +936,7 @@ pid_t startProcess(std::function fun, const ProcessOptions & options) throw SysError("setting death signal"); #endif restoreAffinity(); + restoreMountNamespace(); fun(); } catch (std::exception & e) { try { @@ -1504,4 +1505,26 @@ std::unique_ptr createInterruptCallback(std::function return std::unique_ptr(res.release()); } +static AutoCloseFD fdSavedMountNamespace; + +void saveMountNamespace() +{ +#if __linux__ + std::once_flag done; + std::call_once(done, []() { + fdSavedMountNamespace = open("/proc/self/ns/mnt", O_RDONLY); + if (!fdSavedMountNamespace) + throw SysError("saving parent mount namespace"); + }); +#endif +} + +void restoreMountNamespace() +{ +#if __linux__ + if (fdSavedMountNamespace && setns(fdSavedMountNamespace.get(), CLONE_NEWNS) == -1) + throw SysError("restoring parent mount namespace"); +#endif +} + } diff --git a/src/libutil/util.hh b/src/libutil/util.hh index bda87bee433..2689cbd8b41 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -514,4 +514,13 @@ typedef std::function PathFilter; extern PathFilter defaultPathFilter; +/* Save the current mount namespace. Ignored if called more than + once. */ +void saveMountNamespace(); + +/* Restore the mount namespace saved by saveMountNamespace(). Ignored + if saveMountNamespace() was never called. */ +void restoreMountNamespace(); + + } diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 618895d387d..11ea3b1f7ae 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -401,8 +401,6 @@ static void _main(int argc, char * * argv) } else env[var.first] = var.second; - restoreAffinity(); - /* Run a shell using the derivation's environment. For convenience, source $stdenv/setup to setup additional environment variables and shell functions. Also don't @@ -446,7 +444,9 @@ static void _main(int argc, char * * argv) auto argPtrs = stringsToCharPtrs(args); + restoreAffinity(); restoreSignals(); + restoreMountNamespace(); execvp(shell.c_str(), argPtrs.data()); diff --git a/src/nix/edit.cc b/src/nix/edit.cc index c9671f76d0f..d8d5895bd86 100644 --- a/src/nix/edit.cc +++ b/src/nix/edit.cc @@ -3,6 +3,7 @@ #include "eval.hh" #include "attr-path.hh" #include "progress-bar.hh" +#include "affinity.hh" #include @@ -72,6 +73,10 @@ struct CmdEdit : InstallableCommand stopProgressBar(); + restoreAffinity(); + restoreSignals(); + restoreMountNamespace(); + execvp(args.front().c_str(), stringsToCharPtrs(args).data()); throw SysError("cannot run editor '%s'", editor); diff --git a/src/nix/repl.cc b/src/nix/repl.cc index 1bbe256b2d8..77898c63236 100644 --- a/src/nix/repl.cc +++ b/src/nix/repl.cc @@ -285,6 +285,8 @@ static int runProgram(const string & program, const Strings & args) if (pid == -1) throw SysError("forking"); if (pid == 0) { restoreAffinity(); + restoreSignals(); + restoreMountNamespace(); execvp(program.c_str(), stringsToCharPtrs(args2).data()); _exit(1); } diff --git a/src/nix/run.cc b/src/nix/run.cc index 35b76334587..1297072989b 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -153,9 +153,9 @@ struct CmdRun : InstallablesCommand stopProgressBar(); - restoreSignals(); - restoreAffinity(); + restoreSignals(); + restoreMountNamespace(); /* If this is a diverted store (i.e. its "logical" location (typically /nix/store) differs from its "physical" location From 9dc9b64aadadd9f3b8277fb942b00dad1a753fd3 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 13 Nov 2018 20:55:08 +0100 Subject: [PATCH 1289/2196] Remove editline expression, not needed anymore --- editline.nix | 24 ------------------------ release-common.nix | 2 -- 2 files changed, 26 deletions(-) delete mode 100644 editline.nix diff --git a/editline.nix b/editline.nix deleted file mode 100644 index dc44c7d8705..00000000000 --- a/editline.nix +++ /dev/null @@ -1,24 +0,0 @@ -{ stdenv, fetchFromGitHub, autoreconfHook }: - -stdenv.mkDerivation rec { - name = "editline-${version}"; - version = "1.16.0"; - src = fetchFromGitHub { - owner = "troglobit"; - repo = "editline"; - rev = version; - sha256 = "0a751dp34mk9hwv59ss447csknpm5i5cgd607m3fqf24rszyhbf2"; - }; - - nativeBuildInputs = [ autoreconfHook ]; - - dontDisableStatic = true; - - meta = with stdenv.lib; { - homepage = http://troglobit.com/editline.html; - description = "A readline() replacement for UNIX without termcap (ncurses)"; - license = licenses.bsdOriginal; - maintainers = with maintainers; [ dtzWill ]; - platforms = platforms.all; - }; -} diff --git a/release-common.nix b/release-common.nix index f495df1440d..4c556598526 100644 --- a/release-common.nix +++ b/release-common.nix @@ -29,8 +29,6 @@ rec { ''; }); - editline = pkgs.editline or (pkgs.callPackage ./editline.nix {}); - configureFlags = [ "--enable-gc" From a32ff2573ba4d0df4e3360c5a96398738da953f6 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 15 Nov 2018 12:59:54 +0100 Subject: [PATCH 1290/2196] Fix 'Read-only file system' when building a derivation --- src/libstore/build.cc | 2 ++ src/libutil/util.cc | 3 ++- src/libutil/util.hh | 1 + 3 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 676ad5856b1..9c408e29c06 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2193,6 +2193,7 @@ void DerivationGoal::startBuilder() userNamespaceSync.create(); options.allowVfork = false; + options.restoreMountNamespace = false; Pid helper = startProcess([&]() { @@ -2259,6 +2260,7 @@ void DerivationGoal::startBuilder() #endif { options.allowVfork = !buildUser && !drv->isBuiltin(); + options.restoreMountNamespace = false; pid = startProcess([&]() { runChild(); }, options); diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 6e4536e6e4e..e12c4b258c2 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -936,7 +936,8 @@ pid_t startProcess(std::function fun, const ProcessOptions & options) throw SysError("setting death signal"); #endif restoreAffinity(); - restoreMountNamespace(); + if (options.restoreMountNamespace) + restoreMountNamespace(); fun(); } catch (std::exception & e) { try { diff --git a/src/libutil/util.hh b/src/libutil/util.hh index 2689cbd8b41..d67bddc138c 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -250,6 +250,7 @@ struct ProcessOptions bool dieWithParent = true; bool runExitHandlers = false; bool allowVfork = true; + bool restoreMountNamespace = true; }; pid_t startProcess(std::function fun, const ProcessOptions & options = ProcessOptions()); From 40e0c9e925e266c1b0944d9810f3a7f3dd6ec544 Mon Sep 17 00:00:00 2001 From: Linus Heckemann Date: Fri, 16 Nov 2018 16:13:28 +0100 Subject: [PATCH 1291/2196] nix ls-nar: allow reading from FIFOs fixes #2528 --- src/nix/ls.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix/ls.cc b/src/nix/ls.cc index e99622faf47..d089be42fb2 100644 --- a/src/nix/ls.cc +++ b/src/nix/ls.cc @@ -148,7 +148,7 @@ struct CmdLsNar : Command, MixLs void run() override { - list(makeNarAccessor(make_ref(readFile(narPath)))); + list(makeNarAccessor(make_ref(readFile(narPath, true)))); } }; From b289d86cd1ef5a8df0ce177dfc04b4e5fec3fa14 Mon Sep 17 00:00:00 2001 From: Kai Harries Date: Sun, 18 Nov 2018 19:06:35 +0100 Subject: [PATCH 1292/2196] repl: Remove code that was commented out --- src/nix/repl.cc | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/nix/repl.cc b/src/nix/repl.cc index ff8477865b5..39c6520999c 100644 --- a/src/nix/repl.cc +++ b/src/nix/repl.cc @@ -204,8 +204,6 @@ void NixRepl::mainLoop(const std::vector & files) createDirs(dirOf(historyFile)); el_hist_size = 1000; read_history(historyFile.c_str()); - // rl_initialize(); - // linenoiseSetCompletionCallback(completionCallback); curRepl = this; rl_set_complete_func(completionCallback); rl_set_list_possib_func(listPossibleCallback); From de5997332d0f9b1347978ea0d4e7ead2b59d2dd6 Mon Sep 17 00:00:00 2001 From: Kai Harries Date: Thu, 15 Nov 2018 21:15:11 +0100 Subject: [PATCH 1293/2196] repl: give user the choice between libeditline and libreadline The goal is to support libeditline AND libreadline and let the user decide at compile time which one to use. Add a compile time option to use libreadline instead of libeditline. If compiled against libreadline completion functionality is lost because of a incompatibility between libeditlines and libreadlines completion function. Completion with libreadline is possible and can be added later. To use libreadline instead of libeditline the environment variables 'EDITLINE_LIBS' and 'EDITLINE_CFLAGS' have to been set during the ./configure step. Example: EDITLINE_LIBS="/usr/lib/x86_64-linux-gnu/libhistory.so /usr/lib/x86_64-linux-gnu/libreadline.so" EDITLINE_CFLAGS="-DREADLINE" The reason for this change is that for example on Debian already three different editline libraries exist but none of those is compatible the flavor used by nix. My hope is that with this change it would be easier to port nix to systems that have already libreadline available. --- src/nix/repl.cc | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/nix/repl.cc b/src/nix/repl.cc index 39c6520999c..d93fd770e80 100644 --- a/src/nix/repl.cc +++ b/src/nix/repl.cc @@ -5,7 +5,12 @@ #include +#ifdef READLINE +#include +#include +#else #include +#endif #include "shared.hh" #include "eval.hh" @@ -202,11 +207,15 @@ void NixRepl::mainLoop(const std::vector & files) // Allow nix-repl specific settings in .inputrc rl_readline_name = "nix-repl"; createDirs(dirOf(historyFile)); +#ifndef READLINE el_hist_size = 1000; +#endif read_history(historyFile.c_str()); curRepl = this; +#ifndef READLINE rl_set_complete_func(completionCallback); rl_set_list_possib_func(listPossibleCallback); +#endif std::string input; From 5e64470b192271ce60bea2f5c5fcc556227f86a1 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 24 Sep 2018 15:18:59 +0200 Subject: [PATCH 1294/2196] Fix typo --- src/libmain/stack.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libmain/stack.cc b/src/libmain/stack.cc index 13896aeecb6..e6224de7d28 100644 --- a/src/libmain/stack.cc +++ b/src/libmain/stack.cc @@ -63,7 +63,7 @@ void detectStackOverflow() act.sa_sigaction = sigsegvHandler; act.sa_flags = SA_SIGINFO | SA_ONSTACK; if (sigaction(SIGSEGV, &act, 0)) - throw SysError("resetting SIGCHLD"); + throw SysError("resetting SIGSEGV"); #endif } From 4aee93d5ce6cf77e314e93074b9da1dcff8979e9 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 20 Nov 2018 20:59:44 +0100 Subject: [PATCH 1295/2196] fetchGit: Drop unnecessary localRef --- src/libexpr/primops/fetchGit.cc | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/libexpr/primops/fetchGit.cc b/src/libexpr/primops/fetchGit.cc index 5d6249e565d..b46d2f25826 100644 --- a/src/libexpr/primops/fetchGit.cc +++ b/src/libexpr/primops/fetchGit.cc @@ -85,6 +85,8 @@ GitInfo exportGit(ref store, const std::string & uri, if (rev != "" && !std::regex_match(rev, revRegex)) throw Error("invalid Git revision '%s'", rev); + deletePath(getCacheDir() + "/nix/git"); + Path cacheDir = getCacheDir() + "/nix/gitv2/" + hashString(htSHA256, uri).to_string(Base32, false); if (!pathExists(cacheDir)) { @@ -92,9 +94,7 @@ GitInfo exportGit(ref store, const std::string & uri, runProgram("git", true, { "init", "--bare", cacheDir }); } - std::string localRef = hashString(htSHA256, fmt("%s-%s", uri, *ref)).to_string(Base32, false); - - Path localRefFile = cacheDir + "/refs/heads/" + localRef; + Path localRefFile = cacheDir + "/refs/heads/" + *ref; bool doFetch; time_t now = time(0); @@ -124,7 +124,7 @@ GitInfo exportGit(ref store, const std::string & uri, // FIXME: git stderr messes up our progress indicator, so // we're using --quiet for now. Should process its stderr. - runProgram("git", true, { "-C", cacheDir, "fetch", "--quiet", "--force", "--", uri, *ref + ":" + localRef }); + runProgram("git", true, { "-C", cacheDir, "fetch", "--quiet", "--force", "--", uri, fmt("%s:%s", *ref, *ref) }); struct timeval times[2]; times[0].tv_sec = now; From fa5143c722dab749de9305dbaa85d618b5758440 Mon Sep 17 00:00:00 2001 From: CHEIKH Chawki Date: Tue, 4 Dec 2018 16:14:23 +0100 Subject: [PATCH 1296/2196] Solve hg "abandoned transaction" issue --- src/libexpr/primops/fetchMercurial.cc | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc index 97cda2458c9..66f49f37432 100644 --- a/src/libexpr/primops/fetchMercurial.cc +++ b/src/libexpr/primops/fetchMercurial.cc @@ -93,7 +93,22 @@ HgInfo exportMercurial(ref store, const std::string & uri, Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Mercurial repository '%s'", uri)); if (pathExists(cacheDir)) { - runProgram("hg", true, { "pull", "-R", cacheDir, "--", uri }); + try { + runProgram("hg", true, { "pull", "-R", cacheDir, "--", uri }); + } + catch (ExecError & e){ + string transJournal = cacheDir + "/.hg/store/journal"; + /* hg throws "abandoned transaction" error only if this file exists */ + if (pathExists(transJournal)) + { + runProgram("hg", true, { "recover", "-R", cacheDir }); + runProgram("hg", true, { "pull", "-R", cacheDir, "--", uri }); + } + else + { + throw ExecError(e.status, fmt("program hg '%1%' ", statusToString(e.status))); + } + } } else { createDirs(dirOf(cacheDir)); runProgram("hg", true, { "clone", "--noupdate", "--", uri, cacheDir }); From 898823b67d4d9ceeaebf166957141706eb03ad72 Mon Sep 17 00:00:00 2001 From: Daiderd Jordan Date: Fri, 7 Dec 2018 23:38:24 +0100 Subject: [PATCH 1297/2196] s3: make scheme configurable This enables using for http for S3 request for debugging or implementations that don't have https configured. This is not a problem for binary caches since they should not contain sensitive information. Both package signatures and AWS auth already protect against tampering. --- src/libstore/download.cc | 2 +- src/libstore/s3-binary-cache-store.cc | 12 ++++++++---- src/libstore/s3.hh | 4 ++-- 3 files changed, 11 insertions(+), 7 deletions(-) diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 7773d903265..fef2cf7a388 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -622,7 +622,7 @@ struct CurlDownloader : public Downloader // FIXME: do this on a worker thread try { #ifdef ENABLE_S3 - S3Helper s3Helper("", Aws::Region::US_EAST_1, ""); // FIXME: make configurable + S3Helper s3Helper("", Aws::Region::US_EAST_1, "", ""); // FIXME: make configurable auto slash = request.uri.find('/', 5); if (slash == std::string::npos) throw nix::Error("bad S3 URI '%s'", request.uri); diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 4f1e23198ff..51de89e0d92 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -82,8 +82,8 @@ static void initAWS() }); } -S3Helper::S3Helper(const std::string & profile, const std::string & region, const std::string & endpoint) - : config(makeConfig(region, endpoint)) +S3Helper::S3Helper(const string & profile, const string & region, const string & scheme, const string & endpoint) + : config(makeConfig(region, scheme, endpoint)) , client(make_ref( profile == "" ? std::dynamic_pointer_cast( @@ -114,11 +114,14 @@ class RetryStrategy : public Aws::Client::DefaultRetryStrategy } }; -ref S3Helper::makeConfig(const string & region, const string & endpoint) +ref S3Helper::makeConfig(const string & region, const string & scheme, const string & endpoint) { initAWS(); auto res = make_ref(); res->region = region; + if (!scheme.empty()) { + res->scheme = Aws::Http::SchemeMapper::FromString(scheme.c_str()); + } if (!endpoint.empty()) { res->endpointOverride = endpoint; } @@ -169,6 +172,7 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore { const Setting profile{this, "", "profile", "The name of the AWS configuration profile to use."}; const Setting region{this, Aws::Region::US_EAST_1, "region", {"aws-region"}}; + const Setting scheme{this, "", "scheme", "The scheme to use for S3 requests, https by default."}; const Setting endpoint{this, "", "endpoint", "An optional override of the endpoint to use when talking to S3."}; const Setting narinfoCompression{this, "", "narinfo-compression", "compression method for .narinfo files"}; const Setting lsCompression{this, "", "ls-compression", "compression method for .ls files"}; @@ -188,7 +192,7 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore const Params & params, const std::string & bucketName) : S3BinaryCacheStore(params) , bucketName(bucketName) - , s3Helper(profile, region, endpoint) + , s3Helper(profile, region, scheme, endpoint) { diskCache = getNarInfoDiskCache(); } diff --git a/src/libstore/s3.hh b/src/libstore/s3.hh index 95d612b6633..ef5f23d0f25 100644 --- a/src/libstore/s3.hh +++ b/src/libstore/s3.hh @@ -14,9 +14,9 @@ struct S3Helper ref config; ref client; - S3Helper(const std::string & profile, const std::string & region, const std::string & endpoint); + S3Helper(const std::string & profile, const std::string & region, const std::string & scheme, const std::string & endpoint); - ref makeConfig(const std::string & region, const std::string & endpoint); + ref makeConfig(const std::string & region, const std::string & scheme, const std::string & endpoint); struct DownloadResult { From 77cc632186d6a0bb63c1e16238f628857bc6e7bd Mon Sep 17 00:00:00 2001 From: Daiderd Jordan Date: Fri, 7 Dec 2018 23:48:35 +0100 Subject: [PATCH 1298/2196] s3: document scheme query parameter --- doc/manual/packages/s3-substituter.xml | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/doc/manual/packages/s3-substituter.xml b/doc/manual/packages/s3-substituter.xml index ea654392c6b..2ec9687a0c6 100644 --- a/doc/manual/packages/s3-substituter.xml +++ b/doc/manual/packages/s3-substituter.xml @@ -51,6 +51,18 @@ the S3 URL: addressing. + + scheme + + + The scheme used for S3 requests, https + (default) or http. This option allows you to + disable HTTPS for binary caches which don't support it. + + HTTPS should be used if the cache might contain + sensitive information. + + In this example we will use the bucket named @@ -165,7 +177,7 @@ the S3 URL: Uploading to an S3-Compatible Binary Cache - nix copy --to 's3://example-nix-cache?profile=cache-upload&endpoint=minio.example.com' nixpkgs.hello + nix copy --to 's3://example-nix-cache?profile=cache-upload&scheme=https&endpoint=minio.example.com' nixpkgs.hello From 419949bf617a73e92f6ebafbaf5288e3cf4c9f27 Mon Sep 17 00:00:00 2001 From: Dmitry Kalinkin Date: Sat, 8 Dec 2018 17:28:38 -0500 Subject: [PATCH 1299/2196] docs: add missing prerequisites: brotli, boost, libseccomp --- .../installation/prerequisites-source.xml | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/doc/manual/installation/prerequisites-source.xml b/doc/manual/installation/prerequisites-source.xml index ef14a1d753d..f1705221663 100644 --- a/doc/manual/installation/prerequisites-source.xml +++ b/doc/manual/installation/prerequisites-source.xml @@ -25,6 +25,12 @@ If your distribution does not provide it, you can get it from . + The libbrotlienc and + libbrotlidec libraries to provide implementation + of the Brotli compression algorithm. They are available for download + from the official repository . + The bzip2 compressor program and the libbz2 library. Thus you must have bzip2 installed, including development headers and libraries. If your @@ -52,6 +58,10 @@ pass the flag to configure. + The boost library of version + 1.61.0 or higher. It can be obtained from the official web site + . + The xmllint and xsltproc programs to build this manual and the man-pages. These are part of the libxml2 and @@ -77,6 +87,15 @@ modify the parser or when you are building from the Git repository. + The libseccomp is used to provide + syscall filtering on Linux. This is an optional dependency and can + be disabled passing a + option to the configure script (Not recommended + unless your system doesn't support + libseccomp). To get the library, visit . + From 6f890531084ab6596027ce0bf6ad302864affa5b Mon Sep 17 00:00:00 2001 From: Daiderd Jordan Date: Wed, 12 Dec 2018 01:04:34 +0100 Subject: [PATCH 1300/2196] nar-info-disk-cache: include ca in the cache entries Without this information the content addressable state and hashes are lost after the first request, this causes signatures to be required for everything even tho the path could be verified without signing. --- src/libstore/nar-info-disk-cache.cc | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc index 35403e5df56..5fdd7ce8916 100644 --- a/src/libstore/nar-info-disk-cache.cc +++ b/src/libstore/nar-info-disk-cache.cc @@ -31,6 +31,7 @@ create table if not exists NARs ( refs text, deriver text, sigs text, + ca text, timestamp integer not null, present integer not null, primary key (cache, hashPart), @@ -72,7 +73,7 @@ class NarInfoDiskCacheImpl : public NarInfoDiskCache { auto state(_state.lock()); - Path dbPath = getCacheDir() + "/nix/binary-cache-v5.sqlite"; + Path dbPath = getCacheDir() + "/nix/binary-cache-v6.sqlite"; createDirs(dirOf(dbPath)); state->db = SQLite(dbPath); @@ -94,7 +95,7 @@ class NarInfoDiskCacheImpl : public NarInfoDiskCache state->insertNAR.create(state->db, "insert or replace into NARs(cache, hashPart, namePart, url, compression, fileHash, fileSize, narHash, " - "narSize, refs, deriver, sigs, timestamp, present) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 1)"); + "narSize, refs, deriver, sigs, ca, timestamp, present) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 1)"); state->insertMissingNAR.create(state->db, "insert or replace into NARs(cache, hashPart, timestamp, present) values (?, ?, ?, 0)"); @@ -210,6 +211,7 @@ class NarInfoDiskCacheImpl : public NarInfoDiskCache narInfo->deriver = cache.storeDir + "/" + queryNAR.getStr(10); for (auto & sig : tokenizeString(queryNAR.getStr(11), " ")) narInfo->sigs.insert(sig); + narInfo->ca = queryNAR.getStr(12); return {oValid, narInfo}; }); @@ -243,6 +245,7 @@ class NarInfoDiskCacheImpl : public NarInfoDiskCache (concatStringsSep(" ", info->shortRefs())) (info->deriver != "" ? baseNameOf(info->deriver) : "", info->deriver != "") (concatStringsSep(" ", info->sigs)) + (info->ca) (time(0)).exec(); } else { From f7425d55df5f5c39e778d437eba94540fee43f4a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 12 Dec 2018 17:12:41 +0100 Subject: [PATCH 1301/2196] Fix assertion failure in NarInfoDiskCache https://hydra.nixos.org/build/85827920 --- src/libstore/nar-info-disk-cache.cc | 30 ++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc index 5fdd7ce8916..32ad7f2b27f 100644 --- a/src/libstore/nar-info-disk-cache.cc +++ b/src/libstore/nar-info-disk-cache.cc @@ -101,7 +101,7 @@ class NarInfoDiskCacheImpl : public NarInfoDiskCache "insert or replace into NARs(cache, hashPart, timestamp, present) values (?, ?, ?, 0)"); state->queryNAR.create(state->db, - "select * from NARs where cache = ? and hashPart = ? and ((present = 0 and timestamp > ?) or (present = 1 and timestamp > ?))"); + "select present, namePart, url, compression, fileHash, fileSize, narHash, narSize, refs, deriver, sigs, ca from NARs where cache = ? and hashPart = ? and ((present = 0 and timestamp > ?) or (present = 1 and timestamp > ?))"); /* Periodically purge expired entries from the database. */ retrySQLite([&]() { @@ -190,28 +190,28 @@ class NarInfoDiskCacheImpl : public NarInfoDiskCache if (!queryNAR.next()) return {oUnknown, 0}; - if (!queryNAR.getInt(13)) + if (!queryNAR.getInt(0)) return {oInvalid, 0}; auto narInfo = make_ref(); - auto namePart = queryNAR.getStr(2); + auto namePart = queryNAR.getStr(1); narInfo->path = cache.storeDir + "/" + hashPart + (namePart.empty() ? "" : "-" + namePart); - narInfo->url = queryNAR.getStr(3); - narInfo->compression = queryNAR.getStr(4); - if (!queryNAR.isNull(5)) - narInfo->fileHash = Hash(queryNAR.getStr(5)); - narInfo->fileSize = queryNAR.getInt(6); - narInfo->narHash = Hash(queryNAR.getStr(7)); - narInfo->narSize = queryNAR.getInt(8); - for (auto & r : tokenizeString(queryNAR.getStr(9), " ")) + narInfo->url = queryNAR.getStr(2); + narInfo->compression = queryNAR.getStr(3); + if (!queryNAR.isNull(4)) + narInfo->fileHash = Hash(queryNAR.getStr(4)); + narInfo->fileSize = queryNAR.getInt(5); + narInfo->narHash = Hash(queryNAR.getStr(6)); + narInfo->narSize = queryNAR.getInt(7); + for (auto & r : tokenizeString(queryNAR.getStr(8), " ")) narInfo->references.insert(cache.storeDir + "/" + r); - if (!queryNAR.isNull(10)) - narInfo->deriver = cache.storeDir + "/" + queryNAR.getStr(10); - for (auto & sig : tokenizeString(queryNAR.getStr(11), " ")) + if (!queryNAR.isNull(9)) + narInfo->deriver = cache.storeDir + "/" + queryNAR.getStr(9); + for (auto & sig : tokenizeString(queryNAR.getStr(10), " ")) narInfo->sigs.insert(sig); - narInfo->ca = queryNAR.getStr(12); + narInfo->ca = queryNAR.getStr(11); return {oValid, narInfo}; }); From 5e6fa9092fb5be722f3568c687524416bc746423 Mon Sep 17 00:00:00 2001 From: zimbatm Date: Wed, 12 Dec 2018 14:53:00 +0100 Subject: [PATCH 1302/2196] libstore: improve hash mismatch error messages Use the same output ordering and format everywhere. This is such a common issue that we trade the single-line error message for more readability. Old message: ``` fixed-output derivation produced path '/nix/store/d4nw9x2sy9q3r32f3g5l5h1k833c01vq-example.com' with sha256 hash '08y4734bm2zahw75b16bcmcg587vvyvh0n11gwiyir70divwp1rm' instead of the expected hash '1xzwnipjd54wl8g93vpw6hxnpmdabq0wqywriiwmh7x8k0lvpq5m' ``` New message: ``` hash mismatch in fixed-output derivation '/nix/store/d4nw9x2sy9q3r32f3g5l5h1k833c01vq-example.com': wanted: sha256:1xzwnipjd54wl8g93vpw6hxnpmdabq0wqywriiwmh7x8k0lvpq5m got: sha256:08y4734bm2zahw75b16bcmcg587vvyvh0n11gwiyir70divwp1rm ``` --- src/libstore/build.cc | 4 ++-- src/libstore/download.cc | 4 ++-- src/libstore/local-store.cc | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 9c408e29c06..59abae9b90d 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -3129,8 +3129,8 @@ void DerivationGoal::registerOutputs() /* Throw an error after registering the path as valid. */ delayedException = std::make_exception_ptr( - BuildError("fixed-output derivation produced path '%s' with %s hash '%s' instead of the expected hash '%s'", - dest, printHashType(h.type), printHash16or32(h2), printHash16or32(h))); + BuildError("hash mismatch in fixed-output derivation '%s':\n wanted: %s\n got: %s", + dest, h.to_string(), h2.to_string())); Path actualDest = worker.store.toRealPath(dest); diff --git a/src/libstore/download.cc b/src/libstore/download.cc index fef2cf7a388..467f570bbf0 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -881,8 +881,8 @@ Path Downloader::downloadCached(ref store, const string & url_, bool unpa Hash gotHash = unpack ? hashPath(expectedHash.type, store->toRealPath(storePath)).first : hashFile(expectedHash.type, store->toRealPath(storePath)); - throw nix::Error("hash mismatch in file downloaded from '%s': got hash '%s' instead of the expected hash '%s'", - url, gotHash.to_string(), expectedHash.to_string()); + throw nix::Error("hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s", + url, expectedHash.to_string(), gotHash.to_string()); } return store->toRealPath(storePath); diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index e1cb423d151..5b4e7ca4ca9 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1022,11 +1022,11 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, auto hashResult = hashSink.finish(); if (hashResult.first != info.narHash) - throw Error("hash mismatch importing path '%s'; expected hash '%s', got '%s'", + throw Error("hash mismatch importing path '%s';\n wanted: %s\n got: %s", info.path, info.narHash.to_string(), hashResult.first.to_string()); if (hashResult.second != info.narSize) - throw Error("size mismatch importing path '%s'; expected %s, got %s", + throw Error("size mismatch importing path '%s';\n wanted: %s\n got: %s", info.path, info.narSize, hashResult.second); autoGC(); From 0e6c84a771a2b124a200bfbedc85e51884ea63a6 Mon Sep 17 00:00:00 2001 From: Daiderd Jordan Date: Wed, 12 Dec 2018 22:59:03 +0100 Subject: [PATCH 1303/2196] nix repl: don't create result symlinks --- src/nix/repl.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix/repl.cc b/src/nix/repl.cc index d93fd770e80..d4806d74adb 100644 --- a/src/nix/repl.cc +++ b/src/nix/repl.cc @@ -445,7 +445,7 @@ bool NixRepl::processLine(string line) /* We could do the build in this process using buildPaths(), but doing it in a child makes it easier to recover from problems / SIGINT. */ - if (runProgram(settings.nixBinDir + "/nix", Strings{"build", drvPath}) == 0) { + if (runProgram(settings.nixBinDir + "/nix", Strings{"build", "--no-link", drvPath}) == 0) { Derivation drv = readDerivation(drvPath); std::cout << std::endl << "this derivation produced the following outputs:" << std::endl; for (auto & i : drv.outputs) From 21d494da8304aa496721c7695573b9ad796a30be Mon Sep 17 00:00:00 2001 From: volth Date: Thu, 13 Dec 2018 02:45:50 +0000 Subject: [PATCH 1304/2196] probably typo ...at least MSVC unable to compile this --- src/libutil/util.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libutil/util.cc b/src/libutil/util.cc index e12c4b258c2..ce50334e1e6 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -202,7 +202,7 @@ bool isInDir(const Path & path, const Path & dir) bool isDirOrInDir(const Path & path, const Path & dir) { - return path == dir or isInDir(path, dir); + return path == dir || isInDir(path, dir); } From 0cc4728f71b1a1dfd32897eeb79d6920cbf3f5aa Mon Sep 17 00:00:00 2001 From: Dmitry Kalinkin Date: Thu, 13 Dec 2018 01:50:39 -0500 Subject: [PATCH 1305/2196] docs: raise minimal boost version to 1.66 --- doc/manual/installation/prerequisites-source.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/installation/prerequisites-source.xml b/doc/manual/installation/prerequisites-source.xml index f1705221663..e87d0de21ef 100644 --- a/doc/manual/installation/prerequisites-source.xml +++ b/doc/manual/installation/prerequisites-source.xml @@ -59,7 +59,7 @@ configure. The boost library of version - 1.61.0 or higher. It can be obtained from the official web site + 1.66.0 or higher. It can be obtained from the official web site . The xmllint and From 6024dc1d97212130c19d3ff5ce6b1d102837eee6 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 13 Dec 2018 14:30:52 +0100 Subject: [PATCH 1306/2196] Support SRI hashes SRI hashes (https://www.w3.org/TR/SRI/) combine the hash algorithm and a base-64 hash. This allows more concise and standard hash specifications. For example, instead of import { url = https://nixos.org/releases/nix/nix-2.1.3/nix-2.1.3.tar.xz; sha256 = "5d22dad058d5c800d65a115f919da22938c50dd6ba98c5e3a183172d149840a4"; }; you can write import { url = https://nixos.org/releases/nix/nix-2.1.3/nix-2.1.3.tar.xz; hash = "sha256-XSLa0FjVyADWWhFfkZ2iKTjFDda6mMXjoYMXLRSYQKQ="; }; In fixed-output derivations, the outputHashAlgo is no longer mandatory if outputHash specifies the hash (either as an SRI or in the old ":" format). 'nix hash-{file,path}' now print hashes in SRI format by default. I also reverted them to use SHA-256 by default because that's what we're using most of the time in Nixpkgs. Suggested by @zimbatm. --- corepkgs/fetchurl.nix | 8 ++++++-- src/libexpr/primops.cc | 10 ++++------ src/libutil/hash.cc | 34 ++++++++++++++++++++-------------- src/libutil/hash.hh | 7 ++++--- src/nix-store/nix-store.cc | 3 +++ src/nix/hash.cc | 23 ++++++++++++++--------- tests/fetchurl.sh | 11 +++++++++++ tests/hash.sh | 12 +++++++++++- 8 files changed, 73 insertions(+), 35 deletions(-) diff --git a/corepkgs/fetchurl.nix b/corepkgs/fetchurl.nix index 0ce1bab112f..a84777f5744 100644 --- a/corepkgs/fetchurl.nix +++ b/corepkgs/fetchurl.nix @@ -1,10 +1,14 @@ { system ? "" # obsolete , url +, hash ? "" # an SRI ash + +# Legacy hash specification , md5 ? "", sha1 ? "", sha256 ? "", sha512 ? "" , outputHash ? - if sha512 != "" then sha512 else if sha1 != "" then sha1 else if md5 != "" then md5 else sha256 + if hash != "" then hash else if sha512 != "" then sha512 else if sha1 != "" then sha1 else if md5 != "" then md5 else sha256 , outputHashAlgo ? - if sha512 != "" then "sha512" else if sha1 != "" then "sha1" else if md5 != "" then "md5" else "sha256" + if hash != "" then "" else if sha512 != "" then "sha512" else if sha1 != "" then "sha1" else if md5 != "" then "md5" else "sha256" + , executable ? false , unpack ? false , name ? baseNameOf (toString url) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 7372134e2c9..60698f7402e 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -724,16 +724,14 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * if (outputs.size() != 1 || *(outputs.begin()) != "out") throw Error(format("multiple outputs are not supported in fixed-output derivations, at %1%") % posDrvName); - HashType ht = parseHashType(outputHashAlgo); - if (ht == htUnknown) - throw EvalError(format("unknown hash algorithm '%1%', at %2%") % outputHashAlgo % posDrvName); + HashType ht = outputHashAlgo.empty() ? htUnknown : parseHashType(outputHashAlgo); Hash h(*outputHash, ht); - outputHash = h.to_string(Base16, false); - if (outputHashRecursive) outputHashAlgo = "r:" + outputHashAlgo; Path outPath = state.store->makeFixedOutputPath(outputHashRecursive, h, drvName); if (!jsonObject) drv.env["out"] = outPath; - drv.outputs["out"] = DerivationOutput(outPath, outputHashAlgo, *outputHash); + drv.outputs["out"] = DerivationOutput(outPath, + (outputHashRecursive ? "r:" : "") + printHashType(h.type), + h.to_string(Base16, false)); } else { diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index 9d82f13a5e3..1c14ebb187c 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -105,9 +105,9 @@ string printHash16or32(const Hash & hash) std::string Hash::to_string(Base base, bool includeType) const { std::string s; - if (includeType) { + if (base == SRI || includeType) { s += printHashType(type); - s += ':'; + s += base == SRI ? '-' : ':'; } switch (base) { case Base16: @@ -117,6 +117,7 @@ std::string Hash::to_string(Base base, bool includeType) const s += printHash32(*this); break; case Base64: + case SRI: s += base64Encode(std::string((const char *) hash, hashSize)); break; } @@ -127,28 +128,33 @@ std::string Hash::to_string(Base base, bool includeType) const Hash::Hash(const std::string & s, HashType type) : type(type) { - auto colon = s.find(':'); - size_t pos = 0; - - if (colon == string::npos) { - if (type == htUnknown) + bool isSRI = false; + + auto sep = s.find(':'); + if (sep == string::npos) { + sep = s.find('-'); + if (sep != string::npos) { + isSRI = true; + } else if (type == htUnknown) throw BadHash("hash '%s' does not include a type", s); - } else { - string hts = string(s, 0, colon); + } + + if (sep != string::npos) { + string hts = string(s, 0, sep); this->type = parseHashType(hts); if (this->type == htUnknown) throw BadHash("unknown hash type '%s'", hts); if (type != htUnknown && type != this->type) throw BadHash("hash '%s' should have type '%s'", s, printHashType(type)); - pos = colon + 1; + pos = sep + 1; } init(); size_t size = s.size() - pos; - if (size == base16Len()) { + if (!isSRI && size == base16Len()) { auto parseHexDigit = [&](char c) { if (c >= '0' && c <= '9') return c - '0'; @@ -164,7 +170,7 @@ Hash::Hash(const std::string & s, HashType type) } } - else if (size == base32Len()) { + else if (!isSRI && size == base32Len()) { for (unsigned int n = 0; n < size; ++n) { char c = s[pos + size - n - 1]; @@ -187,10 +193,10 @@ Hash::Hash(const std::string & s, HashType type) } } - else if (size == base64Len()) { + else if (isSRI || size == base64Len()) { auto d = base64Decode(std::string(s, pos)); if (d.size() != hashSize) - throw BadHash("invalid base-64 hash '%s'", s); + throw BadHash("invalid %s hash '%s'", isSRI ? "SRI" : "base-64", s); assert(hashSize); memcpy(hash, d.data(), hashSize); } diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh index fd7a61df8e4..2dbc3b63081 100644 --- a/src/libutil/hash.hh +++ b/src/libutil/hash.hh @@ -20,7 +20,7 @@ const int sha512HashSize = 64; extern const string base32Chars; -enum Base : int { Base64, Base32, Base16 }; +enum Base : int { Base64, Base32, Base16, SRI }; struct Hash @@ -38,8 +38,9 @@ struct Hash Hash(HashType type) : type(type) { init(); }; /* Initialize the hash from a string representation, in the format - "[:]". If the 'type' argument is - htUnknown, then the hash type must be specified in the + "[:]" or "-" (a + Subresource Integrity hash expression). If the 'type' argument + is htUnknown, then the hash type must be specified in the string. */ Hash(const std::string & s, HashType type = htUnknown); diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index a9ad14762e6..5b37237eb16 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -1000,6 +1000,9 @@ static int _main(int argc, char * * argv) Strings opFlags, opArgs; Operation op = 0; + Hash h("sha512-Q2bFTOhEALkN8hOms2FKTDLy7eugP2zFZ1T8LCvX42Fp3WoNr3bjZSAHeOsHrbV1Fu9/A0EzCinRE7Af1ofPrw=="); + printError("GOT HASH %s", h.to_string(Base64)); + parseCmdLine(argc, argv, [&](Strings::iterator & arg, const Strings::iterator & end) { Operation oldOp = op; diff --git a/src/nix/hash.cc b/src/nix/hash.cc index 64062fb9795..af4105e2890 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -9,13 +9,14 @@ struct CmdHash : Command { enum Mode { mFile, mPath }; Mode mode; - Base base = Base16; + Base base = SRI; bool truncate = false; - HashType ht = htSHA512; + HashType ht = htSHA256; std::vector paths; CmdHash(Mode mode) : mode(mode) { + mkFlag(0, "sri", "print hash in SRI format", &base, SRI); mkFlag(0, "base64", "print hash in base-64", &base, Base64); mkFlag(0, "base32", "print hash in base-32 (Nix-specific)", &base, Base32); mkFlag(0, "base16", "print hash in base-16", &base, Base16); @@ -43,7 +44,7 @@ struct CmdHash : Command Hash h = mode == mFile ? hashFile(ht, path) : hashPath(ht, path).first; if (truncate && h.hashSize > 20) h = compressHash(h, 20); std::cout << format("%1%\n") % - h.to_string(base, false); + h.to_string(base, base == SRI); } } }; @@ -54,7 +55,7 @@ static RegisterCommand r2(make_ref(CmdHash::mPath)); struct CmdToBase : Command { Base base; - HashType ht = htSHA512; + HashType ht = htUnknown; std::vector args; CmdToBase(Base base) : base(base) @@ -70,26 +71,30 @@ struct CmdToBase : Command return base == Base16 ? "to-base16" : base == Base32 ? "to-base32" : - "to-base64"; + base == Base64 ? "to-base64" : + "to-sri"; } std::string description() override { - return fmt("convert a hash to base-%d representation", - base == Base16 ? 16 : - base == Base32 ? 32 : 64); + return fmt("convert a hash to %s representation", + base == Base16 ? "base-16" : + base == Base32 ? "base-32" : + base == Base64 ? "base-64" : + "SRI"); } void run() override { for (auto s : args) - std::cout << fmt("%s\n", Hash(s, ht).to_string(base, false)); + std::cout << fmt("%s\n", Hash(s, ht).to_string(base, base == SRI)); } }; static RegisterCommand r3(make_ref(Base16)); static RegisterCommand r4(make_ref(Base32)); static RegisterCommand r5(make_ref(Base64)); +static RegisterCommand r6(make_ref(SRI)); /* Legacy nix-hash command. */ static int compatNixHash(int argc, char * * argv) diff --git a/tests/fetchurl.sh b/tests/fetchurl.sh index 9bbf044f732..d51d081f5e3 100644 --- a/tests/fetchurl.sh +++ b/tests/fetchurl.sh @@ -18,6 +18,17 @@ outPath=$(nix-build '' --argstr url file://$(pwd)/fetchurl.sh cmp $outPath fetchurl.sh +# Now using an SRI hash. +clearStore + +hash=$(nix hash-file ./fetchurl.sh) + +[[ $hash =~ ^sha512- ]] + +outPath=$(nix-build '' --argstr url file://$(pwd)/fetchurl.sh --argstr hash $hash --no-out-link --hashed-mirrors '') + +cmp $outPath fetchurl.sh + # Test the hashed mirror feature. clearStore diff --git a/tests/hash.sh b/tests/hash.sh index 9f234bc635b..4cfc9790101 100644 --- a/tests/hash.sh +++ b/tests/hash.sh @@ -2,7 +2,7 @@ source common.sh try () { printf "%s" "$2" > $TEST_ROOT/vector - hash=$(nix-hash $EXTRA --flat --type "$1" $TEST_ROOT/vector) + hash=$(nix hash-file --base16 $EXTRA --type "$1" $TEST_ROOT/vector) if test "$hash" != "$3"; then echo "hash $1, expected $3, got $hash" exit 1 @@ -33,6 +33,12 @@ EXTRA=--base32 try sha256 "abc" "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s" EXTRA= +EXTRA=--sri +try sha512 "" "sha512-z4PhNX7vuL3xVChQ1m2AB9Yg5AULVxXcg/SpIdNs6c5H0NE8XYXysP+DGNKHfuwvY7kxvUdBeoGlODJ6+SfaPg==" +try sha512 "abc" "sha512-3a81oZNherrMQXNJriBBMRLm+k6JqX6iCp7u5ktV05ohkpkqJ0/BqDa6PCOj/uu9RU1EI2Q86A4qmslPpUyknw==" +try sha512 "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq" "sha512-IEqPxt2oLwoM7XvrjgikFlfBbvRosiioJ5vjMacDwzWW/RXBOxsH+aodO+pXeJygMa2Fx6cd1wNU7GMSOMo0RQ==" +try sha256 "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq" "sha256-JI1qYdIGOLjlwCaTDD5gOaM85Flk/yFn9uzt1BnbBsE=" + try2 () { hash=$(nix-hash --type "$1" $TEST_ROOT/hash-path) if test "$hash" != "$2"; then @@ -65,12 +71,16 @@ try2 md5 "f78b733a68f5edbdf9413899339eaa4a" try3() { h64=$(nix to-base64 --type "$1" "$2") [ "$h64" = "$4" ] + sri=$(nix to-sri --type "$1" "$2") + [ "$sri" = "$1-$4" ] h32=$(nix-hash --type "$1" --to-base32 "$2") [ "$h32" = "$3" ] h16=$(nix-hash --type "$1" --to-base16 "$h32") [ "$h16" = "$2" ] h16=$(nix to-base16 --type "$1" "$h64") [ "$h16" = "$2" ] + h16=$(nix to-base16 "$sri") + [ "$h16" = "$2" ] } try3 sha1 "800d59cfcd3c05e900cb4e214be48f6b886a08df" "vw46m23bizj4n8afrc0fj19wrp7mj3c0" "gA1Zz808BekAy04hS+SPa4hqCN8=" try3 sha256 "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad" "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s" "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=" From 567941fb59960a05a5d99208fb229c8777977023 Mon Sep 17 00:00:00 2001 From: Patrick Hilhorst Date: Fri, 14 Dec 2018 09:36:19 +0100 Subject: [PATCH 1307/2196] Clarify nix-instantiate --read-write-mode --- doc/manual/command-ref/nix-instantiate.xml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/manual/command-ref/nix-instantiate.xml b/doc/manual/command-ref/nix-instantiate.xml index 39c1282fcc3..53f06aed124 100644 --- a/doc/manual/command-ref/nix-instantiate.xml +++ b/doc/manual/command-ref/nix-instantiate.xml @@ -154,7 +154,9 @@ input. When used with , perform evaluation in read/write mode so nix language features that require it will still work (at the cost of needing to do - instantiation of every evaluated derivation). + instantiation of every evaluated derivation). If this option is + not enabled, there may be uninstantiated store paths in the final + output. From 7e35e914c1aa24957107c666c76f1d834ebae90a Mon Sep 17 00:00:00 2001 From: Daiderd Jordan Date: Fri, 14 Dec 2018 20:07:23 +0100 Subject: [PATCH 1308/2196] fetchGit: allow fetching explicit refs Trying to fetch refs that are not in refs/heads currently fails because it looks for refs/heads/refs/foo instead of refs/foo. eg. builtins.fetchGit { url = https://github.com/NixOS/nixpkgs.git; ref = "refs/pull/1024/head; } --- src/libexpr/primops/fetchGit.cc | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/libexpr/primops/fetchGit.cc b/src/libexpr/primops/fetchGit.cc index b46d2f25826..588b0fa4d53 100644 --- a/src/libexpr/primops/fetchGit.cc +++ b/src/libexpr/primops/fetchGit.cc @@ -94,7 +94,11 @@ GitInfo exportGit(ref store, const std::string & uri, runProgram("git", true, { "init", "--bare", cacheDir }); } - Path localRefFile = cacheDir + "/refs/heads/" + *ref; + Path localRefFile; + if (ref->compare(0, 5, "refs/") == 0) + localRefFile = cacheDir + "/" + *ref; + else + localRefFile = cacheDir + "/refs/heads/" + *ref; bool doFetch; time_t now = time(0); From aa7e52abff64c4211b65e4092d17a35b9cccf36e Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Fri, 14 Dec 2018 22:37:20 -0600 Subject: [PATCH 1309/2196] tests/fetchurl: fix after changing default hash from 512 to 256 --- tests/fetchurl.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/fetchurl.sh b/tests/fetchurl.sh index d51d081f5e3..ec3399b08d0 100644 --- a/tests/fetchurl.sh +++ b/tests/fetchurl.sh @@ -23,7 +23,7 @@ clearStore hash=$(nix hash-file ./fetchurl.sh) -[[ $hash =~ ^sha512- ]] +[[ $hash =~ ^sha256- ]] outPath=$(nix-build '' --argstr url file://$(pwd)/fetchurl.sh --argstr hash $hash --no-out-link --hashed-mirrors '') From c1112ae9a229c188f3415e69a5997d77383f6643 Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Sat, 15 Dec 2018 09:59:47 -0600 Subject: [PATCH 1310/2196] nix-store: remove debugging print --- src/nix-store/nix-store.cc | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 5b37237eb16..a9ad14762e6 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -1000,9 +1000,6 @@ static int _main(int argc, char * * argv) Strings opFlags, opArgs; Operation op = 0; - Hash h("sha512-Q2bFTOhEALkN8hOms2FKTDLy7eugP2zFZ1T8LCvX42Fp3WoNr3bjZSAHeOsHrbV1Fu9/A0EzCinRE7Af1ofPrw=="); - printError("GOT HASH %s", h.to_string(Base64)); - parseCmdLine(argc, argv, [&](Strings::iterator & arg, const Strings::iterator & end) { Operation oldOp = op; From 82f054d7d5fc8f9de45afa7107557644d1514c98 Mon Sep 17 00:00:00 2001 From: Daiderd Jordan Date: Thu, 20 Dec 2018 20:05:14 +0100 Subject: [PATCH 1311/2196] installer: update macOS version check to 10.12.2 Nixpkgs will drop support for <10.12 soon and thus a nix release built using the 19.03 channel will also require a newer version of macOS. --- scripts/install-nix-from-closure.sh | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/scripts/install-nix-from-closure.sh b/scripts/install-nix-from-closure.sh index ab20774bbf0..e635dcab391 100644 --- a/scripts/install-nix-from-closure.sh +++ b/scripts/install-nix-from-closure.sh @@ -22,10 +22,12 @@ if [ -z "$HOME" ]; then exit 1 fi -# macOS support for 10.10 or higher +# macOS support for 10.12.6 or higher if [ "$(uname -s)" = "Darwin" ]; then - if [ $(($(sw_vers -productVersion | cut -d '.' -f 2))) -lt 10 ]; then - echo "$0: macOS $(sw_vers -productVersion) is not supported, upgrade to 10.10 or higher" + macos_major=$(sw_vers -productVersion | cut -d '.' -f 2) + macos_minor=$(sw_vers -productVersion | cut -d '.' -f 3) + if [ "$macos_major" -lt 12 ] || ([ "$macos_major" -eq 12 ] && [ "$macos_minor" -lt 6 ]); then + echo "$0: macOS $(sw_vers -productVersion) is not supported, upgrade to 10.12.6 or higher" exit 1 fi fi From e78511743eaf7eca6f51480bad590ab6548519ab Mon Sep 17 00:00:00 2001 From: Will Dietz Date: Mon, 17 Dec 2018 11:43:15 -0600 Subject: [PATCH 1312/2196] nlohmann: 3.0.1 -> 3.4.0 ``` $ curl -L "https://github.com/nlohmann/json/releases/download/v3.4.0/json.hpp" -o src/nlohmann/json.hpp ``` --- src/nlohmann/json.hpp | 14718 +++++++++++++++++++++++++++------------- 1 file changed, 10059 insertions(+), 4659 deletions(-) diff --git a/src/nlohmann/json.hpp b/src/nlohmann/json.hpp index 5b0b0ea5b30..1e7cf51e0ac 100644 --- a/src/nlohmann/json.hpp +++ b/src/nlohmann/json.hpp @@ -1,11 +1,12 @@ /* __ _____ _____ _____ __| | __| | | | JSON for Modern C++ -| | |__ | | | | | | version 3.0.1 +| | |__ | | | | | | version 3.4.0 |_____|_____|_____|_|___| https://github.com/nlohmann/json Licensed under the MIT License . -Copyright (c) 2013-2017 Niels Lohmann . +SPDX-License-Identifier: MIT +Copyright (c) 2013-2018 Niels Lohmann . Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -29,42 +30,104 @@ SOFTWARE. #ifndef NLOHMANN_JSON_HPP #define NLOHMANN_JSON_HPP -#include // all_of, copy, fill, find, for_each, generate_n, none_of, remove, reverse, transform -#include // array +#define NLOHMANN_JSON_VERSION_MAJOR 3 +#define NLOHMANN_JSON_VERSION_MINOR 4 +#define NLOHMANN_JSON_VERSION_PATCH 0 + +#include // all_of, find, for_each #include // assert #include // and, not, or -#include // lconv, localeconv -#include // isfinite, labs, ldexp, signbit #include // nullptr_t, ptrdiff_t, size_t -#include // int64_t, uint64_t -#include // abort, strtod, strtof, strtold, strtoul, strtoll, strtoull -#include // memcpy, strlen -#include // forward_list -#include // function, hash, less +#include // hash, less #include // initializer_list -#include // hex -#include // istream, ostream -#include // advance, begin, back_inserter, bidirectional_iterator_tag, distance, end, inserter, iterator, iterator_traits, next, random_access_iterator_tag, reverse_iterator -#include // numeric_limits -#include // locale -#include // map -#include // addressof, allocator, allocator_traits, unique_ptr +#include // istream, ostream +#include // iterator_traits, random_access_iterator_tag #include // accumulate -#include // stringstream -#include // getline, stoi, string, to_string -#include // add_pointer, conditional, decay, enable_if, false_type, integral_constant, is_arithmetic, is_base_of, is_const, is_constructible, is_convertible, is_default_constructible, is_enum, is_floating_point, is_integral, is_nothrow_move_assignable, is_nothrow_move_constructible, is_pointer, is_reference, is_same, is_scalar, is_signed, remove_const, remove_cv, remove_pointer, remove_reference, true_type, underlying_type -#include // declval, forward, make_pair, move, pair, swap -#include // valarray +#include // string, stoi, to_string +#include // declval, forward, move, pair, swap + +// #include +#ifndef NLOHMANN_JSON_FWD_HPP +#define NLOHMANN_JSON_FWD_HPP + +#include // int64_t, uint64_t +#include // map +#include // allocator +#include // string #include // vector +/*! +@brief namespace for Niels Lohmann +@see https://github.com/nlohmann +@since version 1.0.0 +*/ +namespace nlohmann +{ +/*! +@brief default JSONSerializer template argument + +This serializer ignores the template arguments and uses ADL +([argument-dependent lookup](https://en.cppreference.com/w/cpp/language/adl)) +for serialization. +*/ +template +struct adl_serializer; + +template class ObjectType = + std::map, + template class ArrayType = std::vector, + class StringType = std::string, class BooleanType = bool, + class NumberIntegerType = std::int64_t, + class NumberUnsignedType = std::uint64_t, + class NumberFloatType = double, + template class AllocatorType = std::allocator, + template class JSONSerializer = + adl_serializer> +class basic_json; + +/*! +@brief JSON Pointer + +A JSON pointer defines a string syntax for identifying a specific value +within a JSON document. It can be used with functions `at` and +`operator[]`. Furthermore, JSON pointers are the base for JSON patches. + +@sa [RFC 6901](https://tools.ietf.org/html/rfc6901) + +@since version 2.0.0 +*/ +template +class json_pointer; + +/*! +@brief default JSON class + +This type is the default specialization of the @ref basic_json class which +uses the standard template types. + +@since version 1.0.0 +*/ +using json = basic_json<>; +} // namespace nlohmann + +#endif + +// #include + + +// This file contains all internal macro definitions +// You MUST include macro_unscope.hpp at the end of json.hpp to undef all of them + // exclude unsupported compilers -#if defined(__clang__) - #if (__clang_major__ * 10000 + __clang_minor__ * 100 + __clang_patchlevel__) < 30400 - #error "unsupported Clang version - see https://github.com/nlohmann/json#supported-compilers" - #endif -#elif defined(__GNUC__) && !(defined(__ICC) || defined(__INTEL_COMPILER)) - #if (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) < 40900 - #error "unsupported GCC version - see https://github.com/nlohmann/json#supported-compilers" +#if !defined(JSON_SKIP_UNSUPPORTED_COMPILER_CHECK) + #if defined(__clang__) + #if (__clang_major__ * 10000 + __clang_minor__ * 100 + __clang_patchlevel__) < 30400 + #error "unsupported Clang version - see https://github.com/nlohmann/json#supported-compilers" + #endif + #elif defined(__GNUC__) && !(defined(__ICC) || defined(__INTEL_COMPILER)) + #if (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) < 40800 + #error "unsupported GCC version - see https://github.com/nlohmann/json#supported-compilers" + #endif #endif #endif @@ -90,14 +153,36 @@ SOFTWARE. #endif // allow to disable exceptions -#if (defined(__cpp_exceptions) || defined(__EXCEPTIONS) || defined(_CPPUNWIND)) && not defined(JSON_NOEXCEPTION) +#if (defined(__cpp_exceptions) || defined(__EXCEPTIONS) || defined(_CPPUNWIND)) && !defined(JSON_NOEXCEPTION) #define JSON_THROW(exception) throw exception #define JSON_TRY try #define JSON_CATCH(exception) catch(exception) + #define JSON_INTERNAL_CATCH(exception) catch(exception) #else #define JSON_THROW(exception) std::abort() #define JSON_TRY if(true) #define JSON_CATCH(exception) if(false) + #define JSON_INTERNAL_CATCH(exception) if(false) +#endif + +// override exception macros +#if defined(JSON_THROW_USER) + #undef JSON_THROW + #define JSON_THROW JSON_THROW_USER +#endif +#if defined(JSON_TRY_USER) + #undef JSON_TRY + #define JSON_TRY JSON_TRY_USER +#endif +#if defined(JSON_CATCH_USER) + #undef JSON_CATCH + #define JSON_CATCH JSON_CATCH_USER + #undef JSON_INTERNAL_CATCH + #define JSON_INTERNAL_CATCH JSON_CATCH_USER +#endif +#if defined(JSON_INTERNAL_CATCH_USER) + #undef JSON_INTERNAL_CATCH + #define JSON_INTERNAL_CATCH JSON_INTERNAL_CATCH_USER #endif // manual branch prediction @@ -118,25 +203,35 @@ SOFTWARE. #endif /*! -@brief namespace for Niels Lohmann -@see https://github.com/nlohmann -@since version 1.0.0 +@brief macro to briefly define a mapping between an enum and JSON +@def NLOHMANN_JSON_SERIALIZE_ENUM +@since version 3.4.0 */ -namespace nlohmann -{ -template -struct adl_serializer; - -// forward declaration of basic_json (required to split the class) -template class ObjectType = std::map, - template class ArrayType = std::vector, - class StringType = std::string, class BooleanType = bool, - class NumberIntegerType = std::int64_t, - class NumberUnsignedType = std::uint64_t, - class NumberFloatType = double, - template class AllocatorType = std::allocator, - template class JSONSerializer = adl_serializer> -class basic_json; +#define NLOHMANN_JSON_SERIALIZE_ENUM(ENUM_TYPE, ...) \ + template \ + inline void to_json(BasicJsonType& j, const ENUM_TYPE& e) \ + { \ + static_assert(std::is_enum::value, #ENUM_TYPE " must be an enum!"); \ + static const std::pair m[] = __VA_ARGS__; \ + auto it = std::find_if(std::begin(m), std::end(m), \ + [e](const std::pair& ej_pair) -> bool \ + { \ + return ej_pair.first == e; \ + }); \ + j = ((it != std::end(m)) ? it : std::begin(m))->second; \ + } \ + template \ + inline void from_json(const BasicJsonType& j, ENUM_TYPE& e) \ + { \ + static_assert(std::is_enum::value, #ENUM_TYPE " must be an enum!"); \ + static const std::pair m[] = __VA_ARGS__; \ + auto it = std::find_if(std::begin(m), std::end(m), \ + [j](const std::pair& ej_pair) -> bool \ + { \ + return ej_pair.second == j; \ + }); \ + e = ((it != std::end(m)) ? it : std::begin(m))->first; \ + } // Ugly macros to avoid uglier copy-paste when specializing basic_json. They // may be removed in the future once the class is split. @@ -154,910 +249,991 @@ class basic_json; NumberIntegerType, NumberUnsignedType, NumberFloatType, \ AllocatorType, JSONSerializer> +// #include -/*! -@brief unnamed namespace with internal helper functions -This namespace collects some functions that could not be defined inside the -@ref basic_json class. +#include // not +#include // size_t +#include // conditional, enable_if, false_type, integral_constant, is_constructible, is_integral, is_same, remove_cv, remove_reference, true_type -@since version 2.1.0 -*/ +namespace nlohmann +{ namespace detail { -//////////////// -// exceptions // -//////////////// - -/*! -@brief general exception of the @ref basic_json class - -This class is an extension of `std::exception` objects with a member @a id for -exception ids. It is used as the base class for all exceptions thrown by the -@ref basic_json class. This class can hence be used as "wildcard" to catch -exceptions. - -Subclasses: -- @ref parse_error for exceptions indicating a parse error -- @ref invalid_iterator for exceptions indicating errors with iterators -- @ref type_error for exceptions indicating executing a member function with - a wrong type -- @ref out_of_range for exceptions indicating access out of the defined range -- @ref other_error for exceptions indicating other library errors - -@internal -@note To have nothrow-copy-constructible exceptions, we internally use - `std::runtime_error` which can cope with arbitrary-length error messages. - Intermediate strings are built with static functions and then passed to - the actual constructor. -@endinternal +// alias templates to reduce boilerplate +template +using enable_if_t = typename std::enable_if::type; -@liveexample{The following code shows how arbitrary library exceptions can be -caught.,exception} +template +using uncvref_t = typename std::remove_cv::type>::type; -@since version 3.0.0 -*/ -class exception : public std::exception +// implementation of C++14 index_sequence and affiliates +// source: https://stackoverflow.com/a/32223343 +template +struct index_sequence { - public: - /// returns the explanatory string - const char* what() const noexcept override + using type = index_sequence; + using value_type = std::size_t; + static constexpr std::size_t size() noexcept { - return m.what(); + return sizeof...(Ints); } +}; - /// the id of the exception - const int id; +template +struct merge_and_renumber; - protected: - exception(int id_, const char* what_arg) : id(id_), m(what_arg) {} +template +struct merge_and_renumber, index_sequence> + : index_sequence < I1..., (sizeof...(I1) + I2)... > {}; - static std::string name(const std::string& ename, int id_) - { - return "[json.exception." + ename + "." + std::to_string(id_) + "] "; - } +template +struct make_index_sequence + : merge_and_renumber < typename make_index_sequence < N / 2 >::type, + typename make_index_sequence < N - N / 2 >::type > {}; - private: - /// an exception object as storage for error messages - std::runtime_error m; +template<> struct make_index_sequence<0> : index_sequence<> {}; +template<> struct make_index_sequence<1> : index_sequence<0> {}; + +template +using index_sequence_for = make_index_sequence; + +// dispatch utility (taken from ranges-v3) +template struct priority_tag : priority_tag < N - 1 > {}; +template<> struct priority_tag<0> {}; + +// taken from ranges-v3 +template +struct static_const +{ + static constexpr T value{}; }; -/*! -@brief exception indicating a parse error +template +constexpr T static_const::value; +} // namespace detail +} // namespace nlohmann -This exception is thrown by the library when a parse error occurs. Parse errors -can occur during the deserialization of JSON text, CBOR, MessagePack, as well -as when using JSON Patch. +// #include -Member @a byte holds the byte index of the last read character in the input -file. -Exceptions have ids 1xx. +#include // not +#include // numeric_limits +#include // false_type, is_constructible, is_integral, is_same, true_type +#include // declval -name / id | example message | description ------------------------------- | --------------- | ------------------------- -json.exception.parse_error.101 | parse error at 2: unexpected end of input; expected string literal | This error indicates a syntax error while deserializing a JSON text. The error message describes that an unexpected token (character) was encountered, and the member @a byte indicates the error position. -json.exception.parse_error.102 | parse error at 14: missing or wrong low surrogate | JSON uses the `\uxxxx` format to describe Unicode characters. Code points above above 0xFFFF are split into two `\uxxxx` entries ("surrogate pairs"). This error indicates that the surrogate pair is incomplete or contains an invalid code point. -json.exception.parse_error.103 | parse error: code points above 0x10FFFF are invalid | Unicode supports code points up to 0x10FFFF. Code points above 0x10FFFF are invalid. -json.exception.parse_error.104 | parse error: JSON patch must be an array of objects | [RFC 6902](https://tools.ietf.org/html/rfc6902) requires a JSON Patch document to be a JSON document that represents an array of objects. -json.exception.parse_error.105 | parse error: operation must have string member 'op' | An operation of a JSON Patch document must contain exactly one "op" member, whose value indicates the operation to perform. Its value must be one of "add", "remove", "replace", "move", "copy", or "test"; other values are errors. -json.exception.parse_error.106 | parse error: array index '01' must not begin with '0' | An array index in a JSON Pointer ([RFC 6901](https://tools.ietf.org/html/rfc6901)) may be `0` or any number without a leading `0`. -json.exception.parse_error.107 | parse error: JSON pointer must be empty or begin with '/' - was: 'foo' | A JSON Pointer must be a Unicode string containing a sequence of zero or more reference tokens, each prefixed by a `/` character. -json.exception.parse_error.108 | parse error: escape character '~' must be followed with '0' or '1' | In a JSON Pointer, only `~0` and `~1` are valid escape sequences. -json.exception.parse_error.109 | parse error: array index 'one' is not a number | A JSON Pointer array index must be a number. -json.exception.parse_error.110 | parse error at 1: cannot read 2 bytes from vector | When parsing CBOR or MessagePack, the byte vector ends before the complete value has been read. -json.exception.parse_error.112 | parse error at 1: error reading CBOR; last byte: 0xF8 | Not all types of CBOR or MessagePack are supported. This exception occurs if an unsupported byte was read. -json.exception.parse_error.113 | parse error at 2: expected a CBOR string; last byte: 0x98 | While parsing a map key, a value that is not a string has been read. +// #include -@note For an input with n bytes, 1 is the index of the first character and n+1 - is the index of the terminating null byte or the end of file. This also - holds true when reading a byte vector (CBOR or MessagePack). +// #include -@liveexample{The following code shows how a `parse_error` exception can be -caught.,parse_error} +// #include -@sa @ref exception for the base class of the library exceptions -@sa @ref invalid_iterator for exceptions indicating errors with iterators -@sa @ref type_error for exceptions indicating executing a member function with - a wrong type -@sa @ref out_of_range for exceptions indicating access out of the defined range -@sa @ref other_error for exceptions indicating other library errors -@since version 3.0.0 -*/ -class parse_error : public exception +#include + +// #include + + +namespace nlohmann { - public: - /*! - @brief create a parse error exception - @param[in] id_ the id of the exception - @param[in] byte_ the byte index where the error occurred (or 0 if the - position cannot be determined) - @param[in] what_arg the explanatory string - @return parse_error object - */ - static parse_error create(int id_, std::size_t byte_, const std::string& what_arg) - { - std::string w = exception::name("parse_error", id_) + "parse error" + - (byte_ != 0 ? (" at " + std::to_string(byte_)) : "") + - ": " + what_arg; - return parse_error(id_, byte_, w.c_str()); - } +namespace detail +{ +template struct make_void +{ + using type = void; +}; +template using void_t = typename make_void::type; +} // namespace detail +} // namespace nlohmann - /*! - @brief byte index of the parse error - The byte index of the last read character in the input file. +// http://en.cppreference.com/w/cpp/experimental/is_detected +namespace nlohmann +{ +namespace detail +{ +struct nonesuch +{ + nonesuch() = delete; + ~nonesuch() = delete; + nonesuch(nonesuch const&) = delete; + void operator=(nonesuch const&) = delete; +}; - @note For an input with n bytes, 1 is the index of the first character and - n+1 is the index of the terminating null byte or the end of file. - This also holds true when reading a byte vector (CBOR or MessagePack). - */ - const std::size_t byte; +template class Op, + class... Args> +struct detector +{ + using value_t = std::false_type; + using type = Default; +}; - private: - parse_error(int id_, std::size_t byte_, const char* what_arg) - : exception(id_, what_arg), byte(byte_) {} +template class Op, class... Args> +struct detector>, Op, Args...> +{ + using value_t = std::true_type; + using type = Op; }; -/*! -@brief exception indicating errors with iterators +template