Merge branch 'no-manifests'

This commit is contained in:
Eelco Dolstra 2012-08-27 14:34:51 -04:00
commit 15e1b2c223
60 changed files with 2411 additions and 1323 deletions

4
.gitignore vendored
View File

@ -35,8 +35,11 @@ Makefile.in
# /doc/manual/ # /doc/manual/
/doc/manual/manual.html /doc/manual/manual.html
/doc/manual/manual.xmli
/doc/manual/manual.pdf
/doc/manual/manual.is-valid /doc/manual/manual.is-valid
/doc/manual/*.1 /doc/manual/*.1
/doc/manual/*.5
/doc/manual/*.8 /doc/manual/*.8
/doc/manual/images /doc/manual/images
/doc/manual/version.txt /doc/manual/version.txt
@ -60,6 +63,7 @@ Makefile.in
/scripts/GeneratePatches.pm /scripts/GeneratePatches.pm
/scripts/download-using-manifests.pl /scripts/download-using-manifests.pl
/scripts/copy-from-other-stores.pl /scripts/copy-from-other-stores.pl
/scripts/download-from-binary-cache.pl
/scripts/find-runtime-roots.pl /scripts/find-runtime-roots.pl
/scripts/build-remote.pl /scripts/build-remote.pl
/scripts/nix-reduce-build /scripts/nix-reduce-build

View File

@ -25,12 +25,12 @@ AC_ARG_WITH(system, AC_HELP_STRING([--with-system=SYSTEM],
case "$host_os" in case "$host_os" in
linux-gnu*) linux-gnu*)
# For backward compatibility, strip the `-gnu' part. # For backward compatibility, strip the `-gnu' part.
system="$machine_name-linux";; system="$machine_name-linux";;
*) *)
# Strip the version number from names such as `gnu0.3', # Strip the version number from names such as `gnu0.3',
# `darwin10.2.0', etc. # `darwin10.2.0', etc.
system="$machine_name-`echo $host_os | "$SED" -e's/@<:@0-9.@:>@*$//g'`";; system="$machine_name-`echo $host_os | "$SED" -e's/@<:@0-9.@:>@*$//g'`";;
esac]) esac])
sys_name=$(uname -s | tr 'A-Z ' 'a-z_') sys_name=$(uname -s | tr 'A-Z ' 'a-z_')
@ -40,7 +40,7 @@ case $sys_name in
sys_name=cygwin sys_name=cygwin
;; ;;
esac esac
AC_MSG_RESULT($system) AC_MSG_RESULT($system)
AC_SUBST(system) AC_SUBST(system)
AC_DEFINE_UNQUOTED(SYSTEM, ["$system"], [platform identifier (`cpu-os')]) AC_DEFINE_UNQUOTED(SYSTEM, ["$system"], [platform identifier (`cpu-os')])
@ -178,6 +178,7 @@ NEED_PROG(perl, perl)
NEED_PROG(sed, sed) NEED_PROG(sed, sed)
NEED_PROG(tar, tar) NEED_PROG(tar, tar)
NEED_PROG(bzip2, bzip2) NEED_PROG(bzip2, bzip2)
NEED_PROG(xz, xz)
AC_PATH_PROG(dot, dot) AC_PATH_PROG(dot, dot)
AC_PATH_PROG(dblatex, dblatex) AC_PATH_PROG(dblatex, dblatex)
AC_PATH_PROG(gzip, gzip) AC_PATH_PROG(gzip, gzip)
@ -266,7 +267,7 @@ if test "$gc" = yes; then
fi fi
# Check for the required Perl dependencies (DBI and DBD::SQLite). # Check for the required Perl dependencies (DBI, DBD::SQLite and WWW::Curl).
perlFlags="-I$perllibdir" perlFlags="-I$perllibdir"
AC_ARG_WITH(dbi, AC_HELP_STRING([--with-dbi=PATH], AC_ARG_WITH(dbi, AC_HELP_STRING([--with-dbi=PATH],
@ -277,13 +278,24 @@ AC_ARG_WITH(dbd-sqlite, AC_HELP_STRING([--with-dbd-sqlite=PATH],
[prefix of the Perl DBD::SQLite library]), [prefix of the Perl DBD::SQLite library]),
perlFlags="$perlFlags -I$withval") perlFlags="$perlFlags -I$withval")
AC_ARG_WITH(www-curl, AC_HELP_STRING([--with-www-curl=PATH],
[prefix of the Perl WWW::Curl library]),
perlFlags="$perlFlags -I$withval")
AC_MSG_CHECKING([whether DBD::SQLite works]) AC_MSG_CHECKING([whether DBD::SQLite works])
if ! $perl $perlFlags -e 'use DBI; use DBD::SQLite;' 2>&5; then if ! $perl $perlFlags -e 'use DBI; use DBD::SQLite;' 2>&5; then
AC_MSG_RESULT(no) AC_MSG_RESULT(no)
AC_MSG_FAILURE([The Perl modules DBI and/or DBD::SQLite are missing.]) AC_MSG_FAILURE([The Perl modules DBI and/or DBD::SQLite are missing.])
fi fi
AC_MSG_RESULT(yes) AC_MSG_RESULT(yes)
AC_MSG_CHECKING([whether WWW::Curl works])
if ! $perl $perlFlags -e 'use WWW::Curl;' 2>&5; then
AC_MSG_RESULT(no)
AC_MSG_FAILURE([The Perl module WWW::Curl is missing.])
fi
AC_MSG_RESULT(yes)
AC_SUBST(perlFlags) AC_SUBST(perlFlags)
@ -327,6 +339,18 @@ eval dynlib_suffix=$shrext_cmds
AC_SUBST(dynlib_suffix) AC_SUBST(dynlib_suffix)
# Do we have GNU tar?
AC_MSG_CHECKING([if you have GNU tar])
if $tar --version 2> /dev/null | grep -q GNU; then
AC_MSG_RESULT(yes)
tarFlags="--warning=no-timestamp"
else
AC_MSG_RESULT(no)
fi
AC_SUBST(tarFlags)
AM_CONFIG_HEADER([config.h]) AM_CONFIG_HEADER([config.h])
AC_CONFIG_FILES([Makefile AC_CONFIG_FILES([Makefile
src/Makefile src/Makefile

View File

@ -1,6 +1,6 @@
all-local: config.nix all-local: config.nix
files = nar.nix buildenv.nix buildenv.pl unpack-channel.nix unpack-channel.sh derivation.nix fetchurl.nix \ files = nar.nix buildenv.nix buildenv.pl unpack-channel.nix derivation.nix fetchurl.nix \
imported-drv-to-derivation.nix imported-drv-to-derivation.nix
install-exec-local: install-exec-local:

View File

@ -6,8 +6,10 @@ in {
perl = "@perl@"; perl = "@perl@";
shell = "@shell@"; shell = "@shell@";
coreutils = "@coreutils@"; coreutils = "@coreutils@";
bzip2 = fromEnv "NIX_BZIP2" "@bzip2@"; bzip2 = "@bzip2@";
xz = "@xz@";
tar = "@tar@"; tar = "@tar@";
tarFlags = "@tarFlags@";
tr = "@tr@"; tr = "@tr@";
curl = "@curl@"; curl = "@curl@";
nixBinDir = fromEnv "NIX_BIN_DIR" "@bindir@"; nixBinDir = fromEnv "NIX_BIN_DIR" "@bindir@";

View File

@ -6,28 +6,37 @@ let
'' ''
export PATH=${nixBinDir}:${coreutils} export PATH=${nixBinDir}:${coreutils}
if [ $compressionType = "xz" ]; then
ext=xz
compressor="${xz} -9"
else
ext=bz2
compressor="${bzip2}"
fi
echo "packing $storePath..." echo "packing $storePath..."
mkdir $out mkdir $out
dst=$out/tmp.nar.bz2 dst=$out/tmp.nar.$ext
set -o pipefail set -o pipefail
nix-store --dump "$storePath" | ${bzip2} > $dst nix-store --dump "$storePath" | $compressor > $dst
nix-hash --flat --type $hashAlgo --base32 $dst > $out/narbz2-hash hash=$(nix-hash --flat --type $hashAlgo --base32 $dst)
echo -n $hash > $out/nar-compressed-hash
mv $out/tmp.nar.bz2 $out/$(cat $out/narbz2-hash).nar.bz2 mv $dst $out/$hash.nar.$ext
''; '';
in in
{ storePath, hashAlgo }: { storePath, hashAlgo, compressionType }:
derivation { derivation {
name = "nar"; name = "nar";
system = builtins.currentSystem; system = builtins.currentSystem;
builder = shell; builder = shell;
args = [ "-e" builder ]; args = [ "-e" builder ];
inherit storePath hashAlgo; inherit storePath hashAlgo compressionType;
# Don't build in a chroot because Nix's dependencies may not be there. # Don't build in a chroot because Nix's dependencies may not be there.
__noChroot = true; __noChroot = true;

View File

@ -1,14 +1,31 @@
with import <nix/config.nix>; with import <nix/config.nix>;
{ name, channelName, src }: let
builder = builtins.toFile "unpack-channel.sh"
''
mkdir $out
cd $out
${bzip2} -d < $src | ${tar} xf - --warning=no-timestamp
mv * $out/$channelName
if [ -n "$binaryCacheURL" ]; then
mkdir $out/binary-caches
echo -n "$binaryCacheURL" > $out/binary-caches/$channelName
fi
'';
in
{ name, channelName, src, binaryCacheURL ? "" }:
derivation { derivation {
system = builtins.currentSystem; system = builtins.currentSystem;
builder = shell; builder = shell;
args = [ "-e" ./unpack-channel.sh ]; args = [ "-e" builder ];
inherit name channelName src bzip2 tar tr; inherit name channelName src binaryCacheURL;
PATH = "${nixBinDir}:${coreutils}"; PATH = "${nixBinDir}:${coreutils}";
# No point in doing this remotely. # No point in doing this remotely.
preferLocalBuild = true; preferLocalBuild = true;

View File

@ -1,4 +0,0 @@
mkdir $out
cd $out
$bzip2 -d < $src | $tar xf -
mv * $out/$channelName

View File

@ -30,6 +30,9 @@ gc-keep-derivations = true # Idem
env-keep-derivations = false env-keep-derivations = false
</programlisting> </programlisting>
<para>You can override settings using the <option>--option</option>
flag, e.g. <literal>--option gc-keep-outputs false</literal>.</para>
<para>The following settings are currently available: <para>The following settings are currently available:
<variablelist> <variablelist>
@ -243,6 +246,16 @@ env-keep-derivations = false
</varlistentry> </varlistentry>
<varlistentry><term><literal>build-fallback</literal></term>
<listitem><para>If set to <literal>true</literal>, Nix will fall
back to building from source if a binary substitute fails. This
is equivalent to the <option>--fallback</option> flag. The
default is <literal>false</literal>.</para></listitem>
</varlistentry>
<varlistentry xml:id="conf-build-chroot-dirs"><term><literal>build-chroot-dirs</literal></term> <varlistentry xml:id="conf-build-chroot-dirs"><term><literal>build-chroot-dirs</literal></term>
<listitem><para>When builds are performed in a chroot environment, <listitem><para>When builds are performed in a chroot environment,
@ -307,6 +320,50 @@ build-use-chroot = /dev /proc /bin</programlisting>
</varlistentry> </varlistentry>
<varlistentry><term><literal>binary-caches</literal></term>
<listitem><para>A list of URLs of binary caches, separated by
whitespace. The default is empty.<!-- The default is
<literal>http://nixos.org/binary-cache</literal>. --></para></listitem>
</varlistentry>
<varlistentry><term><literal>binary-caches-files</literal></term>
<listitem><para>A list of names of files that will be read to
obtain additional binary cache URLs. The default is
<literal>/nix/var/nix/profiles/per-user/root/channels/binary-caches/*</literal>,
which ensures that Nix will use the binary caches corresponding to
the channels installed by root. Do not set this option to read
files created by untrusted users!</para></listitem>
</varlistentry>
<varlistentry><term><literal>trusted-binary-caches</literal></term>
<listitem><para>A list of URLs of binary caches, separated by
whitespace. These are not used by default, but can be enabled by
users of the Nix daemon by specifying <literal>--option
binary-caches <replaceable>urls</replaceable></literal> on the
command line. Daemon users are only allowed to pass a subset of
the URLs listed in <literal>binary-caches</literal> and
<literal>trusted-binary-caches</literal>.</para></listitem>
</varlistentry>
<varlistentry><term><literal>binary-caches-parallel-connections</literal></term>
<listitem><para>The maximum number of parallel HTTP connections
used by the binary cache substituter to get NAR info files. This
number should be high to minimise latency. It defaults to
150.</para></listitem>
</varlistentry>
<varlistentry><term><literal>system</literal></term> <varlistentry><term><literal>system</literal></term>
<listitem><para>This option specifies the canonical Nix system <listitem><para>This option specifies the canonical Nix system

View File

@ -94,6 +94,11 @@
<citerefentry><refentrytitle>nix.conf</refentrytitle><manvolnum>5</manvolnum></citerefentry>.</para> <citerefentry><refentrytitle>nix.conf</refentrytitle><manvolnum>5</manvolnum></citerefentry>.</para>
</listitem> </listitem>
<listitem>
<para>When using the Nix daemon, the <option>-s</option> flag in
<command>nix-env -qa</command> is now much faster.</para>
</listitem>
</itemizedlist> </itemizedlist>
</section> </section>

View File

@ -16,6 +16,7 @@ BuildRoot: %(mktemp -ud %{_tmppath}/%{name}-%{version}-%{release}-XXXXXX)
%endif %endif
BuildRequires: perl(DBD::SQLite) BuildRequires: perl(DBD::SQLite)
BuildRequires: perl(DBI) BuildRequires: perl(DBI)
BuildRequires: perl(WWW::Curl)
BuildRequires: perl(ExtUtils::ParseXS) BuildRequires: perl(ExtUtils::ParseXS)
Requires: /usr/bin/perl Requires: /usr/bin/perl
Requires: curl Requires: curl

View File

@ -1,4 +1,4 @@
PERL_MODULES = lib/Nix/Store.pm lib/Nix/Manifest.pm lib/Nix/GeneratePatches.pm lib/Nix/SSH.pm lib/Nix/CopyClosure.pm lib/Nix/Config.pm.in PERL_MODULES = lib/Nix/Store.pm lib/Nix/Manifest.pm lib/Nix/GeneratePatches.pm lib/Nix/SSH.pm lib/Nix/CopyClosure.pm lib/Nix/Config.pm.in lib/Nix/Utils.pm
all: $(PERL_MODULES:.in=) all: $(PERL_MODULES:.in=)

View File

@ -1,27 +1,39 @@
package Nix::Config; package Nix::Config;
$version = "@version@";
$binDir = $ENV{"NIX_BIN_DIR"} || "@bindir@"; $binDir = $ENV{"NIX_BIN_DIR"} || "@bindir@";
$libexecDir = $ENV{"NIX_LIBEXEC_DIR"} || "@libexecdir@"; $libexecDir = $ENV{"NIX_LIBEXEC_DIR"} || "@libexecdir@";
$stateDir = $ENV{"NIX_STATE_DIR"} || "@localstatedir@/nix"; $stateDir = $ENV{"NIX_STATE_DIR"} || "@localstatedir@/nix";
$manifestDir = $ENV{"NIX_MANIFESTS_DIR"} || "@localstatedir@/nix/manifests"; $manifestDir = $ENV{"NIX_MANIFESTS_DIR"} || "@localstatedir@/nix/manifests";
$logDir = $ENV{"NIX_LOG_DIR"} || "@localstatedir@/log/nix"; $logDir = $ENV{"NIX_LOG_DIR"} || "@localstatedir@/log/nix";
$confDir = $ENV{"NIX_CONF_DIR"} || "@sysconfdir@/nix"; $confDir = $ENV{"NIX_CONF_DIR"} || "@sysconfdir@/nix";
$storeDir = $ENV{"NIX_STORE_DIR"} || "@storedir@";
$bzip2 = $ENV{"NIX_BZIP2"} || "@bzip2@"; $bzip2 = "@bzip2@";
$xz = "@xz@";
$curl = "@curl@"; $curl = "@curl@";
$useBindings = "@perlbindings@" eq "yes"; $useBindings = "@perlbindings@" eq "yes";
%config = ();
sub readConfig { sub readConfig {
my %config; if (defined $ENV{'_NIX_OPTIONS'}) {
my $config = "@sysconfdir@/nix/nix.conf"; foreach my $s (split '\n', $ENV{'_NIX_OPTIONS'}) {
my ($n, $v) = split '=', $s, 2;
$config{$n} = $v;
}
return;
}
my $config = "$confDir/nix.conf";
return unless -f $config; return unless -f $config;
open CONFIG, "<$config" or die "cannot open `$config'"; open CONFIG, "<$config" or die "cannot open `$config'";
while (<CONFIG>) { while (<CONFIG>) {
/^\s*([\w|-]+)\s*=\s*(.*)$/ or next; /^\s*([\w|-]+)\s*=\s*(.*)$/ or next;
$config{$1} = $2; $config{$1} = $2;
print "|$1| -> |$2|\n";
} }
close CONFIG; close CONFIG;
} }

View File

@ -19,7 +19,7 @@ void doInit()
{ {
if (!store) { if (!store) {
try { try {
setDefaultsFromEnvironment(); settings.processEnvironment();
store = openStore(); store = openStore();
} catch (Error & e) { } catch (Error & e) {
croak(e.what()); croak(e.what());

19
perl/lib/Nix/Utils.pm Normal file
View File

@ -0,0 +1,19 @@
package Nix::Utils;
$urlRE = "(?: [a-zA-Z][a-zA-Z0-9\+\-\.]*\:[a-zA-Z0-9\%\/\?\:\@\&\=\+\$\,\-\_\.\!\~\*]+ )";
sub checkURL {
my ($url) = @_;
die "invalid URL $url\n" unless $url =~ /^ $urlRE $ /x;
}
sub uniq {
my %seen;
my @res;
foreach my $name (@_) {
next if $seen{$name};
$seen{$name} = 1;
push @res, $name;
}
return @res;
}

View File

@ -29,6 +29,7 @@ let
--with-xml-flags=--nonet --with-xml-flags=--nonet
--with-dbi=${perlPackages.DBI}/${perl.libPrefix} --with-dbi=${perlPackages.DBI}/${perl.libPrefix}
--with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix} --with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix}
--with-www-curl=${perlPackages.WWWCurl}/${perl.libPrefix}
''; '';
postUnpack = '' postUnpack = ''
@ -43,7 +44,7 @@ let
preDist = '' preDist = ''
make -C doc/manual install prefix=$out make -C doc/manual install prefix=$out
make -C doc/manual manual.pdf prefix=$out make -C doc/manual manual.pdf prefix=$out
cp doc/manual/manual.pdf $out/manual.pdf cp doc/manual/manual.pdf $out/manual.pdf
@ -54,7 +55,7 @@ let
# to Windows and Macs, so there should be no Linux binaries # to Windows and Macs, so there should be no Linux binaries
# in the closure). # in the closure).
nuke-refs $out/manual.pdf nuke-refs $out/manual.pdf
echo "doc manual $out/share/doc/nix/manual" >> $out/nix-support/hydra-build-products echo "doc manual $out/share/doc/nix/manual" >> $out/nix-support/hydra-build-products
echo "doc-pdf manual $out/manual.pdf" >> $out/nix-support/hydra-build-products echo "doc-pdf manual $out/manual.pdf" >> $out/nix-support/hydra-build-products
echo "doc release-notes $out/share/doc/nix/release-notes" >> $out/nix-support/hydra-build-products echo "doc release-notes $out/share/doc/nix/release-notes" >> $out/nix-support/hydra-build-products
@ -77,6 +78,7 @@ let
--disable-init-state --disable-init-state
--with-dbi=${perlPackages.DBI}/${perl.libPrefix} --with-dbi=${perlPackages.DBI}/${perl.libPrefix}
--with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix} --with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix}
--with-www-curl=${perlPackages.WWWCurl}/${perl.libPrefix}
--enable-gc --enable-gc
''; '';
@ -134,12 +136,13 @@ let
--disable-init-state --disable-init-state
--with-dbi=${perlPackages.DBI}/${perl.libPrefix} --with-dbi=${perlPackages.DBI}/${perl.libPrefix}
--with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix} --with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix}
--with-www-curl=${perlPackages.WWWCurl}/${perl.libPrefix}
''; '';
dontInstall = false; dontInstall = false;
doInstallCheck = true; doInstallCheck = true;
lcovFilter = [ "*/boost/*" "*-tab.*" ]; lcovFilter = [ "*/boost/*" "*-tab.*" ];
# We call `dot', and even though we just use it to # We call `dot', and even though we just use it to
@ -148,16 +151,16 @@ let
FONTCONFIG_FILE = texFunctions.fontsConf; FONTCONFIG_FILE = texFunctions.fontsConf;
}; };
rpm_fedora13i386 = makeRPM_i686 (diskImageFuns: diskImageFuns.fedora13i386) 50; rpm_fedora13i386 = makeRPM_i686 (diskImageFuns: diskImageFuns.fedora13i386) 50;
rpm_fedora13x86_64 = makeRPM_x86_64 (diskImageFunsFun: diskImageFunsFun.fedora13x86_64) 50; rpm_fedora13x86_64 = makeRPM_x86_64 (diskImageFunsFun: diskImageFunsFun.fedora13x86_64) 50;
rpm_fedora16i386 = makeRPM_i686 (diskImageFuns: diskImageFuns.fedora16i386) 50; rpm_fedora16i386 = makeRPM_i686 (diskImageFuns: diskImageFuns.fedora16i386) 50;
rpm_fedora16x86_64 = makeRPM_x86_64 (diskImageFunsFun: diskImageFunsFun.fedora16x86_64) 50; rpm_fedora16x86_64 = makeRPM_x86_64 (diskImageFunsFun: diskImageFunsFun.fedora16x86_64) 50;
deb_debian60i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.debian60i386) 50; deb_debian60i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.debian60i386) 50;
deb_debian60x86_64 = makeDeb_x86_64 (diskImageFunsFun: diskImageFunsFun.debian60x86_64) 50; deb_debian60x86_64 = makeDeb_x86_64 (diskImageFunsFun: diskImageFunsFun.debian60x86_64) 50;
deb_ubuntu1004i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1004i386) 50; deb_ubuntu1004i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1004i386) 50;
deb_ubuntu1004x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1004x86_64) 50; deb_ubuntu1004x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1004x86_64) 50;
deb_ubuntu1010i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1010i386) 50; deb_ubuntu1010i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1010i386) 50;
@ -183,7 +186,7 @@ let
makeRPM_i686 = makeRPM "i686-linux"; makeRPM_i686 = makeRPM "i686-linux";
makeRPM_x86_64 = makeRPM "x86_64-linux"; makeRPM_x86_64 = makeRPM "x86_64-linux";
makeRPM = makeRPM =
system: diskImageFun: prio: system: diskImageFun: prio:
with import nixpkgs { inherit system; }; with import nixpkgs { inherit system; };
@ -192,7 +195,7 @@ let
name = "nix-rpm-${diskImage.name}"; name = "nix-rpm-${diskImage.name}";
src = jobs.tarball; src = jobs.tarball;
diskImage = (diskImageFun vmTools.diskImageFuns) diskImage = (diskImageFun vmTools.diskImageFuns)
{ extraPackages = [ "perl-DBD-SQLite" "perl-devel" "sqlite" "sqlite-devel" "bzip2-devel" "emacs" ]; }; { extraPackages = [ "perl-DBD-SQLite" "perl-devel" "sqlite" "sqlite-devel" "bzip2-devel" "emacs" "perl-WWW-Curl" ]; };
memSize = 1024; memSize = 1024;
meta.schedulingPriority = prio; meta.schedulingPriority = prio;
postRPMInstall = "cd /tmp/rpmout/BUILD/nix-* && make installcheck"; postRPMInstall = "cd /tmp/rpmout/BUILD/nix-* && make installcheck";
@ -201,7 +204,7 @@ let
makeDeb_i686 = makeDeb "i686-linux"; makeDeb_i686 = makeDeb "i686-linux";
makeDeb_x86_64 = makeDeb "x86_64-linux"; makeDeb_x86_64 = makeDeb "x86_64-linux";
makeDeb = makeDeb =
system: diskImageFun: prio: system: diskImageFun: prio:
@ -211,7 +214,7 @@ let
name = "nix-deb"; name = "nix-deb";
src = jobs.tarball; src = jobs.tarball;
diskImage = (diskImageFun vmTools.diskImageFuns) diskImage = (diskImageFun vmTools.diskImageFuns)
{ extraPackages = [ "libdbd-sqlite3-perl" "libsqlite3-dev" "libbz2-dev" ]; }; { extraPackages = [ "libdbd-sqlite3-perl" "libsqlite3-dev" "libbz2-dev" "libwww-curl-perl" ]; };
memSize = 1024; memSize = 1024;
meta.schedulingPriority = prio; meta.schedulingPriority = prio;
configureFlags = "--sysconfdir=/etc"; configureFlags = "--sysconfdir=/etc";

View File

@ -7,17 +7,14 @@ noinst_SCRIPTS = nix-profile.sh \
find-runtime-roots.pl build-remote.pl nix-reduce-build \ find-runtime-roots.pl build-remote.pl nix-reduce-build \
copy-from-other-stores.pl nix-http-export.cgi copy-from-other-stores.pl nix-http-export.cgi
nix-pull nix-push: download-using-manifests.pl install-exec-local: download-using-manifests.pl copy-from-other-stores.pl download-from-binary-cache.pl find-runtime-roots.pl
install-exec-local: download-using-manifests.pl copy-from-other-stores.pl find-runtime-roots.pl
$(INSTALL) -d $(DESTDIR)$(sysconfdir)/profile.d $(INSTALL) -d $(DESTDIR)$(sysconfdir)/profile.d
$(INSTALL_DATA) nix-profile.sh $(DESTDIR)$(sysconfdir)/profile.d/nix.sh $(INSTALL_DATA) nix-profile.sh $(DESTDIR)$(sysconfdir)/profile.d/nix.sh
$(INSTALL) -d $(DESTDIR)$(libexecdir)/nix $(INSTALL) -d $(DESTDIR)$(libexecdir)/nix
$(INSTALL_PROGRAM) find-runtime-roots.pl $(DESTDIR)$(libexecdir)/nix $(INSTALL_PROGRAM) find-runtime-roots.pl $(DESTDIR)$(libexecdir)/nix
$(INSTALL_PROGRAM) build-remote.pl $(DESTDIR)$(libexecdir)/nix $(INSTALL_PROGRAM) build-remote.pl $(DESTDIR)$(libexecdir)/nix
$(INSTALL) -d $(DESTDIR)$(libexecdir)/nix/substituters $(INSTALL) -d $(DESTDIR)$(libexecdir)/nix/substituters
$(INSTALL_PROGRAM) download-using-manifests.pl $(DESTDIR)$(libexecdir)/nix/substituters $(INSTALL_PROGRAM) download-using-manifests.pl copy-from-other-stores.pl download-from-binary-cache.pl $(DESTDIR)$(libexecdir)/nix/substituters
$(INSTALL_PROGRAM) copy-from-other-stores.pl $(DESTDIR)$(libexecdir)/nix/substituters
$(INSTALL) -d $(DESTDIR)$(sysconfdir)/nix $(INSTALL) -d $(DESTDIR)$(sysconfdir)/nix
include ../substitute.mk include ../substitute.mk
@ -29,6 +26,7 @@ EXTRA_DIST = nix-collect-garbage.in \
nix-build.in \ nix-build.in \
download-using-manifests.pl.in \ download-using-manifests.pl.in \
copy-from-other-stores.pl.in \ copy-from-other-stores.pl.in \
download-from-binary-cache.pl.in \
nix-copy-closure.in \ nix-copy-closure.in \
find-runtime-roots.pl.in \ find-runtime-roots.pl.in \
build-remote.pl.in \ build-remote.pl.in \

View File

@ -36,42 +36,45 @@ sub findStorePath {
if ($ARGV[0] eq "--query") { if ($ARGV[0] eq "--query") {
while (<STDIN>) { while (<STDIN>) {
my $cmd = $_; chomp $cmd; chomp;
my ($cmd, @args) = split " ", $_;
if ($cmd eq "have") { if ($cmd eq "have") {
my $storePath = <STDIN>; chomp $storePath; foreach my $storePath (@args) {
print STDOUT (defined findStorePath($storePath) ? "1\n" : "0\n"); print "$storePath\n" if defined findStorePath($storePath);
}
print "\n";
} }
elsif ($cmd eq "info") { elsif ($cmd eq "info") {
my $storePath = <STDIN>; chomp $storePath; foreach my $storePath (@args) {
my ($store, $sourcePath) = findStorePath($storePath); my ($store, $sourcePath) = findStorePath($storePath);
if (!defined $store) { next unless defined $store;
print "0\n";
next; # not an error $ENV{"NIX_DB_DIR"} = "$store/var/nix/db";
my $deriver = `@bindir@/nix-store --query --deriver $storePath`;
die "cannot query deriver of `$storePath'" if $? != 0;
chomp $deriver;
$deriver = "" if $deriver eq "unknown-deriver";
my @references = split "\n",
`@bindir@/nix-store --query --references $storePath`;
die "cannot query references of `$storePath'" if $? != 0;
my $narSize = `@bindir@/nix-store --query --size $storePath`;
die "cannot query size of `$storePath'" if $? != 0;
chomp $narSize;
print "$storePath\n";
print "$deriver\n";
print scalar @references, "\n";
print "$_\n" foreach @references;
print "$narSize\n";
print "$narSize\n";
} }
print "1\n";
$ENV{"NIX_DB_DIR"} = "$store/var/nix/db"; print "\n";
my $deriver = `@bindir@/nix-store --query --deriver $storePath`;
die "cannot query deriver of `$storePath'" if $? != 0;
chomp $deriver;
$deriver = "" if $deriver eq "unknown-deriver";
my @references = split "\n",
`@bindir@/nix-store --query --references $storePath`;
die "cannot query references of `$storePath'" if $? != 0;
my $narSize = `@bindir@/nix-store --query --size $storePath`;
die "cannot query size of `$storePath'" if $? != 0;
chomp $narSize;
print "$deriver\n";
print scalar @references, "\n";
print "$_\n" foreach @references;
print "$narSize\n";
print "$narSize\n";
} }
else { die "unknown command `$cmd'"; } else { die "unknown command `$cmd'"; }
@ -84,9 +87,10 @@ elsif ($ARGV[0] eq "--substitute") {
my $storePath = $ARGV[1]; my $storePath = $ARGV[1];
my ($store, $sourcePath) = findStorePath $storePath; my ($store, $sourcePath) = findStorePath $storePath;
die unless $store; die unless $store;
print "\n*** Copying `$storePath' from `$sourcePath'\n\n"; print STDERR "\n*** Copying `$storePath' from `$sourcePath'\n\n";
system("$binDir/nix-store --dump $sourcePath | $binDir/nix-store --restore $storePath") == 0 system("$binDir/nix-store --dump $sourcePath | $binDir/nix-store --restore $storePath") == 0
or die "cannot copy `$sourcePath' to `$storePath'"; or die "cannot copy `$sourcePath' to `$storePath'";
print "\n"; # no hash to verify
} }

View File

@ -0,0 +1,537 @@
#! @perl@ -w @perlFlags@
use DBI;
use File::Basename;
use IO::Select;
use Nix::Config;
use Nix::Store;
use Nix::Utils;
use WWW::Curl::Easy;
use WWW::Curl::Multi;
use strict;
Nix::Config::readConfig;
my @caches;
my $gotCaches = 0;
my $maxParallelRequests = int($Nix::Config::config{"binary-caches-parallel-connections"} // 150);
$maxParallelRequests = 1 if $maxParallelRequests < 1;
my $debug = ($ENV{"NIX_DEBUG_SUBST"} // "") eq 1;
my ($dbh, $queryCache, $insertNAR, $queryNAR, $insertNARExistence, $queryNARExistence);
my $curlm = WWW::Curl::Multi->new;
my $activeRequests = 0;
my $curlIdCount = 1;
my %requests;
my %scheduled;
my $caBundle = $ENV{"CURL_CA_BUNDLE"} // $ENV{"OPENSSL_X509_CERT_FILE"};
sub addRequest {
my ($storePath, $url, $head) = @_;
my $curl = WWW::Curl::Easy->new;
my $curlId = $curlIdCount++;
$requests{$curlId} = { storePath => $storePath, url => $url, handle => $curl, content => "", type => $head ? "HEAD" : "GET" };
$curl->setopt(CURLOPT_PRIVATE, $curlId);
$curl->setopt(CURLOPT_URL, $url);
$curl->setopt(CURLOPT_WRITEDATA, \$requests{$curlId}->{content});
$curl->setopt(CURLOPT_FOLLOWLOCATION, 1);
$curl->setopt(CURLOPT_CAINFO, $caBundle) if defined $caBundle;
$curl->setopt(CURLOPT_USERAGENT, "Nix/$Nix::Config::version");
$curl->setopt(CURLOPT_NOBODY, 1) if $head;
$curl->setopt(CURLOPT_FAILONERROR, 1);
if ($activeRequests >= $maxParallelRequests) {
$scheduled{$curlId} = 1;
} else {
$curlm->add_handle($curl);
$activeRequests++;
}
return $requests{$curlId};
}
sub processRequests {
while ($activeRequests) {
my ($rfds, $wfds, $efds) = $curlm->fdset();
#print STDERR "R = @{$rfds}, W = @{$wfds}, E = @{$efds}\n";
# Sleep until we can read or write some data.
if (scalar @{$rfds} + scalar @{$wfds} + scalar @{$efds} > 0) {
IO::Select->select(IO::Select->new(@{$rfds}), IO::Select->new(@{$wfds}), IO::Select->new(@{$efds}), 0.1);
}
if ($curlm->perform() != $activeRequests) {
while (my ($id, $result) = $curlm->info_read) {
if ($id) {
my $request = $requests{$id} or die;
my $handle = $request->{handle};
$request->{result} = $result;
$request->{httpStatus} = $handle->getinfo(CURLINFO_RESPONSE_CODE);
print STDERR "$request->{type} on $request->{url} [$request->{result}, $request->{httpStatus}]\n" if $debug;
$activeRequests--;
delete $request->{handle};
if (scalar(keys %scheduled) > 0) {
my $id2 = (keys %scheduled)[0];
$curlm->add_handle($requests{$id2}->{handle});
$activeRequests++;
delete $scheduled{$id2};
}
}
}
}
}
}
sub initCache {
my $dbPath = "$Nix::Config::stateDir/binary-cache-v1.sqlite";
# Open/create the database.
$dbh = DBI->connect("dbi:SQLite:dbname=$dbPath", "", "")
or die "cannot open database `$dbPath'";
$dbh->{RaiseError} = 1;
$dbh->{PrintError} = 0;
$dbh->do("pragma synchronous = off"); # we can always reproduce the cache
$dbh->do("pragma journal_mode = truncate");
# Initialise the database schema, if necessary.
$dbh->do(<<EOF);
create table if not exists BinaryCaches (
id integer primary key autoincrement not null,
url text unique not null,
timestamp integer not null,
storeDir text not null,
wantMassQuery integer not null
);
EOF
$dbh->do(<<EOF);
create table if not exists NARs (
cache integer not null,
storePath text not null,
url text not null,
compression text not null,
fileHash text,
fileSize integer,
narHash text,
narSize integer,
refs text,
deriver text,
system text,
timestamp integer not null,
primary key (cache, storePath),
foreign key (cache) references BinaryCaches(id) on delete cascade
);
EOF
$dbh->do(<<EOF);
create table if not exists NARExistence (
cache integer not null,
storePath text not null,
exist integer not null,
timestamp integer not null,
primary key (cache, storePath),
foreign key (cache) references BinaryCaches(id) on delete cascade
);
EOF
$queryCache = $dbh->prepare("select id, storeDir, wantMassQuery from BinaryCaches where url = ?") or die;
$insertNAR = $dbh->prepare(
"insert or replace into NARs(cache, storePath, url, compression, fileHash, fileSize, narHash, " .
"narSize, refs, deriver, system, timestamp) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)") or die;
$queryNAR = $dbh->prepare("select * from NARs where cache = ? and storePath = ?") or die;
$insertNARExistence = $dbh->prepare(
"insert or replace into NARExistence(cache, storePath, exist, timestamp) values (?, ?, ?, ?)") or die;
$queryNARExistence = $dbh->prepare("select exist from NARExistence where cache = ? and storePath = ?") or die;
}
sub getAvailableCaches {
return if $gotCaches;
$gotCaches = 1;
sub strToList {
my ($s) = @_;
return map { s/\/+$//; $_ } split(/ /, $s);
}
my @urls = strToList ($Nix::Config::config{"binary-caches"} // "");
# // ($Nix::Config::storeDir eq "/nix/store" ? "http://nixos.org/binary-cache" : ""));
my $urlsFiles = $Nix::Config::config{"binary-cache-files"}
// "/nix/var/nix/profiles/per-user/root/channels/binary-caches/*";
foreach my $urlFile (glob $urlsFiles) {
next unless -f $urlFile;
open FILE, "<$urlFile" or die "cannot open $urlFile\n";
my $url = <FILE>; chomp $url;
close FILE;
push @urls, strToList($url);
}
# Allow Nix daemon users to override the binary caches to a subset
# of those listed in the config file. Note that untrusted-*
# denotes options passed by the client.
if (defined $Nix::Config::config{"untrusted-binary-caches"}) {
my @untrustedUrls = strToList $Nix::Config::config{"untrusted-binary-caches"};
my @trustedUrls = (@urls, strToList($Nix::Config::config{"trusted-binary-caches"} // ""));
@urls = ();
foreach my $url (@untrustedUrls) {
die "binary cache $url is not trusted (please add it to trusted-binary-caches in $Nix::Config::confDir/nix.conf)\n"
unless grep { $url eq $_ } @trustedUrls > 0;
push @urls, $url;
}
}
foreach my $url (Nix::Utils::uniq @urls) {
# FIXME: not atomic.
$queryCache->execute($url);
my $res = $queryCache->fetchrow_hashref();
if (defined $res) {
next if $res->{storeDir} ne $Nix::Config::storeDir;
push @caches, { id => $res->{id}, url => $url, wantMassQuery => $res->{wantMassQuery} };
next;
}
# Get the cache info file.
my $request = addRequest(undef, $url . "/nix-cache-info");
processRequests;
if ($request->{result} != 0) {
print STDERR "could not download $request->{url} (" .
($request->{result} != 0 ? "Curl error $request->{result}" : "HTTP status $request->{httpStatus}") . ")\n";
next;
}
my $storeDir = "/nix/store";
my $wantMassQuery = 0;
foreach my $line (split "\n", $request->{content}) {
unless ($line =~ /^(.*): (.*)$/) {
print STDERR "bad cache info file $request->{url}\n";
return undef;
}
if ($1 eq "StoreDir") { $storeDir = $2; }
elsif ($1 eq "WantMassQuery") { $wantMassQuery = int($2); }
}
$dbh->do("insert into BinaryCaches(url, timestamp, storeDir, wantMassQuery) values (?, ?, ?, ?)",
{}, $url, time(), $storeDir, $wantMassQuery);
my $id = $dbh->last_insert_id("", "", "", "");
next if $storeDir ne $Nix::Config::storeDir;
push @caches, { id => $id, url => $url, wantMassQuery => $wantMassQuery };
}
}
sub processNARInfo {
my ($storePath, $cache, $request) = @_;
if ($request->{result} != 0) {
if ($request->{result} != 37 && $request->{httpStatus} != 404) {
print STDERR "could not download $request->{url} (" .
($request->{result} != 0 ? "Curl error $request->{result}" : "HTTP status $request->{httpStatus}") . ")\n";
} else {
$insertNARExistence->execute($cache->{id}, basename($storePath), 0, time())
unless $request->{url} =~ /^file:/;
}
return undef;
}
my ($storePath2, $url, $fileHash, $fileSize, $narHash, $narSize, $deriver, $system);
my $compression = "bzip2";
my @refs;
foreach my $line (split "\n", $request->{content}) {
unless ($line =~ /^(.*): (.*)$/) {
print STDERR "bad NAR info file $request->{url}\n";
return undef;
}
if ($1 eq "StorePath") { $storePath2 = $2; }
elsif ($1 eq "URL") { $url = $2; }
elsif ($1 eq "Compression") { $compression = $2; }
elsif ($1 eq "FileHash") { $fileHash = $2; }
elsif ($1 eq "FileSize") { $fileSize = int($2); }
elsif ($1 eq "NarHash") { $narHash = $2; }
elsif ($1 eq "NarSize") { $narSize = int($2); }
elsif ($1 eq "References") { @refs = split / /, $2; }
elsif ($1 eq "Deriver") { $deriver = $2; }
elsif ($1 eq "System") { $system = $2; }
}
return undef if $storePath ne $storePath2;
if ($storePath ne $storePath2 || !defined $url || !defined $narHash) {
print STDERR "bad NAR info file $request->{url}\n";
return undef;
}
# Cache the result.
$insertNAR->execute(
$cache->{id}, basename($storePath), $url, $compression, $fileHash, $fileSize,
$narHash, $narSize, join(" ", @refs), $deriver, $system, time())
unless $request->{url} =~ /^file:/;
return
{ url => $url
, compression => $compression
, fileHash => $fileHash
, fileSize => $fileSize
, narHash => $narHash
, narSize => $narSize
, refs => [ @refs ]
, deriver => $deriver
, system => $system
};
}
sub getCachedInfoFrom {
my ($storePath, $cache) = @_;
$queryNAR->execute($cache->{id}, basename($storePath));
my $res = $queryNAR->fetchrow_hashref();
return undef unless defined $res;
return
{ url => $res->{url}
, compression => $res->{compression}
, fileHash => $res->{fileHash}
, fileSize => $res->{fileSize}
, narHash => $res->{narHash}
, narSize => $res->{narSize}
, refs => [ split " ", $res->{refs} ]
, deriver => $res->{deriver}
} if defined $res;
}
sub negativeHit {
my ($storePath, $cache) = @_;
$queryNARExistence->execute($cache->{id}, basename($storePath));
my $res = $queryNARExistence->fetchrow_hashref();
return defined $res && $res->{exist} == 0;
}
sub positiveHit {
my ($storePath, $cache) = @_;
return 1 if defined getCachedInfoFrom($storePath, $cache);
$queryNARExistence->execute($cache->{id}, basename($storePath));
my $res = $queryNARExistence->fetchrow_hashref();
return defined $res && $res->{exist} == 1;
}
sub printInfo {
my ($storePath, $info) = @_;
print "$storePath\n";
print $info->{deriver} ? "$Nix::Config::storeDir/$info->{deriver}" : "", "\n";
print scalar @{$info->{refs}}, "\n";
print "$Nix::Config::storeDir/$_\n" foreach @{$info->{refs}};
print $info->{fileSize} || 0, "\n";
print $info->{narSize} || 0, "\n";
}
sub infoUrl {
my ($binaryCacheUrl, $storePath) = @_;
my $pathHash = substr(basename($storePath), 0, 32);
my $infoUrl = "$binaryCacheUrl/$pathHash.narinfo";
}
sub printInfoParallel {
my @paths = @_;
# First print all paths for which we have cached info.
my @left;
foreach my $storePath (@paths) {
my $found = 0;
foreach my $cache (@caches) {
my $info = getCachedInfoFrom($storePath, $cache);
if (defined $info) {
printInfo($storePath, $info);
$found = 1;
last;
}
}
push @left, $storePath if !$found;
}
return if scalar @left == 0;
foreach my $cache (@caches) {
my @left2;
%requests = ();
foreach my $storePath (@left) {
if (negativeHit($storePath, $cache)) {
push @left2, $storePath;
next;
}
addRequest($storePath, infoUrl($cache->{url}, $storePath));
}
processRequests;
foreach my $request (values %requests) {
my $info = processNARInfo($request->{storePath}, $cache, $request);
if (defined $info) {
printInfo($request->{storePath}, $info);
} else {
push @left2, $request->{storePath};
}
}
@left = @left2;
}
}
sub printSubstitutablePaths {
my @paths = @_;
# First look for paths that have cached info.
my @left;
foreach my $storePath (@paths) {
my $found = 0;
foreach my $cache (@caches) {
next unless $cache->{wantMassQuery};
if (positiveHit($storePath, $cache)) {
print "$storePath\n";
$found = 1;
last;
}
}
push @left, $storePath if !$found;
}
return if scalar @left == 0;
# For remaining paths, do HEAD requests.
foreach my $cache (@caches) {
next unless $cache->{wantMassQuery};
my @left2;
%requests = ();
foreach my $storePath (@left) {
if (negativeHit($storePath, $cache)) {
push @left2, $storePath;
next;
}
addRequest($storePath, infoUrl($cache->{url}, $storePath), 1);
}
processRequests;
foreach my $request (values %requests) {
if ($request->{result} != 0) {
if ($request->{result} != 37 && $request->{httpStatus} != 404) {
print STDERR "could not check $request->{url} (" .
($request->{result} != 0 ? "Curl error $request->{result}" : "HTTP status $request->{httpStatus}") . ")\n";
} else {
$insertNARExistence->execute($cache->{id}, basename($request->{storePath}), 0, time())
unless $request->{url} =~ /^file:/;
}
push @left2, $request->{storePath};
} else {
$insertNARExistence->execute($cache->{id}, basename($request->{storePath}), 1, time())
unless $request->{url} =~ /^file:/;
print "$request->{storePath}\n";
}
}
@left = @left2;
}
}
sub downloadBinary {
my ($storePath) = @_;
foreach my $cache (@caches) {
my $info = getCachedInfoFrom($storePath, $cache);
unless (defined $info) {
next if negativeHit($storePath, $cache);
my $request = addRequest($storePath, infoUrl($cache->{url}, $storePath));
processRequests;
$info = processNARInfo($storePath, $cache, $request);
}
next unless defined $info;
my $decompressor;
if ($info->{compression} eq "bzip2") { $decompressor = "$Nix::Config::bzip2 -d"; }
elsif ($info->{compression} eq "xz") { $decompressor = "$Nix::Config::xz -d"; }
else {
print STDERR "unknown compression method $info->{compression}\n";
next;
}
my $url = "$cache->{url}/$info->{url}"; # FIXME: handle non-relative URLs
print STDERR "\n*** Downloading $url into $storePath...\n";
Nix::Utils::checkURL $url;
if (system("$Nix::Config::curl --fail --location --insecure '$url' | $decompressor | $Nix::Config::binDir/nix-store --restore $storePath") != 0) {
die "download of `$info->{url}' failed" . ($! ? ": $!" : "") . "\n" unless $? == 0;
next;
}
# Tell Nix about the expected hash so it can verify it.
print "$info->{narHash}\n";
print STDERR "\n";
return;
}
print STDERR "could not download $storePath from any binary cache\n";
}
initCache();
if ($ARGV[0] eq "--query") {
while (<STDIN>) {
getAvailableCaches;
chomp;
my ($cmd, @args) = split " ", $_;
if ($cmd eq "have") {
printSubstitutablePaths(@args);
print "\n";
}
elsif ($cmd eq "info") {
printInfoParallel(@args);
print "\n";
}
else { die "unknown command `$cmd'"; }
flush STDOUT;
}
}
elsif ($ARGV[0] eq "--substitute") {
my $storePath = $ARGV[1] or die;
getAvailableCaches;
downloadBinary($storePath);
}
else {
die;
}

View File

@ -4,6 +4,7 @@ use strict;
use Nix::Config; use Nix::Config;
use Nix::Manifest; use Nix::Manifest;
use Nix::Store; use Nix::Store;
use Nix::Utils;
use POSIX qw(strftime); use POSIX qw(strftime);
use File::Temp qw(tempdir); use File::Temp qw(tempdir);
@ -15,6 +16,9 @@ my $logFile = "$Nix::Config::logDir/downloads";
# estimating the expected download size. # estimating the expected download size.
my $fast = 1; my $fast = 1;
# --insecure is fine because Nix verifies the hash of the result.
my $curl = "$Nix::Config::curl --fail --location --insecure";
# Open the manifest cache and update it if necessary. # Open the manifest cache and update it if necessary.
my $dbh = updateManifestDB(); my $dbh = updateManifestDB();
@ -38,7 +42,7 @@ sub parseHash {
# given path. # given path.
sub computeSmallestDownload { sub computeSmallestDownload {
my $targetPath = shift; my $targetPath = shift;
# Build a graph of all store paths that might contribute to the # Build a graph of all store paths that might contribute to the
# construction of $targetPath, and the special node "start". The # construction of $targetPath, and the special node "start". The
# edges are either patch operations, or downloads of full NAR # edges are either patch operations, or downloads of full NAR
@ -93,7 +97,7 @@ sub computeSmallestDownload {
my $patchList = $dbh->selectall_arrayref( my $patchList = $dbh->selectall_arrayref(
"select * from Patches where storePath = ?", "select * from Patches where storePath = ?",
{ Slice => {} }, $u); { Slice => {} }, $u);
foreach my $patch (@{$patchList}) { foreach my $patch (@{$patchList}) {
if (isValidPath($patch->{basePath})) { if (isValidPath($patch->{basePath})) {
my ($baseHashAlgo, $baseHash) = parseHash $patch->{baseHash}; my ($baseHashAlgo, $baseHash) = parseHash $patch->{baseHash};
@ -106,7 +110,7 @@ sub computeSmallestDownload {
$hash =~ s/.*://; $hash =~ s/.*://;
$hashCache->{$baseHashAlgo}->{$patch->{basePath}} = $hash; $hashCache->{$baseHashAlgo}->{$patch->{basePath}} = $hash;
} }
next if $hash ne $baseHash; next if $hash ne $baseHash;
} }
push @queue, $patch->{basePath}; push @queue, $patch->{basePath};
@ -117,7 +121,7 @@ sub computeSmallestDownload {
my $narFileList = $dbh->selectall_arrayref( my $narFileList = $dbh->selectall_arrayref(
"select * from NARs where storePath = ?", "select * from NARs where storePath = ?",
{ Slice => {} }, $u); { Slice => {} }, $u);
foreach my $narFile (@{$narFileList}) { foreach my $narFile (@{$narFileList}) {
# !!! how to handle files whose size is not known in advance? # !!! how to handle files whose size is not known in advance?
# For now, assume some arbitrary size (1 GB). # For now, assume some arbitrary size (1 GB).
@ -173,58 +177,56 @@ sub computeSmallestDownload {
if ($ARGV[0] eq "--query") { if ($ARGV[0] eq "--query") {
while (<STDIN>) { while (<STDIN>) {
my $cmd = $_; chomp $cmd; chomp;
my ($cmd, @args) = split " ", $_;
if ($cmd eq "have") { if ($cmd eq "have") {
my $storePath = <STDIN>; chomp $storePath; foreach my $storePath (@args) {
print STDOUT ( print "$storePath\n" if scalar @{$dbh->selectcol_arrayref("select 1 from NARs where storePath = ?", {}, $storePath)} > 0;
scalar @{$dbh->selectcol_arrayref("select 1 from NARs where storePath = ?", {}, $storePath)} > 0 }
? "1\n" : "0\n"); print "\n";
} }
elsif ($cmd eq "info") { elsif ($cmd eq "info") {
my $storePath = <STDIN>; chomp $storePath; foreach my $storePath (@args) {
my $infos = $dbh->selectall_arrayref( my $infos = $dbh->selectall_arrayref(
"select * from NARs where storePath = ?", "select * from NARs where storePath = ?",
{ Slice => {} }, $storePath); { Slice => {} }, $storePath);
my $info;
if (scalar @{$infos} > 0) {
$info = @{$infos}[0];
}
else {
print "0\n";
next; # not an error
}
print "1\n"; next unless scalar @{$infos} > 0;
print "$info->{deriver}\n"; my $info = @{$infos}[0];
my @references = split " ", $info->{refs};
print scalar @references, "\n";
print "$_\n" foreach @references;
my @path = computeSmallestDownload $storePath; print "$storePath\n";
print "$info->{deriver}\n";
my @references = split " ", $info->{refs};
print scalar @references, "\n";
print "$_\n" foreach @references;
my $downloadSize = 0; my @path = computeSmallestDownload $storePath;
while (scalar @path > 0) {
my $edge = pop @path; my $downloadSize = 0;
my $u = $edge->{start}; while (scalar @path > 0) {
my $v = $edge->{end}; my $edge = pop @path;
if ($edge->{type} eq "patch") { my $u = $edge->{start};
$downloadSize += $edge->{info}->{size} || 0; my $v = $edge->{end};
} if ($edge->{type} eq "patch") {
elsif ($edge->{type} eq "narfile") { $downloadSize += $edge->{info}->{size} || 0;
$downloadSize += $edge->{info}->{size} || 0; }
elsif ($edge->{type} eq "narfile") {
$downloadSize += $edge->{info}->{size} || 0;
}
} }
print "$downloadSize\n";
my $narSize = $info->{narSize} || 0;
print "$narSize\n";
} }
print "$downloadSize\n"; print "\n";
my $narSize = $info->{narSize} || 0;
print "$narSize\n";
} }
else { die "unknown command `$cmd'"; } else { die "unknown command `$cmd'"; }
} }
@ -273,16 +275,6 @@ $dbh->disconnect;
my $curStep = 1; my $curStep = 1;
my $maxStep = scalar @path; my $maxStep = scalar @path;
sub downloadFile {
my $url = shift;
$ENV{"PRINT_PATH"} = 1;
$ENV{"QUIET"} = 1;
my ($hash, $path) = `$Nix::Config::binDir/nix-prefetch-url '$url'`;
die "download of `$url' failed" . ($! ? ": $!" : "") . "\n" unless $? == 0;
chomp $path;
return $path;
}
my $finalNarHash; my $finalNarHash;
while (scalar @path > 0) { while (scalar @path > 0) {
@ -314,13 +306,16 @@ while (scalar @path > 0) {
# Download the patch. # Download the patch.
print STDERR " downloading patch...\n"; print STDERR " downloading patch...\n";
my $patchPath = downloadFile "$patch->{url}"; my $patchPath = "$tmpDir/patch";
Nix::Utils::checkURL $patch->{url};
system("$curl '$patch->{url}' -o $patchPath") == 0
or die "cannot download patch `$patch->{url}'\n";
# Apply the patch to the NAR archive produced in step 1 (for # Apply the patch to the NAR archive produced in step 1 (for
# the already present path) or a later step (for patch sequences). # the already present path) or a later step (for patch sequences).
print STDERR " applying patch...\n"; print STDERR " applying patch...\n";
system("$Nix::Config::libexecDir/bspatch $tmpNar $tmpNar2 $patchPath") == 0 system("$Nix::Config::libexecDir/bspatch $tmpNar $tmpNar2 $patchPath") == 0
or die "cannot apply patch `$patchPath' to $tmpNar"; or die "cannot apply patch `$patchPath' to $tmpNar\n";
if ($curStep < $maxStep) { if ($curStep < $maxStep) {
# The archive will be used as the base of the next patch. # The archive will be used as the base of the next patch.
@ -330,7 +325,7 @@ while (scalar @path > 0) {
# into the target path. # into the target path.
print STDERR " unpacking patched archive...\n"; print STDERR " unpacking patched archive...\n";
system("$Nix::Config::binDir/nix-store --restore $v < $tmpNar2") == 0 system("$Nix::Config::binDir/nix-store --restore $v < $tmpNar2") == 0
or die "cannot unpack $tmpNar2 into `$v'"; or die "cannot unpack $tmpNar2 into `$v'\n";
} }
$finalNarHash = $patch->{narHash}; $finalNarHash = $patch->{narHash};
@ -342,20 +337,16 @@ while (scalar @path > 0) {
my $size = $narFile->{size} || -1; my $size = $narFile->{size} || -1;
print LOGFILE "$$ narfile $narFile->{url} $size $v\n"; print LOGFILE "$$ narfile $narFile->{url} $size $v\n";
# Download the archive.
print STDERR " downloading archive...\n";
my $narFilePath = downloadFile "$narFile->{url}";
Nix::Utils::checkURL $narFile->{url};
if ($curStep < $maxStep) { if ($curStep < $maxStep) {
# The archive will be used a base to a patch. # The archive will be used a base to a patch.
system("$Nix::Config::bzip2 -d < '$narFilePath' > $tmpNar") == 0 system("$curl '$narFile->{url}' | $Nix::Config::bzip2 -d > $tmpNar") == 0
or die "cannot unpack `$narFilePath' into `$v'"; or die "cannot download and unpack `$narFile->{url}' into `$v'\n";
} else { } else {
# Unpack the archive into the target path. # Unpack the archive into the target path.
print STDERR " unpacking archive...\n"; system("$curl '$narFile->{url}' | $Nix::Config::bzip2 -d | $Nix::Config::binDir/nix-store --restore '$v'") == 0
system("$Nix::Config::bzip2 -d < '$narFilePath' | $Nix::Config::binDir/nix-store --restore '$v'") == 0 or die "cannot download and unpack `$narFile->{url}' into `$v'\n";
or die "cannot unpack `$narFilePath' into `$v'";
} }
$finalNarHash = $narFile->{narHash}; $finalNarHash = $narFile->{narHash};
@ -365,21 +356,10 @@ while (scalar @path > 0) {
} }
# Make sure that the hash declared in the manifest matches what we # Tell Nix about the expected hash so it can verify it.
# downloaded and unpacked. die "cannot check integrity of the downloaded path since its hash is not known\n"
unless defined $finalNarHash;
if (defined $finalNarHash) { print "$finalNarHash\n";
my ($hashAlgo, $hash) = parseHash $finalNarHash;
# The hash in the manifest can be either in base-16 or base-32.
# Handle both.
my $hash2 = hashPath($hashAlgo, $hashAlgo eq "sha256" && length($hash) != 64, $targetPath);
die "hash mismatch in downloaded path $targetPath; expected $hash, got $hash2\n"
if $hash ne $hash2;
} else {
die "cannot check integrity of the downloaded path since its hash is not known\n";
}
print STDERR "\n"; print STDERR "\n";

View File

@ -58,6 +58,11 @@ EOF
# '` hack # '` hack
} }
elsif ($arg eq "--version") {
print "nix-build (Nix) $Nix::Config::version\n";
exit 0;
}
elsif ($arg eq "--add-drv-link") { elsif ($arg eq "--add-drv-link") {
$drvLink = "./derivation"; $drvLink = "./derivation";
} }

View File

@ -22,7 +22,7 @@ my $nixDefExpr = "$home/.nix-defexpr";
my $userName = getpwuid($<) or die "cannot figure out user name"; my $userName = getpwuid($<) or die "cannot figure out user name";
my $profile = "$Nix::Config::stateDir/profiles/per-user/$userName/channels"; my $profile = "$Nix::Config::stateDir/profiles/per-user/$userName/channels";
mkpath(dirname $profile, 0, 0755); mkpath(dirname $profile, 0, 0755);
my %channels; my %channels;
@ -77,20 +77,14 @@ sub removeChannel {
# channels. # channels.
sub update { sub update {
my @channelNames = @_; my @channelNames = @_;
readChannels; readChannels;
# Create the manifests directory if it doesn't exist.
mkdir $manifestDir, 0755 unless -e $manifestDir;
# Do we have write permission to the manifests directory?
die "$0: you do not have write permission to `$manifestDir'!\n" unless -W $manifestDir;
# Download each channel. # Download each channel.
my $exprs = ""; my $exprs = "";
foreach my $name (keys %channels) { foreach my $name (keys %channels) {
next if scalar @channelNames > 0 && ! grep { $_ eq $name } @{channelNames}; next if scalar @channelNames > 0 && ! grep { $_ eq $name } @{channelNames};
my $url = $channels{$name}; my $url = $channels{$name};
my $origUrl = "$url/MANIFEST"; my $origUrl = "$url/MANIFEST";
@ -101,11 +95,20 @@ sub update {
die "$0: unable to check `$url'\n" if $? != 0; die "$0: unable to check `$url'\n" if $? != 0;
$headers =~ s/\r//g; $headers =~ s/\r//g;
$url = $1 if $headers =~ /^Location:\s*(.*)\s*$/m; $url = $1 if $headers =~ /^Location:\s*(.*)\s*$/m;
# Pull the channel manifest. # Check if the channel advertises a binary cache.
$ENV{'NIX_ORIG_URL'} = $origUrl; my $binaryCacheURL = `$Nix::Config::curl --silent '$url'/binary-cache-url`;
system("$Nix::Config::binDir/nix-pull", "--skip-wrong-store", "$url/MANIFEST") == 0 my $extraAttrs = "";
or die "cannot pull manifest from `$url'\n"; if ($? == 0 && $binaryCacheURL ne "") {
$extraAttrs .= "binaryCacheURL = \"$binaryCacheURL\"; ";
} else {
# No binary cache, so pull the channel manifest.
mkdir $manifestDir, 0755 unless -e $manifestDir;
die "$0: you do not have write permission to `$manifestDir'!\n" unless -W $manifestDir;
$ENV{'NIX_ORIG_URL'} = $origUrl;
system("$Nix::Config::binDir/nix-pull", "--skip-wrong-store", "$url/MANIFEST") == 0
or die "cannot pull manifest from `$url'\n";
}
# Download the channel tarball. # Download the channel tarball.
my $fullURL = "$url/nixexprs.tar.bz2"; my $fullURL = "$url/nixexprs.tar.bz2";
@ -120,7 +123,7 @@ sub update {
my $cname = $name; my $cname = $name;
$cname .= $1 if basename($url) =~ /(-\d.*)$/; $cname .= $1 if basename($url) =~ /(-\d.*)$/;
$exprs .= "'f: f { name = \"$cname\"; channelName = \"$name\"; src = builtins.storePath \"$path\"; }' "; $exprs .= "'f: f { name = \"$cname\"; channelName = \"$name\"; src = builtins.storePath \"$path\"; $extraAttrs }' ";
} }
# Unpack the channel tarballs into the Nix store and install them # Unpack the channel tarballs into the Nix store and install them
@ -189,11 +192,16 @@ while (scalar @ARGV) {
update(@ARGV); update(@ARGV);
last; last;
} }
elsif ($arg eq "--help") { elsif ($arg eq "--help") {
usageError; usageError;
} }
elsif ($arg eq "--version") {
print "nix-channel (Nix) $Nix::Config::version\n";
exit 0;
}
else { else {
die "unknown argument `$arg'; try `--help'"; die "unknown argument `$arg'; try `--help'";
} }

View File

@ -3,6 +3,7 @@
use strict; use strict;
use File::Temp qw(tempdir); use File::Temp qw(tempdir);
use Nix::Config; use Nix::Config;
use Nix::Utils;
sub usageError { sub usageError {
@ -72,7 +73,7 @@ my $tmpDir = tempdir("nix-install-package.XXXXXX", CLEANUP => 1, TMPDIR => 1)
sub barf { sub barf {
my $msg = shift; my $msg = shift;
print "$msg\n"; print "\nInstallation failed: $msg\n";
<STDIN> if $interactive; <STDIN> if $interactive;
exit 1; exit 1;
} }
@ -92,7 +93,6 @@ open PKGFILE, "<$pkgFile" or barf "cannot open `$pkgFile': $!";
my $contents = <PKGFILE>; my $contents = <PKGFILE>;
close PKGFILE; close PKGFILE;
my $urlRE = "(?: [a-zA-Z][a-zA-Z0-9\+\-\.]*\:[a-zA-Z0-9\%\/\?\:\@\&\=\+\$\,\-\_\.\!\~\*\']+ )";
my $nameRE = "(?: [A-Za-z0-9\+\-\.\_\?\=]+ )"; # see checkStoreName() my $nameRE = "(?: [A-Za-z0-9\+\-\.\_\?\=]+ )"; # see checkStoreName()
my $systemRE = "(?: [A-Za-z0-9\+\-\_]+ )"; my $systemRE = "(?: [A-Za-z0-9\+\-\_]+ )";
my $pathRE = "(?: \/ [\/A-Za-z0-9\+\-\.\_\?\=]* )"; my $pathRE = "(?: \/ [\/A-Za-z0-9\+\-\.\_\?\=]* )";
@ -101,7 +101,7 @@ my $pathRE = "(?: \/ [\/A-Za-z0-9\+\-\.\_\?\=]* )";
# store path. We'll let nix-env do that. # store path. We'll let nix-env do that.
$contents =~ $contents =~
/ ^ \s* (\S+) \s+ ($urlRE) \s+ ($nameRE) \s+ ($systemRE) \s+ ($pathRE) \s+ ($pathRE) /x / ^ \s* (\S+) \s+ ($Nix::Utils::urlRE) \s+ ($nameRE) \s+ ($systemRE) \s+ ($pathRE) \s+ ($pathRE) ( \s+ ($Nix::Utils::urlRE) )? /x
or barf "invalid package contents"; or barf "invalid package contents";
my $version = $1; my $version = $1;
my $manifestURL = $2; my $manifestURL = $2;
@ -109,6 +109,7 @@ my $drvName = $3;
my $system = $4; my $system = $4;
my $drvPath = $5; my $drvPath = $5;
my $outPath = $6; my $outPath = $6;
my $binaryCacheURL = $8;
barf "invalid package version `$version'" unless $version eq "NIXPKG1"; barf "invalid package version `$version'" unless $version eq "NIXPKG1";
@ -122,17 +123,25 @@ if ($interactive) {
} }
# Store the manifest in the temporary directory so that we don't if (defined $binaryCacheURL) {
# pollute /nix/var/nix/manifests. This also requires that we don't
# use the Nix daemon (because otherwise download-using-manifests won't
# see our NIX_MANIFESTS_DIRS environment variable).
$ENV{NIX_MANIFESTS_DIR} = $tmpDir;
$ENV{NIX_REMOTE} = "";
push @extraNixEnvArgs, "--option", "binary-caches", $binaryCacheURL;
print "\nPulling manifests...\n"; } else {
system("$Nix::Config::binDir/nix-pull", $manifestURL) == 0
or barf "nix-pull failed: $?"; # Store the manifest in the temporary directory so that we don't
# pollute /nix/var/nix/manifests. This also requires that we
# don't use the Nix daemon (because otherwise
# download-using-manifests won't see our NIX_MANIFESTS_DIRS
# environment variable).
$ENV{NIX_MANIFESTS_DIR} = $tmpDir;
$ENV{NIX_REMOTE} = "";
print "\nPulling manifests...\n";
system("$Nix::Config::binDir/nix-pull", $manifestURL) == 0
or barf "nix-pull failed: $?";
}
print "\nInstalling package...\n"; print "\nInstalling package...\n";

View File

@ -1,85 +1,85 @@
#! @perl@ -w @perlFlags@ #! @perl@ -w @perlFlags@
use strict; use strict;
use File::Basename;
use File::Temp qw(tempdir); use File::Temp qw(tempdir);
use File::Path qw(mkpath);
use File::stat; use File::stat;
use File::Copy;
use Nix::Config; use Nix::Config;
use Nix::Store;
use Nix::Manifest; use Nix::Manifest;
my $hashAlgo = "sha256";
my $tmpDir = tempdir("nix-push.XXXXXX", CLEANUP => 1, TMPDIR => 1) my $tmpDir = tempdir("nix-push.XXXXXX", CLEANUP => 1, TMPDIR => 1)
or die "cannot create a temporary directory"; or die "cannot create a temporary directory";
my $nixExpr = "$tmpDir/create-nars.nix"; my $nixExpr = "$tmpDir/create-nars.nix";
my $manifest = "$tmpDir/MANIFEST";
my $curl = "$Nix::Config::curl --fail --silent";
my $extraCurlFlags = ${ENV{'CURL_FLAGS'}};
$curl = "$curl $extraCurlFlags" if defined $extraCurlFlags;
# Parse the command line. # Parse the command line.
my $localCopy; my $compressionType = "xz";
my $localArchivesDir; my $force = 0;
my $localManifestFile; my $destDir;
my $writeManifest = 0;
my $targetArchivesUrl; my $archivesURL;
my @roots;
my $archivesPutURL;
my $archivesGetURL;
my $manifestPutURL;
sub showSyntax { sub showSyntax {
print STDERR <<EOF print STDERR <<EOF
Usage: nix-push --copy ARCHIVES_DIR MANIFEST_FILE PATHS... Usage: nix-push --dest DIR [--manifest] [--url-prefix URL] PATHS...
or: nix-push ARCHIVES_PUT_URL ARCHIVES_GET_URL MANIFEST_PUT_URL PATHS...
`nix-push' copies or uploads the closure of PATHS to the given `nix-push' packs the closure of PATHS into a set of NAR files stored
destination. in DIR. Optionally generate a manifest.
EOF EOF
; # ` ; # `
exit 1; exit 1;
} }
showSyntax if scalar @ARGV < 1; for (my $n = 0; $n < scalar @ARGV; $n++) {
my $arg = $ARGV[$n];
if ($ARGV[0] eq "--copy") { if ($arg eq "--help") {
showSyntax if scalar @ARGV < 3; showSyntax;
$localCopy = 1; } elsif ($arg eq "--bzip2") {
shift @ARGV; $compressionType = "bzip2";
$localArchivesDir = shift @ARGV; } elsif ($arg eq "--force") {
$localManifestFile = shift @ARGV; $force = 1;
if ($ARGV[0] eq "--target") { } elsif ($arg eq "--dest") {
shift @ARGV; $n++;
$targetArchivesUrl = shift @ARGV; die "$0: `$arg' requires an argument\n" unless $n < scalar @ARGV;
} $destDir = $ARGV[$n];
else { mkpath($destDir, 0, 0755);
$targetArchivesUrl = "file://$localArchivesDir"; } elsif ($arg eq "--manifest") {
$writeManifest = 1;
} elsif ($arg eq "--url-prefix") {
$n++;
die "$0: `$arg' requires an argument\n" unless $n < scalar @ARGV;
$archivesURL = $ARGV[$n];
} elsif (substr($arg, 0, 1) eq "-") {
showSyntax;
} else {
push @roots, $arg;
} }
} }
else {
showSyntax if scalar @ARGV < 3; showSyntax if !defined $destDir;
$localCopy = 0;
$archivesPutURL = shift @ARGV; $archivesURL = "file://$destDir" unless defined $archivesURL;
$archivesGetURL = shift @ARGV;
$manifestPutURL = shift @ARGV;
}
# From the given store paths, determine the set of requisite store # From the given store paths, determine the set of requisite store
# paths, i.e, the paths required to realise them. # paths, i.e, the paths required to realise them.
my %storePaths; my %storePaths;
foreach my $path (@ARGV) { foreach my $path (@roots) {
die unless $path =~ /^\//; die unless $path =~ /^\//;
# Get all paths referenced by the normalisation of the given # Get all paths referenced by the normalisation of the given
# Nix expression. # Nix expression.
my $pid = open(READ, my $pid = open(READ,
"$Nix::Config::binDir/nix-store --query --requisites --force-realise " . "$Nix::Config::binDir/nix-store --query --requisites --force-realise " .
"--include-outputs '$path'|") or die; "--include-outputs '$path'|") or die;
while (<READ>) { while (<READ>) {
chomp; chomp;
die "bad: $_" unless /^\//; die "bad: $_" unless /^\//;
@ -92,8 +92,8 @@ foreach my $path (@ARGV) {
my @storePaths = keys %storePaths; my @storePaths = keys %storePaths;
# For each path, create a Nix expression that turns the path into # Create a list of Nix derivations that turn each path into a Nix
# a Nix archive. # archive.
open NIX, ">$nixExpr"; open NIX, ">$nixExpr";
print NIX "["; print NIX "[";
@ -101,10 +101,10 @@ foreach my $storePath (@storePaths) {
die unless ($storePath =~ /\/[0-9a-z]{32}[^\"\\\$]*$/); die unless ($storePath =~ /\/[0-9a-z]{32}[^\"\\\$]*$/);
# Construct a Nix expression that creates a Nix archive. # Construct a Nix expression that creates a Nix archive.
my $nixexpr = my $nixexpr =
"(import <nix/nar.nix> " . "(import <nix/nar.nix> " .
"{ storePath = builtins.storePath \"$storePath\"; hashAlgo = \"$hashAlgo\"; }) "; "{ storePath = builtins.storePath \"$storePath\"; hashAlgo = \"sha256\"; compressionType = \"$compressionType\"; }) ";
print NIX $nixexpr; print NIX $nixexpr;
} }
@ -112,172 +112,132 @@ print NIX "]";
close NIX; close NIX;
# Instantiate store derivations from the Nix expression. # Build the Nix expression.
my @storeExprs; print STDERR "building compressed archives...\n";
print STDERR "instantiating store derivations...\n"; my @narPaths;
my $pid = open(READ, "$Nix::Config::binDir/nix-instantiate $nixExpr|") my $pid = open(READ, "$Nix::Config::binDir/nix-build $nixExpr -o $tmpDir/result |")
or die "cannot run nix-instantiate"; or die "cannot run nix-build";
while (<READ>) { while (<READ>) {
chomp; chomp;
die unless /^\//; die unless /^\//;
push @storeExprs, $_; push @narPaths, $_;
} }
close READ or die "nix-instantiate failed: $?"; close READ or die "nix-build failed: $?";
# Build the derivations. # Write the cache info file.
print STDERR "creating archives...\n"; my $cacheInfoFile = "$destDir/nix-cache-info";
if (! -e $cacheInfoFile) {
my @narPaths; open FILE, ">$cacheInfoFile" or die "cannot create $cacheInfoFile: $!";
print FILE "StoreDir: $Nix::Config::storeDir\n";
my @tmp = @storeExprs; print FILE "WantMassQuery: 0\n"; # by default, don't hit this cache for "nix-env -qas"
while (scalar @tmp > 0) { close FILE;
my $n = scalar @tmp;
if ($n > 256) { $n = 256 };
my @tmp2 = @tmp[0..$n - 1];
@tmp = @tmp[$n..scalar @tmp - 1];
my $pid = open(READ, "$Nix::Config::binDir/nix-store --realise @tmp2|")
or die "cannot run nix-store";
while (<READ>) {
chomp;
die unless (/^\//);
push @narPaths, "$_";
}
close READ or die "nix-store failed: $?";
} }
# Create the manifest. # Copy the archives and the corresponding NAR info files.
print STDERR "creating manifest...\n"; print STDERR "copying archives...\n";
my $totalNarSize = 0;
my $totalCompressedSize = 0;
my %narFiles; my %narFiles;
my %patches;
my @narArchives;
for (my $n = 0; $n < scalar @storePaths; $n++) { for (my $n = 0; $n < scalar @storePaths; $n++) {
my $storePath = $storePaths[$n]; my $storePath = $storePaths[$n];
my $narDir = $narPaths[$n]; my $narDir = $narPaths[$n];
my $baseName = basename $storePath;
$storePath =~ /\/([^\/]*)$/;
my $basename = $1;
defined $basename or die;
open HASH, "$narDir/narbz2-hash" or die "cannot open narbz2-hash"; # Get info about the store path.
my $narbz2Hash = <HASH>; my ($deriver, $narHash, $time, $narSize, $refs) = queryPathInfo($storePath, 1);
chomp $narbz2Hash;
$narbz2Hash =~ /^[0-9a-z]+$/ or die "invalid hash";
close HASH;
my $narName = "$narbz2Hash.nar.bz2";
my $narFile = "$narDir/$narName";
(-f $narFile) or die "narfile for $storePath not found";
push @narArchives, $narFile;
my $narbz2Size = stat($narFile)->size;
my $references = `$Nix::Config::binDir/nix-store --query --references '$storePath'`;
die "cannot query references for `$storePath'" if $? != 0;
$references = join(" ", split(" ", $references));
my $deriver = `$Nix::Config::binDir/nix-store --query --deriver '$storePath'`;
die "cannot query deriver for `$storePath'" if $? != 0;
chomp $deriver;
$deriver = "" if $deriver eq "unknown-deriver";
my $narHash = `$Nix::Config::binDir/nix-store --query --hash '$storePath'`;
die "cannot query hash for `$storePath'" if $? != 0;
chomp $narHash;
# In some exceptional cases (such as VM tests that use the Nix # In some exceptional cases (such as VM tests that use the Nix
# store of the host), the database doesn't contain the hash. So # store of the host), the database doesn't contain the hash. So
# compute it. # compute it.
if ($narHash =~ /^sha256:0*$/) { if ($narHash =~ /^sha256:0*$/) {
$narHash = `$Nix::Config::binDir/nix-hash --type sha256 --base32 '$storePath'`; my $nar = "$tmpDir/nar";
die "cannot hash `$storePath'" if $? != 0; system("$Nix::Config::binDir/nix-store --dump $storePath > $nar") == 0
or die "cannot dump $storePath\n";
$narHash = `$Nix::Config::binDir/nix-hash --type sha256 --base32 --flat $nar`;
die "cannot hash `$nar'" if $? != 0;
chomp $narHash; chomp $narHash;
$narHash = "sha256:$narHash"; $narHash = "sha256:$narHash";
$narSize = stat("$nar")->size;
unlink $nar or die;
} }
my $narSize = `$Nix::Config::binDir/nix-store --query --size '$storePath'`; $totalNarSize += $narSize;
die "cannot query size for `$storePath'" if $? != 0;
chomp $narSize;
my $url; # Get info about the compressed NAR.
if ($localCopy) { open HASH, "$narDir/nar-compressed-hash" or die "cannot open nar-compressed-hash";
$url = "$targetArchivesUrl/$narName"; my $compressedHash = <HASH>;
} else { chomp $compressedHash;
$url = "$archivesGetURL/$narName"; $compressedHash =~ /^[0-9a-z]+$/ or die "invalid hash";
close HASH;
my $narName = "$compressedHash.nar." . ($compressionType eq "xz" ? "xz" : "bz2");
my $narFile = "$narDir/$narName";
(-f $narFile) or die "NAR file for $storePath not found";
my $compressedSize = stat($narFile)->size;
$totalCompressedSize += $compressedSize;
printf STDERR "%s [%.2f MiB, %.1f%%]\n", $storePath,
$compressedSize / (1024 * 1024), $compressedSize / $narSize * 100;
# Copy the compressed NAR.
my $dst = "$destDir/$narName";
if (! -f $dst) {
my $tmp = "$destDir/.tmp.$$.$narName";
copy($narFile, $tmp) or die "cannot copy $narFile to $tmp: $!\n";
rename($tmp, $dst) or die "cannot rename $tmp to $dst: $!\n";
} }
# Write the info file.
my $info;
$info .= "StorePath: $storePath\n";
$info .= "URL: $narName\n";
$info .= "Compression: $compressionType\n";
$info .= "FileHash: sha256:$compressedHash\n";
$info .= "FileSize: $compressedSize\n";
$info .= "NarHash: $narHash\n";
$info .= "NarSize: $narSize\n";
$info .= "References: " . join(" ", map { basename $_ } @{$refs}) . "\n";
if (defined $deriver) {
$info .= "Deriver: " . basename $deriver . "\n";
if (isValidPath($deriver)) {
my $drv = derivationFromPath($deriver);
$info .= "System: $drv->{platform}\n";
}
}
my $pathHash = substr(basename($storePath), 0, 32);
$dst = "$destDir/$pathHash.narinfo";
if ($force || ! -f $dst) {
my $tmp = "$destDir/.tmp.$$.$pathHash.narinfo";
open INFO, ">$tmp" or die;
print INFO "$info" or die;
close INFO or die;
rename($tmp, $dst) or die "cannot rename $tmp to $dst: $!\n";
}
$narFiles{$storePath} = [ $narFiles{$storePath} = [
{ url => $url { url => "$archivesURL/$narName"
, hash => "$hashAlgo:$narbz2Hash" , hash => "sha256:$compressedHash"
, size => $narbz2Size , size => $compressedSize
, narHash => "$narHash" , narHash => "$narHash"
, narSize => $narSize , narSize => $narSize
, references => $references , references => join(" ", @{$refs})
, deriver => $deriver , deriver => $deriver
} }
]; ] if $writeManifest;
} }
writeManifest $manifest, \%narFiles, \%patches; printf STDERR "total compressed size %.2f MiB, %.1f%%\n",
$totalCompressedSize / (1024 * 1024), $totalCompressedSize / $totalNarSize * 100;
sub copyFile { # Optionally write a manifest.
my $src = shift; writeManifest "$destDir/MANIFEST", \%narFiles, \() if $writeManifest;
my $dst = shift;
my $tmp = "$dst.tmp.$$";
system("@coreutils@/cp", $src, $tmp) == 0 or die "cannot copy file";
rename($tmp, $dst) or die "cannot rename file: $!";
}
# Upload/copy the archives.
print STDERR "uploading/copying archives...\n";
sub archiveExists {
my $name = shift;
print STDERR " HEAD on $archivesGetURL/$name\n";
return system("$curl --head $archivesGetURL/$name > /dev/null") == 0;
}
foreach my $narArchive (@narArchives) {
$narArchive =~ /\/([^\/]*)$/;
my $basename = $1;
if ($localCopy) {
# Since nix-push creates $dst atomically, if it exists we
# don't have to copy again.
my $dst = "$localArchivesDir/$basename";
if (! -f "$localArchivesDir/$basename") {
print STDERR " $narArchive\n";
copyFile $narArchive, $dst;
}
}
else {
if (!archiveExists("$basename")) {
print STDERR " $narArchive\n";
system("$curl --show-error --upload-file " .
"'$narArchive' '$archivesPutURL/$basename' > /dev/null") == 0 or
die "curl failed on $narArchive: $?";
}
}
}
# Upload the manifest.
print STDERR "uploading manifest...\n";
if ($localCopy) {
copyFile $manifest, $localManifestFile;
copyFile "$manifest.bz2", "$localManifestFile.bz2";
} else {
system("$curl --show-error --upload-file " .
"'$manifest' '$manifestPutURL' > /dev/null") == 0 or
die "curl failed on $manifest: $?";
system("$curl --show-error --upload-file " .
"'$manifest'.bz2 '$manifestPutURL'.bz2 > /dev/null") == 0 or
die "curl failed on $manifest: $?";
}

View File

@ -181,7 +181,7 @@ EvalState::EvalState()
searchPathInsertionPoint = searchPath.end(); searchPathInsertionPoint = searchPath.end();
Strings paths = tokenizeString(getEnv("NIX_PATH", ""), ":"); Strings paths = tokenizeString(getEnv("NIX_PATH", ""), ":");
foreach (Strings::iterator, i, paths) addToSearchPath(*i); foreach (Strings::iterator, i, paths) addToSearchPath(*i);
addToSearchPath("nix=" + nixDataDir + "/nix/corepkgs"); addToSearchPath("nix=" + settings.nixDataDir + "/nix/corepkgs");
searchPathInsertionPoint = searchPath.begin(); searchPathInsertionPoint = searchPath.begin();
createBaseEnv(); createBaseEnv();
@ -1091,7 +1091,7 @@ string EvalState::coerceToString(Value & v, PathSet & context,
if (srcToStore[path] != "") if (srcToStore[path] != "")
dstPath = srcToStore[path]; dstPath = srcToStore[path];
else { else {
dstPath = readOnlyMode dstPath = settings.readOnlyMode
? computeStorePathForPath(path).first ? computeStorePathForPath(path).first
: store->addToStore(path); : store->addToStore(path);
srcToStore[path] = dstPath; srcToStore[path] = dstPath;

View File

@ -31,6 +31,8 @@ private:
bool metaInfoRead; bool metaInfoRead;
MetaInfo meta; MetaInfo meta;
bool failed; // set if we get an AssertionError
public: public:
string name; string name;
@ -40,7 +42,7 @@ public:
/* !!! make this private */ /* !!! make this private */
Bindings * attrs; Bindings * attrs;
DrvInfo() : metaInfoRead(false), attrs(0) { }; DrvInfo() : metaInfoRead(false), failed(false), attrs(0) { };
string queryDrvPath(EvalState & state) const; string queryDrvPath(EvalState & state) const;
string queryOutPath(EvalState & state) const; string queryOutPath(EvalState & state) const;
@ -58,6 +60,9 @@ public:
} }
void setMetaInfo(const MetaInfo & meta); void setMetaInfo(const MetaInfo & meta);
void setFailed() { failed = true; };
bool hasFailed() { return failed; };
}; };

View File

@ -51,6 +51,12 @@ static void prim_import(EvalState & state, Value * * args, Value & v)
% path % ctx); % path % ctx);
if (isDerivation(ctx)) if (isDerivation(ctx))
try { try {
/* For performance, prefetch all substitute info. */
PathSet willBuild, willSubstitute, unknown;
unsigned long long downloadSize, narSize;
queryMissing(*store, singleton<PathSet>(ctx),
willBuild, willSubstitute, unknown, downloadSize, narSize);
/* !!! If using a substitute, we only need to fetch /* !!! If using a substitute, we only need to fetch
the selected output of this derivation. */ the selected output of this derivation. */
store->buildPaths(singleton<PathSet>(ctx)); store->buildPaths(singleton<PathSet>(ctx));
@ -617,7 +623,7 @@ static void prim_toFile(EvalState & state, Value * * args, Value & v)
refs.insert(path); refs.insert(path);
} }
Path storePath = readOnlyMode Path storePath = settings.readOnlyMode
? computeStorePathForText(name, contents, refs) ? computeStorePathForText(name, contents, refs)
: store->addTextToStore(name, contents, refs); : store->addTextToStore(name, contents, refs);
@ -681,7 +687,7 @@ static void prim_filterSource(EvalState & state, Value * * args, Value & v)
FilterFromExpr filter(state, *args[0]); FilterFromExpr filter(state, *args[0]);
Path dstPath = readOnlyMode Path dstPath = settings.readOnlyMode
? computeStorePathForPath(path, true, htSHA256, filter).first ? computeStorePathForPath(path, true, htSHA256, filter).first
: store->addToStore(path, true, htSHA256, filter); : store->addToStore(path, true, htSHA256, filter);
@ -1134,7 +1140,7 @@ void EvalState::createBaseEnv()
mkInt(v, time(0)); mkInt(v, time(0));
addConstant("__currentTime", v); addConstant("__currentTime", v);
mkString(v, thisSystem.c_str()); mkString(v, settings.thisSystem.c_str());
addConstant("__currentSystem", v); addConstant("__currentSystem", v);
// Miscellaneous // Miscellaneous

View File

@ -36,7 +36,7 @@ static void sigintHandler(int signo)
void printGCWarning() void printGCWarning()
{ {
static bool haveWarned = false; static bool haveWarned = false;
warnOnce(haveWarned, warnOnce(haveWarned,
"you did not specify `--add-root'; " "you did not specify `--add-root'; "
"the result might be removed by the garbage collector"); "the result might be removed by the garbage collector");
} }
@ -64,7 +64,7 @@ void printMissing(StoreAPI & store, const PathSet & paths)
if (!unknown.empty()) { if (!unknown.empty()) {
printMsg(lvlInfo, format("don't know how to build these paths%1%:") printMsg(lvlInfo, format("don't know how to build these paths%1%:")
% (readOnlyMode ? " (may be caused by read-only store access)" : "")); % (settings.readOnlyMode ? " (may be caused by read-only store access)" : ""));
foreach (PathSet::iterator, i, unknown) foreach (PathSet::iterator, i, unknown)
printMsg(lvlInfo, format(" %1%") % *i); printMsg(lvlInfo, format(" %1%") % *i);
} }
@ -83,12 +83,21 @@ static void setLogType(string lt)
static bool showTrace = false; static bool showTrace = false;
string getArg(const string & opt,
Strings::iterator & i, const Strings::iterator & end)
{
++i;
if (i == end) throw UsageError(format("`%1%' requires an argument") % opt);
return *i;
}
/* Initialize and reorder arguments, then call the actual argument /* Initialize and reorder arguments, then call the actual argument
processor. */ processor. */
static void initAndRun(int argc, char * * argv) static void initAndRun(int argc, char * * argv)
{ {
setDefaultsFromEnvironment(); settings.processEnvironment();
settings.loadConfFile();
/* Catch SIGINT. */ /* Catch SIGINT. */
struct sigaction act; struct sigaction act;
act.sa_handler = sigintHandler; act.sa_handler = sigintHandler;
@ -127,7 +136,7 @@ static void initAndRun(int argc, char * * argv)
Strings args, remaining; Strings args, remaining;
while (argc--) args.push_back(*argv++); while (argc--) args.push_back(*argv++);
args.erase(args.begin()); args.erase(args.begin());
/* Expand compound dash options (i.e., `-qlf' -> `-q -l -f'), and /* Expand compound dash options (i.e., `-qlf' -> `-q -l -f'), and
ignore options for the ATerm library. */ ignore options for the ATerm library. */
for (Strings::iterator i = args.begin(); i != args.end(); ++i) { for (Strings::iterator i = args.begin(); i != args.end(); ++i) {
@ -146,20 +155,19 @@ static void initAndRun(int argc, char * * argv)
remaining.clear(); remaining.clear();
/* Process default options. */ /* Process default options. */
int verbosityDelta = 0; int verbosityDelta = lvlInfo;
for (Strings::iterator i = args.begin(); i != args.end(); ++i) { for (Strings::iterator i = args.begin(); i != args.end(); ++i) {
string arg = *i; string arg = *i;
if (arg == "--verbose" || arg == "-v") verbosityDelta++; if (arg == "--verbose" || arg == "-v") verbosityDelta++;
else if (arg == "--quiet") verbosityDelta--; else if (arg == "--quiet") verbosityDelta--;
else if (arg == "--log-type") { else if (arg == "--log-type") {
++i; string s = getArg(arg, i, args.end());
if (i == args.end()) throw UsageError("`--log-type' requires an argument"); setLogType(s);
setLogType(*i);
} }
else if (arg == "--no-build-output" || arg == "-Q") else if (arg == "--no-build-output" || arg == "-Q")
buildVerbosity = lvlVomit; settings.buildVerbosity = lvlVomit;
else if (arg == "--print-build-trace") else if (arg == "--print-build-trace")
printBuildTrace = true; settings.printBuildTrace = true;
else if (arg == "--help") { else if (arg == "--help") {
printHelp(); printHelp();
return; return;
@ -169,23 +177,23 @@ static void initAndRun(int argc, char * * argv)
return; return;
} }
else if (arg == "--keep-failed" || arg == "-K") else if (arg == "--keep-failed" || arg == "-K")
keepFailed = true; settings.keepFailed = true;
else if (arg == "--keep-going" || arg == "-k") else if (arg == "--keep-going" || arg == "-k")
keepGoing = true; settings.keepGoing = true;
else if (arg == "--fallback") else if (arg == "--fallback")
tryFallback = true; settings.set("build-fallback", "true");
else if (arg == "--max-jobs" || arg == "-j") else if (arg == "--max-jobs" || arg == "-j")
maxBuildJobs = getIntArg<unsigned int>(arg, i, args.end()); settings.set("build-max-jobs", getArg(arg, i, args.end()));
else if (arg == "--cores") else if (arg == "--cores")
buildCores = getIntArg<unsigned int>(arg, i, args.end()); settings.set("build-cores", getArg(arg, i, args.end()));
else if (arg == "--readonly-mode") else if (arg == "--readonly-mode")
readOnlyMode = true; settings.readOnlyMode = true;
else if (arg == "--max-silent-time") else if (arg == "--max-silent-time")
maxSilentTime = getIntArg<unsigned int>(arg, i, args.end()); settings.set("build-max-silent-time", getArg(arg, i, args.end()));
else if (arg == "--timeout") else if (arg == "--timeout")
buildTimeout = getIntArg<unsigned int>(arg, i, args.end()); settings.set("build-timeout", getArg(arg, i, args.end()));
else if (arg == "--no-build-hook") else if (arg == "--no-build-hook")
useBuildHook = false; settings.useBuildHook = false;
else if (arg == "--show-trace") else if (arg == "--show-trace")
showTrace = true; showTrace = true;
else if (arg == "--option") { else if (arg == "--option") {
@ -193,14 +201,15 @@ static void initAndRun(int argc, char * * argv)
string name = *i; string name = *i;
++i; if (i == args.end()) throw UsageError("`--option' requires two arguments"); ++i; if (i == args.end()) throw UsageError("`--option' requires two arguments");
string value = *i; string value = *i;
overrideSetting(name, tokenizeString(value)); settings.set(name, value);
} }
else remaining.push_back(arg); else remaining.push_back(arg);
} }
verbosityDelta += queryIntSetting("verbosity", lvlInfo);
verbosity = (Verbosity) (verbosityDelta < 0 ? 0 : verbosityDelta); verbosity = (Verbosity) (verbosityDelta < 0 ? 0 : verbosityDelta);
settings.update();
run(remaining); run(remaining);
/* Close the Nix database. */ /* Close the Nix database. */
@ -218,7 +227,7 @@ static void setuidInit()
uid_t nixUid = geteuid(); uid_t nixUid = geteuid();
gid_t nixGid = getegid(); gid_t nixGid = getegid();
setuidCleanup(); setuidCleanup();
/* Don't trust the current directory. */ /* Don't trust the current directory. */
@ -284,7 +293,7 @@ int main(int argc, char * * argv)
right away. */ right away. */
if (argc == 0) abort(); if (argc == 0) abort();
setuidInit(); setuidInit();
/* Turn on buffering for cerr. */ /* Turn on buffering for cerr. */
#if HAVE_PUBSETBUF #if HAVE_PUBSETBUF
std::cerr.rdbuf()->pubsetbuf(buf, sizeof(buf)); std::cerr.rdbuf()->pubsetbuf(buf, sizeof(buf));
@ -313,7 +322,7 @@ int main(int argc, char * * argv)
throw; throw;
} }
} catch (UsageError & e) { } catch (UsageError & e) {
printMsg(lvlError, printMsg(lvlError,
format( format(
"error: %1%\n" "error: %1%\n"
"Try `%2% --help' for more information.") "Try `%2% --help' for more information.")

File diff suppressed because it is too large Load Diff

View File

@ -12,7 +12,7 @@ void DerivationOutput::parseHashInfo(bool & recursive, HashType & hashType, Hash
{ {
recursive = false; recursive = false;
string algo = hashAlgo; string algo = hashAlgo;
if (string(algo, 0, 2) == "r:") { if (string(algo, 0, 2) == "r:") {
recursive = true; recursive = true;
algo = string(algo, 2); algo = string(algo, 2);
@ -21,7 +21,7 @@ void DerivationOutput::parseHashInfo(bool & recursive, HashType & hashType, Hash
hashType = parseHashType(algo); hashType = parseHashType(algo);
if (hashType == htUnknown) if (hashType == htUnknown)
throw Error(format("unknown hash algorithm `%1%'") % algo); throw Error(format("unknown hash algorithm `%1%'") % algo);
hash = parseHash(hashType, this->hash); hash = parseHash(hashType, this->hash);
} }
@ -38,7 +38,7 @@ Path writeDerivation(StoreAPI & store,
held during a garbage collection). */ held during a garbage collection). */
string suffix = name + drvExtension; string suffix = name + drvExtension;
string contents = unparseDerivation(drv); string contents = unparseDerivation(drv);
return readOnlyMode return settings.readOnlyMode
? computeStorePathForText(suffix, contents, references) ? computeStorePathForText(suffix, contents, references)
: store.addTextToStore(suffix, contents, references); : store.addTextToStore(suffix, contents, references);
} }
@ -51,7 +51,7 @@ static Path parsePath(std::istream & str)
throw Error(format("bad path `%1%' in derivation") % s); throw Error(format("bad path `%1%' in derivation") % s);
return s; return s;
} }
static StringSet parseStrings(std::istream & str, bool arePaths) static StringSet parseStrings(std::istream & str, bool arePaths)
{ {
@ -60,7 +60,7 @@ static StringSet parseStrings(std::istream & str, bool arePaths)
res.insert(arePaths ? parsePath(str) : parseString(str)); res.insert(arePaths ? parsePath(str) : parseString(str));
return res; return res;
} }
Derivation parseDerivation(const string & s) Derivation parseDerivation(const string & s)
{ {
@ -106,7 +106,7 @@ Derivation parseDerivation(const string & s)
expect(str, ")"); expect(str, ")");
drv.env[name] = value; drv.env[name] = value;
} }
expect(str, ")"); expect(str, ")");
return drv; return drv;
} }
@ -165,7 +165,7 @@ string unparseDerivation(const Derivation & drv)
s += "],"; s += "],";
printStrings(s, drv.inputSrcs.begin(), drv.inputSrcs.end()); printStrings(s, drv.inputSrcs.begin(), drv.inputSrcs.end());
s += ','; printString(s, drv.platform); s += ','; printString(s, drv.platform);
s += ','; printString(s, drv.builder); s += ','; printString(s, drv.builder);
s += ','; printStrings(s, drv.args.begin(), drv.args.end()); s += ','; printStrings(s, drv.args.begin(), drv.args.end());
@ -178,9 +178,9 @@ string unparseDerivation(const Derivation & drv)
s += ','; printString(s, i->second); s += ','; printString(s, i->second);
s += ')'; s += ')';
} }
s += "])"; s += "])";
return s; return s;
} }
@ -190,7 +190,7 @@ bool isDerivation(const string & fileName)
return hasSuffix(fileName, drvExtension); return hasSuffix(fileName, drvExtension);
} }
bool isFixedOutputDrv(const Derivation & drv) bool isFixedOutputDrv(const Derivation & drv)
{ {
return drv.outputs.size() == 1 && return drv.outputs.size() == 1 &&
@ -247,7 +247,7 @@ Hash hashDerivationModulo(StoreAPI & store, Derivation drv)
inputs2[printHash(h)] = i->second; inputs2[printHash(h)] = i->second;
} }
drv.inputDrvs = inputs2; drv.inputDrvs = inputs2;
return hashString(htSHA256, unparseDerivation(drv)); return hashString(htSHA256, unparseDerivation(drv));
} }

View File

@ -34,10 +34,10 @@ static const int defaultGcLevel = 1000;
int LocalStore::openGCLock(LockType lockType) int LocalStore::openGCLock(LockType lockType)
{ {
Path fnGCLock = (format("%1%/%2%") Path fnGCLock = (format("%1%/%2%")
% nixStateDir % gcLockName).str(); % settings.nixStateDir % gcLockName).str();
debug(format("acquiring global GC lock `%1%'") % fnGCLock); debug(format("acquiring global GC lock `%1%'") % fnGCLock);
AutoCloseFD fdGCLock = open(fnGCLock.c_str(), O_RDWR | O_CREAT, 0600); AutoCloseFD fdGCLock = open(fnGCLock.c_str(), O_RDWR | O_CREAT, 0600);
if (fdGCLock == -1) if (fdGCLock == -1)
throw SysError(format("opening global GC lock `%1%'") % fnGCLock); throw SysError(format("opening global GC lock `%1%'") % fnGCLock);
@ -51,7 +51,7 @@ int LocalStore::openGCLock(LockType lockType)
/* !!! Restrict read permission on the GC root. Otherwise any /* !!! Restrict read permission on the GC root. Otherwise any
process that can open the file for reading can DoS the process that can open the file for reading can DoS the
collector. */ collector. */
return fdGCLock.borrow(); return fdGCLock.borrow();
} }
@ -85,7 +85,7 @@ void LocalStore::addIndirectRoot(const Path & path)
{ {
string hash = printHash32(hashString(htSHA1, path)); string hash = printHash32(hashString(htSHA1, path));
Path realRoot = canonPath((format("%1%/%2%/auto/%3%") Path realRoot = canonPath((format("%1%/%2%/auto/%3%")
% nixStateDir % gcRootsDir % hash).str()); % settings.nixStateDir % gcRootsDir % hash).str());
createSymlink(realRoot, path); createSymlink(realRoot, path);
} }
@ -113,15 +113,15 @@ Path addPermRoot(StoreAPI & store, const Path & _storePath,
else { else {
if (!allowOutsideRootsDir) { if (!allowOutsideRootsDir) {
Path rootsDir = canonPath((format("%1%/%2%") % nixStateDir % gcRootsDir).str()); Path rootsDir = canonPath((format("%1%/%2%") % settings.nixStateDir % gcRootsDir).str());
if (string(gcRoot, 0, rootsDir.size() + 1) != rootsDir + "/") if (string(gcRoot, 0, rootsDir.size() + 1) != rootsDir + "/")
throw Error(format( throw Error(format(
"path `%1%' is not a valid garbage collector root; " "path `%1%' is not a valid garbage collector root; "
"it's not in the directory `%2%'") "it's not in the directory `%2%'")
% gcRoot % rootsDir); % gcRoot % rootsDir);
} }
createSymlink(gcRoot, storePath); createSymlink(gcRoot, storePath);
} }
@ -130,10 +130,10 @@ Path addPermRoot(StoreAPI & store, const Path & _storePath,
Instead of reading all the roots, it would be more efficient to Instead of reading all the roots, it would be more efficient to
check if the root is in a directory in or linked from the check if the root is in a directory in or linked from the
gcroots directory. */ gcroots directory. */
if (queryBoolSetting("gc-check-reachability", false)) { if (settings.checkRootReachability) {
Roots roots = store.findRoots(); Roots roots = store.findRoots();
if (roots.find(gcRoot) == roots.end()) if (roots.find(gcRoot) == roots.end())
printMsg(lvlError, printMsg(lvlError,
format( format(
"warning: `%1%' is not in a directory where the garbage collector looks for roots; " "warning: `%1%' is not in a directory where the garbage collector looks for roots; "
"therefore, `%2%' might be removed by the garbage collector") "therefore, `%2%' might be removed by the garbage collector")
@ -144,7 +144,7 @@ Path addPermRoot(StoreAPI & store, const Path & _storePath,
progress. This prevents the set of permanent roots from progress. This prevents the set of permanent roots from
increasing while a GC is in progress. */ increasing while a GC is in progress. */
store.syncWithGC(); store.syncWithGC();
return gcRoot; return gcRoot;
} }
@ -160,23 +160,23 @@ void LocalStore::addTempRoot(const Path & path)
if (fdTempRoots == -1) { if (fdTempRoots == -1) {
while (1) { while (1) {
Path dir = (format("%1%/%2%") % nixStateDir % tempRootsDir).str(); Path dir = (format("%1%/%2%") % settings.nixStateDir % tempRootsDir).str();
createDirs(dir); createDirs(dir);
fnTempRoots = (format("%1%/%2%") fnTempRoots = (format("%1%/%2%")
% dir % getpid()).str(); % dir % getpid()).str();
AutoCloseFD fdGCLock = openGCLock(ltRead); AutoCloseFD fdGCLock = openGCLock(ltRead);
if (pathExists(fnTempRoots)) if (pathExists(fnTempRoots))
/* It *must* be stale, since there can be no two /* It *must* be stale, since there can be no two
processes with the same pid. */ processes with the same pid. */
unlink(fnTempRoots.c_str()); unlink(fnTempRoots.c_str());
fdTempRoots = openLockFile(fnTempRoots, true); fdTempRoots = openLockFile(fnTempRoots, true);
fdGCLock.close(); fdGCLock.close();
debug(format("acquiring read lock on `%1%'") % fnTempRoots); debug(format("acquiring read lock on `%1%'") % fnTempRoots);
lockFile(fdTempRoots, ltRead, true); lockFile(fdTempRoots, ltRead, true);
@ -186,7 +186,7 @@ void LocalStore::addTempRoot(const Path & path)
if (fstat(fdTempRoots, &st) == -1) if (fstat(fdTempRoots, &st) == -1)
throw SysError(format("statting `%1%'") % fnTempRoots); throw SysError(format("statting `%1%'") % fnTempRoots);
if (st.st_size == 0) break; if (st.st_size == 0) break;
/* The garbage collector deleted this file before we could /* The garbage collector deleted this file before we could
get a lock. (It won't delete the file after we get a get a lock. (It won't delete the file after we get a
lock.) Try again. */ lock.) Try again. */
@ -218,7 +218,7 @@ void removeTempRoots()
/* Automatically clean up the temporary roots file when we exit. */ /* Automatically clean up the temporary roots file when we exit. */
struct RemoveTempRoots struct RemoveTempRoots
{ {
~RemoveTempRoots() ~RemoveTempRoots()
{ {
@ -238,10 +238,10 @@ static void readTempRoots(PathSet & tempRoots, FDs & fds)
/* Read the `temproots' directory for per-process temporary root /* Read the `temproots' directory for per-process temporary root
files. */ files. */
Strings tempRootFiles = readDirectory( Strings tempRootFiles = readDirectory(
(format("%1%/%2%") % nixStateDir % tempRootsDir).str()); (format("%1%/%2%") % settings.nixStateDir % tempRootsDir).str());
foreach (Strings::iterator, i, tempRootFiles) { foreach (Strings::iterator, i, tempRootFiles) {
Path path = (format("%1%/%2%/%3%") % nixStateDir % tempRootsDir % *i).str(); Path path = (format("%1%/%2%/%3%") % settings.nixStateDir % tempRootsDir % *i).str();
debug(format("reading temporary root file `%1%'") % path); debug(format("reading temporary root file `%1%'") % path);
FDPtr fd(new AutoCloseFD(open(path.c_str(), O_RDWR, 0666))); FDPtr fd(new AutoCloseFD(open(path.c_str(), O_RDWR, 0666)));
@ -295,7 +295,7 @@ static void findRoots(StoreAPI & store, const Path & path,
bool recurseSymlinks, bool deleteStale, Roots & roots) bool recurseSymlinks, bool deleteStale, Roots & roots)
{ {
try { try {
struct stat st; struct stat st;
if (lstat(path.c_str(), &st) == -1) if (lstat(path.c_str(), &st) == -1)
throw SysError(format("statting `%1%'") % path); throw SysError(format("statting `%1%'") % path);
@ -315,7 +315,7 @@ static void findRoots(StoreAPI & store, const Path & path,
debug(format("found root `%1%' in `%2%'") debug(format("found root `%1%' in `%2%'")
% target % path); % target % path);
Path storePath = toStorePath(target); Path storePath = toStorePath(target);
if (store.isValidPath(storePath)) if (store.isValidPath(storePath))
roots[path] = storePath; roots[path] = storePath;
else else
printMsg(lvlInfo, format("skipping invalid root from `%1%' to `%2%'") printMsg(lvlInfo, format("skipping invalid root from `%1%' to `%2%'")
@ -350,7 +350,7 @@ static void findRoots(StoreAPI & store, const Path & path,
static Roots findRoots(StoreAPI & store, bool deleteStale) static Roots findRoots(StoreAPI & store, bool deleteStale)
{ {
Roots roots; Roots roots;
Path rootsDir = canonPath((format("%1%/%2%") % nixStateDir % gcRootsDir).str()); Path rootsDir = canonPath((format("%1%/%2%") % settings.nixStateDir % gcRootsDir).str());
findRoots(store, rootsDir, true, deleteStale, roots); findRoots(store, rootsDir, true, deleteStale, roots);
return roots; return roots;
} }
@ -365,16 +365,16 @@ Roots LocalStore::findRoots()
static void addAdditionalRoots(StoreAPI & store, PathSet & roots) static void addAdditionalRoots(StoreAPI & store, PathSet & roots)
{ {
Path rootFinder = getEnv("NIX_ROOT_FINDER", Path rootFinder = getEnv("NIX_ROOT_FINDER",
nixLibexecDir + "/nix/find-runtime-roots.pl"); settings.nixLibexecDir + "/nix/find-runtime-roots.pl");
if (rootFinder.empty()) return; if (rootFinder.empty()) return;
debug(format("executing `%1%' to find additional roots") % rootFinder); debug(format("executing `%1%' to find additional roots") % rootFinder);
string result = runProgram(rootFinder); string result = runProgram(rootFinder);
Strings paths = tokenizeString(result, "\n"); Strings paths = tokenizeString(result, "\n");
foreach (Strings::iterator, i, paths) { foreach (Strings::iterator, i, paths) {
if (isInStore(*i)) { if (isInStore(*i)) {
Path path = toStorePath(*i); Path path = toStorePath(*i);
@ -556,7 +556,7 @@ bool LocalStore::tryToDelete(GCState & state, const Path & path)
} else } else
printMsg(lvlTalkative, format("would delete `%1%'") % path); printMsg(lvlTalkative, format("would delete `%1%'") % path);
state.deleted.insert(path); state.deleted.insert(path);
if (state.options.action != GCOptions::gcReturnLive) if (state.options.action != GCOptions::gcReturnLive)
state.results.paths.insert(path); state.results.paths.insert(path);
@ -621,10 +621,10 @@ void LocalStore::removeUnusedLinks(const GCState & state)
void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
{ {
GCState state(results); GCState state(results);
state.options = options; state.options = options;
state.gcKeepOutputs = queryBoolSetting("gc-keep-outputs", false); state.gcKeepOutputs = settings.gcKeepOutputs;
state.gcKeepDerivations = queryBoolSetting("gc-keep-derivations", true); state.gcKeepDerivations = settings.gcKeepDerivations;
/* Using `--ignore-liveness' with `--delete' can have unintended /* Using `--ignore-liveness' with `--delete' can have unintended
consequences if `gc-keep-outputs' or `gc-keep-derivations' are consequences if `gc-keep-outputs' or `gc-keep-derivations' are
@ -634,7 +634,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
state.gcKeepOutputs = false; state.gcKeepOutputs = false;
state.gcKeepDerivations = false; state.gcKeepDerivations = false;
} }
/* Acquire the global GC root. This prevents /* Acquire the global GC root. This prevents
a) New roots from being added. a) New roots from being added.
b) Processes from creating new temporary root files. */ b) Processes from creating new temporary root files. */
@ -675,18 +675,18 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
if (!tryToDelete(state, *i)) if (!tryToDelete(state, *i))
throw Error(format("cannot delete path `%1%' since it is still alive") % *i); throw Error(format("cannot delete path `%1%' since it is still alive") % *i);
} }
} else if (options.maxFreed > 0) { } else if (options.maxFreed > 0) {
if (shouldDelete(state.options.action)) if (shouldDelete(state.options.action))
printMsg(lvlError, format("deleting garbage...")); printMsg(lvlError, format("deleting garbage..."));
else else
printMsg(lvlError, format("determining live/dead paths...")); printMsg(lvlError, format("determining live/dead paths..."));
try { try {
AutoCloseDir dir = opendir(nixStore.c_str()); AutoCloseDir dir = opendir(settings.nixStore.c_str());
if (!dir) throw SysError(format("opening directory `%1%'") % nixStore); if (!dir) throw SysError(format("opening directory `%1%'") % settings.nixStore);
/* Read the store and immediately delete all paths that /* Read the store and immediately delete all paths that
aren't valid. When using --max-freed etc., deleting aren't valid. When using --max-freed etc., deleting
@ -700,14 +700,14 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
checkInterrupt(); checkInterrupt();
string name = dirent->d_name; string name = dirent->d_name;
if (name == "." || name == "..") continue; if (name == "." || name == "..") continue;
Path path = nixStore + "/" + name; Path path = settings.nixStore + "/" + name;
if (isValidPath(path)) if (isValidPath(path))
entries.push_back(path); entries.push_back(path);
else else
tryToDelete(state, path); tryToDelete(state, path);
} }
dir.close(); dir.close();
/* Now delete the unreachable valid paths. Randomise the /* Now delete the unreachable valid paths. Randomise the
order in which we delete entries to make the collector order in which we delete entries to make the collector

View File

@ -10,36 +10,63 @@
namespace nix { namespace nix {
string nixStore = "/UNINIT"; Settings settings;
string nixDataDir = "/UNINIT";
string nixLogDir = "/UNINIT";
string nixStateDir = "/UNINIT";
string nixDBPath = "/UNINIT";
string nixConfDir = "/UNINIT";
string nixLibexecDir = "/UNINIT";
string nixBinDir = "/UNINIT";
bool keepFailed = false;
bool keepGoing = false;
bool tryFallback = false;
Verbosity buildVerbosity = lvlError;
unsigned int maxBuildJobs = 1;
unsigned int buildCores = 1;
bool readOnlyMode = false;
string thisSystem = "unset";
time_t maxSilentTime = 0;
time_t buildTimeout = 0;
Paths substituters;
bool useBuildHook = true;
bool printBuildTrace = false;
static bool settingsRead = false; Settings::Settings()
{
keepFailed = false;
keepGoing = false;
tryFallback = false;
buildVerbosity = lvlError;
maxBuildJobs = 1;
buildCores = 1;
readOnlyMode = false;
thisSystem = SYSTEM;
maxSilentTime = 0;
buildTimeout = 0;
useBuildHook = true;
printBuildTrace = false;
reservedSize = 1024 * 1024;
fsyncMetadata = true;
useSQLiteWAL = true;
syncBeforeRegistering = false;
useSubstitutes = true;
useChroot = false;
dirsInChroot.insert("/dev");
dirsInChroot.insert("/dev/pts");
impersonateLinux26 = false;
keepLog = true;
compressLog = true;
cacheFailure = false;
pollInterval = 5;
checkRootReachability = false;
gcKeepOutputs = false;
gcKeepDerivations = true;
autoOptimiseStore = true;
envKeepDerivations = false;
}
static std::map<string, Strings> settings;
/* Overriden settings. */ void Settings::processEnvironment()
std::map<string, Strings> settingsCmdline; {
nixStore = canonPath(getEnv("NIX_STORE_DIR", getEnv("NIX_STORE", NIX_STORE_DIR)));
nixDataDir = canonPath(getEnv("NIX_DATA_DIR", NIX_DATA_DIR));
nixLogDir = canonPath(getEnv("NIX_LOG_DIR", NIX_LOG_DIR));
nixStateDir = canonPath(getEnv("NIX_STATE_DIR", NIX_STATE_DIR));
nixDBPath = getEnv("NIX_DB_DIR", nixStateDir + "/db");
nixConfDir = canonPath(getEnv("NIX_CONF_DIR", NIX_CONF_DIR));
nixLibexecDir = canonPath(getEnv("NIX_LIBEXEC_DIR", NIX_LIBEXEC_DIR));
nixBinDir = canonPath(getEnv("NIX_BIN_DIR", NIX_BIN_DIR));
string subs = getEnv("NIX_SUBSTITUTERS", "default");
if (subs == "default") {
substituters.push_back(nixLibexecDir + "/nix/substituters/copy-from-other-stores.pl");
substituters.push_back(nixLibexecDir + "/nix/substituters/download-using-manifests.pl");
substituters.push_back(nixLibexecDir + "/nix/substituters/download-from-binary-cache.pl");
} else
substituters = tokenizeString(subs, ":");
}
string & at(Strings & ss, unsigned int n) string & at(Strings & ss, unsigned int n)
@ -50,7 +77,7 @@ string & at(Strings & ss, unsigned int n)
} }
static void readSettings() void Settings::loadConfFile()
{ {
Path settingsFile = (format("%1%/%2%") % nixConfDir % "nix.conf").str(); Path settingsFile = (format("%1%/%2%") % nixConfDir % "nix.conf").str();
if (!pathExists(settingsFile)) return; if (!pathExists(settingsFile)) return;
@ -78,95 +105,103 @@ static void readSettings()
Strings::iterator i = tokens.begin(); Strings::iterator i = tokens.begin();
advance(i, 2); advance(i, 2);
settings[name] = Strings(i, tokens.end()); settings[name] = concatStringsSep(" ", Strings(i, tokens.end())); // FIXME: slow
}; };
settings.insert(settingsCmdline.begin(), settingsCmdline.end());
settingsRead = true;
} }
Strings querySetting(const string & name, const Strings & def) void Settings::set(const string & name, const string & value)
{ {
if (!settingsRead) readSettings(); settings[name] = value;
std::map<string, Strings>::iterator i = settings.find(name); overrides[name] = value;
return i == settings.end() ? def : i->second;
} }
string querySetting(const string & name, const string & def) void Settings::update()
{ {
Strings defs; get(tryFallback, "build-fallback");
defs.push_back(def); get(maxBuildJobs, "build-max-jobs");
get(buildCores, "build-cores");
Strings value = querySetting(name, defs); get(thisSystem, "system");
if (value.size() != 1) get(maxSilentTime, "build-max-silent-time");
throw Error(format("configuration option `%1%' should not be a list") % name); get(buildTimeout, "build-timeout");
get(reservedSize, "gc-reserved-space");
return value.front(); get(fsyncMetadata, "fsync-metadata");
get(useSQLiteWAL, "use-sqlite-wal");
get(syncBeforeRegistering, "sync-before-registering");
get(useSubstitutes, "build-use-substitutes");
get(buildUsersGroup, "build-users-group");
get(useChroot, "build-use-chroot");
get(dirsInChroot, "build-chroot-dirs");
get(impersonateLinux26, "build-impersonate-linux-26");
get(keepLog, "build-keep-log");
get(compressLog, "build-compress-log");
get(cacheFailure, "build-cache-failure");
get(pollInterval, "build-poll-interval");
get(checkRootReachability, "gc-check-reachability");
get(gcKeepOutputs, "gc-keep-outputs");
get(gcKeepDerivations, "gc-keep-derivations");
get(autoOptimiseStore, "auto-optimise-store");
get(envKeepDerivations, "env-keep-derivations");
} }
bool queryBoolSetting(const string & name, bool def) void Settings::get(string & res, const string & name)
{ {
string v = querySetting(name, def ? "true" : "false"); SettingsMap::iterator i = settings.find(name);
if (v == "true") return true; if (i == settings.end()) return;
else if (v == "false") return false; res = i->second;
}
void Settings::get(bool & res, const string & name)
{
SettingsMap::iterator i = settings.find(name);
if (i == settings.end()) return;
if (i->second == "true") res = true;
else if (i->second == "false") res = false;
else throw Error(format("configuration option `%1%' should be either `true' or `false', not `%2%'") else throw Error(format("configuration option `%1%' should be either `true' or `false', not `%2%'")
% name % v); % name % i->second);
} }
unsigned int queryIntSetting(const string & name, unsigned int def) void Settings::get(PathSet & res, const string & name)
{ {
int n; SettingsMap::iterator i = settings.find(name);
if (!string2Int(querySetting(name, int2String(def)), n) || n < 0) if (i == settings.end()) return;
res.clear();
Strings ss = tokenizeString(i->second);
res.insert(ss.begin(), ss.end());
}
template<class N> void Settings::get(N & res, const string & name)
{
SettingsMap::iterator i = settings.find(name);
if (i == settings.end()) return;
if (!string2Int(i->second, res))
throw Error(format("configuration setting `%1%' should have an integer value") % name); throw Error(format("configuration setting `%1%' should have an integer value") % name);
return n;
} }
void overrideSetting(const string & name, const Strings & value) string Settings::pack()
{ {
if (settingsRead) settings[name] = value; string s;
settingsCmdline[name] = value; foreach (SettingsMap::iterator, i, settings) {
if (i->first.find('\n') != string::npos ||
i->first.find('=') != string::npos ||
i->second.find('\n') != string::npos)
throw Error("illegal option name/value");
s += i->first; s += '='; s += i->second; s += '\n';
}
return s;
} }
void reloadSettings() Settings::SettingsMap Settings::getOverrides()
{ {
settingsRead = false; return overrides;
settings.clear();
} }
void setDefaultsFromEnvironment()
{
/* Setup Nix paths. */
nixStore = canonPath(getEnv("NIX_STORE_DIR", getEnv("NIX_STORE", NIX_STORE_DIR)));
nixDataDir = canonPath(getEnv("NIX_DATA_DIR", NIX_DATA_DIR));
nixLogDir = canonPath(getEnv("NIX_LOG_DIR", NIX_LOG_DIR));
nixStateDir = canonPath(getEnv("NIX_STATE_DIR", NIX_STATE_DIR));
nixDBPath = getEnv("NIX_DB_DIR", nixStateDir + "/db");
nixConfDir = canonPath(getEnv("NIX_CONF_DIR", NIX_CONF_DIR));
nixLibexecDir = canonPath(getEnv("NIX_LIBEXEC_DIR", NIX_LIBEXEC_DIR));
nixBinDir = canonPath(getEnv("NIX_BIN_DIR", NIX_BIN_DIR));
string subs = getEnv("NIX_SUBSTITUTERS", "default");
if (subs == "default") {
substituters.push_back(nixLibexecDir + "/nix/substituters/copy-from-other-stores.pl");
substituters.push_back(nixLibexecDir + "/nix/substituters/download-using-manifests.pl");
} else
substituters = tokenizeString(subs, ":");
/* Get some settings from the configuration file. */
thisSystem = querySetting("system", SYSTEM);
maxBuildJobs = queryIntSetting("build-max-jobs", 1);
buildCores = queryIntSetting("build-cores", 1);
maxSilentTime = queryIntSetting("build-max-silent-time", 0);
buildTimeout = queryIntSetting("build-timeout", 0);
}
} }

View File

@ -2,118 +2,191 @@
#include "types.hh" #include "types.hh"
#include <map>
namespace nix { namespace nix {
/* Path names. */ struct Settings {
/* nixStore is the directory where we generally store atomic and typedef std::map<string, string> SettingsMap;
derived files. */
extern string nixStore;
extern string nixDataDir; /* !!! fix */ Settings();
/* nixLogDir is the directory where we log various operations. */ void processEnvironment();
extern string nixLogDir;
/* nixStateDir is the directory where state is stored. */ void loadConfFile();
extern string nixStateDir;
/* nixDBPath is the path name of our Berkeley DB environment. */ void set(const string & name, const string & value);
extern string nixDBPath;
/* nixConfDir is the directory where configuration files are void update();
stored. */
extern string nixConfDir;
/* nixLibexecDir is the directory where internal helper programs are string pack();
stored. */
extern string nixLibexecDir;
/* nixBinDir is the directory where the main programs are stored. */ SettingsMap getOverrides();
extern string nixBinDir;
/* The directory where we store sources and derived files. */
Path nixStore;
Path nixDataDir; /* !!! fix */
/* The directory where we log various operations. */
Path nixLogDir;
/* The directory where state is stored. */
Path nixStateDir;
/* The directory where we keep the SQLite database. */
Path nixDBPath;
/* The directory where configuration files are stored. */
Path nixConfDir;
/* The directory where internal helper programs are stored. */
Path nixLibexecDir;
/* The directory where the main programs are stored. */
Path nixBinDir;
/* Whether to keep temporary directories of failed builds. */
bool keepFailed;
/* Whether to keep building subgoals when a sibling (another
subgoal of the same goal) fails. */
bool keepGoing;
/* Whether, if we cannot realise the known closure corresponding
to a derivation, we should try to normalise the derivation
instead. */
bool tryFallback;
/* Verbosity level for build output. */
Verbosity buildVerbosity;
/* Maximum number of parallel build jobs. 0 means unlimited. */
unsigned int maxBuildJobs;
/* Number of CPU cores to utilize in parallel within a build,
i.e. by passing this number to Make via '-j'. 0 means that the
number of actual CPU cores on the local host ought to be
auto-detected. */
unsigned int buildCores;
/* Read-only mode. Don't copy stuff to the store, don't change
the database. */
bool readOnlyMode;
/* The canonical system name, as returned by config.guess. */
string thisSystem;
/* The maximum time in seconds that a builer can go without
producing any output on stdout/stderr before it is killed. 0
means infinity. */
time_t maxSilentTime;
/* The maximum duration in seconds that a builder can run. 0
means infinity. */
time_t buildTimeout;
/* The substituters. There are programs that can somehow realise
a store path without building, e.g., by downloading it or
copying it from a CD. */
Paths substituters;
/* Whether to use build hooks (for distributed builds). Sometimes
users want to disable this from the command-line. */
bool useBuildHook;
/* Whether buildDerivations() should print out lines on stderr in
a fixed format to allow its progress to be monitored. Each
line starts with a "@". The following are defined:
@ build-started <drvpath> <outpath> <system> <logfile>
@ build-failed <drvpath> <outpath> <exitcode> <error text>
@ build-succeeded <drvpath> <outpath>
@ substituter-started <outpath> <substituter>
@ substituter-failed <outpath> <exitcode> <error text>
@ substituter-succeeded <outpath>
Best combined with --no-build-output, otherwise stderr might
conceivably contain lines in this format printed by the
builders. */
bool printBuildTrace;
/* Amount of reserved space for the garbage collector
(/nix/var/nix/db/reserved). */
off_t reservedSize;
/* Whether SQLite should use fsync. */
bool fsyncMetadata;
/* Whether SQLite should use WAL mode. */
bool useSQLiteWAL;
/* Whether to call sync() before registering a path as valid. */
bool syncBeforeRegistering;
/* Whether to use substitutes. */
bool useSubstitutes;
/* The Unix group that contains the build users. */
string buildUsersGroup;
/* Whether to build in chroot. */
bool useChroot;
/* The directories from the host filesystem to be included in the
chroot. */
PathSet dirsInChroot;
/* Whether to impersonate a Linux 2.6 machine on newer kernels. */
bool impersonateLinux26;
/* Whether to store build logs. */
bool keepLog;
/* Whether to compress logs. */
bool compressLog;
/* Whether to cache build failures. */
bool cacheFailure;
/* How often (in seconds) to poll for locks. */
unsigned int pollInterval;
/* Whether to check if new GC roots can in fact be found by the
garbage collector. */
bool checkRootReachability;
/* Whether the garbage collector should keep outputs of live
derivations. */
bool gcKeepOutputs;
/* Whether the garbage collector should keep derivers of live
paths. */
bool gcKeepDerivations;
/* Whether to automatically replace files with identical contents
with hard links. */
bool autoOptimiseStore;
/* Whether to add derivations as a dependency of user environments
(to prevent them from being GCed). */
bool envKeepDerivations;
private:
SettingsMap settings, overrides;
void get(string & res, const string & name);
void get(bool & res, const string & name);
void get(PathSet & res, const string & name);
template<class N> void get(N & res, const string & name);
};
/* Misc. global flags. */ // FIXME: don't use a global variable.
extern Settings settings;
/* Whether to keep temporary directories of failed builds. */
extern bool keepFailed;
/* Whether to keep building subgoals when a sibling (another subgoal
of the same goal) fails. */
extern bool keepGoing;
/* Whether, if we cannot realise the known closure corresponding to a
derivation, we should try to normalise the derivation instead. */
extern bool tryFallback;
/* Verbosity level for build output. */
extern Verbosity buildVerbosity;
/* Maximum number of parallel build jobs. 0 means unlimited. */
extern unsigned int maxBuildJobs;
/* Number of CPU cores to utilize in parallel within a build, i.e. by passing
this number to Make via '-j'. 0 means that the number of actual CPU cores on
the local host ought to be auto-detected. */
extern unsigned int buildCores;
/* Read-only mode. Don't copy stuff to the store, don't change the
database. */
extern bool readOnlyMode;
/* The canonical system name, as returned by config.guess. */
extern string thisSystem;
/* The maximum time in seconds that a builer can go without producing
any output on stdout/stderr before it is killed. 0 means
infinity. */
extern time_t maxSilentTime;
/* The maximum duration in seconds that a builder can run. 0 means
infinity. */
extern time_t buildTimeout;
/* The substituters. There are programs that can somehow realise a
store path without building, e.g., by downloading it or copying it
from a CD. */
extern Paths substituters;
/* Whether to use build hooks (for distributed builds). Sometimes
users want to disable this from the command-line. */
extern bool useBuildHook;
/* Whether buildDerivations() should print out lines on stderr in a
fixed format to allow its progress to be monitored. Each line
starts with a "@". The following are defined:
@ build-started <drvpath> <outpath> <system> <logfile>
@ build-failed <drvpath> <outpath> <exitcode> <error text>
@ build-succeeded <drvpath> <outpath>
@ substituter-started <outpath> <substituter>
@ substituter-failed <outpath> <exitcode> <error text>
@ substituter-succeeded <outpath>
Best combined with --no-build-output, otherwise stderr might
conceivably contain lines in this format printed by the builders.
*/
extern bool printBuildTrace;
Strings querySetting(const string & name, const Strings & def);
string querySetting(const string & name, const string & def);
bool queryBoolSetting(const string & name, bool def);
unsigned int queryIntSetting(const string & name, unsigned int def);
void overrideSetting(const string & name, const Strings & value);
void reloadSettings();
void setDefaultsFromEnvironment();
} }

View File

@ -6,7 +6,7 @@
#include "worker-protocol.hh" #include "worker-protocol.hh"
#include "derivations.hh" #include "derivations.hh"
#include "immutable.hh" #include "immutable.hh"
#include <iostream> #include <iostream>
#include <algorithm> #include <algorithm>
@ -147,11 +147,11 @@ struct SQLiteStmtUse
}; };
struct SQLiteTxn struct SQLiteTxn
{ {
bool active; bool active;
sqlite3 * db; sqlite3 * db;
SQLiteTxn(sqlite3 * db) : active(false) { SQLiteTxn(sqlite3 * db) : active(false) {
this->db = db; this->db = db;
if (sqlite3_exec(db, "begin;", 0, 0, 0) != SQLITE_OK) if (sqlite3_exec(db, "begin;", 0, 0, 0) != SQLITE_OK)
@ -159,14 +159,14 @@ struct SQLiteTxn
active = true; active = true;
} }
void commit() void commit()
{ {
if (sqlite3_exec(db, "commit;", 0, 0, 0) != SQLITE_OK) if (sqlite3_exec(db, "commit;", 0, 0, 0) != SQLITE_OK)
throwSQLiteError(db, "committing transaction"); throwSQLiteError(db, "committing transaction");
active = false; active = false;
} }
~SQLiteTxn() ~SQLiteTxn()
{ {
try { try {
if (active && sqlite3_exec(db, "rollback;", 0, 0, 0) != SQLITE_OK) if (active && sqlite3_exec(db, "rollback;", 0, 0, 0) != SQLITE_OK)
@ -181,7 +181,7 @@ struct SQLiteTxn
void checkStoreNotSymlink() void checkStoreNotSymlink()
{ {
if (getEnv("NIX_IGNORE_SYMLINK_STORE") == "1") return; if (getEnv("NIX_IGNORE_SYMLINK_STORE") == "1") return;
Path path = nixStore; Path path = settings.nixStore;
struct stat st; struct stat st;
while (path != "/") { while (path != "/") {
if (lstat(path.c_str(), &st)) if (lstat(path.c_str(), &st))
@ -198,29 +198,27 @@ void checkStoreNotSymlink()
LocalStore::LocalStore(bool reserveSpace) LocalStore::LocalStore(bool reserveSpace)
{ {
substitutablePathsLoaded = false; schemaPath = settings.nixDBPath + "/schema";
schemaPath = nixDBPath + "/schema"; if (settings.readOnlyMode) {
if (readOnlyMode) {
openDB(false); openDB(false);
return; return;
} }
/* Create missing state directories if they don't already exist. */ /* Create missing state directories if they don't already exist. */
createDirs(nixStore); createDirs(settings.nixStore);
createDirs(linksDir = nixStore + "/.links"); createDirs(linksDir = settings.nixStore + "/.links");
Path profilesDir = nixStateDir + "/profiles"; Path profilesDir = settings.nixStateDir + "/profiles";
createDirs(nixStateDir + "/profiles"); createDirs(settings.nixStateDir + "/profiles");
createDirs(nixStateDir + "/temproots"); createDirs(settings.nixStateDir + "/temproots");
createDirs(nixDBPath); createDirs(settings.nixDBPath);
Path gcRootsDir = nixStateDir + "/gcroots"; Path gcRootsDir = settings.nixStateDir + "/gcroots";
if (!pathExists(gcRootsDir)) { if (!pathExists(gcRootsDir)) {
createDirs(gcRootsDir); createDirs(gcRootsDir);
if (symlink(profilesDir.c_str(), (gcRootsDir + "/profiles").c_str()) == -1) if (symlink(profilesDir.c_str(), (gcRootsDir + "/profiles").c_str()) == -1)
throw SysError(format("creating symlink to `%1%'") % profilesDir); throw SysError(format("creating symlink to `%1%'") % profilesDir);
} }
checkStoreNotSymlink(); checkStoreNotSymlink();
/* We can't open a SQLite database if the disk is full. Since /* We can't open a SQLite database if the disk is full. Since
@ -228,13 +226,12 @@ LocalStore::LocalStore(bool reserveSpace)
needed, we reserve some dummy space that we can free just needed, we reserve some dummy space that we can free just
before doing a garbage collection. */ before doing a garbage collection. */
try { try {
Path reservedPath = nixDBPath + "/reserved"; Path reservedPath = settings.nixDBPath + "/reserved";
if (reserveSpace) { if (reserveSpace) {
int reservedSize = queryIntSetting("gc-reserved-space", 1024 * 1024);
struct stat st; struct stat st;
if (stat(reservedPath.c_str(), &st) == -1 || if (stat(reservedPath.c_str(), &st) == -1 ||
st.st_size != reservedSize) st.st_size != settings.reservedSize)
writeFile(reservedPath, string(reservedSize, 'X')); writeFile(reservedPath, string(settings.reservedSize, 'X'));
} }
else else
deletePath(reservedPath); deletePath(reservedPath);
@ -244,15 +241,15 @@ LocalStore::LocalStore(bool reserveSpace)
/* Acquire the big fat lock in shared mode to make sure that no /* Acquire the big fat lock in shared mode to make sure that no
schema upgrade is in progress. */ schema upgrade is in progress. */
try { try {
Path globalLockPath = nixDBPath + "/big-lock"; Path globalLockPath = settings.nixDBPath + "/big-lock";
globalLock = openLockFile(globalLockPath.c_str(), true); globalLock = openLockFile(globalLockPath.c_str(), true);
} catch (SysError & e) { } catch (SysError & e) {
if (e.errNo != EACCES) throw; if (e.errNo != EACCES) throw;
readOnlyMode = true; settings.readOnlyMode = true;
openDB(false); openDB(false);
return; return;
} }
if (!lockFile(globalLock, ltRead, false)) { if (!lockFile(globalLock, ltRead, false)) {
printMsg(lvlError, "waiting for the big Nix store lock..."); printMsg(lvlError, "waiting for the big Nix store lock...");
lockFile(globalLock, ltRead, true); lockFile(globalLock, ltRead, true);
@ -264,20 +261,20 @@ LocalStore::LocalStore(bool reserveSpace)
if (curSchema > nixSchemaVersion) if (curSchema > nixSchemaVersion)
throw Error(format("current Nix store schema is version %1%, but I only support %2%") throw Error(format("current Nix store schema is version %1%, but I only support %2%")
% curSchema % nixSchemaVersion); % curSchema % nixSchemaVersion);
else if (curSchema == 0) { /* new store */ else if (curSchema == 0) { /* new store */
curSchema = nixSchemaVersion; curSchema = nixSchemaVersion;
openDB(true); openDB(true);
writeFile(schemaPath, (format("%1%") % nixSchemaVersion).str()); writeFile(schemaPath, (format("%1%") % nixSchemaVersion).str());
} }
else if (curSchema < nixSchemaVersion) { else if (curSchema < nixSchemaVersion) {
if (curSchema < 5) if (curSchema < 5)
throw Error( throw Error(
"Your Nix store has a database in Berkeley DB format,\n" "Your Nix store has a database in Berkeley DB format,\n"
"which is no longer supported. To convert to the new format,\n" "which is no longer supported. To convert to the new format,\n"
"please upgrade Nix to version 0.12 first."); "please upgrade Nix to version 0.12 first.");
if (!lockFile(globalLock, ltWrite, false)) { if (!lockFile(globalLock, ltWrite, false)) {
printMsg(lvlError, "waiting for exclusive access to the Nix store..."); printMsg(lvlError, "waiting for exclusive access to the Nix store...");
lockFile(globalLock, ltWrite, true); lockFile(globalLock, ltWrite, true);
@ -293,7 +290,7 @@ LocalStore::LocalStore(bool reserveSpace)
lockFile(globalLock, ltRead, true); lockFile(globalLock, ltRead, true);
} }
else openDB(false); else openDB(false);
} }
@ -327,7 +324,7 @@ int LocalStore::getSchema()
void LocalStore::openDB(bool create) void LocalStore::openDB(bool create)
{ {
/* Open the Nix database. */ /* Open the Nix database. */
if (sqlite3_open_v2((nixDBPath + "/db.sqlite").c_str(), &db.db, if (sqlite3_open_v2((settings.nixDBPath + "/db.sqlite").c_str(), &db.db,
SQLITE_OPEN_READWRITE | (create ? SQLITE_OPEN_CREATE : 0), 0) != SQLITE_OK) SQLITE_OPEN_READWRITE | (create ? SQLITE_OPEN_CREATE : 0), 0) != SQLITE_OK)
throw Error("cannot open SQLite database"); throw Error("cannot open SQLite database");
@ -339,18 +336,18 @@ void LocalStore::openDB(bool create)
/* !!! check whether sqlite has been built with foreign key /* !!! check whether sqlite has been built with foreign key
support */ support */
/* Whether SQLite should fsync(). "Normal" synchronous mode /* Whether SQLite should fsync(). "Normal" synchronous mode
should be safe enough. If the user asks for it, don't sync at should be safe enough. If the user asks for it, don't sync at
all. This can cause database corruption if the system all. This can cause database corruption if the system
crashes. */ crashes. */
string syncMode = queryBoolSetting("fsync-metadata", true) ? "normal" : "off"; string syncMode = settings.fsyncMetadata ? "normal" : "off";
if (sqlite3_exec(db, ("pragma synchronous = " + syncMode + ";").c_str(), 0, 0, 0) != SQLITE_OK) if (sqlite3_exec(db, ("pragma synchronous = " + syncMode + ";").c_str(), 0, 0, 0) != SQLITE_OK)
throwSQLiteError(db, "setting synchronous mode"); throwSQLiteError(db, "setting synchronous mode");
/* Set the SQLite journal mode. WAL mode is fastest, so it's the /* Set the SQLite journal mode. WAL mode is fastest, so it's the
default. */ default. */
string mode = queryBoolSetting("use-sqlite-wal", true) ? "wal" : "truncate"; string mode = settings.useSQLiteWAL ? "wal" : "truncate";
string prevMode; string prevMode;
{ {
SQLiteStmt stmt; SQLiteStmt stmt;
@ -368,7 +365,7 @@ void LocalStore::openDB(bool create)
derivation is done in a single fsync(). */ derivation is done in a single fsync(). */
if (mode == "wal" && sqlite3_exec(db, "pragma wal_autocheckpoint = 8192;", 0, 0, 0) != SQLITE_OK) if (mode == "wal" && sqlite3_exec(db, "pragma wal_autocheckpoint = 8192;", 0, 0, 0) != SQLITE_OK)
throwSQLiteError(db, "setting autocheckpoint interval"); throwSQLiteError(db, "setting autocheckpoint interval");
/* Initialise the database schema, if necessary. */ /* Initialise the database schema, if necessary. */
if (create) { if (create) {
#include "schema.sql.hh" #include "schema.sql.hh"
@ -423,7 +420,7 @@ void canonicalisePathMetaData(const Path & path, bool recurse)
struct stat st; struct stat st;
if (lstat(path.c_str(), &st)) if (lstat(path.c_str(), &st))
throw SysError(format("getting attributes of path `%1%'") % path); throw SysError(format("getting attributes of path `%1%'") % path);
/* Really make sure that the path is of a supported type. This /* Really make sure that the path is of a supported type. This
has already been checked in dumpPath(). */ has already been checked in dumpPath(). */
@ -451,7 +448,7 @@ void canonicalisePathMetaData(const Path & path, bool recurse)
/* Mask out all type related bits. */ /* Mask out all type related bits. */
mode_t mode = st.st_mode & ~S_IFMT; mode_t mode = st.st_mode & ~S_IFMT;
if (mode != 0444 && mode != 0555) { if (mode != 0444 && mode != 0555) {
mode = (st.st_mode & S_IFMT) mode = (st.st_mode & S_IFMT)
| 0444 | 0444
@ -461,7 +458,7 @@ void canonicalisePathMetaData(const Path & path, bool recurse)
} }
} }
if (st.st_mtime != mtimeStore) { if (st.st_mtime != mtimeStore) {
struct timeval times[2]; struct timeval times[2];
times[0].tv_sec = st.st_atime; times[0].tv_sec = st.st_atime;
@ -472,14 +469,14 @@ void canonicalisePathMetaData(const Path & path, bool recurse)
if (lutimes(path.c_str(), times) == -1) if (lutimes(path.c_str(), times) == -1)
#else #else
if (!S_ISLNK(st.st_mode) && utimes(path.c_str(), times) == -1) if (!S_ISLNK(st.st_mode) && utimes(path.c_str(), times) == -1)
#endif #endif
throw SysError(format("changing modification time of `%1%'") % path); throw SysError(format("changing modification time of `%1%'") % path);
} }
if (recurse && S_ISDIR(st.st_mode)) { if (recurse && S_ISDIR(st.st_mode)) {
Strings names = readDirectory(path); Strings names = readDirectory(path);
foreach (Strings::iterator, i, names) foreach (Strings::iterator, i, names)
canonicalisePathMetaData(path + "/" + *i, true); canonicalisePathMetaData(path + "/" + *i, true);
} }
makeImmutable(path); makeImmutable(path);
@ -494,7 +491,7 @@ void canonicalisePathMetaData(const Path & path)
be a symlink, since we can't change its ownership. */ be a symlink, since we can't change its ownership. */
struct stat st; struct stat st;
if (lstat(path.c_str(), &st)) if (lstat(path.c_str(), &st))
throw SysError(format("getting attributes of path `%1%'") % path); throw SysError(format("getting attributes of path `%1%'") % path);
if (st.st_uid != geteuid()) { if (st.st_uid != geteuid()) {
assert(S_ISLNK(st.st_mode)); assert(S_ISLNK(st.st_mode));
@ -508,7 +505,7 @@ void LocalStore::checkDerivationOutputs(const Path & drvPath, const Derivation &
string drvName = storePathToName(drvPath); string drvName = storePathToName(drvPath);
assert(isDerivation(drvName)); assert(isDerivation(drvName));
drvName = string(drvName, 0, drvName.size() - drvExtension.size()); drvName = string(drvName, 0, drvName.size() - drvExtension.size());
if (isFixedOutputDrv(drv)) { if (isFixedOutputDrv(drv)) {
DerivationOutputs::const_iterator out = drv.outputs.find("out"); DerivationOutputs::const_iterator out = drv.outputs.find("out");
if (out == drv.outputs.end()) if (out == drv.outputs.end())
@ -532,7 +529,7 @@ void LocalStore::checkDerivationOutputs(const Path & drvPath, const Derivation &
} }
Hash h = hashDerivationModulo(*this, drvCopy); Hash h = hashDerivationModulo(*this, drvCopy);
foreach (DerivationOutputs::const_iterator, i, drv.outputs) { foreach (DerivationOutputs::const_iterator, i, drv.outputs) {
Path outPath = makeOutputPath(i->first, h, drvName); Path outPath = makeOutputPath(i->first, h, drvName);
StringPairs::const_iterator j = drv.env.find(i->first); StringPairs::const_iterator j = drv.env.find(i->first);
@ -568,14 +565,14 @@ unsigned long long LocalStore::addValidPath(const ValidPathInfo & info, bool che
derivation. */ derivation. */
if (isDerivation(info.path)) { if (isDerivation(info.path)) {
Derivation drv = parseDerivation(readFile(info.path)); Derivation drv = parseDerivation(readFile(info.path));
/* Verify that the output paths in the derivation are correct /* Verify that the output paths in the derivation are correct
(i.e., follow the scheme for computing output paths from (i.e., follow the scheme for computing output paths from
derivations). Note that if this throws an error, then the derivations). Note that if this throws an error, then the
DB transaction is rolled back, so the path validity DB transaction is rolled back, so the path validity
registration above is undone. */ registration above is undone. */
if (checkOutputs) checkDerivationOutputs(info.path, drv); if (checkOutputs) checkDerivationOutputs(info.path, drv);
foreach (DerivationOutputs::iterator, i, drv.outputs) { foreach (DerivationOutputs::iterator, i, drv.outputs) {
SQLiteStmtUse use(stmtAddDerivationOutput); SQLiteStmtUse use(stmtAddDerivationOutput);
stmtAddDerivationOutput.bind(id); stmtAddDerivationOutput.bind(id);
@ -681,7 +678,7 @@ ValidPathInfo LocalStore::queryPathInfo(const Path & path)
SQLiteStmtUse use1(stmtQueryPathInfo); SQLiteStmtUse use1(stmtQueryPathInfo);
stmtQueryPathInfo.bind(path); stmtQueryPathInfo.bind(path);
int r = sqlite3_step(stmtQueryPathInfo); int r = sqlite3_step(stmtQueryPathInfo);
if (r == SQLITE_DONE) throw Error(format("path `%1%' is not valid") % path); if (r == SQLITE_DONE) throw Error(format("path `%1%' is not valid") % path);
if (r != SQLITE_ROW) throwSQLiteError(db, "querying path in database"); if (r != SQLITE_ROW) throwSQLiteError(db, "querying path in database");
@ -691,7 +688,7 @@ ValidPathInfo LocalStore::queryPathInfo(const Path & path)
const char * s = (const char *) sqlite3_column_text(stmtQueryPathInfo, 1); const char * s = (const char *) sqlite3_column_text(stmtQueryPathInfo, 1);
assert(s); assert(s);
info.hash = parseHashField(path, s); info.hash = parseHashField(path, s);
info.registrationTime = sqlite3_column_int(stmtQueryPathInfo, 2); info.registrationTime = sqlite3_column_int(stmtQueryPathInfo, 2);
s = (const char *) sqlite3_column_text(stmtQueryPathInfo, 3); s = (const char *) sqlite3_column_text(stmtQueryPathInfo, 3);
@ -756,13 +753,22 @@ bool LocalStore::isValidPath(const Path & path)
} }
PathSet LocalStore::queryValidPaths() PathSet LocalStore::queryValidPaths(const PathSet & paths)
{
PathSet res;
foreach (PathSet::const_iterator, i, paths)
if (isValidPath(*i)) res.insert(*i);
return res;
}
PathSet LocalStore::queryAllValidPaths()
{ {
SQLiteStmt stmt; SQLiteStmt stmt;
stmt.create(db, "select path from ValidPaths"); stmt.create(db, "select path from ValidPaths");
PathSet res; PathSet res;
int r; int r;
while ((r = sqlite3_step(stmt)) == SQLITE_ROW) { while ((r = sqlite3_step(stmt)) == SQLITE_ROW) {
const char * s = (const char *) sqlite3_column_text(stmt, 0); const char * s = (const char *) sqlite3_column_text(stmt, 0);
@ -825,10 +831,10 @@ PathSet LocalStore::queryValidDerivers(const Path & path)
assert(s); assert(s);
derivers.insert(s); derivers.insert(s);
} }
if (r != SQLITE_DONE) if (r != SQLITE_DONE)
throwSQLiteError(db, format("error getting valid derivers of `%1%'") % path); throwSQLiteError(db, format("error getting valid derivers of `%1%'") % path);
return derivers; return derivers;
} }
@ -836,10 +842,10 @@ PathSet LocalStore::queryValidDerivers(const Path & path)
PathSet LocalStore::queryDerivationOutputs(const Path & path) PathSet LocalStore::queryDerivationOutputs(const Path & path)
{ {
SQLiteTxn txn(db); SQLiteTxn txn(db);
SQLiteStmtUse use(stmtQueryDerivationOutputs); SQLiteStmtUse use(stmtQueryDerivationOutputs);
stmtQueryDerivationOutputs.bind(queryValidPathId(path)); stmtQueryDerivationOutputs.bind(queryValidPathId(path));
PathSet outputs; PathSet outputs;
int r; int r;
while ((r = sqlite3_step(stmtQueryDerivationOutputs)) == SQLITE_ROW) { while ((r = sqlite3_step(stmtQueryDerivationOutputs)) == SQLITE_ROW) {
@ -847,7 +853,7 @@ PathSet LocalStore::queryDerivationOutputs(const Path & path)
assert(s); assert(s);
outputs.insert(s); outputs.insert(s);
} }
if (r != SQLITE_DONE) if (r != SQLITE_DONE)
throwSQLiteError(db, format("error getting outputs of `%1%'") % path); throwSQLiteError(db, format("error getting outputs of `%1%'") % path);
@ -858,10 +864,10 @@ PathSet LocalStore::queryDerivationOutputs(const Path & path)
StringSet LocalStore::queryDerivationOutputNames(const Path & path) StringSet LocalStore::queryDerivationOutputNames(const Path & path)
{ {
SQLiteTxn txn(db); SQLiteTxn txn(db);
SQLiteStmtUse use(stmtQueryDerivationOutputs); SQLiteStmtUse use(stmtQueryDerivationOutputs);
stmtQueryDerivationOutputs.bind(queryValidPathId(path)); stmtQueryDerivationOutputs.bind(queryValidPathId(path));
StringSet outputNames; StringSet outputNames;
int r; int r;
while ((r = sqlite3_step(stmtQueryDerivationOutputs)) == SQLITE_ROW) { while ((r = sqlite3_step(stmtQueryDerivationOutputs)) == SQLITE_ROW) {
@ -869,7 +875,7 @@ StringSet LocalStore::queryDerivationOutputNames(const Path & path)
assert(s); assert(s);
outputNames.insert(s); outputNames.insert(s);
} }
if (r != SQLITE_DONE) if (r != SQLITE_DONE)
throwSQLiteError(db, format("error getting output names of `%1%'") % path); throwSQLiteError(db, format("error getting output names of `%1%'") % path);
@ -880,11 +886,11 @@ StringSet LocalStore::queryDerivationOutputNames(const Path & path)
Path LocalStore::queryPathFromHashPart(const string & hashPart) Path LocalStore::queryPathFromHashPart(const string & hashPart)
{ {
if (hashPart.size() != 32) throw Error("invalid hash part"); if (hashPart.size() != 32) throw Error("invalid hash part");
SQLiteTxn txn(db); SQLiteTxn txn(db);
Path prefix = nixStore + "/" + hashPart; Path prefix = settings.nixStore + "/" + hashPart;
SQLiteStmtUse use(stmtQueryPathFromHashPart); SQLiteStmtUse use(stmtQueryPathFromHashPart);
stmtQueryPathFromHashPart.bind(prefix); stmtQueryPathFromHashPart.bind(prefix);
@ -900,16 +906,17 @@ Path LocalStore::queryPathFromHashPart(const string & hashPart)
void LocalStore::startSubstituter(const Path & substituter, RunningSubstituter & run) void LocalStore::startSubstituter(const Path & substituter, RunningSubstituter & run)
{ {
if (run.pid != -1) return; if (run.pid != -1) return;
debug(format("starting substituter program `%1%'") % substituter); debug(format("starting substituter program `%1%'") % substituter);
Pipe toPipe, fromPipe; Pipe toPipe, fromPipe, errorPipe;
toPipe.create(); toPipe.create();
fromPipe.create(); fromPipe.create();
errorPipe.create();
run.pid = fork(); run.pid = fork();
switch (run.pid) { switch (run.pid) {
case -1: case -1:
@ -923,13 +930,19 @@ void LocalStore::startSubstituter(const Path & substituter, RunningSubstituter &
library named libutil. As a result, substituters library named libutil. As a result, substituters
written in Perl (i.e. all of them) fail. */ written in Perl (i.e. all of them) fail. */
unsetenv("DYLD_LIBRARY_PATH"); unsetenv("DYLD_LIBRARY_PATH");
/* Pass configuration options (including those overriden
with --option) to the substituter. */
setenv("_NIX_OPTIONS", settings.pack().c_str(), 1);
fromPipe.readSide.close(); fromPipe.readSide.close();
toPipe.writeSide.close(); toPipe.writeSide.close();
if (dup2(toPipe.readSide, STDIN_FILENO) == -1) if (dup2(toPipe.readSide, STDIN_FILENO) == -1)
throw SysError("dupping stdin"); throw SysError("dupping stdin");
if (dup2(fromPipe.writeSide, STDOUT_FILENO) == -1) if (dup2(fromPipe.writeSide, STDOUT_FILENO) == -1)
throw SysError("dupping stdout"); throw SysError("dupping stdout");
if (dup2(errorPipe.writeSide, STDERR_FILENO) == -1)
throw SysError("dupping stderr");
closeMostFDs(set<int>()); closeMostFDs(set<int>());
execl(substituter.c_str(), substituter.c_str(), "--query", NULL); execl(substituter.c_str(), substituter.c_str(), "--query", NULL);
throw SysError(format("executing `%1%'") % substituter); throw SysError(format("executing `%1%'") % substituter);
@ -940,9 +953,10 @@ void LocalStore::startSubstituter(const Path & substituter, RunningSubstituter &
} }
/* Parent. */ /* Parent. */
run.to = toPipe.writeSide.borrow(); run.to = toPipe.writeSide.borrow();
run.from = fromPipe.readSide.borrow(); run.from = fromPipe.readSide.borrow();
run.error = errorPipe.readSide.borrow();
} }
@ -955,50 +969,79 @@ template<class T> T getIntLine(int fd)
} }
bool LocalStore::hasSubstitutes(const Path & path) PathSet LocalStore::querySubstitutablePaths(const PathSet & paths)
{ {
foreach (Paths::iterator, i, substituters) { PathSet res;
foreach (Paths::iterator, i, settings.substituters) {
if (res.size() == paths.size()) break;
RunningSubstituter & run(runningSubstituters[*i]); RunningSubstituter & run(runningSubstituters[*i]);
startSubstituter(*i, run); startSubstituter(*i, run);
writeLine(run.to, "have\n" + path); string s = "have ";
if (getIntLine<int>(run.from)) return true; foreach (PathSet::const_iterator, j, paths)
if (res.find(*j) == res.end()) { s += *j; s += " "; }
writeLine(run.to, s);
while (true) {
/* FIXME: we only read stderr when an error occurs, so
substituters should only write (short) messages to
stderr when they fail. I.e. they shouldn't write debug
output. */
try {
Path path = readLine(run.from);
if (path == "") break;
res.insert(path);
} catch (EndOfFile e) {
throw Error(format("substituter `%1%' failed: %2%") % *i % chomp(drainFD(run.error)));
}
}
} }
return res;
return false;
} }
bool LocalStore::querySubstitutablePathInfo(const Path & substituter, void LocalStore::querySubstitutablePathInfos(const Path & substituter,
const Path & path, SubstitutablePathInfo & info) PathSet & paths, SubstitutablePathInfos & infos)
{ {
RunningSubstituter & run(runningSubstituters[substituter]); RunningSubstituter & run(runningSubstituters[substituter]);
startSubstituter(substituter, run); startSubstituter(substituter, run);
writeLine(run.to, "info\n" + path); string s = "info ";
foreach (PathSet::const_iterator, i, paths)
if (infos.find(*i) == infos.end()) { s += *i; s += " "; }
writeLine(run.to, s);
if (!getIntLine<int>(run.from)) return false; while (true) {
try {
info.deriver = readLine(run.from); Path path = readLine(run.from);
if (info.deriver != "") assertStorePath(info.deriver); if (path == "") break;
int nrRefs = getIntLine<int>(run.from); if (paths.find(path) == paths.end())
while (nrRefs--) { throw Error(format("got unexpected path `%1%' from substituter") % path);
Path p = readLine(run.from); paths.erase(path);
assertStorePath(p); SubstitutablePathInfo & info(infos[path]);
info.references.insert(p); info.deriver = readLine(run.from);
if (info.deriver != "") assertStorePath(info.deriver);
int nrRefs = getIntLine<int>(run.from);
while (nrRefs--) {
Path p = readLine(run.from);
assertStorePath(p);
info.references.insert(p);
}
info.downloadSize = getIntLine<long long>(run.from);
info.narSize = getIntLine<long long>(run.from);
} catch (EndOfFile e) {
throw Error(format("substituter `%1%' failed: %2%") % substituter % chomp(drainFD(run.error)));
}
} }
info.downloadSize = getIntLine<long long>(run.from);
info.narSize = getIntLine<long long>(run.from);
return true;
} }
bool LocalStore::querySubstitutablePathInfo(const Path & path, void LocalStore::querySubstitutablePathInfos(const PathSet & paths,
SubstitutablePathInfo & info) SubstitutablePathInfos & infos)
{ {
foreach (Paths::iterator, i, substituters) PathSet todo = paths;
if (querySubstitutablePathInfo(*i, path, info)) return true; foreach (Paths::iterator, i, settings.substituters) {
return false; if (todo.empty()) break;
querySubstitutablePathInfos(*i, todo, infos);
}
} }
@ -1018,17 +1061,16 @@ void LocalStore::registerValidPath(const ValidPathInfo & info)
void LocalStore::registerValidPaths(const ValidPathInfos & infos) void LocalStore::registerValidPaths(const ValidPathInfos & infos)
{ {
/* sqlite will fsync by default, but the new valid paths may not be fsync-ed. /* SQLite will fsync by default, but the new valid paths may not be fsync-ed.
* So some may want to fsync them before registering the validity, at the * So some may want to fsync them before registering the validity, at the
* expense of some speed of the path registering operation. */ * expense of some speed of the path registering operation. */
if (queryBoolSetting("sync-before-registering", false)) if (settings.syncBeforeRegistering) sync();
sync();
while (1) { while (1) {
try { try {
SQLiteTxn txn(db); SQLiteTxn txn(db);
PathSet paths; PathSet paths;
foreach (ValidPathInfos::const_iterator, i, infos) { foreach (ValidPathInfos::const_iterator, i, infos) {
assert(i->hash.type == htSHA256); assert(i->hash.type == htSHA256);
/* !!! Maybe the registration info should be updated if the /* !!! Maybe the registration info should be updated if the
@ -1119,7 +1161,7 @@ Path LocalStore::addToStoreFromDump(const string & dump, const string & name,
hash = hashPath(htSHA256, dstPath); hash = hashPath(htSHA256, dstPath);
optimisePath(dstPath); // FIXME: combine with hashPath() optimisePath(dstPath); // FIXME: combine with hashPath()
ValidPathInfo info; ValidPathInfo info;
info.path = dstPath; info.path = dstPath;
info.hash = hash.first; info.hash = hash.first;
@ -1144,7 +1186,7 @@ Path LocalStore::addToStore(const Path & _srcPath,
method for very large paths, but `copyPath' is mainly used for method for very large paths, but `copyPath' is mainly used for
small files. */ small files. */
StringSink sink; StringSink sink;
if (recursive) if (recursive)
dumpPath(srcPath, sink, filter); dumpPath(srcPath, sink, filter);
else else
sink.s = readFile(srcPath); sink.s = readFile(srcPath);
@ -1157,7 +1199,7 @@ Path LocalStore::addTextToStore(const string & name, const string & s,
const PathSet & references) const PathSet & references)
{ {
Path dstPath = computeStorePathForText(name, s, references); Path dstPath = computeStorePathForText(name, s, references);
addTempRoot(dstPath); addTempRoot(dstPath);
if (!isValidPath(dstPath)) { if (!isValidPath(dstPath)) {
@ -1175,7 +1217,7 @@ Path LocalStore::addTextToStore(const string & name, const string & s,
HashResult hash = hashPath(htSHA256, dstPath); HashResult hash = hashPath(htSHA256, dstPath);
optimisePath(dstPath); optimisePath(dstPath);
ValidPathInfo info; ValidPathInfo info;
info.path = dstPath; info.path = dstPath;
info.hash = hash.first; info.hash = hash.first;
@ -1233,7 +1275,7 @@ void LocalStore::exportPath(const Path & path, bool sign,
throw Error(format("path `%1%' is not valid") % path); throw Error(format("path `%1%' is not valid") % path);
HashAndWriteSink hashAndWriteSink(sink); HashAndWriteSink hashAndWriteSink(sink);
dumpPath(path, hashAndWriteSink); dumpPath(path, hashAndWriteSink);
/* Refuse to export paths that have changed. This prevents /* Refuse to export paths that have changed. This prevents
@ -1248,7 +1290,7 @@ void LocalStore::exportPath(const Path & path, bool sign,
writeInt(EXPORT_MAGIC, hashAndWriteSink); writeInt(EXPORT_MAGIC, hashAndWriteSink);
writeString(path, hashAndWriteSink); writeString(path, hashAndWriteSink);
PathSet references; PathSet references;
queryReferences(path, references); queryReferences(path, references);
writeStrings(references, hashAndWriteSink); writeStrings(references, hashAndWriteSink);
@ -1258,15 +1300,15 @@ void LocalStore::exportPath(const Path & path, bool sign,
if (sign) { if (sign) {
Hash hash = hashAndWriteSink.currentHash(); Hash hash = hashAndWriteSink.currentHash();
writeInt(1, hashAndWriteSink); writeInt(1, hashAndWriteSink);
Path tmpDir = createTempDir(); Path tmpDir = createTempDir();
AutoDelete delTmp(tmpDir); AutoDelete delTmp(tmpDir);
Path hashFile = tmpDir + "/hash"; Path hashFile = tmpDir + "/hash";
writeFile(hashFile, printHash(hash)); writeFile(hashFile, printHash(hash));
Path secretKey = nixConfDir + "/signing-key.sec"; Path secretKey = settings.nixConfDir + "/signing-key.sec";
checkSecrecy(secretKey); checkSecrecy(secretKey);
Strings args; Strings args;
@ -1279,7 +1321,7 @@ void LocalStore::exportPath(const Path & path, bool sign,
string signature = runProgram(OPENSSL_PATH, true, args); string signature = runProgram(OPENSSL_PATH, true, args);
writeString(signature, hashAndWriteSink); writeString(signature, hashAndWriteSink);
} else } else
writeInt(0, hashAndWriteSink); writeInt(0, hashAndWriteSink);
} }
@ -1312,7 +1354,7 @@ Path LocalStore::createTempDirInStore()
/* There is a slight possibility that `tmpDir' gets deleted by /* There is a slight possibility that `tmpDir' gets deleted by
the GC between createTempDir() and addTempRoot(), so repeat the GC between createTempDir() and addTempRoot(), so repeat
until `tmpDir' exists. */ until `tmpDir' exists. */
tmpDir = createTempDir(nixStore); tmpDir = createTempDir(settings.nixStore);
addTempRoot(tmpDir); addTempRoot(tmpDir);
} while (!pathExists(tmpDir)); } while (!pathExists(tmpDir));
return tmpDir; return tmpDir;
@ -1322,7 +1364,7 @@ Path LocalStore::createTempDirInStore()
Path LocalStore::importPath(bool requireSignature, Source & source) Path LocalStore::importPath(bool requireSignature, Source & source)
{ {
HashAndReadSource hashAndReadSource(source); HashAndReadSource hashAndReadSource(source);
/* We don't yet know what store path this archive contains (the /* We don't yet know what store path this archive contains (the
store path follows the archive data proper), and besides, we store path follows the archive data proper), and besides, we
don't know yet whether the signature is valid. */ don't know yet whether the signature is valid. */
@ -1352,7 +1394,7 @@ Path LocalStore::importPath(bool requireSignature, Source & source)
if (requireSignature && !haveSignature) if (requireSignature && !haveSignature)
throw Error(format("imported archive of `%1%' lacks a signature") % dstPath); throw Error(format("imported archive of `%1%' lacks a signature") % dstPath);
if (haveSignature) { if (haveSignature) {
string signature = readString(hashAndReadSource); string signature = readString(hashAndReadSource);
@ -1364,7 +1406,7 @@ Path LocalStore::importPath(bool requireSignature, Source & source)
args.push_back("rsautl"); args.push_back("rsautl");
args.push_back("-verify"); args.push_back("-verify");
args.push_back("-inkey"); args.push_back("-inkey");
args.push_back(nixConfDir + "/signing-key.pub"); args.push_back(settings.nixConfDir + "/signing-key.pub");
args.push_back("-pubin"); args.push_back("-pubin");
args.push_back("-in"); args.push_back("-in");
args.push_back(sigFile); args.push_back(sigFile);
@ -1406,13 +1448,13 @@ Path LocalStore::importPath(bool requireSignature, Source & source)
% unpacked % dstPath); % unpacked % dstPath);
canonicalisePathMetaData(dstPath); canonicalisePathMetaData(dstPath);
/* !!! if we were clever, we could prevent the hashPath() /* !!! if we were clever, we could prevent the hashPath()
here. */ here. */
HashResult hash = hashPath(htSHA256, dstPath); HashResult hash = hashPath(htSHA256, dstPath);
optimisePath(dstPath); // FIXME: combine with hashPath() optimisePath(dstPath); // FIXME: combine with hashPath()
ValidPathInfo info; ValidPathInfo info;
info.path = dstPath; info.path = dstPath;
info.hash = hash.first; info.hash = hash.first;
@ -1421,10 +1463,10 @@ Path LocalStore::importPath(bool requireSignature, Source & source)
info.deriver = deriver != "" && isValidPath(deriver) ? deriver : ""; info.deriver = deriver != "" && isValidPath(deriver) ? deriver : "";
registerValidPath(info); registerValidPath(info);
} }
outputLock.setDeletion(true); outputLock.setDeletion(true);
} }
return dstPath; return dstPath;
} }
@ -1472,14 +1514,14 @@ void LocalStore::verifyStore(bool checkContents)
/* Acquire the global GC lock to prevent a garbage collection. */ /* Acquire the global GC lock to prevent a garbage collection. */
AutoCloseFD fdGCLock = openGCLock(ltWrite); AutoCloseFD fdGCLock = openGCLock(ltWrite);
Paths entries = readDirectory(nixStore); Paths entries = readDirectory(settings.nixStore);
PathSet store(entries.begin(), entries.end()); PathSet store(entries.begin(), entries.end());
/* Check whether all valid paths actually exist. */ /* Check whether all valid paths actually exist. */
printMsg(lvlInfo, "checking path existence..."); printMsg(lvlInfo, "checking path existence...");
PathSet validPaths2 = queryValidPaths(), validPaths, done; PathSet validPaths2 = queryAllValidPaths(), validPaths, done;
foreach (PathSet::iterator, i, validPaths2) foreach (PathSet::iterator, i, validPaths2)
verifyPath(*i, store, done, validPaths); verifyPath(*i, store, done, validPaths);
@ -1501,7 +1543,7 @@ void LocalStore::verifyStore(bool checkContents)
/* Check the content hash (optionally - slow). */ /* Check the content hash (optionally - slow). */
printMsg(lvlTalkative, format("checking contents of `%1%'") % *i); printMsg(lvlTalkative, format("checking contents of `%1%'") % *i);
HashResult current = hashPath(info.hash.type, *i); HashResult current = hashPath(info.hash.type, *i);
if (info.hash != nullHash && info.hash != current.first) { if (info.hash != nullHash && info.hash != current.first) {
printMsg(lvlError, format("path `%1%' was modified! " printMsg(lvlError, format("path `%1%' was modified! "
"expected hash `%2%', got `%3%'") "expected hash `%2%', got `%3%'")
@ -1516,18 +1558,18 @@ void LocalStore::verifyStore(bool checkContents)
info.hash = current.first; info.hash = current.first;
update = true; update = true;
} }
/* Fill in missing narSize fields (from old stores). */ /* Fill in missing narSize fields (from old stores). */
if (info.narSize == 0) { if (info.narSize == 0) {
printMsg(lvlError, format("updating size field on `%1%' to %2%") % *i % current.second); printMsg(lvlError, format("updating size field on `%1%' to %2%") % *i % current.second);
info.narSize = current.second; info.narSize = current.second;
update = true; update = true;
} }
if (update) updatePathInfo(info); if (update) updatePathInfo(info);
} }
} catch (Error & e) { } catch (Error & e) {
/* It's possible that the path got GC'ed, so ignore /* It's possible that the path got GC'ed, so ignore
errors on invalid paths. */ errors on invalid paths. */
@ -1543,7 +1585,7 @@ void LocalStore::verifyPath(const Path & path, const PathSet & store,
PathSet & done, PathSet & validPaths) PathSet & done, PathSet & validPaths)
{ {
checkInterrupt(); checkInterrupt();
if (done.find(path) != done.end()) return; if (done.find(path) != done.end()) return;
done.insert(path); done.insert(path);
@ -1570,10 +1612,10 @@ void LocalStore::verifyPath(const Path & path, const PathSet & store,
invalidatePath(path); invalidatePath(path);
} else } else
printMsg(lvlError, format("path `%1%' disappeared, but it still has valid referrers!") % path); printMsg(lvlError, format("path `%1%' disappeared, but it still has valid referrers!") % path);
return; return;
} }
validPaths.insert(path); validPaths.insert(path);
} }
@ -1583,9 +1625,9 @@ void LocalStore::verifyPath(const Path & path, const PathSet & store,
PathSet LocalStore::queryValidPathsOld() PathSet LocalStore::queryValidPathsOld()
{ {
PathSet paths; PathSet paths;
Strings entries = readDirectory(nixDBPath + "/info"); Strings entries = readDirectory(settings.nixDBPath + "/info");
foreach (Strings::iterator, i, entries) foreach (Strings::iterator, i, entries)
if (i->at(0) != '.') paths.insert(nixStore + "/" + *i); if (i->at(0) != '.') paths.insert(settings.nixStore + "/" + *i);
return paths; return paths;
} }
@ -1597,7 +1639,7 @@ ValidPathInfo LocalStore::queryPathInfoOld(const Path & path)
/* Read the info file. */ /* Read the info file. */
string baseName = baseNameOf(path); string baseName = baseNameOf(path);
Path infoFile = (format("%1%/info/%2%") % nixDBPath % baseName).str(); Path infoFile = (format("%1%/info/%2%") % settings.nixDBPath % baseName).str();
if (!pathExists(infoFile)) if (!pathExists(infoFile))
throw Error(format("path `%1%' is not valid") % path); throw Error(format("path `%1%' is not valid") % path);
string info = readFile(infoFile); string info = readFile(infoFile);
@ -1639,14 +1681,14 @@ void LocalStore::upgradeStore6()
PathSet validPaths = queryValidPathsOld(); PathSet validPaths = queryValidPathsOld();
SQLiteTxn txn(db); SQLiteTxn txn(db);
foreach (PathSet::iterator, i, validPaths) { foreach (PathSet::iterator, i, validPaths) {
addValidPath(queryPathInfoOld(*i), false); addValidPath(queryPathInfoOld(*i), false);
std::cerr << "."; std::cerr << ".";
} }
std::cerr << "|"; std::cerr << "|";
foreach (PathSet::iterator, i, validPaths) { foreach (PathSet::iterator, i, validPaths) {
ValidPathInfo info = queryPathInfoOld(*i); ValidPathInfo info = queryPathInfoOld(*i);
unsigned long long referrer = queryValidPathId(*i); unsigned long long referrer = queryValidPathId(*i);

View File

@ -45,7 +45,7 @@ struct OptimiseStats
struct RunningSubstituter struct RunningSubstituter
{ {
Pid pid; Pid pid;
AutoCloseFD to, from; AutoCloseFD to, from, error;
}; };
@ -75,19 +75,16 @@ struct SQLiteStmt
void bind64(long long value); void bind64(long long value);
void bind(); void bind();
}; };
class LocalStore : public StoreAPI class LocalStore : public StoreAPI
{ {
private: private:
bool substitutablePathsLoaded;
PathSet substitutablePaths;
typedef std::map<Path, RunningSubstituter> RunningSubstituters; typedef std::map<Path, RunningSubstituter> RunningSubstituters;
RunningSubstituters runningSubstituters; RunningSubstituters runningSubstituters;
Path linksDir; Path linksDir;
public: public:
/* Initialise the local store, upgrading the schema if /* Initialise the local store, upgrading the schema if
@ -95,13 +92,15 @@ public:
LocalStore(bool reserveSpace = true); LocalStore(bool reserveSpace = true);
~LocalStore(); ~LocalStore();
/* Implementations of abstract store API methods. */ /* Implementations of abstract store API methods. */
bool isValidPath(const Path & path); bool isValidPath(const Path & path);
PathSet queryValidPaths(); PathSet queryValidPaths(const PathSet & paths);
PathSet queryAllValidPaths();
ValidPathInfo queryPathInfo(const Path & path); ValidPathInfo queryPathInfo(const Path & path);
Hash queryPathHash(const Path & path); Hash queryPathHash(const Path & path);
@ -121,19 +120,17 @@ public:
PathSet queryDerivationOutputs(const Path & path); PathSet queryDerivationOutputs(const Path & path);
StringSet queryDerivationOutputNames(const Path & path); StringSet queryDerivationOutputNames(const Path & path);
Path queryPathFromHashPart(const string & hashPart);
PathSet querySubstitutablePaths();
bool hasSubstitutes(const Path & path);
bool querySubstitutablePathInfo(const Path & path, Path queryPathFromHashPart(const string & hashPart);
SubstitutablePathInfo & info);
PathSet querySubstitutablePaths(const PathSet & paths);
bool querySubstitutablePathInfo(const Path & substituter,
const Path & path, SubstitutablePathInfo & info); void querySubstitutablePathInfos(const Path & substituter,
PathSet & paths, SubstitutablePathInfos & infos);
void querySubstitutablePathInfos(const PathSet & paths,
SubstitutablePathInfos & infos);
Path addToStore(const Path & srcPath, Path addToStore(const Path & srcPath,
bool recursive = true, HashType hashAlgo = htSHA256, bool recursive = true, HashType hashAlgo = htSHA256,
PathFilter & filter = defaultPathFilter); PathFilter & filter = defaultPathFilter);
@ -152,7 +149,7 @@ public:
Sink & sink); Sink & sink);
Paths importPaths(bool requireSignature, Source & source); Paths importPaths(bool requireSignature, Source & source);
void buildPaths(const PathSet & paths); void buildPaths(const PathSet & paths);
void ensurePath(const Path & path); void ensurePath(const Path & path);
@ -160,7 +157,7 @@ public:
void addTempRoot(const Path & path); void addTempRoot(const Path & path);
void addIndirectRoot(const Path & path); void addIndirectRoot(const Path & path);
void syncWithGC(); void syncWithGC();
Roots findRoots(); Roots findRoots();
@ -173,7 +170,7 @@ public:
/* Optimise a single store path. */ /* Optimise a single store path. */
void optimisePath(const Path & path); void optimisePath(const Path & path);
/* Check the integrity of the Nix store. */ /* Check the integrity of the Nix store. */
void verifyStore(bool checkContents); void verifyStore(bool checkContents);
@ -232,18 +229,18 @@ private:
unsigned long long queryValidPathId(const Path & path); unsigned long long queryValidPathId(const Path & path);
unsigned long long addValidPath(const ValidPathInfo & info, bool checkOutputs = true); unsigned long long addValidPath(const ValidPathInfo & info, bool checkOutputs = true);
void addReference(unsigned long long referrer, unsigned long long reference); void addReference(unsigned long long referrer, unsigned long long reference);
void appendReferrer(const Path & from, const Path & to, bool lock); void appendReferrer(const Path & from, const Path & to, bool lock);
void rewriteReferrers(const Path & path, bool purge, PathSet referrers); void rewriteReferrers(const Path & path, bool purge, PathSet referrers);
void invalidatePath(const Path & path); void invalidatePath(const Path & path);
/* Delete a path from the Nix store. */ /* Delete a path from the Nix store. */
void invalidatePathChecked(const Path & path); void invalidatePathChecked(const Path & path);
void verifyPath(const Path & path, const PathSet & store, void verifyPath(const Path & path, const PathSet & store,
PathSet & done, PathSet & validPaths); PathSet & done, PathSet & validPaths);
@ -256,14 +253,14 @@ private:
struct GCState; struct GCState;
void deleteGarbage(GCState & state, const Path & path); void deleteGarbage(GCState & state, const Path & path);
bool tryToDelete(GCState & state, const Path & path); bool tryToDelete(GCState & state, const Path & path);
bool isActiveTempFile(const GCState & state, bool isActiveTempFile(const GCState & state,
const Path & path, const string & suffix); const Path & path, const string & suffix);
int openGCLock(LockType lockType); int openGCLock(LockType lockType);
void removeUnusedLinks(const GCState & state); void removeUnusedLinks(const GCState & state);
void startSubstituter(const Path & substituter, void startSubstituter(const Path & substituter,
@ -272,7 +269,7 @@ private:
Path createTempDirInStore(); Path createTempDirInStore();
Path importPath(bool requireSignature, Source & source); Path importPath(bool requireSignature, Source & source);
void checkDerivationOutputs(const Path & drvPath, const Derivation & drv); void checkDerivationOutputs(const Path & drvPath, const Derivation & drv);
void optimisePath_(OptimiseStats & stats, const Path & path); void optimisePath_(OptimiseStats & stats, const Path & path);
@ -293,9 +290,6 @@ void canonicalisePathMetaData(const Path & path, bool recurse);
MakeError(PathInUse, Error); MakeError(PathInUse, Error);
/* Whether we are in build users mode. */
bool haveBuildUsers();
/* Whether we are root. */ /* Whether we are root. */
bool amPrivileged(); bool amPrivileged();
@ -307,5 +301,5 @@ void getOwnership(const Path & path);
void deletePathWrapped(const Path & path, unsigned long long & bytesFreed); void deletePathWrapped(const Path & path, unsigned long long & bytesFreed);
void deletePathWrapped(const Path & path); void deletePathWrapped(const Path & path);
} }

View File

@ -52,68 +52,118 @@ void queryMissing(StoreAPI & store, const PathSet & targets,
unsigned long long & downloadSize, unsigned long long & narSize) unsigned long long & downloadSize, unsigned long long & narSize)
{ {
downloadSize = narSize = 0; downloadSize = narSize = 0;
PathSet todo(targets.begin(), targets.end()), done; PathSet todo(targets.begin(), targets.end()), done;
while (!todo.empty()) { /* Getting substitute info has high latency when using the binary
Path p = *(todo.begin()); cache substituter. Thus it's essential to do substitute
todo.erase(p); queries in parallel as much as possible. To accomplish this
if (done.find(p) != done.end()) continue; we do the following:
done.insert(p);
if (isDerivation(p)) { - For all paths still to be processed (todo), we add all
if (!store.isValidPath(p)) { paths for which we need info to the set query. For an
unknown.insert(p); unbuilt derivation this is the output paths; otherwise, it's
continue; the path itself.
- We get info about all paths in query in parallel.
- We process the results and add new items to todo if
necessary. E.g. if a path is substitutable, then we need to
get info on its references.
- Repeat until todo is empty.
*/
while (!todo.empty()) {
PathSet query, todoDrv, todoNonDrv;
foreach (PathSet::iterator, i, todo) {
if (done.find(*i) != done.end()) continue;
done.insert(*i);
if (isDerivation(*i)) {
if (!store.isValidPath(*i)) {
// FIXME: we could try to substitute p.
unknown.insert(*i);
continue;
}
Derivation drv = derivationFromPath(store, *i);
PathSet invalid;
foreach (DerivationOutputs::iterator, j, drv.outputs)
if (!store.isValidPath(j->second.path)) invalid.insert(j->second.path);
if (invalid.empty()) continue;
todoDrv.insert(*i);
if (settings.useSubstitutes) query.insert(invalid.begin(), invalid.end());
} }
Derivation drv = derivationFromPath(store, p);
else {
if (store.isValidPath(*i)) continue;
query.insert(*i);
todoNonDrv.insert(*i);
}
}
todo.clear();
SubstitutablePathInfos infos;
store.querySubstitutablePathInfos(query, infos);
foreach (PathSet::iterator, i, todoDrv) {
// FIXME: cache this
Derivation drv = derivationFromPath(store, *i);
bool mustBuild = false; bool mustBuild = false;
foreach (DerivationOutputs::iterator, i, drv.outputs) if (settings.useSubstitutes) {
if (!store.isValidPath(i->second.path) && foreach (DerivationOutputs::iterator, j, drv.outputs)
!(queryBoolSetting("build-use-substitutes", true) && store.hasSubstitutes(i->second.path))) if (!store.isValidPath(j->second.path) &&
mustBuild = true; infos.find(j->second.path) == infos.end())
mustBuild = true;
} else
mustBuild = true;
if (mustBuild) { if (mustBuild) {
willBuild.insert(p); willBuild.insert(*i);
todo.insert(drv.inputSrcs.begin(), drv.inputSrcs.end()); todo.insert(drv.inputSrcs.begin(), drv.inputSrcs.end());
foreach (DerivationInputs::iterator, i, drv.inputDrvs) foreach (DerivationInputs::iterator, i, drv.inputDrvs)
todo.insert(i->first); todo.insert(i->first);
} else } else
foreach (DerivationOutputs::iterator, i, drv.outputs) foreach (DerivationOutputs::iterator, i, drv.outputs)
todo.insert(i->second.path); todoNonDrv.insert(i->second.path);
} }
else { foreach (PathSet::iterator, i, todoNonDrv) {
if (store.isValidPath(p)) continue; done.insert(*i);
SubstitutablePathInfo info; SubstitutablePathInfos::iterator info = infos.find(*i);
if (store.querySubstitutablePathInfo(p, info)) { if (info != infos.end()) {
willSubstitute.insert(p); willSubstitute.insert(*i);
downloadSize += info.downloadSize; downloadSize += info->second.downloadSize;
narSize += info.narSize; narSize += info->second.narSize;
todo.insert(info.references.begin(), info.references.end()); todo.insert(info->second.references.begin(), info->second.references.end());
} else } else
unknown.insert(p); unknown.insert(*i);
} }
} }
} }
static void dfsVisit(StoreAPI & store, const PathSet & paths, static void dfsVisit(StoreAPI & store, const PathSet & paths,
const Path & path, PathSet & visited, Paths & sorted, const Path & path, PathSet & visited, Paths & sorted,
PathSet & parents) PathSet & parents)
{ {
if (parents.find(path) != parents.end()) if (parents.find(path) != parents.end())
throw BuildError(format("cycle detected in the references of `%1%'") % path); throw BuildError(format("cycle detected in the references of `%1%'") % path);
if (visited.find(path) != visited.end()) return; if (visited.find(path) != visited.end()) return;
visited.insert(path); visited.insert(path);
parents.insert(path); parents.insert(path);
PathSet references; PathSet references;
if (store.isValidPath(path)) if (store.isValidPath(path))
store.queryReferences(path, references); store.queryReferences(path, references);
foreach (PathSet::iterator, i, references) foreach (PathSet::iterator, i, references)
/* Don't traverse into paths that don't exist. That can /* Don't traverse into paths that don't exist. That can
happen due to substitutes for non-existent paths. */ happen due to substitutes for non-existent paths. */

View File

@ -19,7 +19,7 @@ static void makeWritable(const Path & path)
{ {
struct stat st; struct stat st;
if (lstat(path.c_str(), &st)) if (lstat(path.c_str(), &st))
throw SysError(format("getting attributes of path `%1%'") % path); throw SysError(format("getting attributes of path `%1%'") % path);
if (S_ISDIR(st.st_mode) || S_ISREG(st.st_mode)) makeMutable(path); if (S_ISDIR(st.st_mode) || S_ISREG(st.st_mode)) makeMutable(path);
if (chmod(path.c_str(), st.st_mode | S_IWUSR) == -1) if (chmod(path.c_str(), st.st_mode | S_IWUSR) == -1)
throw SysError(format("changing writability of `%1%'") % path); throw SysError(format("changing writability of `%1%'") % path);
@ -57,22 +57,22 @@ void LocalStore::optimisePath_(OptimiseStats & stats, const Path & path)
struct stat st; struct stat st;
if (lstat(path.c_str(), &st)) if (lstat(path.c_str(), &st))
throw SysError(format("getting attributes of path `%1%'") % path); throw SysError(format("getting attributes of path `%1%'") % path);
if (S_ISDIR(st.st_mode)) { if (S_ISDIR(st.st_mode)) {
Strings names = readDirectory(path); Strings names = readDirectory(path);
foreach (Strings::iterator, i, names) foreach (Strings::iterator, i, names)
optimisePath_(stats, path + "/" + *i); optimisePath_(stats, path + "/" + *i);
return; return;
} }
/* We can hard link regular files and maybe symlinks. */ /* We can hard link regular files and maybe symlinks. */
if (!S_ISREG(st.st_mode) if (!S_ISREG(st.st_mode)
#if CAN_LINK_SYMLINK #if CAN_LINK_SYMLINK
&& !S_ISLNK(st.st_mode) && !S_ISLNK(st.st_mode)
#endif #endif
) return; ) return;
/* Sometimes SNAFUs can cause files in the Nix store to be /* Sometimes SNAFUs can cause files in the Nix store to be
modified, in particular when running programs as root under modified, in particular when running programs as root under
NixOS (example: $fontconfig/var/cache being modified). Skip NixOS (example: $fontconfig/var/cache being modified). Skip
@ -113,14 +113,14 @@ void LocalStore::optimisePath_(OptimiseStats & stats, const Path & path)
current file with a hard link to that file. */ current file with a hard link to that file. */
struct stat stLink; struct stat stLink;
if (lstat(linkPath.c_str(), &stLink)) if (lstat(linkPath.c_str(), &stLink))
throw SysError(format("getting attributes of path `%1%'") % linkPath); throw SysError(format("getting attributes of path `%1%'") % linkPath);
stats.sameContents++; stats.sameContents++;
if (st.st_ino == stLink.st_ino) { if (st.st_ino == stLink.st_ino) {
printMsg(lvlDebug, format("`%1%' is already linked to `%2%'") % path % linkPath); printMsg(lvlDebug, format("`%1%' is already linked to `%2%'") % path % linkPath);
return; return;
} }
printMsg(lvlTalkative, format("linking `%1%' to `%2%'") % path % linkPath); printMsg(lvlTalkative, format("linking `%1%' to `%2%'") % path % linkPath);
/* Make the containing directory writable, but only if it's not /* Make the containing directory writable, but only if it's not
@ -128,7 +128,7 @@ void LocalStore::optimisePath_(OptimiseStats & stats, const Path & path)
permissions). */ permissions). */
bool mustToggle = !isStorePath(path); bool mustToggle = !isStorePath(path);
if (mustToggle) makeWritable(dirOf(path)); if (mustToggle) makeWritable(dirOf(path));
/* When we're done, make the directory read-only again and reset /* When we're done, make the directory read-only again and reset
its timestamp back to 0. */ its timestamp back to 0. */
MakeReadOnly makeReadOnly(mustToggle ? dirOf(path) : ""); MakeReadOnly makeReadOnly(mustToggle ? dirOf(path) : "");
@ -149,7 +149,7 @@ void LocalStore::optimisePath_(OptimiseStats & stats, const Path & path)
MakeImmutable mk1(linkPath); MakeImmutable mk1(linkPath);
Path tempLink = (format("%1%/.tmp-link-%2%-%3%") Path tempLink = (format("%1%/.tmp-link-%2%-%3%")
% nixStore % getpid() % rand()).str(); % settings.nixStore % getpid() % rand()).str();
if (link(linkPath.c_str(), tempLink.c_str()) == -1) { if (link(linkPath.c_str(), tempLink.c_str()) == -1) {
if (errno == EMLINK) { if (errno == EMLINK) {
@ -194,7 +194,7 @@ void LocalStore::optimisePath_(OptimiseStats & stats, const Path & path)
void LocalStore::optimiseStore(OptimiseStats & stats) void LocalStore::optimiseStore(OptimiseStats & stats)
{ {
PathSet paths = queryValidPaths(); PathSet paths = queryAllValidPaths();
foreach (PathSet::iterator, i, paths) { foreach (PathSet::iterator, i, paths) {
addTempRoot(*i); addTempRoot(*i);
@ -207,10 +207,8 @@ void LocalStore::optimiseStore(OptimiseStats & stats)
void LocalStore::optimisePath(const Path & path) void LocalStore::optimisePath(const Path & path)
{ {
if (queryBoolSetting("auto-optimise-store", true)) { OptimiseStats stats;
OptimiseStats stats; if (settings.autoOptimiseStore) optimisePath_(stats, path);
optimisePath_(stats, path);
}
} }

View File

@ -60,7 +60,7 @@ void RemoteStore::openConnection(bool reserveSpace)
else else
throw Error(format("invalid setting for NIX_REMOTE, `%1%'") throw Error(format("invalid setting for NIX_REMOTE, `%1%'")
% remoteMode); % remoteMode);
from.fd = fdSocket; from.fd = fdSocket;
to.fd = fdSocket; to.fd = fdSocket;
@ -100,18 +100,18 @@ void RemoteStore::forkSlave()
/* Start the worker. */ /* Start the worker. */
Path worker = getEnv("NIX_WORKER"); Path worker = getEnv("NIX_WORKER");
if (worker == "") if (worker == "")
worker = nixBinDir + "/nix-worker"; worker = settings.nixBinDir + "/nix-worker";
child = fork(); child = fork();
switch (child) { switch (child) {
case -1: case -1:
throw SysError("unable to fork"); throw SysError("unable to fork");
case 0: case 0:
try { /* child */ try { /* child */
if (dup2(fdChild, STDOUT_FILENO) == -1) if (dup2(fdChild, STDOUT_FILENO) == -1)
throw SysError("dupping write side"); throw SysError("dupping write side");
@ -124,7 +124,7 @@ void RemoteStore::forkSlave()
execlp(worker.c_str(), worker.c_str(), "--slave", NULL); execlp(worker.c_str(), worker.c_str(), "--slave", NULL);
throw SysError(format("executing `%1%'") % worker); throw SysError(format("executing `%1%'") % worker);
} catch (std::exception & e) { } catch (std::exception & e) {
std::cerr << format("child error: %1%\n") % e.what(); std::cerr << format("child error: %1%\n") % e.what();
} }
@ -142,7 +142,7 @@ void RemoteStore::connectToDaemon()
if (fdSocket == -1) if (fdSocket == -1)
throw SysError("cannot create Unix domain socket"); throw SysError("cannot create Unix domain socket");
string socketPath = nixStateDir + DEFAULT_SOCKET_PATH; string socketPath = settings.nixStateDir + DEFAULT_SOCKET_PATH;
/* Urgh, sockaddr_un allows path names of only 108 characters. So /* Urgh, sockaddr_un allows path names of only 108 characters. So
chdir to the socket directory so that we can pass a relative chdir to the socket directory so that we can pass a relative
@ -150,16 +150,16 @@ void RemoteStore::connectToDaemon()
applications... */ applications... */
AutoCloseFD fdPrevDir = open(".", O_RDONLY); AutoCloseFD fdPrevDir = open(".", O_RDONLY);
if (fdPrevDir == -1) throw SysError("couldn't open current directory"); if (fdPrevDir == -1) throw SysError("couldn't open current directory");
chdir(dirOf(socketPath).c_str()); chdir(dirOf(socketPath).c_str());
Path socketPathRel = "./" + baseNameOf(socketPath); Path socketPathRel = "./" + baseNameOf(socketPath);
struct sockaddr_un addr; struct sockaddr_un addr;
addr.sun_family = AF_UNIX; addr.sun_family = AF_UNIX;
if (socketPathRel.size() >= sizeof(addr.sun_path)) if (socketPathRel.size() >= sizeof(addr.sun_path))
throw Error(format("socket path `%1%' is too long") % socketPathRel); throw Error(format("socket path `%1%' is too long") % socketPathRel);
using namespace std; using namespace std;
strcpy(addr.sun_path, socketPathRel.c_str()); strcpy(addr.sun_path, socketPathRel.c_str());
if (connect(fdSocket, (struct sockaddr *) &addr, sizeof(addr)) == -1) if (connect(fdSocket, (struct sockaddr *) &addr, sizeof(addr)) == -1)
throw SysError(format("cannot connect to daemon at `%1%'") % socketPath); throw SysError(format("cannot connect to daemon at `%1%'") % socketPath);
@ -184,24 +184,34 @@ RemoteStore::~RemoteStore()
void RemoteStore::setOptions() void RemoteStore::setOptions()
{ {
writeInt(wopSetOptions, to); writeInt(wopSetOptions, to);
writeInt(keepFailed, to);
writeInt(keepGoing, to); writeInt(settings.keepFailed, to);
writeInt(tryFallback, to); writeInt(settings.keepGoing, to);
writeInt(settings.tryFallback, to);
writeInt(verbosity, to); writeInt(verbosity, to);
writeInt(maxBuildJobs, to); writeInt(settings.maxBuildJobs, to);
writeInt(maxSilentTime, to); writeInt(settings.maxSilentTime, to);
if (GET_PROTOCOL_MINOR(daemonVersion) >= 2) if (GET_PROTOCOL_MINOR(daemonVersion) >= 2)
writeInt(useBuildHook, to); writeInt(settings.useBuildHook, to);
if (GET_PROTOCOL_MINOR(daemonVersion) >= 4) { if (GET_PROTOCOL_MINOR(daemonVersion) >= 4) {
writeInt(buildVerbosity, to); writeInt(settings.buildVerbosity, to);
writeInt(logType, to); writeInt(logType, to);
writeInt(printBuildTrace, to); writeInt(settings.printBuildTrace, to);
} }
if (GET_PROTOCOL_MINOR(daemonVersion) >= 6) if (GET_PROTOCOL_MINOR(daemonVersion) >= 6)
writeInt(buildCores, to); writeInt(settings.buildCores, to);
if (GET_PROTOCOL_MINOR(daemonVersion) >= 10) if (GET_PROTOCOL_MINOR(daemonVersion) >= 10)
writeInt(queryBoolSetting("build-use-substitutes", true), to); writeInt(settings.useSubstitutes, to);
if (GET_PROTOCOL_MINOR(daemonVersion) >= 12) {
Settings::SettingsMap overrides = settings.getOverrides();
writeInt(overrides.size(), to);
foreach (Settings::SettingsMap::iterator, i, overrides) {
writeString(i->first, to);
writeString(i->second, to);
}
}
processStderr(); processStderr();
} }
@ -217,42 +227,96 @@ bool RemoteStore::isValidPath(const Path & path)
} }
PathSet RemoteStore::queryValidPaths() PathSet RemoteStore::queryValidPaths(const PathSet & paths)
{ {
openConnection(); openConnection();
writeInt(wopQueryValidPaths, to); if (GET_PROTOCOL_MINOR(daemonVersion) < 12) {
PathSet res;
foreach (PathSet::const_iterator, i, paths)
if (isValidPath(*i)) res.insert(*i);
return res;
} else {
writeInt(wopQueryValidPaths, to);
writeStrings(paths, to);
processStderr();
return readStorePaths<PathSet>(from);
}
}
PathSet RemoteStore::queryAllValidPaths()
{
openConnection();
writeInt(wopQueryAllValidPaths, to);
processStderr(); processStderr();
return readStorePaths<PathSet>(from); return readStorePaths<PathSet>(from);
} }
bool RemoteStore::hasSubstitutes(const Path & path) PathSet RemoteStore::querySubstitutablePaths(const PathSet & paths)
{ {
openConnection(); openConnection();
writeInt(wopHasSubstitutes, to); if (GET_PROTOCOL_MINOR(daemonVersion) < 12) {
writeString(path, to); PathSet res;
processStderr(); foreach (PathSet::const_iterator, i, paths) {
unsigned int reply = readInt(from); writeInt(wopHasSubstitutes, to);
return reply != 0; writeString(*i, to);
processStderr();
if (readInt(from)) res.insert(*i);
}
return res;
} else {
writeInt(wopQuerySubstitutablePaths, to);
writeStrings(paths, to);
processStderr();
return readStorePaths<PathSet>(from);
}
} }
bool RemoteStore::querySubstitutablePathInfo(const Path & path, void RemoteStore::querySubstitutablePathInfos(const PathSet & paths,
SubstitutablePathInfo & info) SubstitutablePathInfos & infos)
{ {
if (paths.empty()) return;
openConnection(); openConnection();
if (GET_PROTOCOL_MINOR(daemonVersion) < 3) return false;
writeInt(wopQuerySubstitutablePathInfo, to); if (GET_PROTOCOL_MINOR(daemonVersion) < 3) return;
writeString(path, to);
processStderr(); if (GET_PROTOCOL_MINOR(daemonVersion) < 12) {
unsigned int reply = readInt(from);
if (reply == 0) return false; foreach (PathSet::const_iterator, i, paths) {
info.deriver = readString(from); SubstitutablePathInfo info;
if (info.deriver != "") assertStorePath(info.deriver); writeInt(wopQuerySubstitutablePathInfo, to);
info.references = readStorePaths<PathSet>(from); writeString(*i, to);
info.downloadSize = readLongLong(from); processStderr();
info.narSize = GET_PROTOCOL_MINOR(daemonVersion) >= 7 ? readLongLong(from) : 0; unsigned int reply = readInt(from);
return true; if (reply == 0) continue;
info.deriver = readString(from);
if (info.deriver != "") assertStorePath(info.deriver);
info.references = readStorePaths<PathSet>(from);
info.downloadSize = readLongLong(from);
info.narSize = GET_PROTOCOL_MINOR(daemonVersion) >= 7 ? readLongLong(from) : 0;
infos[*i] = info;
}
} else {
writeInt(wopQuerySubstitutablePathInfos, to);
writeStrings(paths, to);
processStderr();
unsigned int count = readInt(from);
for (unsigned int n = 0; n < count; n++) {
Path path = readStorePath(from);
SubstitutablePathInfo & info(infos[path]);
info.deriver = readString(from);
if (info.deriver != "") assertStorePath(info.deriver);
info.references = readStorePaths<PathSet>(from);
info.downloadSize = readLongLong(from);
info.narSize = readLongLong(from);
}
}
} }
@ -357,7 +421,7 @@ Path RemoteStore::addToStore(const Path & _srcPath,
bool recursive, HashType hashAlgo, PathFilter & filter) bool recursive, HashType hashAlgo, PathFilter & filter)
{ {
openConnection(); openConnection();
Path srcPath(absPath(_srcPath)); Path srcPath(absPath(_srcPath));
writeInt(wopAddToStore, to); writeInt(wopAddToStore, to);
@ -380,7 +444,7 @@ Path RemoteStore::addTextToStore(const string & name, const string & s,
writeString(name, to); writeString(name, to);
writeString(s, to); writeString(s, to);
writeStrings(references, to); writeStrings(references, to);
processStderr(); processStderr();
return readStorePath(from); return readStorePath(from);
} }
@ -477,7 +541,7 @@ Roots RemoteStore::findRoots()
void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results) void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results)
{ {
openConnection(false); openConnection(false);
writeInt(wopCollectGarbage, to); writeInt(wopCollectGarbage, to);
writeInt(options.action, to); writeInt(options.action, to);
writeStrings(options.pathsToDelete, to); writeStrings(options.pathsToDelete, to);
@ -489,9 +553,9 @@ void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results)
writeInt(0, to); writeInt(0, to);
writeInt(0, to); writeInt(0, to);
} }
processStderr(); processStderr();
results.paths = readStrings<PathSet>(from); results.paths = readStrings<PathSet>(from);
results.bytesFreed = readLongLong(from); results.bytesFreed = readLongLong(from);
readLongLong(from); // obsolete readLongLong(from); // obsolete

View File

@ -26,7 +26,9 @@ public:
bool isValidPath(const Path & path); bool isValidPath(const Path & path);
PathSet queryValidPaths(); PathSet queryValidPaths(const PathSet & paths);
PathSet queryAllValidPaths();
ValidPathInfo queryPathInfo(const Path & path); ValidPathInfo queryPathInfo(const Path & path);
@ -44,10 +46,10 @@ public:
Path queryPathFromHashPart(const string & hashPart); Path queryPathFromHashPart(const string & hashPart);
bool hasSubstitutes(const Path & path); PathSet querySubstitutablePaths(const PathSet & paths);
bool querySubstitutablePathInfo(const Path & path, void querySubstitutablePathInfos(const PathSet & paths,
SubstitutablePathInfo & info); SubstitutablePathInfos & infos);
Path addToStore(const Path & srcPath, Path addToStore(const Path & srcPath,
bool recursive = true, HashType hashAlgo = htSHA256, bool recursive = true, HashType hashAlgo = htSHA256,

View File

@ -19,16 +19,16 @@ GCOptions::GCOptions()
bool isInStore(const Path & path) bool isInStore(const Path & path)
{ {
return path[0] == '/' return path[0] == '/'
&& string(path, 0, nixStore.size()) == nixStore && string(path, 0, settings.nixStore.size()) == settings.nixStore
&& path.size() >= nixStore.size() + 2 && path.size() >= settings.nixStore.size() + 2
&& path[nixStore.size()] == '/'; && path[settings.nixStore.size()] == '/';
} }
bool isStorePath(const Path & path) bool isStorePath(const Path & path)
{ {
return isInStore(path) return isInStore(path)
&& path.find('/', nixStore.size() + 1) == Path::npos; && path.find('/', settings.nixStore.size() + 1) == Path::npos;
} }
@ -43,7 +43,7 @@ Path toStorePath(const Path & path)
{ {
if (!isInStore(path)) if (!isInStore(path))
throw Error(format("path `%1%' is not in the Nix store") % path); throw Error(format("path `%1%' is not in the Nix store") % path);
Path::size_type slash = path.find('/', nixStore.size() + 1); Path::size_type slash = path.find('/', settings.nixStore.size() + 1);
if (slash == Path::npos) if (slash == Path::npos)
return path; return path;
else else
@ -74,7 +74,7 @@ Path followLinksToStorePath(const Path & path)
string storePathToName(const Path & path) string storePathToName(const Path & path)
{ {
assertStorePath(path); assertStorePath(path);
return string(path, nixStore.size() + 34); return string(path, settings.nixStore.size() + 34);
} }
@ -173,11 +173,11 @@ Path makeStorePath(const string & type,
{ {
/* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */ /* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */
string s = type + ":sha256:" + printHash(hash) + ":" string s = type + ":sha256:" + printHash(hash) + ":"
+ nixStore + ":" + name; + settings.nixStore + ":" + name;
checkStoreName(name); checkStoreName(name);
return nixStore + "/" return settings.nixStore + "/"
+ printHash32(compressHash(hashString(htSHA256, s), 20)) + printHash32(compressHash(hashString(htSHA256, s), 20))
+ "-" + name; + "-" + name;
} }

View File

@ -80,6 +80,8 @@ struct SubstitutablePathInfo
unsigned long long narSize; /* 0 = unknown */ unsigned long long narSize; /* 0 = unknown */
}; };
typedef std::map<Path, SubstitutablePathInfo> SubstitutablePathInfos;
struct ValidPathInfo struct ValidPathInfo
{ {
@ -102,20 +104,23 @@ public:
virtual ~StoreAPI() { } virtual ~StoreAPI() { }
/* Checks whether a path is valid. */ /* Check whether a path is valid. */
virtual bool isValidPath(const Path & path) = 0; virtual bool isValidPath(const Path & path) = 0;
/* Query the set of valid paths. */ /* Query which of the given paths is valid. */
virtual PathSet queryValidPaths() = 0; virtual PathSet queryValidPaths(const PathSet & paths) = 0;
/* Query the set of all valid paths. */
virtual PathSet queryAllValidPaths() = 0;
/* Query information about a valid path. */ /* Query information about a valid path. */
virtual ValidPathInfo queryPathInfo(const Path & path) = 0; virtual ValidPathInfo queryPathInfo(const Path & path) = 0;
/* Queries the hash of a valid path. */ /* Query the hash of a valid path. */
virtual Hash queryPathHash(const Path & path) = 0; virtual Hash queryPathHash(const Path & path) = 0;
/* Queries the set of outgoing FS references for a store path. /* Query the set of outgoing FS references for a store path. The
The result is not cleared. */ result is not cleared. */
virtual void queryReferences(const Path & path, virtual void queryReferences(const Path & path,
PathSet & references) = 0; PathSet & references) = 0;
@ -138,13 +143,14 @@ public:
path, or "" if the path doesn't exist. */ path, or "" if the path doesn't exist. */
virtual Path queryPathFromHashPart(const string & hashPart) = 0; virtual Path queryPathFromHashPart(const string & hashPart) = 0;
/* Query whether a path has substitutes. */ /* Query which of the given paths have substitutes. */
virtual bool hasSubstitutes(const Path & path) = 0; virtual PathSet querySubstitutablePaths(const PathSet & paths) = 0;
/* Query the references, deriver and download size of a /* Query substitute info (i.e. references, derivers and download
substitutable path. */ sizes) of a set of paths. If a path does not have substitute
virtual bool querySubstitutablePathInfo(const Path & path, info, it's omitted from the resulting infos map. */
SubstitutablePathInfo & info) = 0; virtual void querySubstitutablePathInfos(const PathSet & paths,
SubstitutablePathInfos & infos) = 0;
/* Copy the contents of a path to the store and register the /* Copy the contents of a path to the store and register the
validity the resulting path. The resulting path is returned. validity the resulting path. The resulting path is returned.

View File

@ -6,7 +6,7 @@ namespace nix {
#define WORKER_MAGIC_1 0x6e697863 #define WORKER_MAGIC_1 0x6e697863
#define WORKER_MAGIC_2 0x6478696f #define WORKER_MAGIC_2 0x6478696f
#define PROTOCOL_VERSION 0x10b #define PROTOCOL_VERSION 0x10c
#define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00) #define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00)
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff) #define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
@ -32,13 +32,16 @@ typedef enum {
wopCollectGarbage = 20, wopCollectGarbage = 20,
wopQuerySubstitutablePathInfo = 21, wopQuerySubstitutablePathInfo = 21,
wopQueryDerivationOutputs = 22, wopQueryDerivationOutputs = 22,
wopQueryValidPaths = 23, wopQueryAllValidPaths = 23,
wopQueryFailedPaths = 24, wopQueryFailedPaths = 24,
wopClearFailedPaths = 25, wopClearFailedPaths = 25,
wopQueryPathInfo = 26, wopQueryPathInfo = 26,
wopImportPaths = 27, wopImportPaths = 27,
wopQueryDerivationOutputNames = 28, wopQueryDerivationOutputNames = 28,
wopQueryPathFromHashPart = 29, wopQueryPathFromHashPart = 29,
wopQuerySubstitutablePathInfos = 30,
wopQueryValidPaths = 31,
wopQuerySubstitutablePaths = 32,
} WorkerOp; } WorkerOp;
@ -60,5 +63,5 @@ typedef enum {
Path readStorePath(Source & from); Path readStorePath(Source & from);
template<class T> T readStorePaths(Source & from); template<class T> T readStorePaths(Source & from);
} }

View File

@ -253,7 +253,7 @@ string readLine(int fd)
if (errno != EINTR) if (errno != EINTR)
throw SysError("reading a line"); throw SysError("reading a line");
} else if (rd == 0) } else if (rd == 0)
throw Error("unexpected EOF reading a line"); throw EndOfFile("unexpected EOF reading a line");
else { else {
if (ch == '\n') return s; if (ch == '\n') return s;
s += ch; s += ch;
@ -1010,6 +1010,13 @@ string concatStringsSep(const string & sep, const Strings & ss)
} }
string chomp(const string & s)
{
size_t i = s.find_last_not_of(" \n\r\t");
return i == string::npos ? "" : string(s, 0, i + 1);
}
string statusToString(int status) string statusToString(int status)
{ {
if (!WIFEXITED(status) || WEXITSTATUS(status) != 0) { if (!WIFEXITED(status) || WEXITSTATUS(status) != 0) {

View File

@ -294,6 +294,10 @@ Strings tokenizeString(const string & s, const string & separators = " \t\n\r");
string concatStringsSep(const string & sep, const Strings & ss); string concatStringsSep(const string & sep, const Strings & ss);
/* Remove trailing whitespace from a string. */
string chomp(const string & s);
/* Convert the exit status of a child as returned by wait() into an /* Convert the exit status of a child as returned by wait() into an
error string. */ error string. */
string statusToString(int status); string statusToString(int status);

View File

@ -55,7 +55,6 @@ struct Globals
EvalState state; EvalState state;
bool dryRun; bool dryRun;
bool preserveInstalled; bool preserveInstalled;
bool keepDerivations;
string forceName; string forceName;
bool prebuiltOnly; bool prebuiltOnly;
}; };
@ -113,6 +112,11 @@ static void getAllExprs(EvalState & state,
StringSet namesSorted(names.begin(), names.end()); StringSet namesSorted(names.begin(), names.end());
foreach (StringSet::iterator, i, namesSorted) { foreach (StringSet::iterator, i, namesSorted) {
/* Ignore the manifest.nix used by profiles. This is
necessary to prevent it from showing up in channels (which
are implemented using profiles). */
if (*i == "manifest.nix") continue;
Path path2 = path + "/" + *i; Path path2 = path + "/" + *i;
struct stat st; struct stat st;
@ -211,9 +215,12 @@ static int comparePriorities(EvalState & state,
static bool isPrebuilt(EvalState & state, const DrvInfo & elem) static bool isPrebuilt(EvalState & state, const DrvInfo & elem)
{ {
assert(false);
#if 0
return return
store->isValidPath(elem.queryOutPath(state)) || store->isValidPath(elem.queryOutPath(state)) ||
store->hasSubstitutes(elem.queryOutPath(state)); store->hasSubstitutes(elem.queryOutPath(state));
#endif
} }
@ -263,8 +270,8 @@ static DrvInfos filterBySelector(EvalState & state, const DrvInfos & allElems,
if (k != newest.end()) { if (k != newest.end()) {
d = j->first.system == k->second.first.system ? 0 : d = j->first.system == k->second.first.system ? 0 :
j->first.system == thisSystem ? 1 : j->first.system == settings.thisSystem ? 1 :
k->second.first.system == thisSystem ? -1 : 0; k->second.first.system == settings.thisSystem ? -1 : 0;
if (d == 0) if (d == 0)
d = comparePriorities(state, j->first, k->second.first); d = comparePriorities(state, j->first, k->second.first);
if (d == 0) if (d == 0)
@ -495,7 +502,7 @@ static void installDerivations(Globals & globals,
if (globals.dryRun) return; if (globals.dryRun) return;
if (createUserEnv(globals.state, allElems, if (createUserEnv(globals.state, allElems,
profile, globals.keepDerivations, lockToken)) break; profile, settings.envKeepDerivations, lockToken)) break;
} }
} }
@ -602,7 +609,7 @@ static void upgradeDerivations(Globals & globals,
if (globals.dryRun) return; if (globals.dryRun) return;
if (createUserEnv(globals.state, newElems, if (createUserEnv(globals.state, newElems,
globals.profile, globals.keepDerivations, lockToken)) break; globals.profile, settings.envKeepDerivations, lockToken)) break;
} }
} }
@ -669,7 +676,7 @@ static void opSetFlag(Globals & globals,
/* Write the new user environment. */ /* Write the new user environment. */
if (createUserEnv(globals.state, installedElems, if (createUserEnv(globals.state, installedElems,
globals.profile, globals.keepDerivations, lockToken)) break; globals.profile, settings.envKeepDerivations, lockToken)) break;
} }
} }
@ -737,7 +744,7 @@ static void uninstallDerivations(Globals & globals, Strings & selectors,
if (globals.dryRun) return; if (globals.dryRun) return;
if (createUserEnv(globals.state, newElems, if (createUserEnv(globals.state, newElems,
profile, globals.keepDerivations, lockToken)) break; profile, settings.envKeepDerivations, lockToken)) break;
} }
} }
@ -866,7 +873,7 @@ static void opQuery(Globals & globals,
enum { sInstalled, sAvailable } source = sInstalled; enum { sInstalled, sAvailable } source = sInstalled;
readOnlyMode = true; /* makes evaluation a bit faster */ settings.readOnlyMode = true; /* makes evaluation a bit faster */
for (Strings::iterator i = args.begin(); i != args.end(); ) { for (Strings::iterator i = args.begin(); i != args.end(); ) {
string arg = *i++; string arg = *i++;
@ -929,6 +936,22 @@ static void opQuery(Globals & globals,
installed.insert(i->queryOutPath(globals.state)); installed.insert(i->queryOutPath(globals.state));
} }
/* Query which paths have substitutes. */
PathSet validPaths, substitutablePaths;
if (printStatus) {
PathSet paths;
foreach (vector<DrvInfo>::iterator, i, elems2)
try {
paths.insert(i->queryOutPath(globals.state));
} catch (AssertionError & e) {
printMsg(lvlTalkative, format("skipping derivation named `%1%' which gives an assertion failure") % i->name);
i->setFailed();
}
validPaths = store->queryValidPaths(paths);
substitutablePaths = store->querySubstitutablePaths(paths);
}
/* Print the desired columns, or XML output. */ /* Print the desired columns, or XML output. */
Table table; Table table;
@ -938,6 +961,8 @@ static void opQuery(Globals & globals,
foreach (vector<DrvInfo>::iterator, i, elems2) { foreach (vector<DrvInfo>::iterator, i, elems2) {
try { try {
if (i->hasFailed()) continue;
startNest(nest, lvlDebug, format("outputting query result `%1%'") % i->attrPath); startNest(nest, lvlDebug, format("outputting query result `%1%'") % i->attrPath);
if (globals.prebuiltOnly && !isPrebuilt(globals.state, *i)) continue; if (globals.prebuiltOnly && !isPrebuilt(globals.state, *i)) continue;
@ -949,9 +974,10 @@ static void opQuery(Globals & globals,
XMLAttrs attrs; XMLAttrs attrs;
if (printStatus) { if (printStatus) {
bool hasSubs = store->hasSubstitutes(i->queryOutPath(globals.state)); Path outPath = i->queryOutPath(globals.state);
bool isInstalled = installed.find(i->queryOutPath(globals.state)) != installed.end(); bool hasSubs = substitutablePaths.find(outPath) != substitutablePaths.end();
bool isValid = store->isValidPath(i->queryOutPath(globals.state)); bool isInstalled = installed.find(outPath) != installed.end();
bool isValid = validPaths.find(outPath) != validPaths.end();
if (xmlOutput) { if (xmlOutput) {
attrs["installed"] = isInstalled ? "1" : "0"; attrs["installed"] = isInstalled ? "1" : "0";
attrs["valid"] = isValid ? "1" : "0"; attrs["valid"] = isValid ? "1" : "0";
@ -1240,9 +1266,6 @@ void run(Strings args)
globals.preserveInstalled = false; globals.preserveInstalled = false;
globals.prebuiltOnly = false; globals.prebuiltOnly = false;
globals.keepDerivations =
queryBoolSetting("env-keep-derivations", false);
for (Strings::iterator i = args.begin(); i != args.end(); ) { for (Strings::iterator i = args.begin(); i != args.end(); ) {
string arg = *i++; string arg = *i++;
@ -1309,7 +1332,7 @@ void run(Strings args)
Path profileLink = getHomeDir() + "/.nix-profile"; Path profileLink = getHomeDir() + "/.nix-profile";
globals.profile = pathExists(profileLink) globals.profile = pathExists(profileLink)
? absPath(readLink(profileLink), dirOf(profileLink)) ? absPath(readLink(profileLink), dirOf(profileLink))
: canonPath(nixStateDir + "/profiles/default"); : canonPath(settings.nixStateDir + "/profiles/default");
} }
store = openStore(); store = openStore();

View File

@ -96,11 +96,11 @@ void run(Strings args)
if (arg == "-") if (arg == "-")
readStdin = true; readStdin = true;
else if (arg == "--eval-only") { else if (arg == "--eval-only") {
readOnlyMode = true; settings.readOnlyMode = true;
evalOnly = true; evalOnly = true;
} }
else if (arg == "--parse-only") { else if (arg == "--parse-only") {
readOnlyMode = true; settings.readOnlyMode = true;
parseOnly = evalOnly = true; parseOnly = evalOnly = true;
} }
else if (arg == "--find-file") else if (arg == "--find-file")

View File

@ -47,7 +47,7 @@ LocalStore & ensureLocalStore()
static Path useDeriver(Path path) static Path useDeriver(Path path)
{ {
if (!isDerivation(path)) { if (!isDerivation(path)) {
path = store->queryDeriver(path); path = store->queryDeriver(path);
if (path == "") if (path == "")
@ -93,18 +93,18 @@ static PathSet realisePath(const Path & path)
static void opRealise(Strings opFlags, Strings opArgs) static void opRealise(Strings opFlags, Strings opArgs)
{ {
bool dryRun = false; bool dryRun = false;
foreach (Strings::iterator, i, opFlags) foreach (Strings::iterator, i, opFlags)
if (*i == "--dry-run") dryRun = true; if (*i == "--dry-run") dryRun = true;
else throw UsageError(format("unknown flag `%1%'") % *i); else throw UsageError(format("unknown flag `%1%'") % *i);
foreach (Strings::iterator, i, opArgs) foreach (Strings::iterator, i, opArgs)
*i = followLinksToStorePath(*i); *i = followLinksToStorePath(*i);
printMissing(*store, PathSet(opArgs.begin(), opArgs.end())); printMissing(*store, PathSet(opArgs.begin(), opArgs.end()));
if (dryRun) return; if (dryRun) return;
/* Build all paths at the same time to exploit parallelism. */ /* Build all paths at the same time to exploit parallelism. */
PathSet paths(opArgs.begin(), opArgs.end()); PathSet paths(opArgs.begin(), opArgs.end());
store->buildPaths(paths); store->buildPaths(paths);
@ -132,7 +132,7 @@ static void opAdd(Strings opFlags, Strings opArgs)
static void opAddFixed(Strings opFlags, Strings opArgs) static void opAddFixed(Strings opFlags, Strings opArgs)
{ {
bool recursive = false; bool recursive = false;
for (Strings::iterator i = opFlags.begin(); for (Strings::iterator i = opFlags.begin();
i != opFlags.end(); ++i) i != opFlags.end(); ++i)
if (*i == "--recursive") recursive = true; if (*i == "--recursive") recursive = true;
@ -140,7 +140,7 @@ static void opAddFixed(Strings opFlags, Strings opArgs)
if (opArgs.empty()) if (opArgs.empty())
throw UsageError("first argument must be hash algorithm"); throw UsageError("first argument must be hash algorithm");
HashType hashAlgo = parseHashType(opArgs.front()); HashType hashAlgo = parseHashType(opArgs.front());
opArgs.pop_front(); opArgs.pop_front();
@ -153,7 +153,7 @@ static void opAddFixed(Strings opFlags, Strings opArgs)
static void opPrintFixedPath(Strings opFlags, Strings opArgs) static void opPrintFixedPath(Strings opFlags, Strings opArgs)
{ {
bool recursive = false; bool recursive = false;
for (Strings::iterator i = opFlags.begin(); for (Strings::iterator i = opFlags.begin();
i != opFlags.end(); ++i) i != opFlags.end(); ++i)
if (*i == "--recursive") recursive = true; if (*i == "--recursive") recursive = true;
@ -161,7 +161,7 @@ static void opPrintFixedPath(Strings opFlags, Strings opArgs)
if (opArgs.size() != 3) if (opArgs.size() != 3)
throw UsageError(format("`--print-fixed-path' requires three arguments")); throw UsageError(format("`--print-fixed-path' requires three arguments"));
Strings::iterator i = opArgs.begin(); Strings::iterator i = opArgs.begin();
HashType hashAlgo = parseHashType(*i++); HashType hashAlgo = parseHashType(*i++);
string hash = *i++; string hash = *i++;
@ -209,12 +209,12 @@ static void printTree(const Path & path,
PathSet references; PathSet references;
store->queryReferences(path, references); store->queryReferences(path, references);
#if 0 #if 0
for (PathSet::iterator i = drv.inputSrcs.begin(); for (PathSet::iterator i = drv.inputSrcs.begin();
i != drv.inputSrcs.end(); ++i) i != drv.inputSrcs.end(); ++i)
cout << format("%1%%2%\n") % (tailPad + treeConn) % *i; cout << format("%1%%2%\n") % (tailPad + treeConn) % *i;
#endif #endif
/* Topologically sort under the relation A < B iff A \in /* Topologically sort under the relation A < B iff A \in
closure(B). That is, if derivation A is an (possibly indirect) closure(B). That is, if derivation A is an (possibly indirect)
@ -270,7 +270,7 @@ static void opQuery(Strings opFlags, Strings opArgs)
else throw UsageError(format("unknown flag `%1%'") % *i); else throw UsageError(format("unknown flag `%1%'") % *i);
switch (query) { switch (query) {
case qOutputs: { case qOutputs: {
foreach (Strings::iterator, i, opArgs) { foreach (Strings::iterator, i, opArgs) {
*i = followLinksToStorePath(*i); *i = followLinksToStorePath(*i);
@ -297,7 +297,7 @@ static void opQuery(Strings opFlags, Strings opArgs)
} }
} }
Paths sorted = topoSortPaths(*store, paths); Paths sorted = topoSortPaths(*store, paths);
for (Paths::reverse_iterator i = sorted.rbegin(); for (Paths::reverse_iterator i = sorted.rbegin();
i != sorted.rend(); ++i) i != sorted.rend(); ++i)
cout << format("%s\n") % *i; cout << format("%s\n") % *i;
break; break;
@ -332,7 +332,7 @@ static void opQuery(Strings opFlags, Strings opArgs)
if (query == qHash) { if (query == qHash) {
assert(info.hash.type == htSHA256); assert(info.hash.type == htSHA256);
cout << format("sha256:%1%\n") % printHash32(info.hash); cout << format("sha256:%1%\n") % printHash32(info.hash);
} else if (query == qSize) } else if (query == qSize)
cout << format("%1%\n") % info.narSize; cout << format("%1%\n") % info.narSize;
} }
} }
@ -344,7 +344,7 @@ static void opQuery(Strings opFlags, Strings opArgs)
printTree(followLinksToStorePath(*i), "", "", done); printTree(followLinksToStorePath(*i), "", "", done);
break; break;
} }
case qGraph: { case qGraph: {
PathSet roots; PathSet roots;
foreach (Strings::iterator, i, opArgs) { foreach (Strings::iterator, i, opArgs) {
@ -370,7 +370,7 @@ static void opQuery(Strings opFlags, Strings opArgs)
cout << format("%1%\n") % followLinksToStorePath(*i); cout << format("%1%\n") % followLinksToStorePath(*i);
break; break;
} }
case qRoots: { case qRoots: {
PathSet referrers; PathSet referrers;
foreach (Strings::iterator, i, opArgs) { foreach (Strings::iterator, i, opArgs) {
@ -384,7 +384,7 @@ static void opQuery(Strings opFlags, Strings opArgs)
cout << format("%1%\n") % i->first; cout << format("%1%\n") % i->first;
break; break;
} }
default: default:
abort(); abort();
} }
@ -430,9 +430,9 @@ static void opReadLog(Strings opFlags, Strings opArgs)
foreach (Strings::iterator, i, opArgs) { foreach (Strings::iterator, i, opArgs) {
Path path = useDeriver(followLinksToStorePath(*i)); Path path = useDeriver(followLinksToStorePath(*i));
Path logPath = (format("%1%/%2%/%3%") % Path logPath = (format("%1%/%2%/%3%") %
nixLogDir % drvsLogDir % baseNameOf(path)).str(); settings.nixLogDir % drvsLogDir % baseNameOf(path)).str();
Path logBz2Path = logPath + ".bz2"; Path logBz2Path = logPath + ".bz2";
if (pathExists(logPath)) { if (pathExists(logPath)) {
@ -458,7 +458,7 @@ static void opReadLog(Strings opFlags, Strings opArgs)
} while (err != BZ_STREAM_END); } while (err != BZ_STREAM_END);
BZ2_bzReadClose(&err, bz); BZ2_bzReadClose(&err, bz);
} }
else throw Error(format("build log of derivation `%1%' is not available") % path); else throw Error(format("build log of derivation `%1%' is not available") % path);
} }
} }
@ -469,7 +469,7 @@ static void opDumpDB(Strings opFlags, Strings opArgs)
if (!opFlags.empty()) throw UsageError("unknown flag"); if (!opFlags.empty()) throw UsageError("unknown flag");
if (!opArgs.empty()) if (!opArgs.empty())
throw UsageError("no arguments expected"); throw UsageError("no arguments expected");
PathSet validPaths = store->queryValidPaths(); PathSet validPaths = store->queryAllValidPaths();
foreach (PathSet::iterator, i, validPaths) foreach (PathSet::iterator, i, validPaths)
cout << store->makeValidityRegistration(singleton<PathSet>(*i), true, true); cout << store->makeValidityRegistration(singleton<PathSet>(*i), true, true);
} }
@ -478,7 +478,7 @@ static void opDumpDB(Strings opFlags, Strings opArgs)
static void registerValidity(bool reregister, bool hashGiven, bool canonicalise) static void registerValidity(bool reregister, bool hashGiven, bool canonicalise)
{ {
ValidPathInfos infos; ValidPathInfos infos;
while (1) { while (1) {
ValidPathInfo info = decodeValidPathInfo(cin, hashGiven); ValidPathInfo info = decodeValidPathInfo(cin, hashGiven);
if (info.path == "") break; if (info.path == "") break;
@ -512,7 +512,7 @@ static void opRegisterValidity(Strings opFlags, Strings opArgs)
{ {
bool reregister = false; // !!! maybe this should be the default bool reregister = false; // !!! maybe this should be the default
bool hashGiven = false; bool hashGiven = false;
for (Strings::iterator i = opFlags.begin(); for (Strings::iterator i = opFlags.begin();
i != opFlags.end(); ++i) i != opFlags.end(); ++i)
if (*i == "--reregister") reregister = true; if (*i == "--reregister") reregister = true;
@ -528,7 +528,7 @@ static void opRegisterValidity(Strings opFlags, Strings opArgs)
static void opCheckValidity(Strings opFlags, Strings opArgs) static void opCheckValidity(Strings opFlags, Strings opArgs)
{ {
bool printInvalid = false; bool printInvalid = false;
for (Strings::iterator i = opFlags.begin(); for (Strings::iterator i = opFlags.begin();
i != opFlags.end(); ++i) i != opFlags.end(); ++i)
if (*i == "--print-invalid") printInvalid = true; if (*i == "--print-invalid") printInvalid = true;
@ -554,13 +554,13 @@ static string showBytes(unsigned long long bytes)
} }
struct PrintFreed struct PrintFreed
{ {
bool show; bool show;
const GCResults & results; const GCResults & results;
PrintFreed(bool show, const GCResults & results) PrintFreed(bool show, const GCResults & results)
: show(show), results(results) { } : show(show), results(results) { }
~PrintFreed() ~PrintFreed()
{ {
if (show) if (show)
cout << format("%1% store paths deleted, %2% freed\n") cout << format("%1% store paths deleted, %2% freed\n")
@ -575,9 +575,9 @@ static void opGC(Strings opFlags, Strings opArgs)
bool printRoots = false; bool printRoots = false;
GCOptions options; GCOptions options;
options.action = GCOptions::gcDeleteDead; options.action = GCOptions::gcDeleteDead;
GCResults results; GCResults results;
/* Do what? */ /* Do what? */
foreach (Strings::iterator, i, opFlags) foreach (Strings::iterator, i, opFlags)
if (*i == "--print-roots") printRoots = true; if (*i == "--print-roots") printRoots = true;
@ -616,14 +616,14 @@ static void opDelete(Strings opFlags, Strings opArgs)
{ {
GCOptions options; GCOptions options;
options.action = GCOptions::gcDeleteSpecific; options.action = GCOptions::gcDeleteSpecific;
foreach (Strings::iterator, i, opFlags) foreach (Strings::iterator, i, opFlags)
if (*i == "--ignore-liveness") options.ignoreLiveness = true; if (*i == "--ignore-liveness") options.ignoreLiveness = true;
else throw UsageError(format("unknown flag `%1%'") % *i); else throw UsageError(format("unknown flag `%1%'") % *i);
foreach (Strings::iterator, i, opArgs) foreach (Strings::iterator, i, opArgs)
options.pathsToDelete.insert(followLinksToStorePath(*i)); options.pathsToDelete.insert(followLinksToStorePath(*i));
GCResults results; GCResults results;
PrintFreed freed(true, results); PrintFreed freed(true, results);
store->collectGarbage(options, results); store->collectGarbage(options, results);
@ -674,9 +674,9 @@ static void opImport(Strings opFlags, Strings opArgs)
foreach (Strings::iterator, i, opFlags) foreach (Strings::iterator, i, opFlags)
if (*i == "--require-signature") requireSignature = true; if (*i == "--require-signature") requireSignature = true;
else throw UsageError(format("unknown flag `%1%'") % *i); else throw UsageError(format("unknown flag `%1%'") % *i);
if (!opArgs.empty()) throw UsageError("no arguments expected"); if (!opArgs.empty()) throw UsageError("no arguments expected");
FdSource source(STDIN_FILENO); FdSource source(STDIN_FILENO);
Paths paths = store->importPaths(requireSignature, source); Paths paths = store->importPaths(requireSignature, source);
@ -703,12 +703,12 @@ static void opVerify(Strings opFlags, Strings opArgs)
throw UsageError("no arguments expected"); throw UsageError("no arguments expected");
bool checkContents = false; bool checkContents = false;
for (Strings::iterator i = opFlags.begin(); for (Strings::iterator i = opFlags.begin();
i != opFlags.end(); ++i) i != opFlags.end(); ++i)
if (*i == "--check-contents") checkContents = true; if (*i == "--check-contents") checkContents = true;
else throw UsageError(format("unknown flag `%1%'") % *i); else throw UsageError(format("unknown flag `%1%'") % *i);
ensureLocalStore().verifyStore(checkContents); ensureLocalStore().verifyStore(checkContents);
} }
@ -847,7 +847,7 @@ void run(Strings args)
} }
else if (arg == "--indirect") else if (arg == "--indirect")
indirectRoot = true; indirectRoot = true;
else if (arg[0] == '-') { else if (arg[0] == '-') {
opFlags.push_back(arg); opFlags.push_back(arg);
if (arg == "--max-freed" || arg == "--max-links" || arg == "--max-atime") { /* !!! hack */ if (arg == "--max-freed" || arg == "--max-links" || arg == "--max-atime") { /* !!! hack */
if (i != args.end()) opFlags.push_back(*i++); if (i != args.end()) opFlags.push_back(*i++);

View File

@ -95,7 +95,7 @@ static bool isFarSideClosed(int socket)
throw Error("EOF expected (protocol error?)"); throw Error("EOF expected (protocol error?)");
else if (rd == -1 && errno != ECONNRESET) else if (rd == -1 && errno != ECONNRESET)
throw SysError("expected connection reset or EOF"); throw SysError("expected connection reset or EOF");
return true; return true;
} }
@ -185,7 +185,7 @@ static void stopWork(bool success = true, const string & msg = "", unsigned int
we're either sending or receiving from the client, so we'll be we're either sending or receiving from the client, so we'll be
notified of client death anyway. */ notified of client death anyway. */
setSigPollAction(false); setSigPollAction(false);
canSendStderr = false; canSendStderr = false;
if (success) if (success)
@ -220,7 +220,7 @@ struct TunnelSource : BufferedSource
so we have to disable the SIGPOLL handler. */ so we have to disable the SIGPOLL handler. */
setSigPollAction(false); setSigPollAction(false);
canSendStderr = false; canSendStderr = false;
writeInt(STDERR_READ, to); writeInt(STDERR_READ, to);
writeInt(len, to); writeInt(len, to);
to.flush(); to.flush();
@ -279,7 +279,7 @@ static void performOp(unsigned int clientVersion,
{ {
switch (op) { switch (op) {
#if 0 #if 0
case wopQuit: { case wopQuit: {
/* Close the database. */ /* Close the database. */
store.reset((StoreAPI *) 0); store.reset((StoreAPI *) 0);
@ -297,12 +297,30 @@ static void performOp(unsigned int clientVersion,
break; break;
} }
case wopQueryValidPaths: {
PathSet paths = readStorePaths<PathSet>(from);
startWork();
PathSet res = store->queryValidPaths(paths);
stopWork();
writeStrings(res, to);
break;
}
case wopHasSubstitutes: { case wopHasSubstitutes: {
Path path = readStorePath(from); Path path = readStorePath(from);
startWork(); startWork();
bool result = store->hasSubstitutes(path); PathSet res = store->querySubstitutablePaths(singleton<PathSet>(path));
stopWork(); stopWork();
writeInt(result, to); writeInt(res.find(path) != res.end(), to);
break;
}
case wopQuerySubstitutablePaths: {
PathSet paths = readStorePaths<PathSet>(from);
startWork();
PathSet res = store->querySubstitutablePaths(paths);
stopWork();
writeStrings(res, to);
break; break;
} }
@ -373,7 +391,7 @@ static void performOp(unsigned int clientVersion,
SavingSourceAdapter savedNAR(from); SavingSourceAdapter savedNAR(from);
RetrieveRegularNARSink savedRegular; RetrieveRegularNARSink savedRegular;
if (recursive) { if (recursive) {
/* Get the entire NAR dump from the client and save it to /* Get the entire NAR dump from the client and save it to
a string so that we can pass it to a string so that we can pass it to
@ -382,13 +400,13 @@ static void performOp(unsigned int clientVersion,
parseDump(sink, savedNAR); parseDump(sink, savedNAR);
} else } else
parseDump(savedRegular, from); parseDump(savedRegular, from);
startWork(); startWork();
if (!savedRegular.regular) throw Error("regular file expected"); if (!savedRegular.regular) throw Error("regular file expected");
Path path = dynamic_cast<LocalStore *>(store.get()) Path path = dynamic_cast<LocalStore *>(store.get())
->addToStoreFromDump(recursive ? savedNAR.s : savedRegular.s, baseName, recursive, hashAlgo); ->addToStoreFromDump(recursive ? savedNAR.s : savedRegular.s, baseName, recursive, hashAlgo);
stopWork(); stopWork();
writeString(path, to); writeString(path, to);
break; break;
} }
@ -494,41 +512,45 @@ static void performOp(unsigned int clientVersion,
} }
GCResults results; GCResults results;
startWork(); startWork();
if (options.ignoreLiveness) if (options.ignoreLiveness)
throw Error("you are not allowed to ignore liveness"); throw Error("you are not allowed to ignore liveness");
store->collectGarbage(options, results); store->collectGarbage(options, results);
stopWork(); stopWork();
writeStrings(results.paths, to); writeStrings(results.paths, to);
writeLongLong(results.bytesFreed, to); writeLongLong(results.bytesFreed, to);
writeLongLong(0, to); // obsolete writeLongLong(0, to); // obsolete
break; break;
} }
case wopSetOptions: { case wopSetOptions: {
keepFailed = readInt(from) != 0; settings.keepFailed = readInt(from) != 0;
keepGoing = readInt(from) != 0; settings.keepGoing = readInt(from) != 0;
tryFallback = readInt(from) != 0; settings.tryFallback = readInt(from) != 0;
verbosity = (Verbosity) readInt(from); verbosity = (Verbosity) readInt(from);
maxBuildJobs = readInt(from); settings.maxBuildJobs = readInt(from);
maxSilentTime = readInt(from); settings.maxSilentTime = readInt(from);
if (GET_PROTOCOL_MINOR(clientVersion) >= 2) if (GET_PROTOCOL_MINOR(clientVersion) >= 2)
useBuildHook = readInt(from) != 0; settings.useBuildHook = readInt(from) != 0;
if (GET_PROTOCOL_MINOR(clientVersion) >= 4) { if (GET_PROTOCOL_MINOR(clientVersion) >= 4) {
buildVerbosity = (Verbosity) readInt(from); settings.buildVerbosity = (Verbosity) readInt(from);
logType = (LogType) readInt(from); logType = (LogType) readInt(from);
printBuildTrace = readInt(from) != 0; settings.printBuildTrace = readInt(from) != 0;
} }
if (GET_PROTOCOL_MINOR(clientVersion) >= 6) if (GET_PROTOCOL_MINOR(clientVersion) >= 6)
buildCores = readInt(from); settings.buildCores = readInt(from);
if (GET_PROTOCOL_MINOR(clientVersion) >= 10) { if (GET_PROTOCOL_MINOR(clientVersion) >= 10)
int x = readInt(from); settings.useSubstitutes = readInt(from) != 0;
Strings ss; if (GET_PROTOCOL_MINOR(clientVersion) >= 12) {
ss.push_back(x == 0 ? "false" : "true"); unsigned int n = readInt(from);
overrideSetting("build-use-substitutes", ss); for (unsigned int i = 0; i < n; i++) {
string name = readString(from);
string value = readString(from);
settings.set("untrusted-" + name, value);
}
} }
startWork(); startWork();
stopWork(); stopWork();
@ -538,23 +560,43 @@ static void performOp(unsigned int clientVersion,
case wopQuerySubstitutablePathInfo: { case wopQuerySubstitutablePathInfo: {
Path path = absPath(readString(from)); Path path = absPath(readString(from));
startWork(); startWork();
SubstitutablePathInfo info; SubstitutablePathInfos infos;
bool res = store->querySubstitutablePathInfo(path, info); store->querySubstitutablePathInfos(singleton<PathSet>(path), infos);
stopWork(); stopWork();
writeInt(res ? 1 : 0, to); SubstitutablePathInfos::iterator i = infos.find(path);
if (res) { if (i == infos.end())
writeString(info.deriver, to); writeInt(0, to);
writeStrings(info.references, to); else {
writeLongLong(info.downloadSize, to); writeInt(1, to);
writeString(i->second.deriver, to);
writeStrings(i->second.references, to);
writeLongLong(i->second.downloadSize, to);
if (GET_PROTOCOL_MINOR(clientVersion) >= 7) if (GET_PROTOCOL_MINOR(clientVersion) >= 7)
writeLongLong(info.narSize, to); writeLongLong(i->second.narSize, to);
} }
break; break;
} }
case wopQueryValidPaths: { case wopQuerySubstitutablePathInfos: {
PathSet paths = readStorePaths<PathSet>(from);
startWork(); startWork();
PathSet paths = store->queryValidPaths(); SubstitutablePathInfos infos;
store->querySubstitutablePathInfos(paths, infos);
stopWork();
writeInt(infos.size(), to);
foreach (SubstitutablePathInfos::iterator, i, infos) {
writeString(i->first, to);
writeString(i->second.deriver, to);
writeStrings(i->second.references, to);
writeLongLong(i->second.downloadSize, to);
writeLongLong(i->second.narSize, to);
}
break;
}
case wopQueryAllValidPaths: {
startWork();
PathSet paths = store->queryAllValidPaths();
stopWork(); stopWork();
writeStrings(paths, to); writeStrings(paths, to);
break; break;
@ -599,7 +641,7 @@ static void performOp(unsigned int clientVersion,
static void processConnection() static void processConnection()
{ {
canSendStderr = false; canSendStderr = false;
myPid = getpid(); myPid = getpid();
writeToStderr = tunnelStderr; writeToStderr = tunnelStderr;
#ifdef HAVE_HUP_NOTIFICATION #ifdef HAVE_HUP_NOTIFICATION
@ -643,7 +685,7 @@ static void processConnection()
stopWork(); stopWork();
to.flush(); to.flush();
} catch (Error & e) { } catch (Error & e) {
stopWork(false, e.msg()); stopWork(false, e.msg());
to.flush(); to.flush();
@ -652,7 +694,7 @@ static void processConnection()
/* Process client requests. */ /* Process client requests. */
unsigned int opCount = 0; unsigned int opCount = 0;
while (true) { while (true) {
WorkerOp op; WorkerOp op;
try { try {
@ -724,13 +766,13 @@ static void daemonLoop()
/* Otherwise, create and bind to a Unix domain socket. */ /* Otherwise, create and bind to a Unix domain socket. */
else { else {
/* Create and bind to a Unix domain socket. */ /* Create and bind to a Unix domain socket. */
fdSocket = socket(PF_UNIX, SOCK_STREAM, 0); fdSocket = socket(PF_UNIX, SOCK_STREAM, 0);
if (fdSocket == -1) if (fdSocket == -1)
throw SysError("cannot create Unix domain socket"); throw SysError("cannot create Unix domain socket");
string socketPath = nixStateDir + DEFAULT_SOCKET_PATH; string socketPath = settings.nixStateDir + DEFAULT_SOCKET_PATH;
createDirs(dirOf(socketPath)); createDirs(dirOf(socketPath));
@ -739,7 +781,7 @@ static void daemonLoop()
relative path name. */ relative path name. */
chdir(dirOf(socketPath).c_str()); chdir(dirOf(socketPath).c_str());
Path socketPathRel = "./" + baseNameOf(socketPath); Path socketPathRel = "./" + baseNameOf(socketPath);
struct sockaddr_un addr; struct sockaddr_un addr;
addr.sun_family = AF_UNIX; addr.sun_family = AF_UNIX;
if (socketPathRel.size() >= sizeof(addr.sun_path)) if (socketPathRel.size() >= sizeof(addr.sun_path))
@ -764,7 +806,7 @@ static void daemonLoop()
} }
closeOnExec(fdSocket); closeOnExec(fdSocket);
/* Loop accepting connections. */ /* Loop accepting connections. */
while (1) { while (1) {
@ -772,7 +814,7 @@ static void daemonLoop()
/* Important: the server process *cannot* open the SQLite /* Important: the server process *cannot* open the SQLite
database, because it doesn't like forks very much. */ database, because it doesn't like forks very much. */
assert(!store); assert(!store);
/* Accept a connection. */ /* Accept a connection. */
struct sockaddr_un remoteAddr; struct sockaddr_un remoteAddr;
socklen_t remoteAddrLen = sizeof(remoteAddr); socklen_t remoteAddrLen = sizeof(remoteAddr);
@ -781,14 +823,14 @@ static void daemonLoop()
(struct sockaddr *) &remoteAddr, &remoteAddrLen); (struct sockaddr *) &remoteAddr, &remoteAddrLen);
checkInterrupt(); checkInterrupt();
if (remote == -1) { if (remote == -1) {
if (errno == EINTR) if (errno == EINTR)
continue; continue;
else else
throw SysError("accepting connection"); throw SysError("accepting connection");
} }
closeOnExec(remote); closeOnExec(remote);
/* Get the identity of the caller, if possible. */ /* Get the identity of the caller, if possible. */
uid_t clientUid = -1; uid_t clientUid = -1;
pid_t clientPid = -1; pid_t clientPid = -1;
@ -803,13 +845,13 @@ static void daemonLoop()
#endif #endif
printMsg(lvlInfo, format("accepted connection from pid %1%, uid %2%") % clientPid % clientUid); printMsg(lvlInfo, format("accepted connection from pid %1%, uid %2%") % clientPid % clientUid);
/* Fork a child to handle the connection. */ /* Fork a child to handle the connection. */
pid_t child; pid_t child;
child = fork(); child = fork();
switch (child) { switch (child) {
case -1: case -1:
throw SysError("unable to fork"); throw SysError("unable to fork");
@ -828,16 +870,12 @@ static void daemonLoop()
string processName = int2String(clientPid); string processName = int2String(clientPid);
strncpy(argvSaved[1], processName.c_str(), strlen(argvSaved[1])); strncpy(argvSaved[1], processName.c_str(), strlen(argvSaved[1]));
} }
/* Since the daemon can be long-running, the
settings may have changed. So force a reload. */
reloadSettings();
/* Handle the connection. */ /* Handle the connection. */
from.fd = remote; from.fd = remote;
to.fd = remote; to.fd = remote;
processConnection(); processConnection();
} catch (std::exception & e) { } catch (std::exception & e) {
std::cerr << format("child error: %1%\n") % e.what(); std::cerr << format("child error: %1%\n") % e.what();
} }
@ -857,7 +895,7 @@ void run(Strings args)
{ {
bool slave = false; bool slave = false;
bool daemon = false; bool daemon = false;
for (Strings::iterator i = args.begin(); i != args.end(); ) { for (Strings::iterator i = args.begin(); i != args.end(); ) {
string arg = *i++; string arg = *i++;
if (arg == "--slave") slave = true; if (arg == "--slave") slave = true;

View File

@ -16,11 +16,13 @@
-e "s^@shell\@^$(bash)^g" \ -e "s^@shell\@^$(bash)^g" \
-e "s^@curl\@^$(curl)^g" \ -e "s^@curl\@^$(curl)^g" \
-e "s^@bzip2\@^$(bzip2)^g" \ -e "s^@bzip2\@^$(bzip2)^g" \
-e "s^@xz\@^$(xz)^g" \
-e "s^@perl\@^$(perl)^g" \ -e "s^@perl\@^$(perl)^g" \
-e "s^@perlFlags\@^$(perlFlags)^g" \ -e "s^@perlFlags\@^$(perlFlags)^g" \
-e "s^@coreutils\@^$(coreutils)^g" \ -e "s^@coreutils\@^$(coreutils)^g" \
-e "s^@sed\@^$(sed)^g" \ -e "s^@sed\@^$(sed)^g" \
-e "s^@tar\@^$(tar)^g" \ -e "s^@tar\@^$(tar)^g" \
-e "s^@tarFlags\@^$(tarFlags)^g" \
-e "s^@gzip\@^$(gzip)^g" \ -e "s^@gzip\@^$(gzip)^g" \
-e "s^@pv\@^$(pv)^g" \ -e "s^@pv\@^$(pv)^g" \
-e "s^@tr\@^$(tr)^g" \ -e "s^@tr\@^$(tr)^g" \

View File

@ -9,7 +9,8 @@ TESTS = init.sh hash.sh lang.sh add.sh simple.sh dependencies.sh \
gc-runtime.sh install-package.sh check-refs.sh filter-source.sh \ gc-runtime.sh install-package.sh check-refs.sh filter-source.sh \
remote-store.sh export.sh export-graph.sh negative-caching.sh \ remote-store.sh export.sh export-graph.sh negative-caching.sh \
binary-patching.sh timeout.sh secure-drv-outputs.sh nix-channel.sh \ binary-patching.sh timeout.sh secure-drv-outputs.sh nix-channel.sh \
multiple-outputs.sh import-derivation.sh fetchurl.sh optimise-store.sh multiple-outputs.sh import-derivation.sh fetchurl.sh optimise-store.sh \
binary-cache.sh
XFAIL_TESTS = XFAIL_TESTS =

35
tests/binary-cache.sh Normal file
View File

@ -0,0 +1,35 @@
source common.sh
clearStore
# Create the binary cache.
cacheDir=$TEST_ROOT/binary-cache
rm -rf $cacheDir
outPath=$(nix-build dependencies.nix --no-out-link)
nix-push --dest $cacheDir $outPath
# By default, a binary cache doesn't support "nix-env -qas", but does
# support installation.
clearStore
rm -f $NIX_STATE_DIR/binary-cache*
nix-env --option binary-caches "file://$cacheDir" -f dependencies.nix -qas \* | grep -- "---"
nix-store --option binary-caches "file://$cacheDir" -r $outPath
# But with the right configuration, "nix-env -qas" should also work.
clearStore
rm -f $NIX_STATE_DIR/binary-cache*
echo "WantMassQuery: 1" >> $cacheDir/nix-cache-info
nix-env --option binary-caches "file://$cacheDir" -f dependencies.nix -qas \* | grep -- "--S"
nix-store --option binary-caches "file://$cacheDir" -r $outPath
nix-store --check-validity $outPath
nix-store -qR $outPath | grep input-2

View File

@ -7,14 +7,17 @@ mkdir -p $TEST_ROOT/cache2 $TEST_ROOT/patches
RESULT=$TEST_ROOT/result RESULT=$TEST_ROOT/result
# Build version 1 and 2 of the "foo" package. # Build version 1 and 2 of the "foo" package.
nix-push --copy $TEST_ROOT/cache2 $TEST_ROOT/manifest1 \ nix-push --dest $TEST_ROOT/cache2 --manifest --bzip2 \
$(nix-build -o $RESULT binary-patching.nix --arg version 1) $(nix-build -o $RESULT binary-patching.nix --arg version 1)
mv $TEST_ROOT/cache2/MANIFEST $TEST_ROOT/manifest1
out2=$(nix-build -o $RESULT binary-patching.nix --arg version 2) out2=$(nix-build -o $RESULT binary-patching.nix --arg version 2)
nix-push --copy $TEST_ROOT/cache2 $TEST_ROOT/manifest2 $out2 nix-push --dest $TEST_ROOT/cache2 --manifest --bzip2 $out2
mv $TEST_ROOT/cache2/MANIFEST $TEST_ROOT/manifest2
out3=$(nix-build -o $RESULT binary-patching.nix --arg version 3) out3=$(nix-build -o $RESULT binary-patching.nix --arg version 3)
nix-push --copy $TEST_ROOT/cache2 $TEST_ROOT/manifest3 $out3 nix-push --dest $TEST_ROOT/cache2 --manifest --bzip2 $out3
mv $TEST_ROOT/cache2/MANIFEST $TEST_ROOT/manifest3
rm $RESULT rm $RESULT

View File

@ -16,6 +16,7 @@ export NIX_DB_DIR=$TEST_ROOT/db
export NIX_CONF_DIR=$TEST_ROOT/etc export NIX_CONF_DIR=$TEST_ROOT/etc
export NIX_MANIFESTS_DIR=$TEST_ROOT/var/nix/manifests export NIX_MANIFESTS_DIR=$TEST_ROOT/var/nix/manifests
export SHARED=$TEST_ROOT/shared export SHARED=$TEST_ROOT/shared
export NIX_REMOTE=$NIX_REMOTE_
export PATH=@bindir@:$PATH export PATH=@bindir@:$PATH
@ -79,3 +80,5 @@ fail() {
echo "$1" echo "$1"
exit 1 exit 1
} }
set -x

View File

@ -9,7 +9,7 @@ clearStore
clearProfiles clearProfiles
cat > $TEST_ROOT/foo.nixpkg <<EOF cat > $TEST_ROOT/foo.nixpkg <<EOF
NIXPKG1 file://$TEST_ROOT/manifest simple $system $drvPath $outPath NIXPKG1 file://$TEST_ROOT/cache/MANIFEST simple $system $drvPath $outPath
EOF EOF
nix-install-package --non-interactive -p $profiles/test $TEST_ROOT/foo.nixpkg nix-install-package --non-interactive -p $profiles/test $TEST_ROOT/foo.nixpkg

View File

@ -19,7 +19,7 @@ nix-channel --remove xyzzy
# Create a channel. # Create a channel.
rm -rf $TEST_ROOT/foo rm -rf $TEST_ROOT/foo
mkdir -p $TEST_ROOT/foo mkdir -p $TEST_ROOT/foo
nix-push --copy $TEST_ROOT/foo $TEST_ROOT/foo/MANIFEST $(nix-store -r $(nix-instantiate dependencies.nix)) nix-push --dest $TEST_ROOT/foo --manifest --bzip2 $(nix-store -r $(nix-instantiate dependencies.nix))
rm -rf $TEST_ROOT/nixexprs rm -rf $TEST_ROOT/nixexprs
mkdir -p $TEST_ROOT/nixexprs mkdir -p $TEST_ROOT/nixexprs
cp config.nix dependencies.nix dependencies.builder*.sh $TEST_ROOT/nixexprs/ cp config.nix dependencies.nix dependencies.builder*.sh $TEST_ROOT/nixexprs/

View File

@ -2,7 +2,7 @@ source common.sh
pullCache () { pullCache () {
echo "pulling cache..." echo "pulling cache..."
nix-pull file://$TEST_ROOT/manifest nix-pull file://$TEST_ROOT/cache/MANIFEST
} }
clearStore clearStore

View File

@ -1,5 +1,7 @@
source common.sh source common.sh
clearStore
drvPath=$(nix-instantiate dependencies.nix) drvPath=$(nix-instantiate dependencies.nix)
outPath=$(nix-store -r $drvPath) outPath=$(nix-store -r $drvPath)
@ -7,4 +9,4 @@ echo "pushing $drvPath"
mkdir -p $TEST_ROOT/cache mkdir -p $TEST_ROOT/cache
nix-push --copy $TEST_ROOT/cache $TEST_ROOT/manifest $drvPath nix-push --dest $TEST_ROOT/cache --manifest $drvPath --bzip2

View File

@ -10,6 +10,7 @@ touch $reference
echo "making registration..." echo "making registration..."
set +x
for ((n = 0; n < $max; n++)); do for ((n = 0; n < $max; n++)); do
storePath=$NIX_STORE_DIR/$n storePath=$NIX_STORE_DIR/$n
echo -n > $storePath echo -n > $storePath
@ -19,6 +20,7 @@ for ((n = 0; n < $max; n++)); do
fi fi
echo $storePath; echo; echo 2; echo $reference; echo $ref2 echo $storePath; echo; echo 2; echo $reference; echo $ref2
done > $TEST_ROOT/reg_info done > $TEST_ROOT/reg_info
set -x
echo "registering..." echo "registering..."

View File

@ -3,7 +3,7 @@ source common.sh
echo '*** testing slave mode ***' echo '*** testing slave mode ***'
clearStore clearStore
clearManifests clearManifests
NIX_REMOTE=slave $SHELL ./user-envs.sh NIX_REMOTE_=slave $SHELL ./user-envs.sh
echo '*** testing daemon mode ***' echo '*** testing daemon mode ***'
clearStore clearStore

View File

@ -2,22 +2,25 @@
echo substituter args: $* >&2 echo substituter args: $* >&2
if test $1 = "--query"; then if test $1 = "--query"; then
while read cmd; do while read cmd args; do
echo FOO $cmd >&2 echo "CMD = $cmd, ARGS = $args" >&2
if test "$cmd" = "have"; then if test "$cmd" = "have"; then
read path for path in $args; do
if grep -q "$path" $TEST_ROOT/sub-paths; then read path
echo 1 if grep -q "$path" $TEST_ROOT/sub-paths; then
else echo $path
echo 0 fi
fi done
echo
elif test "$cmd" = "info"; then elif test "$cmd" = "info"; then
read path for path in $args; do
echo 1 echo $path
echo "" # deriver echo "" # deriver
echo 0 # nr of refs echo 0 # nr of refs
echo $((1 * 1024 * 1024)) # download size echo $((1 * 1024 * 1024)) # download size
echo $((2 * 1024 * 1024)) # nar size echo $((2 * 1024 * 1024)) # nar size
done
echo
else else
echo "bad command $cmd" echo "bad command $cmd"
exit 1 exit 1
@ -26,6 +29,7 @@ if test $1 = "--query"; then
elif test $1 = "--substitute"; then elif test $1 = "--substitute"; then
mkdir $2 mkdir $2
echo "Hallo Wereld" > $2/hello echo "Hallo Wereld" > $2/hello
echo # no expected hash
else else
echo "unknown substituter operation" echo "unknown substituter operation"
exit 1 exit 1

View File

@ -2,21 +2,23 @@
echo substituter2 args: $* >&2 echo substituter2 args: $* >&2
if test $1 = "--query"; then if test $1 = "--query"; then
while read cmd; do while read cmd args; do
if test "$cmd" = "have"; then if test "$cmd" = have; then
read path for path in $args; do
if grep -q "$path" $TEST_ROOT/sub-paths; then if grep -q "$path" $TEST_ROOT/sub-paths; then
echo 1 echo $path
else fi
echo 0 done
fi echo
elif test "$cmd" = "info"; then elif test "$cmd" = info; then
read path for path in $args; do
echo 1 echo $path
echo "" # deriver echo "" # deriver
echo 0 # nr of refs echo 0 # nr of refs
echo 0 # download size echo 0 # download size
echo 0 # nar size echo 0 # nar size
done
echo
else else
echo "bad command $cmd" echo "bad command $cmd"
exit 1 exit 1