* Store the size of a store path in the database (to be precise, the

size of the NAR serialisation of the path, i.e., `nix-store --dump
  PATH').  This is useful for Hydra.
This commit is contained in:
Eelco Dolstra 2010-11-16 17:11:46 +00:00
parent fb9368b5a0
commit a3883cbd28
16 changed files with 144 additions and 91 deletions

View File

@ -47,7 +47,7 @@ build-sqlite:
else
build-sqlite: $(SQLITE)
(cd $(SQLITE) && \
CC="$(CC)" ./configure --disable-static --prefix=$(pkglibdir)/dummy --libdir=${pkglibdir} $(SUB_CONFIGURE_FLAGS) && \
CC="$(CC)" CFLAGS="-DSQLITE_ENABLE_COLUMN_METADATA=1" ./configure --disable-static --prefix=$(pkglibdir)/dummy --libdir=${pkglibdir} $(SUB_CONFIGURE_FLAGS) && \
$(MAKE) )
touch build-sqlite

View File

@ -1547,7 +1547,7 @@ void DerivationGoal::startBuilder()
/* Write closure info to `fileName'. */
writeFile(tmpDir + "/" + fileName,
makeValidityRegistration(paths, false, false));
worker.store.makeValidityRegistration(paths, false, false));
}
@ -1870,7 +1870,7 @@ PathSet parseReferenceSpecifiers(const Derivation & drv, string attr)
void DerivationGoal::computeClosure()
{
map<Path, PathSet> allReferences;
map<Path, Hash> contentHashes;
map<Path, HashResult> contentHashes;
/* When using a build hook, the build hook can register the output
as valid (by doing `nix-store --import'). If so we don't have
@ -1927,7 +1927,7 @@ void DerivationGoal::computeClosure()
if (ht == htUnknown)
throw BuildError(format("unknown hash algorithm `%1%'") % algo);
Hash h = parseHash(ht, i->second.hash);
Hash h2 = recursive ? hashPath(ht, path) : hashFile(ht, path);
Hash h2 = recursive ? hashPath(ht, path).first : hashFile(ht, path);
if (h != h2)
throw BuildError(
format("output path `%1%' should have %2% hash `%3%', instead has `%4%'")
@ -1941,7 +1941,7 @@ void DerivationGoal::computeClosure()
contained in it. Compute the SHA-256 NAR hash at the same
time. The hash is stored in the database so that we can
verify later on whether nobody has messed with the store. */
Hash hash;
HashResult hash;
PathSet references = scanForReferences(path, allPaths, hash);
contentHashes[path] = hash;
@ -1970,14 +1970,18 @@ void DerivationGoal::computeClosure()
}
/* Register each output path as valid, and register the sets of
paths referenced by each of them. !!! this should be
atomic so that either all paths are registered as valid, or
none are. */
foreach (DerivationOutputs::iterator, i, drv.outputs)
worker.store.registerValidPath(i->second.path,
contentHashes[i->second.path],
allReferences[i->second.path],
drvPath);
paths referenced by each of them. */
ValidPathInfos infos;
foreach (DerivationOutputs::iterator, i, drv.outputs) {
ValidPathInfo info;
info.path = i->second.path;
info.hash = contentHashes[i->second.path].first;
info.narSize = contentHashes[i->second.path].second;
info.references = allReferences[i->second.path];
info.deriver = drvPath;
infos.push_back(info);
}
worker.store.registerValidPaths(infos);
/* It is now safe to delete the lock files, since all future
lockers will see that the output paths are valid; they will not
@ -2385,10 +2389,15 @@ void SubstitutionGoal::finished()
canonicalisePathMetaData(storePath);
Hash contentHash = hashPath(htSHA256, storePath);
worker.store.registerValidPath(storePath, contentHash,
info.references, info.deriver);
HashResult hash = hashPath(htSHA256, storePath);
ValidPathInfo info2;
info2.path = storePath;
info2.hash = hash.first;
info2.narSize = hash.second;
info2.references = info.references;
info2.deriver = info.deriver;
worker.store.registerValidPath(info2);
outputLock->setDeletion(true);

View File

@ -327,9 +327,16 @@ void LocalStore::openDB(bool create)
throw SQLiteError(db, "initialising database schema");
}
/* Backwards compatibility with old (pre-release) databases. Can
remove this eventually. */
if (sqlite3_table_column_metadata(db, 0, "ValidPaths", "narSize", 0, 0, 0, 0, 0) != SQLITE_OK) {
if (sqlite3_exec(db, "alter table ValidPaths add column narSize integer" , 0, 0, 0) != SQLITE_OK)
throw SQLiteError(db, "adding column narSize");
}
/* Prepare SQL statements. */
stmtRegisterValidPath.create(db,
"insert into ValidPaths (path, hash, registrationTime, deriver) values (?, ?, ?, ?);");
"insert into ValidPaths (path, hash, registrationTime, deriver, narSize) values (?, ?, ?, ?, ?);");
stmtAddReference.create(db,
"insert or replace into Refs (referrer, reference) values (?, ?);");
stmtQueryPathInfo.create(db,
@ -431,19 +438,6 @@ void canonicalisePathMetaData(const Path & path)
}
void LocalStore::registerValidPath(const Path & path,
const Hash & hash, const PathSet & references,
const Path & deriver)
{
ValidPathInfo info;
info.path = path;
info.hash = hash;
info.references = references;
info.deriver = deriver;
registerValidPath(info);
}
unsigned long long LocalStore::addValidPath(const ValidPathInfo & info)
{
SQLiteStmtUse use(stmtRegisterValidPath);
@ -454,6 +448,10 @@ unsigned long long LocalStore::addValidPath(const ValidPathInfo & info)
stmtRegisterValidPath.bind(info.deriver);
else
stmtRegisterValidPath.bind(); // null
if (info.narSize != 0)
stmtRegisterValidPath.bind(info.narSize);
else
stmtRegisterValidPath.bind(); // null
if (sqlite3_step(stmtRegisterValidPath) != SQLITE_DONE)
throw SQLiteError(db, format("registering valid path `%1%' in database") % info.path);
unsigned long long id = sqlite3_last_insert_rowid(db);
@ -920,10 +918,18 @@ Path LocalStore::addToStoreFromDump(const string & dump, const string & name,
the path in the database. We may just have computed it
above (if called with recursive == true and hashAlgo ==
sha256); otherwise, compute it here. */
registerValidPath(dstPath,
(recursive && hashAlgo == htSHA256) ? h :
(recursive ? hashString(htSHA256, dump) : hashPath(htSHA256, dstPath)),
PathSet(), "");
HashResult hash;
if (recursive) {
hash.first = hashAlgo == htSHA256 ? h : hashString(htSHA256, dump);
hash.second = dump.size();
} else
hash = hashPath(htSHA256, dstPath);
ValidPathInfo info;
info.path = dstPath;
info.hash = hash.first;
info.narSize = hash.second;
registerValidPath(info);
}
outputLock.setDeletion(true);
@ -970,9 +976,15 @@ Path LocalStore::addTextToStore(const string & name, const string & s,
writeFile(dstPath, s);
canonicalisePathMetaData(dstPath);
HashResult hash = hashPath(htSHA256, dstPath);
registerValidPath(dstPath,
hashPath(htSHA256, dstPath), references, "");
ValidPathInfo info;
info.path = dstPath;
info.hash = hash.first;
info.narSize = hash.second;
info.references = references;
registerValidPath(info);
}
outputLock.setDeletion(true);
@ -998,7 +1010,7 @@ struct HashAndWriteSink : Sink
Hash currentHash()
{
HashSink hashSinkClone(hashSink);
return hashSinkClone.finish();
return hashSinkClone.finish().first;
}
};
@ -1136,7 +1148,7 @@ Path LocalStore::importPath(bool requireSignature, Source & source)
Path deriver = readString(hashAndReadSource);
if (deriver != "") assertStorePath(deriver);
Hash hash = hashAndReadSource.hashSink.finish();
Hash hash = hashAndReadSource.hashSink.finish().first;
hashAndReadSource.hashing = false;
bool haveSignature = readInt(hashAndReadSource) == 1;
@ -1200,9 +1212,15 @@ Path LocalStore::importPath(bool requireSignature, Source & source)
/* !!! if we were clever, we could prevent the hashPath()
here. */
if (deriver != "" && !isValidPath(deriver)) deriver = "";
registerValidPath(dstPath,
hashPath(htSHA256, dstPath), references, deriver);
HashResult hash = hashPath(htSHA256, dstPath);
ValidPathInfo info;
info.path = dstPath;
info.hash = hash.first;
info.narSize = hash.second;
info.references = references;
info.deriver = deriver != "" && isValidPath(deriver) ? deriver : "";
registerValidPath(info);
}
outputLock.setDeletion(true);
@ -1263,12 +1281,14 @@ void LocalStore::verifyStore(bool checkContents)
/* Check the content hash (optionally - slow). */
printMsg(lvlTalkative, format("checking contents of `%1%'") % *i);
Hash current = hashPath(info.hash.type, *i);
Hash current = hashPath(info.hash.type, *i).first;
if (current != info.hash) {
printMsg(lvlError, format("path `%1%' was modified! "
"expected hash `%2%', got `%3%'")
% *i % printHash(info.hash) % printHash(current));
}
/* !!! Check info.narSize */
}
}
}

View File

@ -97,6 +97,8 @@ public:
PathSet queryValidPaths();
ValidPathInfo queryPathInfo(const Path & path);
Hash queryPathHash(const Path & path);
void queryReferences(const Path & path, PathSet & references);
@ -173,8 +175,7 @@ public:
execution of the derivation (or something equivalent). Also
register the hash of the file system contents of the path. The
hash must be a SHA-256 hash. */
void registerValidPath(const Path & path,
const Hash & hash, const PathSet & references, const Path & deriver);
void registerValidPath(const ValidPathInfo & info);
void registerValidPaths(const ValidPathInfos & infos);
@ -224,10 +225,6 @@ private:
void addReference(unsigned long long referrer, unsigned long long reference);
void registerValidPath(const ValidPathInfo & info);
ValidPathInfo queryPathInfo(const Path & path);
void appendReferrer(const Path & from, const Path & to, bool lock);
void rewriteReferrers(const Path & path, bool purge, PathSet referrers);

View File

@ -68,7 +68,7 @@ static void hashAndLink(bool dryRun, HashToPath & hashToPath,
the contents of the symlink (i.e. the result of
readlink()), not the contents of the target (which may not
even exist). */
Hash hash = hashPath(htSHA256, path);
Hash hash = hashPath(htSHA256, path).first;
stats.totalFiles++;
printMsg(lvlDebug, format("`%1%' has hash `%2%'") % path % printHash(hash));

View File

@ -81,7 +81,7 @@ void RefScanSink::operator () (const unsigned char * data, unsigned int len)
PathSet scanForReferences(const string & path,
const PathSet & refs, Hash & hash)
const PathSet & refs, HashResult & hash)
{
RefScanSink sink;
std::map<string, Path> backMap;

View File

@ -7,7 +7,7 @@
namespace nix {
PathSet scanForReferences(const Path & path, const PathSet & refs,
Hash & hash);
HashResult & hash);
}

View File

@ -247,6 +247,12 @@ bool RemoteStore::querySubstitutablePathInfo(const Path & path,
}
ValidPathInfo RemoteStore::queryPathInfo(const Path & path)
{
throw Error("not implemented");
}
Hash RemoteStore::queryPathHash(const Path & path)
{
openConnection();

View File

@ -29,6 +29,8 @@ public:
PathSet queryValidPaths();
ValidPathInfo queryPathInfo(const Path & path);
Hash queryPathHash(const Path & path);
void queryReferences(const Path & path, PathSet & references);

View File

@ -3,7 +3,8 @@ create table if not exists ValidPaths (
path text unique not null,
hash text not null,
registrationTime integer not null,
deriver text
deriver text,
narSize integer
);
create table if not exists Refs (

View File

@ -190,7 +190,7 @@ std::pair<Path, Hash> computeStorePathForPath(const Path & srcPath,
bool recursive, HashType hashAlgo, PathFilter & filter)
{
HashType ht(hashAlgo);
Hash h = recursive ? hashPath(ht, srcPath, filter) : hashFile(ht, srcPath);
Hash h = recursive ? hashPath(ht, srcPath, filter).first : hashFile(ht, srcPath);
string name = baseNameOf(srcPath);
Path dstPath = makeFixedOutputPath(recursive, hashAlgo, h, name);
return std::pair<Path, Hash>(dstPath, h);
@ -216,7 +216,7 @@ Path computeStorePathForText(const string & name, const string & s,
/* Return a string accepted by decodeValidPathInfo() that
registers the specified paths as valid. Note: it's the
responsibility of the caller to provide a closure. */
string makeValidityRegistration(const PathSet & paths,
string StoreAPI::makeValidityRegistration(const PathSet & paths,
bool showDerivers, bool showHash)
{
string s = "";
@ -224,18 +224,19 @@ string makeValidityRegistration(const PathSet & paths,
foreach (PathSet::iterator, i, paths) {
s += *i + "\n";
if (showHash)
s += printHash(store->queryPathHash(*i)) + "\n";
ValidPathInfo info = queryPathInfo(*i);
Path deriver = showDerivers ? store->queryDeriver(*i) : "";
if (showHash) {
s += printHash(info.hash) + "\n";
s += (format("%1%\n") % info.narSize).str();
}
Path deriver = showDerivers ? info.deriver : "";
s += deriver + "\n";
PathSet references;
store->queryReferences(*i, references);
s += (format("%1%\n") % info.references.size()).str();
s += (format("%1%\n") % references.size()).str();
foreach (PathSet::iterator, j, references)
foreach (PathSet::iterator, j, info.references)
s += *j + "\n";
}
@ -252,6 +253,8 @@ ValidPathInfo decodeValidPathInfo(std::istream & str, bool hashGiven)
string s;
getline(str, s);
info.hash = parseHash(htSHA256, s);
getline(str, s);
if (!string2Int(s, info.narSize)) throw Error("number expected");
}
getline(str, info.deriver);
string s; int n;

View File

@ -90,6 +90,21 @@ struct SubstitutablePathInfo
};
struct ValidPathInfo
{
Path path;
Path deriver;
Hash hash;
PathSet references;
time_t registrationTime;
unsigned long long narSize; // 0 = unknown
unsigned long long id; // internal use only
ValidPathInfo() : registrationTime(0), narSize(0) { }
};
typedef list<ValidPathInfo> ValidPathInfos;
class StoreAPI
{
public:
@ -102,6 +117,9 @@ public:
/* Query the set of valid paths. */
virtual PathSet queryValidPaths() = 0;
/* Query information about a valid path. */
virtual ValidPathInfo queryPathInfo(const Path & path) = 0;
/* Queries the hash of a valid path. */
virtual Hash queryPathHash(const Path & path) = 0;
@ -214,6 +232,12 @@ public:
/* Clear the "failed" status of the given paths. The special
value `*' causes all failed paths to be cleared. */
virtual void clearFailedPaths(const PathSet & paths) = 0;
/* Return a string representing information about the path that
can be loaded into the database using `nix-store --load-db' or
`nix-store --register-validity'. */
string makeValidityRegistration(const PathSet & paths,
bool showDerivers, bool showHash);
};
@ -307,22 +331,6 @@ boost::shared_ptr<StoreAPI> openStore();
string showPaths(const PathSet & paths);
string makeValidityRegistration(const PathSet & paths,
bool showDerivers, bool showHash);
struct ValidPathInfo
{
Path path;
Path deriver;
Hash hash;
PathSet references;
time_t registrationTime;
unsigned long long id; // internal use only
ValidPathInfo() : registrationTime(0) { }
};
typedef list<ValidPathInfo> ValidPathInfos;
ValidPathInfo decodeValidPathInfo(std::istream & str,
bool hashGiven = false);

View File

@ -286,12 +286,14 @@ Hash hashFile(HashType ht, const Path & path)
HashSink::HashSink(HashType ht) : ht(ht)
{
ctx = new Ctx;
bytes = 0;
start(ht, *ctx);
}
HashSink::HashSink(const HashSink & h)
{
ht = h.ht;
bytes = h.bytes;
ctx = new Ctx;
*ctx = *h.ctx;
}
@ -304,18 +306,20 @@ HashSink::~HashSink()
void HashSink::operator ()
(const unsigned char * data, unsigned int len)
{
bytes += len;
update(ht, *ctx, data, len);
}
Hash HashSink::finish()
HashResult HashSink::finish()
{
Hash hash(ht);
nix::finish(ht, *ctx, hash.hash);
return hash;
return HashResult(hash, bytes);
}
Hash hashPath(HashType ht, const Path & path, PathFilter & filter)
HashResult hashPath(
HashType ht, const Path & path, PathFilter & filter)
{
HashSink sink(ht);
dumpPath(path, sink, filter);

View File

@ -40,7 +40,6 @@ struct Hash
/* For sorting. */
bool operator < (const Hash & h) const;
};
@ -72,7 +71,8 @@ Hash hashFile(HashType ht, const Path & path);
(essentially) hashString(ht, dumpPath(path)). */
struct PathFilter;
extern PathFilter defaultPathFilter;
Hash hashPath(HashType ht, const Path & path,
typedef std::pair<Hash, unsigned long long> HashResult;
HashResult hashPath(HashType ht, const Path & path,
PathFilter & filter = defaultPathFilter);
/* Compress a hash to the specified number of bytes by cyclically
@ -93,13 +93,14 @@ class HashSink : public Sink
private:
HashType ht;
Ctx * ctx;
unsigned long long bytes;
public:
HashSink(HashType ht);
HashSink(const HashSink & h);
~HashSink();
virtual void operator () (const unsigned char * data, unsigned int len);
Hash finish();
HashResult finish();
};

View File

@ -44,7 +44,7 @@ void run(Strings args)
if (op == opHash) {
for (Strings::iterator i = ss.begin(); i != ss.end(); ++i) {
Hash h = flat ? hashFile(ht, *i) : hashPath(ht, *i);
Hash h = flat ? hashFile(ht, *i) : hashPath(ht, *i).first;
if (truncate && h.hashSize > 20) h = compressHash(h, 20);
std::cout << format("%1%\n") %
(base32 ? printHash32(h) : printHash(h));

View File

@ -393,9 +393,8 @@ static void opDumpDB(Strings opFlags, Strings opArgs)
if (!opArgs.empty())
throw UsageError("no arguments expected");
PathSet validPaths = store->queryValidPaths();
foreach (PathSet::iterator, i, validPaths) {
cout << makeValidityRegistration(singleton<PathSet>(*i), true, true);
}
foreach (PathSet::iterator, i, validPaths)
cout << store->makeValidityRegistration(singleton<PathSet>(*i), true, true);
}
@ -410,8 +409,11 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise)
/* !!! races */
if (canonicalise)
canonicalisePathMetaData(info.path);
if (!hashGiven)
info.hash = hashPath(htSHA256, info.path);
if (!hashGiven) {
HashResult hash = hashPath(htSHA256, info.path);
info.hash = hash.first;
info.narSize = hash.second;
}
infos.push_back(info);
}
}