Feature: installed file verification (#12841)

This feature generates a verification manifest for each installed
package and provides a command, "spack verify", which can be used to
compare the current file checksums/permissions with those calculated
at installed time.

Verification includes

* Checksums of files
* File permissions
* Modification time
* File size

Packages installed before this PR will be skipped during verification.
To verify such a package you must reinstall it.

The spack verify command has three modes.

* With the -a,--all option it will check every installed package.
* With the -f,--files option, it will check some specific files,
  determine which package they belong to, and confirm that they have
  not been changed.
* With the -s,--specs option or by default, it will check some
  specific packages that no files havae changed.
This commit is contained in:
Greg Becker 2019-10-15 14:24:52 -07:00 committed by Peter Scheibel
parent 5ea0eed287
commit 94e80933f0
13 changed files with 791 additions and 54 deletions

View File

@ -277,6 +277,40 @@ the tarballs in question to it (see :ref:`mirrors`):
$ spack install galahad
-----------------------
Verifying installations
-----------------------
The ``spack verify`` command can be used to verify the validity of
Spack-installed packages any time after installation.
At installation time, Spack creates a manifest of every file in the
installation prefix. For links, Spack tracks the mode, ownership, and
destination. For directories, Spack tracks the mode, and
ownership. For files, Spack tracks the mode, ownership, modification
time, hash, and size. The Spack verify command will check, for every
file in each package, whether any of those attributes have changed. It
will also check for newly added files or deleted files from the
installation prefix. Spack can either check all installed packages
using the `-a,--all` or accept specs listed on the command line to
verify.
The ``spack verify`` command can also verify for individual files that
they haven't been altered since installation time. If the given file
is not in a Spack installation prefix, Spack will report that it is
not owned by any package. To check individual files instead of specs,
use the ``-f,--files`` option.
Spack installation manifests are part of the tarball signed by Spack
for binary package distribution. When installed from a binary package,
Spack uses the packaged installation manifest instead of creating one
at install time.
The ``spack verify`` command also accepts the ``-l,--local`` option to
check only local packages (as opposed to those used transparently from
``upstream`` spack instances) and the ``-j,--json`` option to output
machine-readable json data for any errors.
-------------------------
Seeing installed packages
-------------------------

View File

@ -653,7 +653,7 @@ def replace_directory_transaction(directory_name, tmp_root=None):
tty.debug('TEMPORARY DIRECTORY DELETED [{0}]'.format(tmp_dir))
def hash_directory(directory):
def hash_directory(directory, ignore=[]):
"""Hashes recursively the content of a directory.
Args:
@ -670,11 +670,12 @@ def hash_directory(directory):
for root, dirs, files in os.walk(directory):
for name in sorted(files):
filename = os.path.join(root, name)
# TODO: if caching big files becomes an issue, convert this to
# TODO: read in chunks. Currently it's used only for testing
# TODO: purposes.
with open(filename, 'rb') as f:
md5_hash.update(f.read())
if filename not in ignore:
# TODO: if caching big files becomes an issue, convert this to
# TODO: read in chunks. Currently it's used only for testing
# TODO: purposes.
with open(filename, 'rb') as f:
md5_hash.update(f.read())
return md5_hash.hexdigest()

View File

@ -585,8 +585,13 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
except Exception as e:
shutil.rmtree(spec.prefix)
tty.die(e)
# Delay creating spec.prefix until verification is complete
# and any relocation has been done.
else:
manifest_file = os.path.join(spec.prefix,
spack.store.layout.metadata_dir,
spack.store.layout.manifest_file_name)
if not os.path.exists(manifest_file):
spec_id = spec.format('{name}/{hash:7}')
tty.warn('No manifest file in tarball for spec %s' % spec_id)
finally:
shutil.rmtree(tmpdir)

View File

@ -174,16 +174,20 @@ def elide_list(line_list, max_num=10):
return line_list
def disambiguate_spec(spec, env):
def disambiguate_spec(spec, env, local=False):
"""Given a spec, figure out which installed package it refers to.
Arguments:
spec (spack.spec.Spec): a spec to disambiguate
env (spack.environment.Environment): a spack environment,
if one is active, or None if no environment is active
local (boolean, default False): do not search chained spack instances
"""
hashes = env.all_hashes() if env else None
matching_specs = spack.store.db.query(spec, hashes=hashes)
if local:
matching_specs = spack.store.db.query_local(spec, hashes=hashes)
else:
matching_specs = spack.store.db.query(spec, hashes=hashes)
if not matching_specs:
tty.die("Spec '%s' matches no installed packages." % spec)

View File

@ -0,0 +1,95 @@
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from __future__ import print_function
import argparse
import llnl.util.tty as tty
import spack.store
import spack.verify
import spack.environment as ev
description = "Check that all spack packages are on disk as installed"
section = "admin"
level = "long"
def setup_parser(subparser):
setup_parser.parser = subparser
subparser.add_argument('-l', '--local', action='store_true',
help="Verify only locally installed packages")
subparser.add_argument('-j', '--json', action='store_true',
help="Ouptut json-formatted errors")
subparser.add_argument('-a', '--all', action='store_true',
help="Verify all packages")
subparser.add_argument('files_or_specs', nargs=argparse.REMAINDER,
help="Files or specs to verify")
type = subparser.add_mutually_exclusive_group()
type.add_argument(
'-s', '--specs',
action='store_const', const='specs', dest='type', default='specs',
help='Treat entries as specs (default)')
type.add_argument(
'-f', '--files',
action='store_const', const='files', dest='type', default='specs',
help="Treat entries as absolute filenames. Cannot be used with '-a'")
def verify(parser, args):
local = args.local
if args.type == 'files':
if args.all:
setup_parser.parser.print_help()
return 1
for file in args.files_or_specs:
results = spack.verify.check_file_manifest(file)
if results.has_errors():
if args.json:
print(results.json_string())
else:
print(results)
return 0
else:
spec_args = spack.cmd.parse_specs(args.files_or_specs)
if args.all:
query = spack.store.db.query_local if local else spack.store.db.query
# construct spec list
if spec_args:
spec_list = spack.cmd.parse_specs(args.files_or_specs)
specs = []
for spec in spec_list:
specs += query(spec, installed=True)
else:
specs = query(installed=True)
elif args.files_or_specs:
# construct disambiguated spec list
env = ev.get_env(args, 'verify')
specs = list(map(lambda x: spack.cmd.disambiguate_spec(x, env,
local=local),
spec_args))
else:
setup_parser.parser.print_help()
return 1
for spec in specs:
tty.debug("Verifying package %s")
results = spack.verify.check_spec_manifest(spec)
if results.has_errors():
if args.json:
print(results.json_string())
else:
tty.msg("In package %s" % spec.format('{name}/{hash:7}'))
print(results)
return 1
else:
tty.debug(results)

View File

@ -194,6 +194,7 @@ def __init__(self, root, **kwargs):
self.spec_file_name = 'spec.yaml'
self.extension_file_name = 'extensions.yaml'
self.packages_dir = 'repos' # archive of package.py files
self.manifest_file_name = 'install_manifest.json'
@property
def hidden_file_paths(self):
@ -430,6 +431,11 @@ def _extension_map(self, spec):
def _write_extensions(self, spec, extensions):
path = self.extension_file_path(spec)
if not extensions:
# Remove the empty extensions file
os.remove(path)
return
# Create a temp file in the same directory as the actual file.
dirname, basename = os.path.split(path)
mkdirp(dirname)

View File

@ -188,47 +188,41 @@ def __init__(self, root, layout, **kwargs):
# Super class gets projections from the kwargs
# YAML specific to get projections from YAML file
projections_path = os.path.join(self._root, _projections_path)
self.projections_path = os.path.join(self._root, _projections_path)
if not self.projections:
if os.path.exists(projections_path):
# Read projections file from view
with open(projections_path, 'r') as f:
projections_data = s_yaml.load(f)
spack.config.validate(projections_data,
spack.schema.projections.schema)
self.projections = projections_data['projections']
else:
# Write projections file to new view
# Not strictly necessary as the empty file is the empty
# projection but it makes sense for consistency
try:
mkdirp(os.path.dirname(projections_path))
with open(projections_path, 'w') as f:
f.write(s_yaml.dump({'projections': self.projections}))
except OSError as e:
if self.projections:
raise e
elif not os.path.exists(projections_path):
# Read projections file from view
self.projections = self.read_projections()
elif not os.path.exists(self.projections_path):
# Write projections file to new view
mkdirp(os.path.dirname(projections_path))
with open(projections_path, 'w') as f:
f.write(s_yaml.dump({'projections': self.projections}))
self.write_projections()
else:
# Ensure projections are the same from each source
# Read projections file from view
with open(projections_path, 'r') as f:
projections_data = s_yaml.load(f)
spack.config.validate(projections_data,
spack.schema.projections.schema)
if self.projections != projections_data['projections']:
msg = 'View at %s has projections file' % self._root
msg += ' which does not match projections passed manually.'
raise ConflictingProjectionsError(msg)
if self.projections != self.read_projections():
msg = 'View at %s has projections file' % self._root
msg += ' which does not match projections passed manually.'
raise ConflictingProjectionsError(msg)
self.extensions_layout = YamlViewExtensionsLayout(self, layout)
self._croot = colorize_root(self._root) + " "
def write_projections(self):
if self.projections:
mkdirp(os.path.dirname(self.projections_path))
with open(self.projections_path, 'w') as f:
f.write(s_yaml.dump({'projections': self.projections}))
def read_projections(self):
if os.path.exists(self.projections_path):
with open(self.projections_path, 'r') as f:
projections_data = s_yaml.load(f)
spack.config.validate(projections_data,
spack.schema.projections.schema)
return projections_data['projections']
else:
return {}
def add_specs(self, *specs, **kwargs):
assert all((s.concrete for s in specs))
specs = set(specs)

View File

@ -36,8 +36,14 @@ def all_hook_modules():
mod_name = __name__ + '.' + name
path = os.path.join(spack.paths.hooks_path, name) + ".py"
mod = simp.load_source(mod_name, path)
modules.append(mod)
if name == 'write_install_manifest':
last_mod = mod
else:
modules.append(mod)
# put `write_install_manifest` as the last hook to run
modules.append(last_mod)
return modules

View File

@ -0,0 +1,11 @@
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import spack.verify
def post_install(spec):
if not spec.external:
spack.verify.write_manifest(spec)

View File

@ -197,22 +197,26 @@ def test_install_overwrite(
install('libdwarf')
manifest = os.path.join(spec.prefix, spack.store.layout.metadata_dir,
spack.store.layout.manifest_file_name)
assert os.path.exists(spec.prefix)
expected_md5 = fs.hash_directory(spec.prefix)
expected_md5 = fs.hash_directory(spec.prefix, ignore=[manifest])
# Modify the first installation to be sure the content is not the same
# as the one after we reinstalled
with open(os.path.join(spec.prefix, 'only_in_old'), 'w') as f:
f.write('This content is here to differentiate installations.')
bad_md5 = fs.hash_directory(spec.prefix)
bad_md5 = fs.hash_directory(spec.prefix, ignore=[manifest])
assert bad_md5 != expected_md5
install('--overwrite', '-y', 'libdwarf')
assert os.path.exists(spec.prefix)
assert fs.hash_directory(spec.prefix) == expected_md5
assert fs.hash_directory(spec.prefix) != bad_md5
assert fs.hash_directory(spec.prefix, ignore=[manifest]) == expected_md5
assert fs.hash_directory(spec.prefix, ignore=[manifest]) != bad_md5
def test_install_overwrite_not_installed(
@ -242,11 +246,20 @@ def test_install_overwrite_multiple(
install('cmake')
ld_manifest = os.path.join(libdwarf.prefix,
spack.store.layout.metadata_dir,
spack.store.layout.manifest_file_name)
assert os.path.exists(libdwarf.prefix)
expected_libdwarf_md5 = fs.hash_directory(libdwarf.prefix)
expected_libdwarf_md5 = fs.hash_directory(libdwarf.prefix,
ignore=[ld_manifest])
cm_manifest = os.path.join(cmake.prefix,
spack.store.layout.metadata_dir,
spack.store.layout.manifest_file_name)
assert os.path.exists(cmake.prefix)
expected_cmake_md5 = fs.hash_directory(cmake.prefix)
expected_cmake_md5 = fs.hash_directory(cmake.prefix, ignore=[cm_manifest])
# Modify the first installation to be sure the content is not the same
# as the one after we reinstalled
@ -255,8 +268,8 @@ def test_install_overwrite_multiple(
with open(os.path.join(cmake.prefix, 'only_in_old'), 'w') as f:
f.write('This content is here to differentiate installations.')
bad_libdwarf_md5 = fs.hash_directory(libdwarf.prefix)
bad_cmake_md5 = fs.hash_directory(cmake.prefix)
bad_libdwarf_md5 = fs.hash_directory(libdwarf.prefix, ignore=[ld_manifest])
bad_cmake_md5 = fs.hash_directory(cmake.prefix, ignore=[cm_manifest])
assert bad_libdwarf_md5 != expected_libdwarf_md5
assert bad_cmake_md5 != expected_cmake_md5
@ -264,10 +277,13 @@ def test_install_overwrite_multiple(
install('--overwrite', '-y', 'libdwarf', 'cmake')
assert os.path.exists(libdwarf.prefix)
assert os.path.exists(cmake.prefix)
assert fs.hash_directory(libdwarf.prefix) == expected_libdwarf_md5
assert fs.hash_directory(cmake.prefix) == expected_cmake_md5
assert fs.hash_directory(libdwarf.prefix) != bad_libdwarf_md5
assert fs.hash_directory(cmake.prefix) != bad_cmake_md5
ld_hash = fs.hash_directory(libdwarf.prefix, ignore=[ld_manifest])
cm_hash = fs.hash_directory(cmake.prefix, ignore=[cm_manifest])
assert ld_hash == expected_libdwarf_md5
assert cm_hash == expected_cmake_md5
assert ld_hash != bad_libdwarf_md5
assert cm_hash != bad_cmake_md5
@pytest.mark.usefixtures(

View File

@ -0,0 +1,89 @@
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Tests for the `spack verify` command"""
import os
import llnl.util.filesystem as fs
import spack.util.spack_json as sjson
import spack.verify
import spack.spec
import spack.store
from spack.main import SpackCommand
verify = SpackCommand('verify')
install = SpackCommand('install')
def test_single_file_verify_cmd(tmpdir):
# Test the verify command interface to verifying a single file.
filedir = os.path.join(str(tmpdir), 'a', 'b', 'c', 'd')
filepath = os.path.join(filedir, 'file')
metadir = os.path.join(str(tmpdir), spack.store.layout.metadata_dir)
fs.mkdirp(filedir)
fs.mkdirp(metadir)
with open(filepath, 'w') as f:
f.write("I'm a file")
data = spack.verify.create_manifest_entry(filepath)
manifest_file = os.path.join(metadir,
spack.store.layout.manifest_file_name)
with open(manifest_file, 'w') as f:
sjson.dump({filepath: data}, f)
results = verify('-f', filepath, fail_on_error=False)
print(results)
assert not results
os.utime(filepath, (0, 0))
with open(filepath, 'w') as f:
f.write("I changed.")
results = verify('-f', filepath, fail_on_error=False)
expected = ['hash']
mtime = os.stat(filepath).st_mtime
if mtime != data['time']:
expected.append('mtime')
assert results
assert filepath in results
assert all(x in results for x in expected)
results = verify('-fj', filepath, fail_on_error=False)
res = sjson.load(results)
assert len(res) == 1
errors = res.pop(filepath)
assert sorted(errors) == sorted(expected)
def test_single_spec_verify_cmd(tmpdir, mock_packages, mock_archive,
mock_fetch, config, install_mockery):
# Test the verify command interface to verify a single spec
install('libelf')
s = spack.spec.Spec('libelf').concretized()
prefix = s.prefix
hash = s.dag_hash()
results = verify('/%s' % hash, fail_on_error=False)
assert not results
new_file = os.path.join(prefix, 'new_file_for_verify_test')
with open(new_file, 'w') as f:
f.write('New file')
results = verify('/%s' % hash, fail_on_error=False)
assert new_file in results
assert 'added' in results
results = verify('-j', '/%s' % hash, fail_on_error=False)
res = sjson.load(results)
assert len(res) == 1
assert res[new_file] == ['added']

View File

@ -0,0 +1,232 @@
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Tests for the `spack.verify` module"""
import os
import shutil
import llnl.util.filesystem as fs
import spack.util.spack_json as sjson
import spack.verify
import spack.spec
import spack.store
def test_link_manifest_entry(tmpdir):
# Test that symlinks are properly checked against the manifest.
# Test that the appropriate errors are generated when the check fails.
file = str(tmpdir.join('file'))
open(file, 'a').close()
link = str(tmpdir.join('link'))
os.symlink(file, link)
data = spack.verify.create_manifest_entry(link)
assert data['type'] == 'link'
assert data['dest'] == file
assert all(x in data for x in ('mode', 'owner', 'group'))
results = spack.verify.check_entry(link, data)
assert not results.has_errors()
data['type'] = 'garbage'
results = spack.verify.check_entry(link, data)
assert results.has_errors()
assert link in results.errors
assert results.errors[link] == ['type']
data['type'] = 'link'
file2 = str(tmpdir.join('file2'))
open(file2, 'a').close()
os.remove(link)
os.symlink(file2, link)
results = spack.verify.check_entry(link, data)
assert results.has_errors()
assert link in results.errors
assert results.errors[link] == ['link']
def test_dir_manifest_entry(tmpdir):
# Test that directories are properly checked against the manifest.
# Test that the appropriate errors are generated when the check fails.
dirent = str(tmpdir.join('dir'))
fs.mkdirp(dirent)
data = spack.verify.create_manifest_entry(dirent)
assert data['type'] == 'dir'
assert all(x in data for x in ('mode', 'owner', 'group'))
results = spack.verify.check_entry(dirent, data)
assert not results.has_errors()
data['type'] = 'garbage'
results = spack.verify.check_entry(dirent, data)
assert results.has_errors()
assert dirent in results.errors
assert results.errors[dirent] == ['type']
def test_file_manifest_entry(tmpdir):
# Test that files are properly checked against the manifest.
# Test that the appropriate errors are generated when the check fails.
orig_str = 'This is a file'
new_str = 'The file has changed'
file = str(tmpdir.join('dir'))
with open(file, 'w') as f:
f.write(orig_str)
data = spack.verify.create_manifest_entry(file)
assert data['type'] == 'file'
assert data['size'] == len(orig_str)
assert all(x in data for x in ('mode', 'owner', 'group'))
results = spack.verify.check_entry(file, data)
assert not results.has_errors()
data['type'] = 'garbage'
results = spack.verify.check_entry(file, data)
assert results.has_errors()
assert file in results.errors
assert results.errors[file] == ['type']
data['type'] = 'file'
with open(file, 'w') as f:
f.write(new_str)
results = spack.verify.check_entry(file, data)
expected = ['size', 'hash']
mtime = os.stat(file).st_mtime
if mtime != data['time']:
expected.append('mtime')
assert results.has_errors()
assert file in results.errors
assert sorted(results.errors[file]) == sorted(expected)
def test_check_chmod_manifest_entry(tmpdir):
# Check that the verification properly identifies errors for files whose
# permissions have been modified.
file = str(tmpdir.join('dir'))
with open(file, 'w') as f:
f.write('This is a file')
data = spack.verify.create_manifest_entry(file)
os.chmod(file, data['mode'] - 1)
results = spack.verify.check_entry(file, data)
assert results.has_errors()
assert file in results.errors
assert results.errors[file] == ['mode']
def test_check_prefix_manifest(tmpdir):
# Test the verification of an entire prefix and its contents
prefix_path = tmpdir.join('prefix')
prefix = str(prefix_path)
spec = spack.spec.Spec('libelf')
spec._mark_concrete()
spec.prefix = prefix
results = spack.verify.check_spec_manifest(spec)
assert results.has_errors()
assert prefix in results.errors
assert results.errors[prefix] == ['manifest missing']
metadata_dir = str(prefix_path.join('.spack'))
bin_dir = str(prefix_path.join('bin'))
other_dir = str(prefix_path.join('other'))
for d in (metadata_dir, bin_dir, other_dir):
fs.mkdirp(d)
file = os.path.join(other_dir, 'file')
with open(file, 'w') as f:
f.write("I'm a little file short and stout")
link = os.path.join(bin_dir, 'run')
os.symlink(file, link)
spack.verify.write_manifest(spec)
results = spack.verify.check_spec_manifest(spec)
assert not results.has_errors()
os.remove(link)
malware = os.path.join(metadata_dir, 'hiddenmalware')
with open(malware, 'w') as f:
f.write("Foul evil deeds")
results = spack.verify.check_spec_manifest(spec)
assert results.has_errors()
assert all(x in results.errors for x in (malware, link))
assert len(results.errors) == 2
assert results.errors[link] == ['deleted']
assert results.errors[malware] == ['added']
manifest_file = os.path.join(spec.prefix,
spack.store.layout.metadata_dir,
spack.store.layout.manifest_file_name)
with open(manifest_file, 'w') as f:
f.write("{This) string is not proper json")
results = spack.verify.check_spec_manifest(spec)
assert results.has_errors()
assert results.errors[spec.prefix] == ['manifest corrupted']
def test_single_file_verification(tmpdir):
# Test the API to verify a single file, including finding the package
# to which it belongs
filedir = os.path.join(str(tmpdir), 'a', 'b', 'c', 'd')
filepath = os.path.join(filedir, 'file')
metadir = os.path.join(str(tmpdir), spack.store.layout.metadata_dir)
fs.mkdirp(filedir)
fs.mkdirp(metadir)
with open(filepath, 'w') as f:
f.write("I'm a file")
data = spack.verify.create_manifest_entry(filepath)
manifest_file = os.path.join(metadir,
spack.store.layout.manifest_file_name)
with open(manifest_file, 'w') as f:
sjson.dump({filepath: data}, f)
results = spack.verify.check_file_manifest(filepath)
assert not results.has_errors()
os.utime(filepath, (0, 0))
with open(filepath, 'w') as f:
f.write("I changed.")
results = spack.verify.check_file_manifest(filepath)
expected = ['hash']
mtime = os.stat(filepath).st_mtime
if mtime != data['time']:
expected.append('mtime')
assert results.has_errors()
assert filepath in results.errors
assert sorted(results.errors[filepath]) == sorted(expected)
shutil.rmtree(metadir)
results = spack.verify.check_file_manifest(filepath)
assert results.has_errors()
assert results.errors[filepath] == ['not owned by any package']

244
lib/spack/spack/verify.py Normal file
View File

@ -0,0 +1,244 @@
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import hashlib
import base64
import sys
import llnl.util.tty as tty
import spack.util.spack_json as sjson
import spack.util.file_permissions as fp
import spack.store
import spack.filesystem_view
def compute_hash(path):
with open(path, 'rb') as f:
sha1 = hashlib.sha1(f.read()).digest()
b32 = base64.b32encode(sha1)
if sys.version_info[0] >= 3:
b32 = b32.decode()
return b32
def create_manifest_entry(path):
data = {}
stat = os.stat(path)
data['mode'] = stat.st_mode
data['owner'] = stat.st_uid
data['group'] = stat.st_gid
if os.path.islink(path):
data['type'] = 'link'
data['dest'] = os.readlink(path)
elif os.path.isdir(path):
data['type'] = 'dir'
else:
data['type'] = 'file'
data['hash'] = compute_hash(path)
data['time'] = stat.st_mtime
data['size'] = stat.st_size
return data
def write_manifest(spec):
manifest_file = os.path.join(spec.prefix,
spack.store.layout.metadata_dir,
spack.store.layout.manifest_file_name)
if not os.path.exists(manifest_file):
tty.debug("Writing manifest file: No manifest from binary")
manifest = {}
for root, dirs, files in os.walk(spec.prefix):
for entry in list(dirs + files):
path = os.path.join(root, entry)
manifest[path] = create_manifest_entry(path)
manifest[spec.prefix] = create_manifest_entry(spec.prefix)
with open(manifest_file, 'w') as f:
sjson.dump(manifest, f)
fp.set_permissions_by_spec(manifest_file, spec)
def check_entry(path, data):
res = VerificationResults()
if not data:
res.add_error(path, 'added')
return res
stat = os.stat(path)
# Check for all entries
if stat.st_mode != data['mode']:
res.add_error(path, 'mode')
if stat.st_uid != data['owner']:
res.add_error(path, 'owner')
if stat.st_gid != data['group']:
res.add_error(path, 'group')
# Check for symlink targets and listed as symlink
if os.path.islink(path):
if data['type'] != 'link':
res.add_error(path, 'type')
if os.readlink(path) != data.get('dest', ''):
res.add_error(path, 'link')
# Check directories are listed as directory
elif os.path.isdir(path):
if data['type'] != 'dir':
res.add_error(path, 'type')
else:
# Check file contents against hash and listed as file
# Check mtime and size as well
if stat.st_size != data['size']:
res.add_error(path, 'size')
if stat.st_mtime != data['time']:
res.add_error(path, 'mtime')
if data['type'] != 'file':
res.add_error(path, 'type')
if compute_hash(path) != data.get('hash', ''):
res.add_error(path, 'hash')
return res
def check_file_manifest(file):
dirname = os.path.dirname(file)
results = VerificationResults()
while spack.store.layout.metadata_dir not in os.listdir(dirname):
if dirname == os.path.sep:
results.add_error(file, 'not owned by any package')
return results
dirname = os.path.dirname(dirname)
manifest_file = os.path.join(dirname,
spack.store.layout.metadata_dir,
spack.store.layout.manifest_file_name)
if not os.path.exists(manifest_file):
results.add_error(file, "manifest missing")
return results
try:
with open(manifest_file, 'r') as f:
manifest = sjson.load(f)
except Exception:
results.add_error(file, "manifest corrupted")
return results
if file in manifest:
results += check_entry(file, manifest[file])
else:
results.add_error(file, 'not owned by any package')
return results
def check_spec_manifest(spec):
prefix = spec.prefix
results = VerificationResults()
manifest_file = os.path.join(prefix,
spack.store.layout.metadata_dir,
spack.store.layout.manifest_file_name)
if not os.path.exists(manifest_file):
results.add_error(prefix, "manifest missing")
return results
try:
with open(manifest_file, 'r') as f:
manifest = sjson.load(f)
except Exception:
results.add_error(prefix, "manifest corrupted")
return results
# Get extensions active in spec
view = spack.filesystem_view.YamlFilesystemView(prefix,
spack.store.layout)
active_exts = view.extensions_layout.extension_map(spec).values()
ext_file = ''
if active_exts:
# No point checking contents of this file as it is the only source of
# truth for that information.
ext_file = view.extensions_layout.extension_file_path(spec)
def is_extension_artifact(p):
if os.path.islink(p):
if any(os.readlink(p).startswith(e.prefix) for e in active_exts):
# This file is linked in by an extension. Belongs to extension
return True
elif os.path.isdir(p) and p not in manifest:
if all(is_extension_artifact(os.path.join(p, f))
for f in os.listdir(p)):
return True
return False
for root, dirs, files in os.walk(prefix):
for entry in list(dirs + files):
path = os.path.join(root, entry)
# Do not check links from prefix to active extension
# TODO: make this stricter for non-linux systems that use symlink
# permissions
# Do not check directories that only exist for extensions
if is_extension_artifact(path):
continue
# Do not check manifest file. Can't store your own hash
# Nothing to check for ext_file
if path == manifest_file or path == ext_file:
continue
data = manifest.pop(path, {})
results += check_entry(path, data)
results += check_entry(prefix, manifest.pop(prefix, {}))
for path in manifest:
results.add_error(path, 'deleted')
return results
class VerificationResults(object):
def __init__(self):
self.errors = {}
def add_error(self, path, field):
self.errors[path] = self.errors.get(path, []) + [field]
def __add__(self, vr):
for path, fields in vr.errors.items():
self.errors[path] = self.errors.get(path, []) + fields
return self
def has_errors(self):
return bool(self.errors)
def json_string(self):
return sjson.dump(self.errors)
def __str__(self):
res = ''
for path, fields in self.errors.items():
res += '%s verification failed with error(s):\n' % path
for error in fields:
res += ' %s\n' % error
if not res:
res += 'No Errors'
return res