env: environments index specs by full DAG w/build deps
- ensure that Spec._build_hash attr is defined - add logic to compute _build_hash when it is not already computed (e.g. for specs prior to this PR) - add test to ensure that different instance of a build dep are preserved - test conversion of old env lockfile format to new format - tests: avoid view creation, DAG display in tests using MockPackage - add regression test for more-general bug also fixed by this PR - update lockfile version since the way we are maintaining hashes has changed - write out backup for version-1 lockfiles and test that
This commit is contained in:
parent
db7127c56d
commit
0715b512a1
@ -72,7 +72,7 @@
|
|||||||
valid_environment_name_re = r'^\w[\w-]*$'
|
valid_environment_name_re = r'^\w[\w-]*$'
|
||||||
|
|
||||||
#: version of the lockfile format. Must increase monotonically.
|
#: version of the lockfile format. Must increase monotonically.
|
||||||
lockfile_format_version = 1
|
lockfile_format_version = 2
|
||||||
|
|
||||||
#: legal first keys in the spack.yaml manifest file
|
#: legal first keys in the spack.yaml manifest file
|
||||||
env_schema_keys = ('spack', 'env')
|
env_schema_keys = ('spack', 'env')
|
||||||
@ -533,11 +533,17 @@ def __init__(self, path, init_file=None, with_view=None):
|
|||||||
|
|
||||||
if os.path.exists(self.lock_path):
|
if os.path.exists(self.lock_path):
|
||||||
with open(self.lock_path) as f:
|
with open(self.lock_path) as f:
|
||||||
self._read_lockfile(f)
|
read_lock_version = self._read_lockfile(f)
|
||||||
if default_manifest:
|
if default_manifest:
|
||||||
# No manifest, set user specs from lockfile
|
# No manifest, set user specs from lockfile
|
||||||
self._set_user_specs_from_lockfile()
|
self._set_user_specs_from_lockfile()
|
||||||
|
|
||||||
|
if read_lock_version == 1:
|
||||||
|
tty.debug(
|
||||||
|
"Storing backup of old lockfile {0} at {1}".format(
|
||||||
|
self.lock_path, self._lock_backup_v1_path))
|
||||||
|
shutil.copy(self.lock_path, self._lock_backup_v1_path)
|
||||||
|
|
||||||
if with_view is False:
|
if with_view is False:
|
||||||
self.views = {}
|
self.views = {}
|
||||||
elif with_view is True:
|
elif with_view is True:
|
||||||
@ -638,6 +644,11 @@ def lock_path(self):
|
|||||||
"""Path to spack.lock file in this environment."""
|
"""Path to spack.lock file in this environment."""
|
||||||
return os.path.join(self.path, lockfile_name)
|
return os.path.join(self.path, lockfile_name)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _lock_backup_v1_path(self):
|
||||||
|
"""Path to backup of v1 lockfile before conversion to v2"""
|
||||||
|
return self.lock_path + '.backup.v1'
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def env_subdir_path(self):
|
def env_subdir_path(self):
|
||||||
"""Path to directory where the env stores repos, logs, views."""
|
"""Path to directory where the env stores repos, logs, views."""
|
||||||
@ -809,7 +820,7 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False):
|
|||||||
del self.concretized_order[i]
|
del self.concretized_order[i]
|
||||||
del self.specs_by_hash[dag_hash]
|
del self.specs_by_hash[dag_hash]
|
||||||
|
|
||||||
def concretize(self, force=False):
|
def concretize(self, force=False, _display=True):
|
||||||
"""Concretize user_specs in this environment.
|
"""Concretize user_specs in this environment.
|
||||||
|
|
||||||
Only concretizes specs that haven't been concretized yet unless
|
Only concretizes specs that haven't been concretized yet unless
|
||||||
@ -850,12 +861,13 @@ def concretize(self, force=False):
|
|||||||
concrete = _concretize_from_constraints(uspec_constraints)
|
concrete = _concretize_from_constraints(uspec_constraints)
|
||||||
self._add_concrete_spec(uspec, concrete)
|
self._add_concrete_spec(uspec, concrete)
|
||||||
|
|
||||||
# Display concretized spec to the user
|
if _display:
|
||||||
sys.stdout.write(concrete.tree(
|
# Display concretized spec to the user
|
||||||
recurse_dependencies=True,
|
sys.stdout.write(concrete.tree(
|
||||||
status_fn=spack.spec.Spec.install_status,
|
recurse_dependencies=True,
|
||||||
hashlen=7, hashes=True)
|
status_fn=spack.spec.Spec.install_status,
|
||||||
)
|
hashlen=7, hashes=True)
|
||||||
|
)
|
||||||
|
|
||||||
def install(self, user_spec, concrete_spec=None, **install_args):
|
def install(self, user_spec, concrete_spec=None, **install_args):
|
||||||
"""Install a single spec into an environment.
|
"""Install a single spec into an environment.
|
||||||
@ -872,7 +884,7 @@ def install(self, user_spec, concrete_spec=None, **install_args):
|
|||||||
# spec might be in the user_specs, but not installed.
|
# spec might be in the user_specs, but not installed.
|
||||||
# TODO: Redo name-based comparison for old style envs
|
# TODO: Redo name-based comparison for old style envs
|
||||||
spec = next(s for s in self.user_specs if s.satisfies(user_spec))
|
spec = next(s for s in self.user_specs if s.satisfies(user_spec))
|
||||||
concrete = self.specs_by_hash.get(spec.dag_hash())
|
concrete = self.specs_by_hash.get(spec.dag_hash(all_deps=True))
|
||||||
if not concrete:
|
if not concrete:
|
||||||
concrete = spec.concretized()
|
concrete = spec.concretized()
|
||||||
self._add_concrete_spec(spec, concrete)
|
self._add_concrete_spec(spec, concrete)
|
||||||
@ -984,7 +996,7 @@ def _add_concrete_spec(self, spec, concrete, new=True):
|
|||||||
# update internal lists of specs
|
# update internal lists of specs
|
||||||
self.concretized_user_specs.append(spec)
|
self.concretized_user_specs.append(spec)
|
||||||
|
|
||||||
h = concrete.dag_hash()
|
h = concrete.dag_hash(all_deps=True)
|
||||||
self.concretized_order.append(h)
|
self.concretized_order.append(h)
|
||||||
self.specs_by_hash[h] = concrete
|
self.specs_by_hash[h] = concrete
|
||||||
|
|
||||||
@ -1013,6 +1025,10 @@ def install_all(self, args=None):
|
|||||||
|
|
||||||
def all_specs_by_hash(self):
|
def all_specs_by_hash(self):
|
||||||
"""Map of hashes to spec for all specs in this environment."""
|
"""Map of hashes to spec for all specs in this environment."""
|
||||||
|
# Note this uses dag-hashes calculated without build deps as keys,
|
||||||
|
# whereas the environment tracks specs based on dag-hashes calculated
|
||||||
|
# with all dependencies. This function should not be used by an
|
||||||
|
# Environment object for management of its own data structures
|
||||||
hashes = {}
|
hashes = {}
|
||||||
for h in self.concretized_order:
|
for h in self.concretized_order:
|
||||||
specs = self.specs_by_hash[h].traverse(deptype=('link', 'run'))
|
specs = self.specs_by_hash[h].traverse(deptype=('link', 'run'))
|
||||||
@ -1095,9 +1111,11 @@ def _to_lockfile_dict(self):
|
|||||||
concrete_specs = {}
|
concrete_specs = {}
|
||||||
for spec in self.specs_by_hash.values():
|
for spec in self.specs_by_hash.values():
|
||||||
for s in spec.traverse():
|
for s in spec.traverse():
|
||||||
dag_hash = s.dag_hash()
|
dag_hash_all = s.dag_hash(all_deps=True)
|
||||||
if dag_hash not in concrete_specs:
|
if dag_hash_all not in concrete_specs:
|
||||||
concrete_specs[dag_hash] = s.to_node_dict(all_deps=True)
|
spec_dict = s.to_node_dict(all_deps=True)
|
||||||
|
spec_dict[s.name]['hash'] = s.dag_hash()
|
||||||
|
concrete_specs[dag_hash_all] = spec_dict
|
||||||
|
|
||||||
hash_spec_list = zip(
|
hash_spec_list = zip(
|
||||||
self.concretized_order, self.concretized_user_specs)
|
self.concretized_order, self.concretized_user_specs)
|
||||||
@ -1126,6 +1144,7 @@ def _read_lockfile(self, file_or_json):
|
|||||||
"""Read a lockfile from a file or from a raw string."""
|
"""Read a lockfile from a file or from a raw string."""
|
||||||
lockfile_dict = sjson.load(file_or_json)
|
lockfile_dict = sjson.load(file_or_json)
|
||||||
self._read_lockfile_dict(lockfile_dict)
|
self._read_lockfile_dict(lockfile_dict)
|
||||||
|
return lockfile_dict['_meta']['lockfile-version']
|
||||||
|
|
||||||
def _read_lockfile_dict(self, d):
|
def _read_lockfile_dict(self, d):
|
||||||
"""Read a lockfile dictionary into this environment."""
|
"""Read a lockfile dictionary into this environment."""
|
||||||
@ -1146,8 +1165,25 @@ def _read_lockfile_dict(self, d):
|
|||||||
specs_by_hash[dag_hash]._add_dependency(
|
specs_by_hash[dag_hash]._add_dependency(
|
||||||
specs_by_hash[dep_hash], deptypes)
|
specs_by_hash[dep_hash], deptypes)
|
||||||
|
|
||||||
self.specs_by_hash = dict(
|
# If we are reading an older lockfile format (which uses dag hashes
|
||||||
(x, y) for x, y in specs_by_hash.items() if x in root_hashes)
|
# that exclude build deps), we use this to convert the old
|
||||||
|
# concretized_order to the full hashes (preserving the order)
|
||||||
|
old_hash_to_new = {}
|
||||||
|
self.specs_by_hash = {}
|
||||||
|
for _, spec in specs_by_hash.items():
|
||||||
|
dag_hash = spec.dag_hash()
|
||||||
|
build_hash = spec.dag_hash(all_deps=True)
|
||||||
|
if dag_hash in root_hashes:
|
||||||
|
old_hash_to_new[dag_hash] = build_hash
|
||||||
|
|
||||||
|
if (dag_hash in root_hashes or build_hash in root_hashes):
|
||||||
|
self.specs_by_hash[build_hash] = spec
|
||||||
|
|
||||||
|
if old_hash_to_new:
|
||||||
|
# Replace any older hashes in concretized_order with hashes
|
||||||
|
# that include build deps
|
||||||
|
self.concretized_order = [
|
||||||
|
old_hash_to_new.get(h, h) for h in self.concretized_order]
|
||||||
|
|
||||||
def write(self):
|
def write(self):
|
||||||
"""Writes an in-memory environment to its location on disk.
|
"""Writes an in-memory environment to its location on disk.
|
||||||
|
@ -927,6 +927,7 @@ def __init__(self, spec_like=None,
|
|||||||
self.namespace = None
|
self.namespace = None
|
||||||
|
|
||||||
self._hash = None
|
self._hash = None
|
||||||
|
self._build_hash = None
|
||||||
self._cmp_key_cache = None
|
self._cmp_key_cache = None
|
||||||
self._package = None
|
self._package = None
|
||||||
|
|
||||||
@ -1285,22 +1286,35 @@ def prefix(self):
|
|||||||
def prefix(self, value):
|
def prefix(self, value):
|
||||||
self._prefix = Prefix(value)
|
self._prefix = Prefix(value)
|
||||||
|
|
||||||
def dag_hash(self, length=None):
|
def dag_hash(self, length=None, all_deps=False):
|
||||||
"""Return a hash of the entire spec DAG, including connectivity."""
|
"""Return a hash of the entire spec DAG, including connectivity."""
|
||||||
if self._hash:
|
if not self.concrete:
|
||||||
return self._hash[:length]
|
h = self._dag_hash(all_deps=all_deps)
|
||||||
else:
|
# An upper bound of None is equivalent to len(h). An upper bound of
|
||||||
yaml_text = syaml.dump(
|
# 0 produces the empty string
|
||||||
self.to_node_dict(), default_flow_style=True, width=maxint)
|
return h[:length]
|
||||||
sha = hashlib.sha1(yaml_text.encode('utf-8'))
|
|
||||||
|
|
||||||
b32_hash = base64.b32encode(sha.digest()).lower()
|
if not self._hash:
|
||||||
if sys.version_info[0] >= 3:
|
self._hash = self._dag_hash(all_deps=False)
|
||||||
b32_hash = b32_hash.decode('utf-8')
|
|
||||||
|
|
||||||
if self.concrete:
|
if not self._build_hash:
|
||||||
self._hash = b32_hash
|
self._build_hash = self._dag_hash(all_deps=True)
|
||||||
return b32_hash[:length]
|
|
||||||
|
h = self._build_hash if all_deps else self._hash
|
||||||
|
return h[:length]
|
||||||
|
|
||||||
|
def _dag_hash(self, all_deps=False):
|
||||||
|
yaml_text = syaml.dump(
|
||||||
|
self.to_node_dict(all_deps=all_deps),
|
||||||
|
default_flow_style=True,
|
||||||
|
width=maxint)
|
||||||
|
sha = hashlib.sha1(yaml_text.encode('utf-8'))
|
||||||
|
|
||||||
|
b32_hash = base64.b32encode(sha.digest()).lower()
|
||||||
|
if sys.version_info[0] >= 3:
|
||||||
|
b32_hash = b32_hash.decode('utf-8')
|
||||||
|
|
||||||
|
return b32_hash
|
||||||
|
|
||||||
def dag_hash_bit_prefix(self, bits):
|
def dag_hash_bit_prefix(self, bits):
|
||||||
"""Get the first <bits> bits of the DAG hash as an integer type."""
|
"""Get the first <bits> bits of the DAG hash as an integer type."""
|
||||||
@ -1373,7 +1387,7 @@ def to_node_dict(self, hash_function=None, all_deps=False):
|
|||||||
deps = self.dependencies_dict(deptype=deptypes)
|
deps = self.dependencies_dict(deptype=deptypes)
|
||||||
if deps:
|
if deps:
|
||||||
if hash_function is None:
|
if hash_function is None:
|
||||||
hash_function = lambda s: s.dag_hash()
|
hash_function = lambda s: s.dag_hash(all_deps=all_deps)
|
||||||
d['dependencies'] = syaml_dict([
|
d['dependencies'] = syaml_dict([
|
||||||
(name,
|
(name,
|
||||||
syaml_dict([
|
syaml_dict([
|
||||||
@ -1393,6 +1407,8 @@ def to_dict(self, all_deps=False):
|
|||||||
for s in self.traverse(order='pre', deptype=deptypes):
|
for s in self.traverse(order='pre', deptype=deptypes):
|
||||||
node = s.to_node_dict(all_deps=all_deps)
|
node = s.to_node_dict(all_deps=all_deps)
|
||||||
node[s.name]['hash'] = s.dag_hash()
|
node[s.name]['hash'] = s.dag_hash()
|
||||||
|
if all_deps:
|
||||||
|
node[s.name]['build_hash'] = s.dag_hash(all_deps=True)
|
||||||
node_list.append(node)
|
node_list.append(node)
|
||||||
|
|
||||||
return syaml_dict([('spec', node_list)])
|
return syaml_dict([('spec', node_list)])
|
||||||
@ -1412,6 +1428,7 @@ def from_node_dict(node):
|
|||||||
spec = Spec(name, full_hash=node.get('full_hash', None))
|
spec = Spec(name, full_hash=node.get('full_hash', None))
|
||||||
spec.namespace = node.get('namespace', None)
|
spec.namespace = node.get('namespace', None)
|
||||||
spec._hash = node.get('hash', None)
|
spec._hash = node.get('hash', None)
|
||||||
|
spec._build_hash = node.get('build_hash', None)
|
||||||
|
|
||||||
if 'version' in node or 'versions' in node:
|
if 'version' in node or 'versions' in node:
|
||||||
spec.versions = VersionList.from_dict(node)
|
spec.versions = VersionList.from_dict(node)
|
||||||
@ -2812,11 +2829,13 @@ def _dup(self, other, deps=True, cleardeps=True, caches=None):
|
|||||||
|
|
||||||
if caches:
|
if caches:
|
||||||
self._hash = other._hash
|
self._hash = other._hash
|
||||||
|
self._build_hash = other._build_hash
|
||||||
self._cmp_key_cache = other._cmp_key_cache
|
self._cmp_key_cache = other._cmp_key_cache
|
||||||
self._normal = other._normal
|
self._normal = other._normal
|
||||||
self._full_hash = other._full_hash
|
self._full_hash = other._full_hash
|
||||||
else:
|
else:
|
||||||
self._hash = None
|
self._hash = None
|
||||||
|
self._build_hash = None
|
||||||
self._cmp_key_cache = None
|
self._cmp_key_cache = None
|
||||||
self._normal = False
|
self._normal = False
|
||||||
self._full_hash = None
|
self._full_hash = None
|
||||||
|
@ -15,7 +15,10 @@
|
|||||||
from spack.cmd.env import _env_create
|
from spack.cmd.env import _env_create
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
from spack.main import SpackCommand
|
from spack.main import SpackCommand
|
||||||
|
|
||||||
from spack.spec_list import SpecListError
|
from spack.spec_list import SpecListError
|
||||||
|
from spack.test.conftest import MockPackage, MockPackageMultiRepo
|
||||||
|
import spack.util.spack_json as sjson
|
||||||
|
|
||||||
|
|
||||||
# everything here uses the mock_env_path
|
# everything here uses the mock_env_path
|
||||||
@ -625,6 +628,175 @@ def test_uninstall_removes_from_env(mock_stage, mock_fetch, install_mockery):
|
|||||||
assert not test.user_specs
|
assert not test.user_specs
|
||||||
|
|
||||||
|
|
||||||
|
def create_v1_lockfile_dict(roots, all_specs):
|
||||||
|
test_lockfile_dict = {
|
||||||
|
"_meta": {
|
||||||
|
"lockfile-version": 1,
|
||||||
|
"file-type": "spack-lockfile"
|
||||||
|
},
|
||||||
|
"roots": list(
|
||||||
|
{
|
||||||
|
"hash": s.dag_hash(),
|
||||||
|
"spec": s.name
|
||||||
|
} for s in roots
|
||||||
|
),
|
||||||
|
# Version one lockfiles use the dag hash without build deps as keys,
|
||||||
|
# but they write out the full node dict (including build deps)
|
||||||
|
"concrete_specs": dict(
|
||||||
|
(s.dag_hash(), s.to_node_dict(all_deps=True)) for s in all_specs
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return test_lockfile_dict
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures('config')
|
||||||
|
def test_read_old_lock_and_write_new(tmpdir):
|
||||||
|
build_only = ('build',)
|
||||||
|
|
||||||
|
y = MockPackage('y', [], [])
|
||||||
|
x = MockPackage('x', [y], [build_only])
|
||||||
|
|
||||||
|
mock_repo = MockPackageMultiRepo([x, y])
|
||||||
|
with spack.repo.swap(mock_repo):
|
||||||
|
x = Spec('x')
|
||||||
|
x.concretize()
|
||||||
|
|
||||||
|
y = x['y']
|
||||||
|
|
||||||
|
test_lockfile_dict = create_v1_lockfile_dict([x], [x, y])
|
||||||
|
|
||||||
|
test_lockfile_path = str(tmpdir.join('test.lock'))
|
||||||
|
with open(test_lockfile_path, 'w') as f:
|
||||||
|
sjson.dump(test_lockfile_dict, stream=f)
|
||||||
|
|
||||||
|
_env_create('test', test_lockfile_path, with_view=False)
|
||||||
|
|
||||||
|
e = ev.read('test')
|
||||||
|
hashes = set(e._to_lockfile_dict()['concrete_specs'])
|
||||||
|
# When the lockfile is rewritten, it should adopt the new hash scheme
|
||||||
|
# which accounts for all dependencies, including build dependencies
|
||||||
|
assert hashes == set([
|
||||||
|
x.dag_hash(all_deps=True),
|
||||||
|
y.dag_hash(all_deps=True)])
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures('config')
|
||||||
|
def test_read_old_lock_creates_backup(tmpdir):
|
||||||
|
"""When reading a version-1 lockfile, make sure that a backup of that file
|
||||||
|
is created.
|
||||||
|
"""
|
||||||
|
y = MockPackage('y', [], [])
|
||||||
|
|
||||||
|
mock_repo = MockPackageMultiRepo([y])
|
||||||
|
with spack.repo.swap(mock_repo):
|
||||||
|
y = Spec('y')
|
||||||
|
y.concretize()
|
||||||
|
|
||||||
|
test_lockfile_dict = create_v1_lockfile_dict([y], [y])
|
||||||
|
|
||||||
|
env_root = tmpdir.mkdir('test-root')
|
||||||
|
test_lockfile_path = str(env_root.join(ev.lockfile_name))
|
||||||
|
with open(test_lockfile_path, 'w') as f:
|
||||||
|
sjson.dump(test_lockfile_dict, stream=f)
|
||||||
|
|
||||||
|
e = ev.Environment(str(env_root))
|
||||||
|
assert os.path.exists(e._lock_backup_v1_path)
|
||||||
|
with open(e._lock_backup_v1_path, 'r') as backup_v1_file:
|
||||||
|
lockfile_dict_v1 = sjson.load(backup_v1_file)
|
||||||
|
# Make sure that the backup file follows the v1 hash scheme
|
||||||
|
assert y.dag_hash() in lockfile_dict_v1['concrete_specs']
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures('config')
|
||||||
|
def test_indirect_build_dep():
|
||||||
|
"""Simple case of X->Y->Z where Y is a build/link dep and Z is a
|
||||||
|
build-only dep. Make sure this concrete DAG is preserved when writing the
|
||||||
|
environment out and reading it back.
|
||||||
|
"""
|
||||||
|
default = ('build', 'link')
|
||||||
|
build_only = ('build',)
|
||||||
|
|
||||||
|
z = MockPackage('z', [], [])
|
||||||
|
y = MockPackage('y', [z], [build_only])
|
||||||
|
x = MockPackage('x', [y], [default])
|
||||||
|
|
||||||
|
mock_repo = MockPackageMultiRepo([x, y, z])
|
||||||
|
|
||||||
|
def noop(*args):
|
||||||
|
pass
|
||||||
|
setattr(mock_repo, 'dump_provenance', noop)
|
||||||
|
|
||||||
|
with spack.repo.swap(mock_repo):
|
||||||
|
x_spec = Spec('x')
|
||||||
|
x_concretized = x_spec.concretized()
|
||||||
|
|
||||||
|
_env_create('test', with_view=False)
|
||||||
|
e = ev.read('test')
|
||||||
|
e.add(x_spec)
|
||||||
|
e.concretize(_display=False)
|
||||||
|
e.write()
|
||||||
|
|
||||||
|
e_read = ev.read('test')
|
||||||
|
x_env_hash, = e_read.concretized_order
|
||||||
|
|
||||||
|
x_env_spec = e_read.specs_by_hash[x_env_hash]
|
||||||
|
assert x_env_spec == x_concretized
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures('config')
|
||||||
|
def test_store_different_build_deps():
|
||||||
|
r"""Ensure that an environment can store two instances of a build-only
|
||||||
|
Dependency:
|
||||||
|
|
||||||
|
x y
|
||||||
|
/| (l) | (b)
|
||||||
|
(b) | y z2
|
||||||
|
\| (b) # noqa: W605
|
||||||
|
z1
|
||||||
|
|
||||||
|
"""
|
||||||
|
default = ('build', 'link')
|
||||||
|
build_only = ('build',)
|
||||||
|
|
||||||
|
z = MockPackage('z', [], [])
|
||||||
|
y = MockPackage('y', [z], [build_only])
|
||||||
|
x = MockPackage('x', [y, z], [default, build_only])
|
||||||
|
|
||||||
|
mock_repo = MockPackageMultiRepo([x, y, z])
|
||||||
|
|
||||||
|
def noop(*args):
|
||||||
|
pass
|
||||||
|
setattr(mock_repo, 'dump_provenance', noop)
|
||||||
|
|
||||||
|
with spack.repo.swap(mock_repo):
|
||||||
|
y_spec = Spec('y ^z@3')
|
||||||
|
y_concretized = y_spec.concretized()
|
||||||
|
|
||||||
|
x_spec = Spec('x ^z@2')
|
||||||
|
x_concretized = x_spec.concretized()
|
||||||
|
|
||||||
|
# Even though x chose a different 'z', it should choose the same y
|
||||||
|
# according to the DAG hash (since build deps are excluded from
|
||||||
|
# comparison by default). Although the dag hashes are equal, the specs
|
||||||
|
# are not considered equal because they compare build deps.
|
||||||
|
assert x_concretized['y'].dag_hash() == y_concretized.dag_hash()
|
||||||
|
|
||||||
|
_env_create('test', with_view=False)
|
||||||
|
e = ev.read('test')
|
||||||
|
e.add(y_spec)
|
||||||
|
e.add(x_spec)
|
||||||
|
e.concretize(_display=False)
|
||||||
|
e.write()
|
||||||
|
|
||||||
|
e_read = ev.read('test')
|
||||||
|
y_env_hash, x_env_hash = e_read.concretized_order
|
||||||
|
|
||||||
|
y_read = e_read.specs_by_hash[y_env_hash]
|
||||||
|
x_read = e_read.specs_by_hash[x_env_hash]
|
||||||
|
|
||||||
|
assert x_read['z'] != y_read['z']
|
||||||
|
|
||||||
|
|
||||||
def test_env_updates_view_install(
|
def test_env_updates_view_install(
|
||||||
tmpdir, mock_stage, mock_fetch, install_mockery):
|
tmpdir, mock_stage, mock_fetch, install_mockery):
|
||||||
view_dir = tmpdir.mkdir('view')
|
view_dir = tmpdir.mkdir('view')
|
||||||
|
Loading…
Reference in New Issue
Block a user