Compare commits
3 Commits
develop
...
bugfix/rou
Author | SHA1 | Date | |
---|---|---|---|
![]() |
307009f939 | ||
![]() |
d7b9e73e12 | ||
![]() |
3b2563c561 |
@ -778,7 +778,7 @@ def _assign_dependencies(
|
||||
spec_node_dict = spec_node_dict[spec.name]
|
||||
if "dependencies" in spec_node_dict:
|
||||
yaml_deps = spec_node_dict["dependencies"]
|
||||
for dname, dhash, dtypes, _, virtuals in spec_reader.read_specfile_dep_specs(
|
||||
for dname, dhash, dtypes, _, virtuals, direct in spec_reader.read_specfile_dep_specs(
|
||||
yaml_deps
|
||||
):
|
||||
# It is important that we always check upstream installations in the same order,
|
||||
@ -797,7 +797,9 @@ def _assign_dependencies(
|
||||
)
|
||||
continue
|
||||
|
||||
spec._add_dependency(child, depflag=dt.canonicalize(dtypes), virtuals=virtuals)
|
||||
spec._add_dependency(
|
||||
child, depflag=dt.canonicalize(dtypes), virtuals=virtuals, direct=direct
|
||||
)
|
||||
|
||||
def _read_from_file(self, filename: pathlib.Path, *, reindex: bool = False) -> None:
|
||||
"""Fill database from file, do not maintain old data.
|
||||
|
@ -543,7 +543,6 @@
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
"""
|
||||
|
||||
from .environment import (
|
||||
|
@ -2230,9 +2230,14 @@ def filter_specs(self, reader, json_specs_by_hash, order_concretized):
|
||||
# and add them to the spec, including build specs
|
||||
for lockfile_key, node_dict in json_specs_by_hash.items():
|
||||
name, data = reader.name_and_data(node_dict)
|
||||
for _, dep_hash, deptypes, _, virtuals in reader.dependencies_from_node_dict(data):
|
||||
for _, dep_hash, deptypes, _, virtuals, direct in reader.dependencies_from_node_dict(
|
||||
data
|
||||
):
|
||||
specs_by_hash[lockfile_key]._add_dependency(
|
||||
specs_by_hash[dep_hash], depflag=dt.canonicalize(deptypes), virtuals=virtuals
|
||||
specs_by_hash[dep_hash],
|
||||
depflag=dt.canonicalize(deptypes),
|
||||
virtuals=virtuals,
|
||||
direct=direct,
|
||||
)
|
||||
|
||||
if "build_spec" in node_dict:
|
||||
|
@ -2383,6 +2383,7 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
"parameters": {
|
||||
"deptypes": dt.flag_to_tuple(dspec.depflag),
|
||||
"virtuals": dspec.virtuals,
|
||||
"direct": dspec.direct,
|
||||
},
|
||||
}
|
||||
for name, edges_for_name in sorted(deps.items())
|
||||
@ -4890,7 +4891,7 @@ def _load(cls, data):
|
||||
|
||||
# Pass 0: Determine hash type
|
||||
for node in nodes:
|
||||
for _, _, _, dhash_type, _ in cls.dependencies_from_node_dict(node):
|
||||
for _, _, _, dhash_type, _, _ in cls.dependencies_from_node_dict(node):
|
||||
any_deps = True
|
||||
if dhash_type:
|
||||
hash_type = dhash_type
|
||||
@ -4921,11 +4922,12 @@ def _load(cls, data):
|
||||
# Pass 2: Finish construction of all DAG edges (including build specs)
|
||||
for node_hash, node in hash_dict.items():
|
||||
node_spec = node["node_spec"]
|
||||
for _, dhash, dtype, _, virtuals in cls.dependencies_from_node_dict(node):
|
||||
for _, dhash, dtype, _, virtuals, direct in cls.dependencies_from_node_dict(node):
|
||||
node_spec._add_dependency(
|
||||
hash_dict[dhash]["node_spec"],
|
||||
depflag=dt.canonicalize(dtype),
|
||||
virtuals=virtuals,
|
||||
direct=direct,
|
||||
)
|
||||
if "build_spec" in node.keys():
|
||||
_, bhash, _ = cls.extract_build_spec_info_from_node_dict(node, hash_type=hash_type)
|
||||
@ -4966,9 +4968,9 @@ def load(cls, data):
|
||||
for node in nodes:
|
||||
# get dependency dict from the node.
|
||||
name, data = cls.name_and_data(node)
|
||||
for dname, _, dtypes, _, virtuals in cls.dependencies_from_node_dict(data):
|
||||
for dname, _, dtypes, _, virtuals, direct in cls.dependencies_from_node_dict(data):
|
||||
deps[name]._add_dependency(
|
||||
deps[dname], depflag=dt.canonicalize(dtypes), virtuals=virtuals
|
||||
deps[dname], depflag=dt.canonicalize(dtypes), virtuals=virtuals, direct=direct
|
||||
)
|
||||
|
||||
reconstruct_virtuals_on_edges(result)
|
||||
@ -5006,7 +5008,7 @@ def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
|
||||
raise spack.error.SpecError("Couldn't parse dependency spec.")
|
||||
else:
|
||||
raise spack.error.SpecError("Couldn't parse dependency types in spec.")
|
||||
yield dep_name, dep_hash, list(deptypes), hash_type, list(virtuals)
|
||||
yield dep_name, dep_hash, list(deptypes), hash_type, list(virtuals), True
|
||||
|
||||
|
||||
class SpecfileV2(SpecfileReaderBase):
|
||||
@ -5043,13 +5045,15 @@ def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
|
||||
# new format: elements of dependency spec are keyed.
|
||||
for h in ht.HASHES:
|
||||
if h.name in elt:
|
||||
dep_hash, deptypes, hash_type, virtuals = cls.extract_info_from_dep(elt, h)
|
||||
dep_hash, deptypes, hash_type, virtuals, direct = (
|
||||
cls.extract_info_from_dep(elt, h)
|
||||
)
|
||||
break
|
||||
else: # We never determined a hash type...
|
||||
raise spack.error.SpecError("Couldn't parse dependency spec.")
|
||||
else:
|
||||
raise spack.error.SpecError("Couldn't parse dependency types in spec.")
|
||||
result.append((dep_name, dep_hash, list(deptypes), hash_type, list(virtuals)))
|
||||
result.append((dep_name, dep_hash, list(deptypes), hash_type, list(virtuals), direct))
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
@ -5057,7 +5061,8 @@ def extract_info_from_dep(cls, elt, hash):
|
||||
dep_hash, deptypes = elt[hash.name], elt["type"]
|
||||
hash_type = hash.name
|
||||
virtuals = []
|
||||
return dep_hash, deptypes, hash_type, virtuals
|
||||
direct = True
|
||||
return dep_hash, deptypes, hash_type, virtuals, direct
|
||||
|
||||
@classmethod
|
||||
def extract_build_spec_info_from_node_dict(cls, node, hash_type=ht.dag_hash.name):
|
||||
@ -5078,7 +5083,8 @@ def extract_info_from_dep(cls, elt, hash):
|
||||
deptypes = elt["parameters"]["deptypes"]
|
||||
hash_type = hash.name
|
||||
virtuals = elt["parameters"]["virtuals"]
|
||||
return dep_hash, deptypes, hash_type, virtuals
|
||||
direct = True
|
||||
return dep_hash, deptypes, hash_type, virtuals, direct
|
||||
|
||||
@classmethod
|
||||
def load(cls, data):
|
||||
@ -5092,6 +5098,15 @@ class SpecfileV5(SpecfileV4):
|
||||
def legacy_compiler(cls, node):
|
||||
raise RuntimeError("The 'compiler' option is unexpected in specfiles at v5 or greater")
|
||||
|
||||
@classmethod
|
||||
def extract_info_from_dep(cls, elt, hash):
|
||||
dep_hash = elt[hash.name]
|
||||
deptypes = elt["parameters"]["deptypes"]
|
||||
hash_type = hash.name
|
||||
virtuals = elt["parameters"]["virtuals"]
|
||||
direct = elt["parameters"].get("direct", True)
|
||||
return dep_hash, deptypes, hash_type, virtuals, direct
|
||||
|
||||
|
||||
#: Alias to the latest version of specfiles
|
||||
SpecfileLatest = SpecfileV5
|
||||
|
@ -494,6 +494,10 @@ def test_anchorify_2():
|
||||
"hdf5~~mpi++shared",
|
||||
"hdf5 cflags==-g foo==bar cxxflags==-O3",
|
||||
"hdf5 cflags=-g foo==bar cxxflags==-O3",
|
||||
"hdf5%gcc",
|
||||
"hdf5%cmake",
|
||||
"hdf5^gcc",
|
||||
"hdf5^cmake",
|
||||
],
|
||||
)
|
||||
def test_pickle_roundtrip_for_abstract_specs(spec_str):
|
||||
|
Loading…
Reference in New Issue
Block a user