Compare commits

...

3 Commits

Author SHA1 Message Date
Gregory Becker
307009f939 revert bumping specfile version
Signed-off-by: Gregory Becker <becker33@llnl.gov>
2025-05-06 13:06:41 +02:00
Gregory Becker
d7b9e73e12 style discrepency between black@24 and black@25
Signed-off-by: Gregory Becker <becker33@llnl.gov>
2025-05-06 13:06:41 +02:00
Gregory Becker
3b2563c561 spec.to_dict: round trip whether deps are direct
Signed-off-by: Gregory Becker <becker33@llnl.gov>
2025-05-06 13:06:41 +02:00
5 changed files with 39 additions and 14 deletions

View File

@ -778,7 +778,7 @@ def _assign_dependencies(
spec_node_dict = spec_node_dict[spec.name] spec_node_dict = spec_node_dict[spec.name]
if "dependencies" in spec_node_dict: if "dependencies" in spec_node_dict:
yaml_deps = spec_node_dict["dependencies"] yaml_deps = spec_node_dict["dependencies"]
for dname, dhash, dtypes, _, virtuals in spec_reader.read_specfile_dep_specs( for dname, dhash, dtypes, _, virtuals, direct in spec_reader.read_specfile_dep_specs(
yaml_deps yaml_deps
): ):
# It is important that we always check upstream installations in the same order, # It is important that we always check upstream installations in the same order,
@ -797,7 +797,9 @@ def _assign_dependencies(
) )
continue continue
spec._add_dependency(child, depflag=dt.canonicalize(dtypes), virtuals=virtuals) spec._add_dependency(
child, depflag=dt.canonicalize(dtypes), virtuals=virtuals, direct=direct
)
def _read_from_file(self, filename: pathlib.Path, *, reindex: bool = False) -> None: def _read_from_file(self, filename: pathlib.Path, *, reindex: bool = False) -> None:
"""Fill database from file, do not maintain old data. """Fill database from file, do not maintain old data.

View File

@ -543,7 +543,6 @@
}, },
} }
} }
""" """
from .environment import ( from .environment import (

View File

@ -2230,9 +2230,14 @@ def filter_specs(self, reader, json_specs_by_hash, order_concretized):
# and add them to the spec, including build specs # and add them to the spec, including build specs
for lockfile_key, node_dict in json_specs_by_hash.items(): for lockfile_key, node_dict in json_specs_by_hash.items():
name, data = reader.name_and_data(node_dict) name, data = reader.name_and_data(node_dict)
for _, dep_hash, deptypes, _, virtuals in reader.dependencies_from_node_dict(data): for _, dep_hash, deptypes, _, virtuals, direct in reader.dependencies_from_node_dict(
data
):
specs_by_hash[lockfile_key]._add_dependency( specs_by_hash[lockfile_key]._add_dependency(
specs_by_hash[dep_hash], depflag=dt.canonicalize(deptypes), virtuals=virtuals specs_by_hash[dep_hash],
depflag=dt.canonicalize(deptypes),
virtuals=virtuals,
direct=direct,
) )
if "build_spec" in node_dict: if "build_spec" in node_dict:

View File

@ -2383,6 +2383,7 @@ def to_node_dict(self, hash=ht.dag_hash):
"parameters": { "parameters": {
"deptypes": dt.flag_to_tuple(dspec.depflag), "deptypes": dt.flag_to_tuple(dspec.depflag),
"virtuals": dspec.virtuals, "virtuals": dspec.virtuals,
"direct": dspec.direct,
}, },
} }
for name, edges_for_name in sorted(deps.items()) for name, edges_for_name in sorted(deps.items())
@ -4890,7 +4891,7 @@ def _load(cls, data):
# Pass 0: Determine hash type # Pass 0: Determine hash type
for node in nodes: for node in nodes:
for _, _, _, dhash_type, _ in cls.dependencies_from_node_dict(node): for _, _, _, dhash_type, _, _ in cls.dependencies_from_node_dict(node):
any_deps = True any_deps = True
if dhash_type: if dhash_type:
hash_type = dhash_type hash_type = dhash_type
@ -4921,11 +4922,12 @@ def _load(cls, data):
# Pass 2: Finish construction of all DAG edges (including build specs) # Pass 2: Finish construction of all DAG edges (including build specs)
for node_hash, node in hash_dict.items(): for node_hash, node in hash_dict.items():
node_spec = node["node_spec"] node_spec = node["node_spec"]
for _, dhash, dtype, _, virtuals in cls.dependencies_from_node_dict(node): for _, dhash, dtype, _, virtuals, direct in cls.dependencies_from_node_dict(node):
node_spec._add_dependency( node_spec._add_dependency(
hash_dict[dhash]["node_spec"], hash_dict[dhash]["node_spec"],
depflag=dt.canonicalize(dtype), depflag=dt.canonicalize(dtype),
virtuals=virtuals, virtuals=virtuals,
direct=direct,
) )
if "build_spec" in node.keys(): if "build_spec" in node.keys():
_, bhash, _ = cls.extract_build_spec_info_from_node_dict(node, hash_type=hash_type) _, bhash, _ = cls.extract_build_spec_info_from_node_dict(node, hash_type=hash_type)
@ -4966,9 +4968,9 @@ def load(cls, data):
for node in nodes: for node in nodes:
# get dependency dict from the node. # get dependency dict from the node.
name, data = cls.name_and_data(node) name, data = cls.name_and_data(node)
for dname, _, dtypes, _, virtuals in cls.dependencies_from_node_dict(data): for dname, _, dtypes, _, virtuals, direct in cls.dependencies_from_node_dict(data):
deps[name]._add_dependency( deps[name]._add_dependency(
deps[dname], depflag=dt.canonicalize(dtypes), virtuals=virtuals deps[dname], depflag=dt.canonicalize(dtypes), virtuals=virtuals, direct=direct
) )
reconstruct_virtuals_on_edges(result) reconstruct_virtuals_on_edges(result)
@ -5006,7 +5008,7 @@ def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
raise spack.error.SpecError("Couldn't parse dependency spec.") raise spack.error.SpecError("Couldn't parse dependency spec.")
else: else:
raise spack.error.SpecError("Couldn't parse dependency types in spec.") raise spack.error.SpecError("Couldn't parse dependency types in spec.")
yield dep_name, dep_hash, list(deptypes), hash_type, list(virtuals) yield dep_name, dep_hash, list(deptypes), hash_type, list(virtuals), True
class SpecfileV2(SpecfileReaderBase): class SpecfileV2(SpecfileReaderBase):
@ -5043,13 +5045,15 @@ def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
# new format: elements of dependency spec are keyed. # new format: elements of dependency spec are keyed.
for h in ht.HASHES: for h in ht.HASHES:
if h.name in elt: if h.name in elt:
dep_hash, deptypes, hash_type, virtuals = cls.extract_info_from_dep(elt, h) dep_hash, deptypes, hash_type, virtuals, direct = (
cls.extract_info_from_dep(elt, h)
)
break break
else: # We never determined a hash type... else: # We never determined a hash type...
raise spack.error.SpecError("Couldn't parse dependency spec.") raise spack.error.SpecError("Couldn't parse dependency spec.")
else: else:
raise spack.error.SpecError("Couldn't parse dependency types in spec.") raise spack.error.SpecError("Couldn't parse dependency types in spec.")
result.append((dep_name, dep_hash, list(deptypes), hash_type, list(virtuals))) result.append((dep_name, dep_hash, list(deptypes), hash_type, list(virtuals), direct))
return result return result
@classmethod @classmethod
@ -5057,7 +5061,8 @@ def extract_info_from_dep(cls, elt, hash):
dep_hash, deptypes = elt[hash.name], elt["type"] dep_hash, deptypes = elt[hash.name], elt["type"]
hash_type = hash.name hash_type = hash.name
virtuals = [] virtuals = []
return dep_hash, deptypes, hash_type, virtuals direct = True
return dep_hash, deptypes, hash_type, virtuals, direct
@classmethod @classmethod
def extract_build_spec_info_from_node_dict(cls, node, hash_type=ht.dag_hash.name): def extract_build_spec_info_from_node_dict(cls, node, hash_type=ht.dag_hash.name):
@ -5078,7 +5083,8 @@ def extract_info_from_dep(cls, elt, hash):
deptypes = elt["parameters"]["deptypes"] deptypes = elt["parameters"]["deptypes"]
hash_type = hash.name hash_type = hash.name
virtuals = elt["parameters"]["virtuals"] virtuals = elt["parameters"]["virtuals"]
return dep_hash, deptypes, hash_type, virtuals direct = True
return dep_hash, deptypes, hash_type, virtuals, direct
@classmethod @classmethod
def load(cls, data): def load(cls, data):
@ -5092,6 +5098,15 @@ class SpecfileV5(SpecfileV4):
def legacy_compiler(cls, node): def legacy_compiler(cls, node):
raise RuntimeError("The 'compiler' option is unexpected in specfiles at v5 or greater") raise RuntimeError("The 'compiler' option is unexpected in specfiles at v5 or greater")
@classmethod
def extract_info_from_dep(cls, elt, hash):
dep_hash = elt[hash.name]
deptypes = elt["parameters"]["deptypes"]
hash_type = hash.name
virtuals = elt["parameters"]["virtuals"]
direct = elt["parameters"].get("direct", True)
return dep_hash, deptypes, hash_type, virtuals, direct
#: Alias to the latest version of specfiles #: Alias to the latest version of specfiles
SpecfileLatest = SpecfileV5 SpecfileLatest = SpecfileV5

View File

@ -494,6 +494,10 @@ def test_anchorify_2():
"hdf5~~mpi++shared", "hdf5~~mpi++shared",
"hdf5 cflags==-g foo==bar cxxflags==-O3", "hdf5 cflags==-g foo==bar cxxflags==-O3",
"hdf5 cflags=-g foo==bar cxxflags==-O3", "hdf5 cflags=-g foo==bar cxxflags==-O3",
"hdf5%gcc",
"hdf5%cmake",
"hdf5^gcc",
"hdf5^cmake",
], ],
) )
def test_pickle_roundtrip_for_abstract_specs(spec_str): def test_pickle_roundtrip_for_abstract_specs(spec_str):