From 84917cfa79e29265b387b483d5773b9d20db3ec9 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Thu, 26 Jan 2023 10:40:46 +0100 Subject: [PATCH] Extract functions to read spec files from different format (#35094) This commit makes explicit the format version of the spec file we are reading from. Before there were different functions capable of reading some part of the spec file at multiple format versions. The decision was implicit, since checks were based on the structure of the JSON without ever checking a format version number. The refactor makes also explicit which spec file format is used by which database and lockfile format, since the information is stored in global mappings. To ensure we don't change the hash of old specs, JSON representations of specs have been added as data. A unit tests checks that we read the correct hash in, and that the hash stays the same when we re-serialize the spec using the most recent format version. Co-authored-by: Harmen Stoppels --- lib/spack/spack/build_systems/python.py | 2 +- lib/spack/spack/cray_manifest.py | 2 +- lib/spack/spack/database.py | 25 +- lib/spack/spack/environment/environment.py | 31 +- lib/spack/spack/parser.py | 2 +- lib/spack/spack/provider_index.py | 4 +- lib/spack/spack/solver/asp.py | 2 +- lib/spack/spack/spec.py | 483 ++++++++++-------- lib/spack/spack/test/cmd/test.py | 2 +- .../test/data/specfiles/hdf5.v013.json.gz | Bin 0 -> 2187 bytes .../test/data/specfiles/hdf5.v016.json.gz | Bin 0 -> 2925 bytes .../test/data/specfiles/hdf5.v017.json.gz | Bin 0 -> 4119 bytes .../test/data/specfiles/hdf5.v019.json.gz | Bin 0 -> 4968 bytes lib/spack/spack/test/spec_dag.py | 30 +- lib/spack/spack/test/spec_semantics.py | 11 +- lib/spack/spack/test/spec_yaml.py | 32 ++ lib/spack/spack/test/traverse.py | 4 +- 17 files changed, 374 insertions(+), 256 deletions(-) create mode 100644 lib/spack/spack/test/data/specfiles/hdf5.v013.json.gz create mode 100644 lib/spack/spack/test/data/specfiles/hdf5.v016.json.gz create mode 100644 lib/spack/spack/test/data/specfiles/hdf5.v017.json.gz create mode 100644 lib/spack/spack/test/data/specfiles/hdf5.v019.json.gz diff --git a/lib/spack/spack/build_systems/python.py b/lib/spack/spack/build_systems/python.py index d93807ff20b..84caebefc6c 100644 --- a/lib/spack/spack/build_systems/python.py +++ b/lib/spack/spack/build_systems/python.py @@ -267,7 +267,7 @@ def update_external_dependencies(self, extendee_spec=None): python.external_path = self.spec.external_path python._mark_concrete() - self.spec.add_dependency_edge(python, ("build", "link", "run")) + self.spec.add_dependency_edge(python, deptypes=("build", "link", "run")) def get_external_python_for_prefix(self): """ diff --git a/lib/spack/spack/cray_manifest.py b/lib/spack/spack/cray_manifest.py index 1f1f5f0ca3b..c2908f61be1 100644 --- a/lib/spack/spack/cray_manifest.py +++ b/lib/spack/spack/cray_manifest.py @@ -162,7 +162,7 @@ def entries_to_specs(entries): continue parent_spec = spec_dict[entry["hash"]] dep_spec = spec_dict[dep_hash] - parent_spec._add_dependency(dep_spec, deptypes) + parent_spec._add_dependency(dep_spec, deptypes=deptypes) return spec_dict diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index 069d95347b5..0607c6312ad 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -107,6 +107,14 @@ ] +def reader(version): + reader_cls = { + Version("5"): spack.spec.SpecfileV1, + Version("6"): spack.spec.SpecfileV3, + } + return reader_cls[version] + + def _now(): """Returns the time since the epoch""" return time.time() @@ -674,7 +682,7 @@ def _write_to_file(self, stream): except (TypeError, ValueError) as e: raise sjson.SpackJSONError("error writing JSON database:", str(e)) - def _read_spec_from_dict(self, hash_key, installs, hash=ht.dag_hash): + def _read_spec_from_dict(self, spec_reader, hash_key, installs, hash=ht.dag_hash): """Recursively construct a spec from a hash in a YAML database. Does not do any locking. @@ -692,7 +700,7 @@ def _read_spec_from_dict(self, hash_key, installs, hash=ht.dag_hash): spec_dict[hash.name] = hash_key # Build spec from dict first. - spec = spack.spec.Spec.from_node_dict(spec_dict) + spec = spec_reader.from_node_dict(spec_dict) return spec def db_for_spec_hash(self, hash_key): @@ -732,7 +740,7 @@ def query_local_by_spec_hash(self, hash_key): with self.read_transaction(): return self._data.get(hash_key, None) - def _assign_dependencies(self, hash_key, installs, data): + def _assign_dependencies(self, spec_reader, hash_key, installs, data): # Add dependencies from other records in the install DB to # form a full spec. spec = data[hash_key].spec @@ -742,7 +750,7 @@ def _assign_dependencies(self, hash_key, installs, data): spec_node_dict = spec_node_dict[spec.name] if "dependencies" in spec_node_dict: yaml_deps = spec_node_dict["dependencies"] - for dname, dhash, dtypes, _ in spack.spec.Spec.read_yaml_dep_specs(yaml_deps): + for dname, dhash, dtypes, _ in spec_reader.read_specfile_dep_specs(yaml_deps): # It is important that we always check upstream installations # in the same order, and that we always check the local # installation first: if a downstream Spack installs a package @@ -765,7 +773,7 @@ def _assign_dependencies(self, hash_key, installs, data): tty.warn(msg) continue - spec._add_dependency(child, dtypes) + spec._add_dependency(child, deptypes=dtypes) def _read_from_file(self, filename): """Fill database from file, do not maintain old data. @@ -797,6 +805,7 @@ def check(cond, msg): # TODO: better version checking semantics. version = Version(db["version"]) + spec_reader = reader(version) if version > _db_version: raise InvalidDatabaseVersionError(_db_version, version) elif version < _db_version: @@ -832,7 +841,7 @@ def invalid_record(hash_key, error): for hash_key, rec in installs.items(): try: # This constructs a spec DAG from the list of all installs - spec = self._read_spec_from_dict(hash_key, installs) + spec = self._read_spec_from_dict(spec_reader, hash_key, installs) # Insert the brand new spec in the database. Each # spec has its own copies of its dependency specs. @@ -848,7 +857,7 @@ def invalid_record(hash_key, error): # Pass 2: Assign dependencies once all specs are created. for hash_key in data: try: - self._assign_dependencies(hash_key, installs, data) + self._assign_dependencies(spec_reader, hash_key, installs, data) except MissingDependenciesError: raise except Exception as e: @@ -1167,7 +1176,7 @@ def _add( for dep in spec.edges_to_dependencies(deptype=_tracked_deps): dkey = dep.spec.dag_hash() upstream, record = self.query_by_spec_hash(dkey) - new_spec._add_dependency(record.spec, dep.deptypes) + new_spec._add_dependency(record.spec, deptypes=dep.deptypes) if not upstream: record.ref_count += 1 diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index f81232feec4..c008cc5c7e6 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -104,6 +104,15 @@ def default_manifest_yaml(): #: version of the lockfile format. Must increase monotonically. lockfile_format_version = 4 + +READER_CLS = { + 1: spack.spec.SpecfileV1, + 2: spack.spec.SpecfileV1, + 3: spack.spec.SpecfileV2, + 4: spack.spec.SpecfileV3, +} + + # Magic names # The name of the standalone spec list in the manifest yaml user_speclist_name = "specs" @@ -1436,7 +1445,7 @@ def _concretize_separately(self, tests=False): if test_dependency in current_spec[node.name]: continue current_spec[node.name].add_dependency_edge( - test_dependency.copy(), deptype="test" + test_dependency.copy(), deptypes="test" ) results = [ @@ -1942,7 +1951,7 @@ def _to_lockfile_dict(self): "_meta": { "file-type": "spack-lockfile", "lockfile-version": lockfile_format_version, - "specfile-version": spack.spec.specfile_format_version, + "specfile-version": spack.spec.SPECFILE_FORMAT_VERSION, }, # users specs + hashes are the 'roots' of the environment "roots": [{"hash": h, "spec": str(s)} for h, s in hash_spec_list], @@ -1975,10 +1984,19 @@ def _read_lockfile_dict(self, d): # Track specs by their DAG hash, allows handling DAG hash collisions first_seen = {} + current_lockfile_format = d["_meta"]["lockfile-version"] + try: + reader = READER_CLS[current_lockfile_format] + except KeyError: + msg = ( + f"Spack {spack.__version__} cannot read environment lockfiles using the " + f"v{current_lockfile_format} format" + ) + raise RuntimeError(msg) # First pass: Put each spec in the map ignoring dependencies for lockfile_key, node_dict in json_specs_by_hash.items(): - spec = Spec.from_node_dict(node_dict) + spec = reader.from_node_dict(node_dict) if not spec._hash: # in v1 lockfiles, the hash only occurs as a key spec._hash = lockfile_key @@ -1987,8 +2005,11 @@ def _read_lockfile_dict(self, d): # Second pass: For each spec, get its dependencies from the node dict # and add them to the spec for lockfile_key, node_dict in json_specs_by_hash.items(): - for _, dep_hash, deptypes, _ in Spec.dependencies_from_node_dict(node_dict): - specs_by_hash[lockfile_key]._add_dependency(specs_by_hash[dep_hash], deptypes) + name, data = reader.name_and_data(node_dict) + for _, dep_hash, deptypes, _ in reader.dependencies_from_node_dict(data): + specs_by_hash[lockfile_key]._add_dependency( + specs_by_hash[dep_hash], deptypes=deptypes + ) # Traverse the root specs one at a time in the order they appear. # The first time we see each DAG hash, that's the one we want to diff --git a/lib/spack/spack/parser.py b/lib/spack/spack/parser.py index bed05c3316e..b4748b259f7 100644 --- a/lib/spack/spack/parser.py +++ b/lib/spack/spack/parser.py @@ -283,7 +283,7 @@ def next_spec(self, initial_spec: Optional[spack.spec.Spec] = None) -> spack.spe if root_spec.concrete: raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency)) - root_spec._add_dependency(dependency, ()) + root_spec._add_dependency(dependency, deptypes=()) else: break diff --git a/lib/spack/spack/provider_index.py b/lib/spack/spack/provider_index.py index c5728503853..526a7dc7623 100644 --- a/lib/spack/spack/provider_index.py +++ b/lib/spack/spack/provider_index.py @@ -292,8 +292,8 @@ def from_json(stream, repository): index.providers = _transform( providers, lambda vpkg, plist: ( - spack.spec.Spec.from_node_dict(vpkg), - set(spack.spec.Spec.from_node_dict(p) for p in plist), + spack.spec.SpecfileV3.from_node_dict(vpkg), + set(spack.spec.SpecfileV3.from_node_dict(p) for p in plist), ), ) return index diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index c6a8ea0c89b..c8f213fcd4d 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -2259,7 +2259,7 @@ def depends_on(self, pkg, dep, type): assert len(dependencies) < 2, msg if not dependencies: - self._specs[pkg].add_dependency_edge(self._specs[dep], (type,)) + self._specs[pkg].add_dependency_edge(self._specs[dep], deptypes=(type,)) else: # TODO: This assumes that each solve unifies dependencies dependencies[0].add_type(type) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index ebaefbcf27a..aaeb15e82ef 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -168,7 +168,7 @@ ) #: specfile format version. Must increase monotonically -specfile_format_version = 3 +SPECFILE_FORMAT_VERSION = 3 def colorize_spec(spec): @@ -1529,14 +1529,14 @@ def _set_compiler(self, compiler): ) self.compiler = compiler - def _add_dependency(self, spec, deptypes): + def _add_dependency(self, spec: "Spec", *, deptypes: dp.DependencyArgument): """Called by the parser to add another spec as a dependency.""" if spec.name not in self._dependencies: - self.add_dependency_edge(spec, deptypes) + self.add_dependency_edge(spec, deptypes=deptypes) return # Keep the intersection of constraints when a dependency is added - # multiple times. Currently we only allow identical edge types. + # multiple times. Currently, we only allow identical edge types. orig = self._dependencies[spec.name] try: dspec = next(dspec for dspec in orig if deptypes == dspec.deptypes) @@ -1550,34 +1550,39 @@ def _add_dependency(self, spec, deptypes): "Cannot depend on incompatible specs '%s' and '%s'" % (dspec.spec, spec) ) - def add_dependency_edge(self, dependency_spec, deptype): + def add_dependency_edge( + self, + dependency_spec: "Spec", + *, + deptypes: dp.DependencyArgument, + ): """Add a dependency edge to this spec. Args: - dependency_spec (Spec): spec of the dependency - deptype (str or tuple): dependency types + dependency_spec: spec of the dependency + deptypes: dependency types for this edge """ - deptype = dp.canonical_deptype(deptype) + deptypes = dp.canonical_deptype(deptypes) # Check if we need to update edges that are already present selected = self._dependencies.select(child=dependency_spec.name) for edge in selected: - if any(d in edge.deptypes for d in deptype): + if any(d in edge.deptypes for d in deptypes): msg = ( 'cannot add a dependency on "{0.spec}" of {1} type ' 'when the "{0.parent}" has the edge {0!s} already' ) - raise spack.error.SpecError(msg.format(edge, deptype)) + raise spack.error.SpecError(msg.format(edge, deptypes)) for edge in selected: if id(dependency_spec) == id(edge.spec): # If we are here, it means the edge object was previously added to # both the parent and the child. When we update this object they'll # both see the deptype modification. - edge.add_type(deptype) + edge.add_type(deptypes) return - edge = DependencySpec(self, dependency_spec, deptypes=deptype) + edge = DependencySpec(self, dependency_spec, deptypes=deptypes) self._dependencies.add(edge) dependency_spec._dependents.add(edge) @@ -2027,7 +2032,7 @@ def to_dict(self, hash=ht.dag_hash): node_list.append(node) hash_set.add(node_hash) - meta_dict = syaml.syaml_dict([("version", specfile_format_version)]) + meta_dict = syaml.syaml_dict([("version", SPECFILE_FORMAT_VERSION)]) inner_dict = syaml.syaml_dict([("_meta", meta_dict), ("nodes", node_list)]) spec_dict = syaml.syaml_dict([("spec", inner_dict)]) return spec_dict @@ -2063,137 +2068,13 @@ def to_json(self, stream=None, hash=ht.dag_hash): @staticmethod def from_specfile(path): - """Construct a spec from aJSON or YAML spec file path""" + """Construct a spec from a JSON or YAML spec file path""" with open(path, "r") as fd: file_content = fd.read() if path.endswith(".json"): return Spec.from_json(file_content) return Spec.from_yaml(file_content) - @staticmethod - def from_node_dict(node): - spec = Spec() - if "name" in node.keys(): - # New format - name = node["name"] - else: - # Old format - name = next(iter(node)) - node = node[name] - for h in ht.hashes: - setattr(spec, h.attr, node.get(h.name, None)) - - spec.name = name - spec.namespace = node.get("namespace", None) - - if "version" in node or "versions" in node: - spec.versions = vn.VersionList.from_dict(node) - - if "arch" in node: - spec.architecture = ArchSpec.from_dict(node) - - if "compiler" in node: - spec.compiler = CompilerSpec.from_dict(node) - else: - spec.compiler = None - - if "parameters" in node: - for name, values in node["parameters"].items(): - if name in _valid_compiler_flags: - spec.compiler_flags[name] = [] - for val in values: - spec.compiler_flags.add_flag(name, val, False) - else: - spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, values) - elif "variants" in node: - for name, value in node["variants"].items(): - spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, value) - for name in FlagMap.valid_compiler_flags(): - spec.compiler_flags[name] = [] - - spec.external_path = None - spec.external_modules = None - if "external" in node: - # This conditional is needed because sometimes this function is - # called with a node already constructed that contains a 'versions' - # and 'external' field. Related to virtual packages provider - # indexes. - if node["external"]: - spec.external_path = node["external"]["path"] - spec.external_modules = node["external"]["module"] - if spec.external_modules is False: - spec.external_modules = None - spec.extra_attributes = node["external"].get( - "extra_attributes", syaml.syaml_dict() - ) - - # specs read in are concrete unless marked abstract - spec._concrete = node.get("concrete", True) - - if "patches" in node: - patches = node["patches"] - if len(patches) > 0: - mvar = spec.variants.setdefault("patches", vt.MultiValuedVariant("patches", ())) - mvar.value = patches - # FIXME: Monkey patches mvar to store patches order - mvar._patches_in_order_of_appearance = patches - - # Don't read dependencies here; from_dict() is used by - # from_yaml() and from_json() to read the root *and* each dependency - # spec. - - return spec - - @staticmethod - def build_spec_from_node_dict(node, hash_type=ht.dag_hash.name): - build_spec_dict = node["build_spec"] - return build_spec_dict["name"], build_spec_dict[hash_type], hash_type - - @staticmethod - def dependencies_from_node_dict(node): - if "name" in node.keys(): - # New format - name = node["name"] - else: - name = next(iter(node)) - node = node[name] - if "dependencies" not in node: - return - for t in Spec.read_yaml_dep_specs(node["dependencies"]): - yield t - - @staticmethod - def read_yaml_dep_specs(deps, hash_type=ht.dag_hash.name): - """Read the DependencySpec portion of a YAML-formatted Spec. - This needs to be backward-compatible with older spack spec - formats so that reindex will work on old specs/databases. - """ - dep_iter = deps.items() if isinstance(deps, dict) else deps - for dep in dep_iter: - if isinstance(dep, tuple): - dep_name, elt = dep - else: - elt = dep - dep_name = dep["name"] - if isinstance(elt, str): - # original format, elt is just the dependency hash. - dep_hash, deptypes = elt, ["build", "link"] - elif isinstance(elt, tuple): - # original deptypes format: (used tuples, not future-proof) - dep_hash, deptypes = elt - elif isinstance(elt, dict): - # new format: elements of dependency spec are keyed. - for h in ht.hashes: - if h.name in elt: - dep_hash, deptypes = elt[h.name], elt["type"] - hash_type = h.name - break - else: # We never determined a hash type... - raise spack.error.SpecError("Couldn't parse dependency spec.") - else: - raise spack.error.SpecError("Couldn't parse dependency types in spec.") - yield dep_name, dep_hash, list(deptypes), hash_type - @staticmethod def override(init_spec, change_spec): # TODO: this doesn't account for the case where the changed spec @@ -2367,7 +2248,7 @@ def spec_and_dependency_types(s): dag_node, dependency_types = spec_and_dependency_types(s) dependency_spec = spec_builder({dag_node: s_dependencies}) - spec._add_dependency(dependency_spec, dependency_types) + spec._add_dependency(dependency_spec, deptypes=dependency_types) return spec @@ -2380,54 +2261,14 @@ def from_dict(data): Args: data: a nested dict/list data structure read from YAML or JSON. """ - if isinstance(data["spec"], list): # Legacy specfile format - return _spec_from_old_dict(data) + # Legacy specfile format + if isinstance(data["spec"], list): + return SpecfileV1.load(data) - # Current specfile format - nodes = data["spec"]["nodes"] - hash_type = None - any_deps = False - - # Pass 0: Determine hash type - for node in nodes: - if "dependencies" in node.keys(): - any_deps = True - for _, _, _, dhash_type in Spec.dependencies_from_node_dict(node): - if dhash_type: - hash_type = dhash_type - break - - if not any_deps: # If we never see a dependency... - hash_type = ht.dag_hash.name - elif not hash_type: # Seen a dependency, still don't know hash_type - raise spack.error.SpecError( - "Spec dictionary contains malformed " "dependencies. Old format?" - ) - - hash_dict = {} - root_spec_hash = None - - # Pass 1: Create a single lookup dictionary by hash - for i, node in enumerate(nodes): - node_hash = node[hash_type] - node_spec = Spec.from_node_dict(node) - hash_dict[node_hash] = node - hash_dict[node_hash]["node_spec"] = node_spec - if i == 0: - root_spec_hash = node_hash - if not root_spec_hash: - raise spack.error.SpecError("Spec dictionary contains no nodes.") - - # Pass 2: Finish construction of all DAG edges (including build specs) - for node_hash, node in hash_dict.items(): - node_spec = node["node_spec"] - for _, dhash, dtypes, _ in Spec.dependencies_from_node_dict(node): - node_spec._add_dependency(hash_dict[dhash]["node_spec"], dtypes) - if "build_spec" in node.keys(): - _, bhash, _ = Spec.build_spec_from_node_dict(node, hash_type=hash_type) - node_spec._build_spec = hash_dict[bhash]["node_spec"] - - return hash_dict[root_spec_hash]["node_spec"] + specfile_version = int(data["spec"]["_meta"]["version"]) + if specfile_version == 2: + return SpecfileV2.load(data) + return SpecfileV3.load(data) @staticmethod def from_yaml(stream): @@ -2583,7 +2424,7 @@ def _replace_with(self, concrete): # add the replacement, unless it is already a dep of dependent. if concrete.name not in dependent._dependencies: - dependent._add_dependency(concrete, deptypes) + dependent._add_dependency(concrete, deptypes=deptypes) def _expand_virtual_packages(self, concretizer): """Find virtual packages in this spec, replace them with providers, @@ -3254,7 +3095,7 @@ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, test # Add merged spec to my deps and recurse spec_dependency = spec_deps[dep.name] if dep.name not in self._dependencies: - self._add_dependency(spec_dependency, dependency.type) + self._add_dependency(spec_dependency, deptypes=dependency.type) changed |= spec_dependency._normalize_helper(visited, spec_deps, provider_index, tests) return changed @@ -3585,7 +3426,7 @@ def _constrain_dependencies(self, other): dep_spec_copy = other._get_dependency(name) dep_copy = dep_spec_copy.spec deptypes = dep_spec_copy.deptypes - self._add_dependency(dep_copy.copy(), deptypes) + self._add_dependency(dep_copy.copy(), deptypes=deptypes) changed = True return changed @@ -3898,7 +3739,7 @@ def spid(spec): new_specs[spid(edge.spec)] = edge.spec.copy(deps=False) new_specs[spid(edge.parent)].add_dependency_edge( - new_specs[spid(edge.spec)], edge.deptypes + new_specs[spid(edge.spec)], deptypes=edge.deptypes ) def copy(self, deps=True, **kwargs): @@ -4813,12 +4654,12 @@ def from_self(name, transitive): if name in self_nodes: for edge in self[name].edges_to_dependencies(): dep_name = deps_to_replace.get(edge.spec, edge.spec).name - nodes[name].add_dependency_edge(nodes[dep_name], edge.deptypes) + nodes[name].add_dependency_edge(nodes[dep_name], deptypes=edge.deptypes) if any(dep not in self_nodes for dep in self[name]._dependencies): nodes[name].build_spec = self[name].build_spec else: for edge in other[name].edges_to_dependencies(): - nodes[name].add_dependency_edge(nodes[edge.spec.name], edge.deptypes) + nodes[name].add_dependency_edge(nodes[edge.spec.name], deptypes=edge.deptypes) if any(dep not in other_nodes for dep in other[name]._dependencies): nodes[name].build_spec = other[name].build_spec @@ -4891,40 +4732,252 @@ def merge_abstract_anonymous_specs(*abstract_specs: Spec): # Update with additional constraints from other spec for name in current_spec_constraint.direct_dep_difference(merged_spec): edge = next(iter(current_spec_constraint.edges_to_dependencies(name))) - merged_spec._add_dependency(edge.spec.copy(), edge.deptypes) + merged_spec._add_dependency(edge.spec.copy(), deptypes=edge.deptypes) return merged_spec -def _spec_from_old_dict(data): - """Construct a spec from JSON/YAML using the format version 1. - Note: Version 1 format has no notion of a build_spec, and names are - guaranteed to be unique. +class SpecfileReaderBase: + @classmethod + def from_node_dict(cls, node): + spec = Spec() - Parameters: - data -- a nested dict/list data structure read from YAML or JSON. - """ - nodes = data["spec"] + name, node = cls.name_and_data(node) + for h in ht.hashes: + setattr(spec, h.attr, node.get(h.name, None)) - # Read nodes out of list. Root spec is the first element; - # dependencies are the following elements. - dep_list = [Spec.from_node_dict(node) for node in nodes] - if not dep_list: - raise spack.error.SpecError("YAML spec contains no nodes.") - deps = dict((spec.name, spec) for spec in dep_list) - spec = dep_list[0] + spec.name = name + spec.namespace = node.get("namespace", None) - for node in nodes: - # get dependency dict from the node. + if "version" in node or "versions" in node: + spec.versions = vn.VersionList.from_dict(node) + + if "arch" in node: + spec.architecture = ArchSpec.from_dict(node) + + if "compiler" in node: + spec.compiler = CompilerSpec.from_dict(node) + else: + spec.compiler = None + + for name, values in node.get("parameters", {}).items(): + if name in _valid_compiler_flags: + spec.compiler_flags[name] = [] + for val in values: + spec.compiler_flags.add_flag(name, val, False) + else: + spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, values) + + spec.external_path = None + spec.external_modules = None + if "external" in node: + # This conditional is needed because sometimes this function is + # called with a node already constructed that contains a 'versions' + # and 'external' field. Related to virtual packages provider + # indexes. + if node["external"]: + spec.external_path = node["external"]["path"] + spec.external_modules = node["external"]["module"] + if spec.external_modules is False: + spec.external_modules = None + spec.extra_attributes = node["external"].get( + "extra_attributes", syaml.syaml_dict() + ) + + # specs read in are concrete unless marked abstract + spec._concrete = node.get("concrete", True) + + if "patches" in node: + patches = node["patches"] + if len(patches) > 0: + mvar = spec.variants.setdefault("patches", vt.MultiValuedVariant("patches", ())) + mvar.value = patches + # FIXME: Monkey patches mvar to store patches order + mvar._patches_in_order_of_appearance = patches + + # Don't read dependencies here; from_dict() is used by + # from_yaml() and from_json() to read the root *and* each dependency + # spec. + + return spec + + @classmethod + def _load(cls, data): + """Construct a spec from JSON/YAML using the format version 2. + + This format is used in Spack v0.17, was introduced in + https://github.com/spack/spack/pull/22845 + + Args: + data: a nested dict/list data structure read from YAML or JSON. + """ + # Current specfile format + nodes = data["spec"]["nodes"] + hash_type = None + any_deps = False + + # Pass 0: Determine hash type + for node in nodes: + for _, _, _, dhash_type in cls.dependencies_from_node_dict(node): + any_deps = True + if dhash_type: + hash_type = dhash_type + break + + if not any_deps: # If we never see a dependency... + hash_type = ht.dag_hash.name + elif not hash_type: # Seen a dependency, still don't know hash_type + raise spack.error.SpecError( + "Spec dictionary contains malformed dependencies. Old format?" + ) + + hash_dict = {} + root_spec_hash = None + + # Pass 1: Create a single lookup dictionary by hash + for i, node in enumerate(nodes): + node_hash = node[hash_type] + node_spec = cls.from_node_dict(node) + hash_dict[node_hash] = node + hash_dict[node_hash]["node_spec"] = node_spec + if i == 0: + root_spec_hash = node_hash + + if not root_spec_hash: + raise spack.error.SpecError("Spec dictionary contains no nodes.") + + # Pass 2: Finish construction of all DAG edges (including build specs) + for node_hash, node in hash_dict.items(): + node_spec = node["node_spec"] + for _, dhash, dtypes, _ in cls.dependencies_from_node_dict(node): + node_spec._add_dependency(hash_dict[dhash]["node_spec"], deptypes=dtypes) + if "build_spec" in node.keys(): + _, bhash, _ = cls.build_spec_from_node_dict(node, hash_type=hash_type) + node_spec._build_spec = hash_dict[bhash]["node_spec"] + + return hash_dict[root_spec_hash]["node_spec"] + + +class SpecfileV1(SpecfileReaderBase): + @classmethod + def load(cls, data): + """Construct a spec from JSON/YAML using the format version 1. + + Note: Version 1 format has no notion of a build_spec, and names are + guaranteed to be unique. This function is guaranteed to read specs as + old as v0.10 - while it was not checked for older formats. + + Args: + data: a nested dict/list data structure read from YAML or JSON. + """ + nodes = data["spec"] + + # Read nodes out of list. Root spec is the first element; + # dependencies are the following elements. + dep_list = [cls.from_node_dict(node) for node in nodes] + if not dep_list: + raise spack.error.SpecError("specfile contains no nodes.") + + deps = {spec.name: spec for spec in dep_list} + result = dep_list[0] + + for node in nodes: + # get dependency dict from the node. + name, data = cls.name_and_data(node) + for dname, _, dtypes, _ in cls.dependencies_from_node_dict(data): + deps[name]._add_dependency(deps[dname], deptypes=dtypes) + + return result + + @classmethod + def name_and_data(cls, node): name = next(iter(node)) + node = node[name] + return name, node - if "dependencies" not in node[name]: - continue + @classmethod + def dependencies_from_node_dict(cls, node): + if "dependencies" not in node: + return [] - for dname, _, dtypes, _ in Spec.dependencies_from_node_dict(node): - deps[name]._add_dependency(deps[dname], dtypes) + for t in cls.read_specfile_dep_specs(node["dependencies"]): + yield t - return spec + @classmethod + def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name): + """Read the DependencySpec portion of a YAML-formatted Spec. + This needs to be backward-compatible with older spack spec + formats so that reindex will work on old specs/databases. + """ + for dep_name, elt in deps.items(): + if isinstance(elt, dict): + for h in ht.hashes: + if h.name in elt: + dep_hash, deptypes = elt[h.name], elt["type"] + hash_type = h.name + break + else: # We never determined a hash type... + raise spack.error.SpecError("Couldn't parse dependency spec.") + else: + raise spack.error.SpecError("Couldn't parse dependency types in spec.") + yield dep_name, dep_hash, list(deptypes), hash_type + + +class SpecfileV2(SpecfileReaderBase): + @classmethod + def load(cls, data): + result = cls._load(data) + return result + + @classmethod + def name_and_data(cls, node): + return node["name"], node + + @classmethod + def dependencies_from_node_dict(cls, node): + return cls.read_specfile_dep_specs(node.get("dependencies", [])) + + @classmethod + def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name): + """Read the DependencySpec portion of a YAML-formatted Spec. + This needs to be backward-compatible with older spack spec + formats so that reindex will work on old specs/databases. + """ + if not isinstance(deps, list): + raise spack.error.SpecError("Spec dictionary contains malformed dependencies") + + result = [] + for dep in deps: + elt = dep + dep_name = dep["name"] + if isinstance(elt, dict): + # new format: elements of dependency spec are keyed. + for h in ht.hashes: + if h.name in elt: + dep_hash, deptypes, hash_type, virtuals = cls.extract_info_from_dep(elt, h) + break + else: # We never determined a hash type... + raise spack.error.SpecError("Couldn't parse dependency spec.") + else: + raise spack.error.SpecError("Couldn't parse dependency types in spec.") + result.append((dep_name, dep_hash, list(deptypes), hash_type)) + return result + + @classmethod + def extract_info_from_dep(cls, elt, hash): + dep_hash, deptypes = elt[hash.name], elt["type"] + hash_type = hash.name + virtuals = [] + return dep_hash, deptypes, hash_type, virtuals + + @classmethod + def build_spec_from_node_dict(cls, node, hash_type=ht.dag_hash.name): + build_spec_dict = node["build_spec"] + return build_spec_dict["name"], build_spec_dict[hash_type], hash_type + + +class SpecfileV3(SpecfileV2): + pass class LazySpecCache(collections.defaultdict): diff --git a/lib/spack/spack/test/cmd/test.py b/lib/spack/spack/test/cmd/test.py index 19e021bf84a..14cf5c3c6b9 100644 --- a/lib/spack/spack/test/cmd/test.py +++ b/lib/spack/spack/test/cmd/test.py @@ -258,7 +258,7 @@ def test_has_test_method_fails(capsys): assert "is not a class" in captured -def test_read_old_results(mock_test_stage): +def test_read_old_results(mock_packages, mock_test_stage): """Take test data generated before the switch to full hash everywhere and make sure we can still read it in""" # Test data was generated with: diff --git a/lib/spack/spack/test/data/specfiles/hdf5.v013.json.gz b/lib/spack/spack/test/data/specfiles/hdf5.v013.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..3f10fbacd953b8d2f7b5eb44897fec129b6b02d0 GIT binary patch literal 2187 zcmV;62z2)!iwFP!000021MOSsj-xsjewC~Io{5M7liU{=%`;rJN?yU>g~cr9s@{DJ zq!Xy6QzLa(wXTpNdBcap@%cX6$Ijn}rex&s=O2IleaI+}eeUl=$LdBH?emAz@e~~6 z!@vGGpqgZJYblZCrdCt46uKRz*6)>1rkUzn+-lo`;21s;wr<%h*ploz#u%O$;6Z%8zzETEj2UDsW`v(PJ1MxZEY=ti2S3Wz8pARo#u z;>N)q)F!O!9}1i#eiziGGVLQj1f7Pl$0UuYtTe21*Xt^QbGNzw)At^|9YYvXRTZ$r zMO8)6@!r`&jwbCY-W?|STQjWOJUpI`(N1bRsl7vLEk$b{7=|Y{5LQk~HFpl-2-kuv zifND3v87^I~$~-0pCBdCGbf?zsB@yqht3Hupe5+Rl z5yF~g1LEFha8#m0Q1o$@NEHpFNy@TH(*%duJYt2r0q!z!=wc^CWHa5QDDRqj7?U<3 zERC!!#^Nj3t1MDtG~dWXgEq>5B1@}6=rM*V;Z-B$m_-$E3n*U5+!EKIO4P>^f$SMjyfCX!^kpbQ&=mE z9s*v2ChHTH#VARHLOI51P3;I{|5HTtZ|%Jv@aqlmH%qPtANawCJJffme>v1uv_TS& zliVi2wu!5Fkr*0*p$2{I=#U`^IR{ythf$|HUE~Z?=Q|m1i_d}(BnW^oU@VBv6wn~?0#5-1jI;BJ zlM7&%6Ha(|Av6ePyOIlrCm2U4#OQ?16cU=82@B2%OUMO=5E5_+F!ADi=E8%^8JuZy z3Gq1&$$1G|h_qp8_Zgdo-}3w4T5H^Gawj4XCTUhVQ1@zRl*a;rEGev$DF6j`(T9T& zA7QjDAwBGIo7&fkZ)W#}HFjFsY3Va3_x}ewTbF;>V{!TQT7EMT!(+HR{O<5ymT;Gt!!?w-_5e-a=h#gzdQUp+bw4^ zqd(>9$XlBT;a4Uvwh&omAc`C*qP~<_&)ERlNF`Qhd5yXGw4;S-u*QuTt&Qz~>yip5 zAqj;pt3jqlsN8AC;_B!VHXm(>9qiX} zc6Wc*-MuP8iIJ^tKmX{m@~=n7N7hg;j;g zXp+3|!I)8lS=eCEM`H^$Q=HmdsQ30dlQ1=zXD!USSS!Gj7??QWp`;?^WeSC`#PJA6 z?+N>vS5bcUS+~xxgRyWf1IW3iRC$w)IQ8?REF%N**M#x#w%bA&o8j0n2HXQMJ74@0 zZjDU#Z}m0M!s;!+F))oD`wEMig0ZOyOGX_DRw>zc0L4F&w+1MQU|w}u8o>c5+Js1F zaba*@iAIO4XkP>LA?IBW!oF*~+1-U6V>>)|c)kPA5^44R`u6(oyDzy#o>Htf0PGAO zSxXDd%S4Zr<~^6SvkjzOJ@hF2a5$0psjya+qZsm`%JQtvs>CE6rkN>h^TrhU5c>-P zZz^v?iRb-DZb#s6FhNdO_WrhA1M7@ceGHAsLfQh=gD!4K($+b|`N(VdVN?xP3oYmf z>u8K4g!{27qq<2#lSv*I{57f`0(wE!M(vfU&-=u-!|FGHl_qW7uWDlZ1@kW^{j;Q_NZR)-J4 zeh;ABLQpRbn|IXhsQagdTz^>v1vrkl4yEI?tirmb;aHS%Y%Gc;8oM{HgFFoMK0kkZ N{vY$v3R7}P004}-Ld*aF literal 0 HcmV?d00001 diff --git a/lib/spack/spack/test/data/specfiles/hdf5.v016.json.gz b/lib/spack/spack/test/data/specfiles/hdf5.v016.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..928fe4a3d765d6bf74c97ed26b601aa7b138b44f GIT binary patch literal 2925 zcmV-z3zGC7iwFP!000021MQsoj-$A~$6w{B{=QR6Yy+wD0!Q~5Mx&NjFnGfY+o<3D zl8~-Ir@B)k_0*iYLW+cK!29>(pO^6GQ*fL<{rg|P|M^rgk#%C%5ul3NZ&Auh&aXs1hoL%DTAt zdU7f1F{hX75?v{ld}@6urnViq)Rm)3C)F;QSu%fidFCWr93GpiD{y{sg$jRJVhe#^ zFW`DbUyiS?Ygpj$>_}R=OY2-KyxSx_1l)1@>N*$Zk8`%U;U34O^Bm4u#GaeDG$6%+ zV)okX6dPmB#rMb8ujc@(omAX!`bkOY(nnsq3**Mn}3?C^Y&t_JjX0x!B>G)C|q9Z>Rgx9xvhT~Hh!`3DfXRLNwWC9UP|d^k#R#M zU+Top{w4FX?r}IP`$?26bOBX7r#|Zj5`h`IvVt+MQf+OzD0y`J(wOJ!zVxmc0nO7T zk!enH6Ja)Y2d5hpRiYg(JdZ4v$=k+jp?h8aG{SR?-y#app=|7?$PpYEPL07e#A#3i zK|?oP`lm=refjnRgRaat;Kk)@uhile@4iLj4&Zj#0{s!f~?$m2Mo(i%)F3;{7r0SjwM z461WMAI0gKl3F7+an=RTY8U`Cg=#W0Inb&y5yhetEr)N7JAd(Q7 zD?1v-W8?e-F+Ja%vw|t* zA#C6UvRag?u~WgA9NUbh5mvG>0~i?Li=KW))?1C-;?#_T7G+5>4r^bvT7Y&^)+g56 z29(XG9s$Df+9xO3e^@3u88hBD-xJ$ z>YI2E?1!W20_?l?U1{X-eB37qJG4JDv_BM-(b_G0N(z|Dq?u}{2BjgLMN5am=-4>N zQ1c^8@mE@%DN6O*j74v^{2r^ydE`k`Lq=+>huF3gQV9Z{V{ZC~3^REt@|KOl_PEW( zJO&UlnG8n-GH)0CY#1hZ=u<6REJ9n3cF;&ch-ftsjS>d=J;NT3F|Q1Jt@>4O0dsHJ z`L*+F=hu7qrO|e7I>2dGCyapi)Ob1Z&=%8FPkcl)3;H1*`jUYX7N8MXXv)C?)1$gf zs_2cG5;1pppatuiDi3WE1%`R*htjZul-l6F(@5Oo;UQiZJX}k*nUJFA<7r33j)u>R z28Sc9uAK;;iUN}ioy6&sz06JkL4g5)2hI=Z!~i=KbA-o34!Gc_qo6Np8MRvJnGx(w)I%tF(I zf9|xa9L$6vNdxs>GfM@ZEH?x;N~XL<_ot62QEf)+){dkt27*Z)6B{havI6V=t+~s? zg8{FoytV1pfZc2e#J&(92=cc5>h;=+YG$Y!c^PO^cN(Q9vdFt{PJr6%wVgXd8a zi;Az{TUA%~44$)Hz`KAyv8f~JhuArP#a!6P+4QzlQkOynfB+|823;rWPm)$gkR|8eXB@#Fc303qa*F`k2rU#!) zowED@A;(x?@&a(4Ga(o}K@3^J4+lzL(4~|gb3)>mWc6+h1*d55);scU^bXrWRIr`O z)W+1dd6_WJB?AW5KU(iNN2bpWxH&-^3k-yACzQ}wqhMvNd9MmLwxTwP{3xdB^E;bXD0*qw_;w%1Ti-s=A+hC2P6ri#OIw z9{%ZJ;q<2cHlWU5c1-V>-q%Y$(k<$+seIR(RlS9q!E8ne&O6p*me&{*)k86 zsw1Lom?H_9)GQxDo;lg$Fh1H;DjCt62))LFaoZ4DOiD(``ot10dOe|`=qK~;O(jA~ z({w$x-Haini;-oO&0Wj-Dd!WE?YCT9Ke)|h;1`lyGYmem`dLwcCR zqUBd!Z8wKc`+2xs!@GvxWlEflm{}9sWtzOHb>gkhgqcX?V63MEbYKwJ6k9^xJ?xjP zNxk79Lw01KsnK9W+mutG%T`^Lzd;45BJ?unF?J$a%$@Rjgrt(S!jO0UJt`iWC<`h! z;+teBj&f4xpyZvJ#V;i2xcg)HR1>kVzX^kx9LOhTxU#9hXiF zoj2AEeeN@d8e5>zC>_WhDtL!cDry@0x~Y&sC_3-zTA8-U2Mp313mT8GcHQn;6?4@x z`Sbp_x1AK9m=s~uXWgy8a5&%7%XyBEU&Q!ZzQmMpCu|#fHMN=Rr@3wVHkJd-gdXKY z?$;TdZVRn!B7HmHin*-O4!}%S&_@!oH#W^567RMd^Sb}_-Qm@pBs)oVo8w)S_@#T& zf7@=%pvpd3Btz(UmWxWsmar1?e!#TSSLtp3w~ss&1!;knM-5&cA;BsR=>KiXyl@BcoINYPktYSpdANiM{T zA$fR2kJ-a%2rqepXX2ERCsRk`9?q36Z{qUFsVL^nF@sc^rTWCKV_x|=PX6@e|El`` zOf3Fe@qanpoJbqT<%MQ-P{%H5y0~!lBBiJ=BE~ltv6A)dB5F@{$1bUJM|4Y^jvPURCwgi zb;;3D**g-WlfGk|O+8R5FKR3XGNt-P$^SfIQT3t*U*)24`nrZu)@`qOD;hRgS_?Z) zrEP7FD2c29`KpF4xfkWm3+HZ-?OTooj67jMbq{!fZG}?h$P>!!|Q~+?nBO9#HJe2ib@V40a-Q zL0zEm5xGLBFA@g#1g1|3ZM^vYSAg~xny)0kszxbgL6fNBj0e_hVzoCo=2Tb^z?;GK z3lpwtAPg0TJ87$AE#%4+Y8eR-&2Xy61lXH#P~d_Zp`&EdhnyBgZ$Ra{!4zG$jJ@Ux z2hAu6!n$;L;-m+SAe9J}RM8U7`Z(M%iLIJ^!PJXkM;^fjgBuKfUgY#)|=hA0gTlkPWjwG-i3`?CU1^ zCmZ~)UFdMtRjO9gCwT*L<|;G+re%p@Egwj^tU|hKtQl|SX{glOjpKYa9$c5NhK?ay zbw*ZA4GUCK*_dH8-sJfLap=wE=aSp&;-{*lY(QPz1&L7=&=i^xkP(c+yS8UsElo>G4rsR}SEb$#v(z?6W zgW^(^+%LK$+HiTp<&TWZr!Chf(0!prdKN|TG{883J1qo~SZJMLwFo;y`yJARVWu;6 zUL+lwg!eAhP9+Mmwo604ENFSpN)!|KW6h&m6D6Z*<7K_s(M`9w?BaZkIvlhN<_;;@n0#xW0n9zCP|ON8eI} zsZ|~2v*e4WK;tyTauRlk<9LVFc?BMqZNBKQ{o`)Kj}1RI{P^|fkT2l_1C6~)G@tU) z(uqW-%m}*%I#lvhx-0nT1{PalTgK{Eq8C;j~w9dFs|T?szIQM8lFhi;rAO(hM$cB~c%Omr&R@s91} z*Bsg|7>$l6{Wj6xME?Uj2*k#k^})U*FWp?%4zo+_vl@l+$7Wo9r5W$e<@|Q`HaNA| z>x=U?W*NDiWA&$Es8>!{FC1%Yl`}EbEICn}$^7~zh1gxX&{}1eJfb@B&4+guGSmWE@wy6NU0(`|8&Zn8 zF5$AwlYsQB(WbG}H;m6KU#(=Bh&N7xPP*3x6*EL5Z|lHnFAGqLl9(V`Hq77Y09Rbs z;5%8Gp+Tr*xhHUju^&PU_0%dbc8&Rlckw$NVEwV~Ge7YtmT#wEU~5pswZ)c=C1J$o zE|t84E%#URJaNZC1T0Rj!D<-i&`G*{ol_k95WyWD=d1R9m5vKc1&?Gq8oSN%AKE;? zOxL_V$LE)qUMv!yr0UlxqpiX7yya9}&)}}Nmqi@mQ(Pu^adB5h*9iL;J8Q9ITS#O= zsBpuc;SEeshoX?MEv?bfnHq!K00>nhVDUY3OY+8+h##-U=nXE6I>bkh`(-(=a&f zjSbaY%Rb(r<-F{!)u_0Tz--%B-ll}jV*LFud%X1MS}sfNh@Em3D64@qY_M2C)r&YI z*ouMuLJjzMD(%y<-sqYesG9r?P)kRj%0VS3?PyDFqt{9(ST@}^F>iKaA%^lab1jKQ zgiXGyBuHo|X4x0l8)Qa@+#uve*pG$5NzYY|0+tAkf==#^+3O|ded(f=3XC2%yxZ_@ z!@KXoJBzNxk(YiFW?AHtI%w3*a++XmiL|MorLD_vLPj8UJ+Wc*wzNIvtS)Ja*0wB7 zwGZstos>*^o~zj4oLELR>?Tgg(TRx+jZLh$1CCdaac<>mOAKuXayGd5&~S0-;B$}y zYc4a11>a?mGQLwpW=aT(7|f&wyS>cV0dXlY%K*g(cA%O7$ppjn#F=G!*P2lQ6wh`U z*h#!c2M)=B0+EpeIY5FDlY2nKj)9D0h#|_d14Bwswm&=>sq>)n{---Q{in1q<^9Jz z{QX;+YI{5vw7O$Om=B#IFx7y;K1@>Gfjob<(tL5g`Ee8IO`tb{{u6 zw{l?+#yw)xH^njp@`!L$tU?m{4W7E(QTMl$8LaK_t$^|;8XgeQ88oZdRd63dgc9Gf! zRn^d1!LBcF?h?mtq*ZU-G>4Akp&17}7E`c|6!{$+qBoeiy3Pzd;tgUqi2b|zeXg&o z_zAkz8=z?=@_ku*(+;FEn6XlpMWYktmbS`*8Fa%P==bU|!?odKQ?542|KQ#jFMdJs zE0nAE{{=*07NJfCswjuC9i-EIY(?vHD?&~64Ihs&Z;r}UCuYV>A$8f-q7su#ZSG6$ z?>KS%+OgslqGXS|?PoX}qWn!Zvyj?Jx)+@DJ(h|9%$WpxnqfuJeve58p<+xkegKRR zskMZA$e^Su%Y-WVhL|8K)bbAP4tq_PqO=l7WP8l^ zdw@ljokd$iN{$;p_;3(QbvmOc!@E5{?3CEcg@oOHDHrVbe0N|=TwzF%6gy2g00;8U z*I)c?S@t^QvlN@#tK9@9P%R5+p36AEapVbDK8*c||Yi0sIYYpB#uob-M~bAJTee+bLo{fORC^4`ze?`!F+U+5uI^tr8C zgo^<=;L6}+Zw;N;IJVREOakyij}yv`HK#C-qwy~9V-UUSr>Nl_^Zu)o{EI92w$1Bp zF4;D(zs~~mj_UaP0@GB{jt;>!G9dw=^F14S$(DYa`y9^T3YfxV8#02a8waxJs(Dk{ zd|?Y5W;Awyusc=_uX%H??);*DyS!tAr45$8ch_A>bHlf?!AK1qg$@K}rCe0O-X#`5 z>vLpr%|!Gy?yryWZ-yaeDfaLkak1bzu14?-_Mu7s(Qns0IJZi z=Y0$ReN+4C2zSnR`< z%?BngHL;XEKZfb)0iK@t06uXOnax)(240(Wfx$Vm&NjXgF3R@h9UouPLcXNeW%NtZ z=$gIc&eg4Qmqw&lho|R-6JfROyTUi|nj870sRtr1&nvlHU-_)-OEYUf@QbV(Ve8(u zS7xrvpHo3LMqU$vxu;a8)Fssw?9-K6*QuumczU8whtH{}cb$TlUU?Gqu&HwD*r3Gc z)VUEK&+ICv0*lu{lbu;(LAiMCIKeu>v9K_2Zha-m=ut zg?c>Tj+fMxyqcM-VA&T$!iNnTpSxi-;ou`ck8sES9sB=&*#8>oFH9QoRjgO+DjMm< z3TK_@L#I|q>qqy5k54b!xGMg3I6UNd#FR~#Vvle&j9~`C!bZc9r-@1u^7^6e3S4M9 z6lhVVxC4iuo4Y~~=xZJ4vTRx7>v>j_EDW6m7K)TTW%r}z8Iilru4!AZMHqssxxb}`%P$&ks!V-Ny=8s$S9S5+?72Qq zpKh&6s!*TRk6)lnP7&pGn)E)e+8TIEQRpWW%GMd%$#zPxlb1o~)foI7urj%8Z-dxh zRnMN)$&Y^q1;3>rZmj5I>ewA(c8K}gD^eRwc{{xDHsGZbLmPTbn%tPyx<#N~Vnt0q zyI2z4HCBYOfThdZvME0+J$FOFx0IHxKkV_)?NfQ5%J0f15-MDnOM+dDwd~0pS}O~m zeN}I7@lde@VmW3#QEHS?3q2_;1)o4)S3PMaFGbxOf;G%*+x5+Ihq55phjX)yHiAXf zeONX9xndABhNCYfsDP&xn4IWlj7A;2_MoLVeNl}H8?%S8aX~KRpqRn{nmkSKl1NdC zqVGFh&o>GFG5ucffpKfca+Nhas|MJH*MFf^(_{ z)di&LLQ!y0gkZH4RJjYZP^l9ocPx7d=!Fy42;+Pew=G==J>s<)NgwrAEB=P{X_Xvf|i zd*7Y+=yIsy30hxSn+=Picp7zpWT#A{!|`}JP)wOdgbB!xCw9^ZN}@PE9Z#AvD20OZ zAhbB0G-nu5kU@SCB?s`Jg+LMutuw5!fD9cENE3#c&QzjB2m(hbz=0BsG6_VMzNBiU z!T?-E+(r8~Yr#v~|1#f+-d~pA$FD6=jl@bt(FbSn=-b7TGW1pN5hFU_+1q|vAjl`R zM(*7_AzJlKn6Vm)GLM`! zeeDQ@4ByZJDf_$p+W3ew;|NL|Tmt zY?>a3dIaw5&9D2$i#re_RcVBG!x}K!(hDzTj7{(aVJ~&kEXvZ=2}OLb&E0m}P2|}C z$C{PxHX+%8V+W3R1{}|G{aFXO*hHxX;?kbCgB3Hf0muoCgd0epfoWxjPD&71*;z#u z&lkt-!otE4F@cS!eQkW)1dOlxpEh$G?8gT8Ile0g-#d$~QO#sYQ&=(aT-SzAOc!d5 zrm#6GWo!{7! ze>;rHixRc-1iJxt9uLjnytI-8NI@-#Zqz-+%zeszQj<{4xs7X*n(=V00<4g1Up?f=1E+%XcaN#FuQvUw`j+ZVoiJCP6;;1DrXBQ^#Yc< zY-B#pR1iBybK^#UAAV;1M@7YLXx3)hw7Ht&04%2^oAbF@qDuC9U$tgH3c<iqai)avsYv=7Nx34Vi}ZEicFI|d4jCJLdR*v!17t{o_k zWW0%QHy4f9`1UNVjrxlo>5gqXw(Z#Vy>aYzX2Wd8#)V>}Q4e^?gMlC^j|k^d<=jlB zt11H36oJNJS?<8L8f`*7?b!3N0!h#n={=}0U!3XZp(>cvow^AXH*s+T71zo)`&8)h zaN1#FhlzIv6YHp)lN4BUnMo}8A%m3hgCa6hLQuqDCN(%5WyVg3ONm(qC_b?h)dWZ; z7^c^VBrUaOQ~<@ZLk12KAJK_Ja-u+F#$Zc4{ zX}{}qpU(Sq{{HKDSj_VQ_a;aSVyKr|ja5f zyO=jN=V9pbUlFbNvz^e>vLV+3VQA;QS!6-1bdWo!7!G|q3bc&B{$$6^UCXbuZgaO8 z*!|i&kp4dl*4*AH@rD-677e@g1pey)0-_V3wGjnqNXqu`t$u~YeC%Q$jE!nL27DPEc_!F~kH>q0%>WI2|=jq_h%9WJk=7 zM}S3^T~5Pj4JkSMbn)pVQgu3`D8q*%KOK}f%EZFqnCODzksnSh@7pi8`C1M)olUMr!x z^|mZXsKTs>ayD16RnxqP+9oo~?VGdDL|^}FxKT4@y&W0#?NWD`W8PJ+$GqiTJ}%`w zF83dP4CDuIzur8U{)Lpj^8ZG_BiXlL_oeMUFKrMDgihdiz=$NIOz8x+`UE68X>kAt z!EmM^%oNGA(&sA;xdKc>fMIq7;vzRnPY`4$o{Aly6q!CAKz5{uqaa6wGJzAS9y3MR zK_GsV5E2CwL<7t;JssH9XhbI|(;^`tKW|O4OlxvHGL9t9j*=cEmp~((lBxO(KyWw# z0ueco1J_WgH(c6&M00DAbXefMedWk{E_m6u5xcpUW_RoT%o2vS^f+#oLe`xj{t{sTp8xZAP z$JYM^{tbgGe3iEXba^a1qcR~8puse(RUvmt@uS+y99z*&B`Etm1{?2ClB&S*8bg9b z36aL&C!q5|0_R#0rWyO$Hf1%-KNF#*q={SFv{?)m0D&b)1wC|vOig-1tlqjF_A)*LW<=(?&v&ovBjdOA`JDVwfIzL)e=Aj$9-|jGd7ZqxM zy5I4eoT438t?!2M^vz3hQQ`Hy^ZIrgM0vFBh&UP=g{oLnKMje|iN(nh0k=?ranWjy ze(j;Ex70T90!I>YXY_7gwPI?53u*)a^T@OyBAfD^>3y&;Uez=H_!R7aB%Ae`Com6r(cS{ag>93@6@-+W(5z;Lj(6H8)_8>G$NT1@yBda@3 zWhgR-dF7+B7j`}kiLiL)RW2Y&L^?(3ut@2 z609Q1 zHn^*GFi8==7;7nrhW4nPnL<&nFAB5{osZX!`FCd^5C2sERwsF(qNrNHP>w==TY!dZ ztCWwUS!-ZB4+gV{$XpYY&F?jM#W0%C+-P4%=%fq&2F*Sb~O!M&u3PPzBfH|{n!!^ m0B-Om#fsq|YLl#p@{$gv*UVZwWYXvSCBFdMY49FdumAvG$hOM> literal 0 HcmV?d00001 diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py index 87cd11cefbd..5afc831096e 100644 --- a/lib/spack/spack/test/spec_dag.py +++ b/lib/spack/spack/test/spec_dag.py @@ -125,7 +125,7 @@ def _mock_installed(self): # use the installed C. It should *not* force A to use the installed D # *if* we're doing a fresh installation. a_spec = Spec(a) - a_spec._add_dependency(c_spec, ("build", "link")) + a_spec._add_dependency(c_spec, deptypes=("build", "link")) a_spec.concretize() assert spack.version.Version("2") == a_spec[c][d].version assert spack.version.Version("2") == a_spec[e].version @@ -148,7 +148,7 @@ def test_specify_preinstalled_dep(tmpdir, monkeypatch): monkeypatch.setattr(Spec, "installed", property(lambda x: x.name != "a")) a_spec = Spec("a") - a_spec._add_dependency(b_spec, ("build", "link")) + a_spec._add_dependency(b_spec, deptypes=("build", "link")) a_spec.concretize() assert set(x.name for x in a_spec.traverse()) == set(["a", "b", "c"]) @@ -992,9 +992,9 @@ def test_synthetic_construction_of_split_dependencies_from_same_package(mock_pac link_run_spec = Spec("c@1.0").concretized() build_spec = Spec("c@2.0").concretized() - root.add_dependency_edge(link_run_spec, deptype="link") - root.add_dependency_edge(link_run_spec, deptype="run") - root.add_dependency_edge(build_spec, deptype="build") + root.add_dependency_edge(link_run_spec, deptypes="link") + root.add_dependency_edge(link_run_spec, deptypes="run") + root.add_dependency_edge(build_spec, deptypes="build") # Check dependencies from the perspective of root assert len(root.dependencies()) == 2 @@ -1020,7 +1020,7 @@ def test_synthetic_construction_bootstrapping(mock_packages, config): root = Spec("b@2.0").concretized() bootstrap = Spec("b@1.0").concretized() - root.add_dependency_edge(bootstrap, deptype="build") + root.add_dependency_edge(bootstrap, deptypes="build") assert len(root.dependencies()) == 1 assert root.dependencies()[0].name == "b" @@ -1039,7 +1039,7 @@ def test_addition_of_different_deptypes_in_multiple_calls(mock_packages, config) bootstrap = Spec("b@1.0").concretized() for current_deptype in ("build", "link", "run"): - root.add_dependency_edge(bootstrap, deptype=current_deptype) + root.add_dependency_edge(bootstrap, deptypes=current_deptype) # Check edges in dependencies assert len(root.edges_to_dependencies()) == 1 @@ -1066,9 +1066,9 @@ def test_adding_same_deptype_with_the_same_name_raises( c1 = Spec("b@1.0").concretized() c2 = Spec("b@2.0").concretized() - p.add_dependency_edge(c1, deptype=c1_deptypes) + p.add_dependency_edge(c1, deptypes=c1_deptypes) with pytest.raises(spack.error.SpackError): - p.add_dependency_edge(c2, deptype=c2_deptypes) + p.add_dependency_edge(c2, deptypes=c2_deptypes) @pytest.mark.regression("33499") @@ -1087,16 +1087,16 @@ def test_indexing_prefers_direct_or_transitive_link_deps(): z3_flavor_1 = Spec("z3 +through_a1") z3_flavor_2 = Spec("z3 +through_z1") - root.add_dependency_edge(a1, deptype=("build", "run", "test")) + root.add_dependency_edge(a1, deptypes=("build", "run", "test")) # unique package as a dep of a build/run/test type dep. - a1.add_dependency_edge(a2, deptype="all") - a1.add_dependency_edge(z3_flavor_1, deptype="all") + a1.add_dependency_edge(a2, deptypes="all") + a1.add_dependency_edge(z3_flavor_1, deptypes="all") # chain of link type deps root -> z1 -> z2 -> z3 - root.add_dependency_edge(z1, deptype="link") - z1.add_dependency_edge(z2, deptype="link") - z2.add_dependency_edge(z3_flavor_2, deptype="link") + root.add_dependency_edge(z1, deptypes="link") + z1.add_dependency_edge(z2, deptypes="link") + z2.add_dependency_edge(z3_flavor_2, deptypes="link") # Indexing should prefer the link-type dep. assert "through_z1" in root["z3"].variants diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py index 0075bfc3848..c09ddeb920e 100644 --- a/lib/spack/spack/test/spec_semantics.py +++ b/lib/spack/spack/test/spec_semantics.py @@ -1108,7 +1108,7 @@ def test_error_message_unknown_variant(self): def test_satisfies_dependencies_ordered(self): d = Spec("zmpi ^fake") s = Spec("mpileaks") - s._add_dependency(d, ()) + s._add_dependency(d, deptypes=()) assert s.satisfies("mpileaks ^zmpi ^fake", strict=True) @pytest.mark.parametrize("transitive", [True, False]) @@ -1156,7 +1156,9 @@ def test_is_extension_after_round_trip_to_dict(config, mock_packages, spec_str): def test_malformed_spec_dict(): with pytest.raises(SpecError, match="malformed"): - Spec.from_dict({"spec": {"nodes": [{"dependencies": {"name": "foo"}}]}}) + Spec.from_dict( + {"spec": {"_meta": {"version": 2}, "nodes": [{"dependencies": {"name": "foo"}}]}} + ) def test_spec_dict_hashless_dep(): @@ -1164,9 +1166,10 @@ def test_spec_dict_hashless_dep(): Spec.from_dict( { "spec": { + "_meta": {"version": 2}, "nodes": [ {"name": "foo", "hash": "thehash", "dependencies": [{"name": "bar"}]} - ] + ], } } ) @@ -1252,7 +1255,7 @@ def test_concretize_partial_old_dag_hash_spec(mock_packages, config): # add it to an abstract spec as a dependency top = Spec("dt-diamond") - top.add_dependency_edge(bottom, ()) + top.add_dependency_edge(bottom, deptypes=()) # concretize with the already-concrete dependency top.concretize() diff --git a/lib/spack/spack/test/spec_yaml.py b/lib/spack/spack/test/spec_yaml.py index 5a88167572a..48826dc4937 100644 --- a/lib/spack/spack/test/spec_yaml.py +++ b/lib/spack/spack/test/spec_yaml.py @@ -13,7 +13,9 @@ import ast import collections import collections.abc +import gzip import inspect +import json import os import pytest @@ -507,3 +509,33 @@ def test_legacy_yaml(tmpdir, install_mockery, mock_packages): ("version", "1.2.11"), ] ) + + +@pytest.mark.parametrize( + "specfile,expected_hash,reader_cls", + [ + # First version supporting JSON format for specs + ("specfiles/hdf5.v013.json.gz", "vglgw4reavn65vx5d4dlqn6rjywnq76d", spack.spec.SpecfileV1), + # Introduces full hash in the format, still has 3 hashes + ("specfiles/hdf5.v016.json.gz", "stp45yvzte43xdauknaj3auxlxb4xvzs", spack.spec.SpecfileV1), + # Introduces "build_specs", see https://github.com/spack/spack/pull/22845 + ("specfiles/hdf5.v017.json.gz", "xqh5iyjjtrp2jw632cchacn3l7vqzf3m", spack.spec.SpecfileV2), + # Use "full hash" everywhere, see https://github.com/spack/spack/pull/28504 + ("specfiles/hdf5.v019.json.gz", "iulacrbz7o5v5sbj7njbkyank3juh6d3", spack.spec.SpecfileV3), + ], +) +def test_load_json_specfiles(specfile, expected_hash, reader_cls): + fullpath = os.path.join(spack.paths.test_path, "data", specfile) + with gzip.open(fullpath, "rt", encoding="utf-8") as f: + data = json.load(f) + + s1 = Spec.from_dict(data) + s2 = reader_cls.load(data) + + assert s2.dag_hash() == expected_hash + assert s1.dag_hash() == s2.dag_hash() + assert s1 == s2 + assert Spec.from_json(s2.to_json()).dag_hash() == s2.dag_hash() + + openmpi_edges = s2.edges_to_dependencies(name="openmpi") + assert len(openmpi_edges) == 1 diff --git a/lib/spack/spack/test/traverse.py b/lib/spack/spack/test/traverse.py index 1bc3d69cfe0..1334f33883e 100644 --- a/lib/spack/spack/test/traverse.py +++ b/lib/spack/spack/test/traverse.py @@ -18,8 +18,8 @@ def create_dag(nodes, edges): dict: mapping from package name to abstract Spec with proper deps. """ specs = {name: Spec(name) for name in nodes} - for parent, child, deptype in edges: - specs[parent].add_dependency_edge(specs[child], deptype) + for parent, child, deptypes in edges: + specs[parent].add_dependency_edge(specs[child], deptypes=deptypes) return specs