Cray manifest: compiler handling fixes (#40061)

* Make use of `prefix` in the Cray manifest schema (prepend it to
  the relative CC etc.) - this was a Spack error.
* Warn people when wrong-looking compilers are found in the manifest
  (i.e. non-existent CC path).
* Bypass compilers that we fail to add (don't allow a single bad
  compiler to terminate the entire read-cray-manifest action).
* Refactor Cray manifest tests: module-level variables have been
  replaced with fixtures, specifically using the `test_platform`
  fixture, which allows the unit tests to run with the new
  concretizer.
* Add unit test to check case where adding a compiler raises an
  exception (check that this doesn't prevent processing the
  rest of the manifest).
This commit is contained in:
Peter Scheibel 2023-09-29 09:47:30 -07:00 committed by GitHub
parent db37672abf
commit 3969653f1b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 216 additions and 132 deletions

View File

@ -112,6 +112,7 @@ def extract_version_from_output(cls, output):
match = re.search(r"AOCC_(\d+)[._](\d+)[._](\d+)", output) match = re.search(r"AOCC_(\d+)[._](\d+)[._](\d+)", output)
if match: if match:
return ".".join(match.groups()) return ".".join(match.groups())
return "unknown"
@classmethod @classmethod
def fc_version(cls, fortran_compiler): def fc_version(cls, fortran_compiler):

View File

@ -4,6 +4,9 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import json import json
import os
import traceback
import warnings
import jsonschema import jsonschema
import jsonschema.exceptions import jsonschema.exceptions
@ -46,9 +49,29 @@ def translated_compiler_name(manifest_compiler_name):
) )
def compiler_from_entry(entry): def compiler_from_entry(entry: dict, manifest_path: str):
# Note that manifest_path is only passed here to compose a
# useful warning message when paths appear to be missing.
compiler_name = translated_compiler_name(entry["name"]) compiler_name = translated_compiler_name(entry["name"])
if "prefix" in entry:
prefix = entry["prefix"]
paths = dict(
(lang, os.path.join(prefix, relpath))
for (lang, relpath) in entry["executables"].items()
)
else:
paths = entry["executables"] paths = entry["executables"]
# Do a check for missing paths. Note that this isn't possible for
# all compiler entries, since their "paths" might actually be
# exe names like "cc" that depend on modules being loaded. Cray
# manifest entries are always paths though.
missing_paths = []
for path in paths.values():
if not os.path.exists(path):
missing_paths.append(path)
# to instantiate a compiler class we may need a concrete version: # to instantiate a compiler class we may need a concrete version:
version = "={}".format(entry["version"]) version = "={}".format(entry["version"])
arch = entry["arch"] arch = entry["arch"]
@ -57,8 +80,18 @@ def compiler_from_entry(entry):
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name) compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
spec = spack.spec.CompilerSpec(compiler_cls.name, version) spec = spack.spec.CompilerSpec(compiler_cls.name, version)
paths = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")] path_list = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")]
return compiler_cls(spec, operating_system, target, paths)
if missing_paths:
warnings.warn(
"Manifest entry refers to nonexistent paths:\n\t"
+ "\n\t".join(missing_paths)
+ f"\nfor {str(spec)}"
+ f"\nin {manifest_path}"
+ "\nPlease report this issue"
)
return compiler_cls(spec, operating_system, target, path_list)
def spec_from_entry(entry): def spec_from_entry(entry):
@ -187,12 +220,21 @@ def read(path, apply_updates):
tty.debug("{0}: {1} specs read from manifest".format(path, str(len(specs)))) tty.debug("{0}: {1} specs read from manifest".format(path, str(len(specs))))
compilers = list() compilers = list()
if "compilers" in json_data: if "compilers" in json_data:
compilers.extend(compiler_from_entry(x) for x in json_data["compilers"]) compilers.extend(compiler_from_entry(x, path) for x in json_data["compilers"])
tty.debug("{0}: {1} compilers read from manifest".format(path, str(len(compilers)))) tty.debug("{0}: {1} compilers read from manifest".format(path, str(len(compilers))))
# Filter out the compilers that already appear in the configuration # Filter out the compilers that already appear in the configuration
compilers = spack.compilers.select_new_compilers(compilers) compilers = spack.compilers.select_new_compilers(compilers)
if apply_updates and compilers: if apply_updates and compilers:
spack.compilers.add_compilers_to_config(compilers, init_config=False) for compiler in compilers:
try:
spack.compilers.add_compilers_to_config([compiler], init_config=False)
except Exception:
warnings.warn(
f"Could not add compiler {str(compiler.spec)}: "
f"\n\tfrom manifest: {path}"
"\nPlease reexecute with 'spack -d' and include the stack trace"
)
tty.debug(f"Include this\n{traceback.format_exc()}")
if apply_updates: if apply_updates:
for spec in specs.values(): for spec in specs.values():
spack.store.STORE.db.add(spec, directory_layout=None) spack.store.STORE.db.add(spec, directory_layout=None)

View File

@ -203,19 +203,6 @@ def fail():
assert "Skipping manifest and continuing" in output assert "Skipping manifest and continuing" in output
def test_find_external_nonempty_default_manifest_dir(
mutable_database, mutable_mock_repo, tmpdir, monkeypatch, directory_with_manifest
):
"""The user runs 'spack external find'; the default manifest directory
contains a manifest file. Ensure that the specs are read.
"""
monkeypatch.setenv("PATH", "")
monkeypatch.setattr(spack.cray_manifest, "default_path", str(directory_with_manifest))
external("find")
specs = spack.store.STORE.db.query("hwloc")
assert any(x.dag_hash() == "hwlocfakehashaaa" for x in specs)
def test_find_external_merge(mutable_config, mutable_mock_repo): def test_find_external_merge(mutable_config, mutable_mock_repo):
"""Check that 'spack find external' doesn't overwrite an existing spec """Check that 'spack find external' doesn't overwrite an existing spec
entry in packages.yaml. entry in packages.yaml.

View File

@ -1714,17 +1714,6 @@ def brand_new_binary_cache():
) )
@pytest.fixture
def directory_with_manifest(tmpdir):
"""Create a manifest file in a directory. Used by 'spack external'."""
with tmpdir.as_cwd():
test_db_fname = "external-db.json"
with open(test_db_fname, "w") as db_file:
json.dump(spack.test.cray_manifest.create_manifest_content(), db_file)
yield str(tmpdir)
@pytest.fixture() @pytest.fixture()
def noncyclical_dir_structure(tmpdir): def noncyclical_dir_structure(tmpdir):
""" """

View File

@ -23,53 +23,6 @@
import spack.store import spack.store
from spack.cray_manifest import compiler_from_entry, entries_to_specs from spack.cray_manifest import compiler_from_entry, entries_to_specs
example_x_json_str = """\
{
"name": "packagex",
"hash": "hash-of-x",
"prefix": "/path/to/packagex-install/",
"version": "1.0",
"arch": {
"platform": "linux",
"platform_os": "centos8",
"target": {
"name": "haswell"
}
},
"compiler": {
"name": "gcc",
"version": "10.2.0.cray"
},
"dependencies": {
"packagey": {
"hash": "hash-of-y",
"type": ["link"]
}
},
"parameters": {
"precision": ["double", "float"]
}
}
"""
example_compiler_entry = """\
{
"name": "gcc",
"prefix": "/path/to/compiler/",
"version": "7.5.0",
"arch": {
"os": "centos8",
"target": "x86_64"
},
"executables": {
"cc": "/path/to/compiler/cc",
"cxx": "/path/to/compiler/cxx",
"fc": "/path/to/compiler/fc"
}
}
"""
class JsonSpecEntry: class JsonSpecEntry:
def __init__(self, name, hash, prefix, version, arch, compiler, dependencies, parameters): def __init__(self, name, hash, prefix, version, arch, compiler, dependencies, parameters):
@ -104,16 +57,19 @@ def __init__(self, platform, os, target):
self.os = os self.os = os
self.target = target self.target = target
def to_dict(self): def spec_json(self):
return {"platform": self.platform, "platform_os": self.os, "target": {"name": self.target}} return {"platform": self.platform, "platform_os": self.os, "target": {"name": self.target}}
def compiler_json(self):
return {"os": self.os, "target": self.target}
class JsonCompilerEntry: class JsonCompilerEntry:
def __init__(self, name, version, arch=None, executables=None): def __init__(self, name, version, arch=None, executables=None):
self.name = name self.name = name
self.version = version self.version = version
if not arch: if not arch:
arch = {"os": "centos8", "target": "x86_64"} arch = JsonArchEntry("anyplatform", "anyos", "anytarget")
if not executables: if not executables:
executables = { executables = {
"cc": "/path/to/compiler/cc", "cc": "/path/to/compiler/cc",
@ -127,7 +83,7 @@ def compiler_json(self):
return { return {
"name": self.name, "name": self.name,
"version": self.version, "version": self.version,
"arch": self.arch, "arch": self.arch.compiler_json(),
"executables": self.executables, "executables": self.executables,
} }
@ -138,22 +94,58 @@ def spec_json(self):
return {"name": self.name, "version": self.version} return {"name": self.name, "version": self.version}
_common_arch = JsonArchEntry(platform="linux", os="centos8", target="haswell").to_dict() @pytest.fixture
def _common_arch(test_platform):
return JsonArchEntry(
platform=test_platform.name,
os=test_platform.front_os,
target=test_platform.target("fe").name,
)
# Intended to match example_compiler_entry above
_common_compiler = JsonCompilerEntry( @pytest.fixture
def _common_compiler(_common_arch):
return JsonCompilerEntry(
name="gcc", name="gcc",
version="10.2.0.cray", version="10.2.0.2112",
arch={"os": "centos8", "target": "x86_64"}, arch=_common_arch,
executables={ executables={
"cc": "/path/to/compiler/cc", "cc": "/path/to/compiler/cc",
"cxx": "/path/to/compiler/cxx", "cxx": "/path/to/compiler/cxx",
"fc": "/path/to/compiler/fc", "fc": "/path/to/compiler/fc",
}, },
) )
def test_compatibility(): @pytest.fixture
def _other_compiler(_common_arch):
return JsonCompilerEntry(
name="clang",
version="3.0.0",
arch=_common_arch,
executables={
"cc": "/path/to/compiler/clang",
"cxx": "/path/to/compiler/clang++",
"fc": "/path/to/compiler/flang",
},
)
@pytest.fixture
def _raw_json_x(_common_arch):
return {
"name": "packagex",
"hash": "hash-of-x",
"prefix": "/path/to/packagex-install/",
"version": "1.0",
"arch": _common_arch.spec_json(),
"compiler": {"name": "gcc", "version": "10.2.0.2112"},
"dependencies": {"packagey": {"hash": "hash-of-y", "type": ["link"]}},
"parameters": {"precision": ["double", "float"]},
}
def test_manifest_compatibility(_common_arch, _common_compiler, _raw_json_x):
"""Make sure that JsonSpecEntry outputs the expected JSON structure """Make sure that JsonSpecEntry outputs the expected JSON structure
by comparing it with JSON parsed from an example string. This by comparing it with JSON parsed from an example string. This
ensures that the testing objects like JsonSpecEntry produce the ensures that the testing objects like JsonSpecEntry produce the
@ -164,7 +156,7 @@ def test_compatibility():
hash="hash-of-y", hash="hash-of-y",
prefix="/path/to/packagey-install/", prefix="/path/to/packagey-install/",
version="1.0", version="1.0",
arch=_common_arch, arch=_common_arch.spec_json(),
compiler=_common_compiler.spec_json(), compiler=_common_compiler.spec_json(),
dependencies={}, dependencies={},
parameters={}, parameters={},
@ -175,23 +167,44 @@ def test_compatibility():
hash="hash-of-x", hash="hash-of-x",
prefix="/path/to/packagex-install/", prefix="/path/to/packagex-install/",
version="1.0", version="1.0",
arch=_common_arch, arch=_common_arch.spec_json(),
compiler=_common_compiler.spec_json(), compiler=_common_compiler.spec_json(),
dependencies=dict([y.as_dependency(deptypes=["link"])]), dependencies=dict([y.as_dependency(deptypes=["link"])]),
parameters={"precision": ["double", "float"]}, parameters={"precision": ["double", "float"]},
) )
x_from_entry = x.to_dict() x_from_entry = x.to_dict()
x_from_str = json.loads(example_x_json_str) assert x_from_entry == _raw_json_x
assert x_from_entry == x_from_str
def test_compiler_from_entry(): def test_compiler_from_entry():
compiler_data = json.loads(example_compiler_entry) compiler_data = json.loads(
compiler_from_entry(compiler_data) """\
{
"name": "gcc",
"prefix": "/path/to/compiler/",
"version": "7.5.0",
"arch": {
"os": "centos8",
"target": "x86_64"
},
"executables": {
"cc": "/path/to/compiler/cc",
"cxx": "/path/to/compiler/cxx",
"fc": "/path/to/compiler/fc"
}
}
"""
)
compiler = compiler_from_entry(compiler_data, "/example/file")
assert compiler.cc == "/path/to/compiler/cc"
assert compiler.cxx == "/path/to/compiler/cxx"
assert compiler.fc == "/path/to/compiler/fc"
assert compiler.operating_system == "centos8"
def generate_openmpi_entries(): @pytest.fixture
def generate_openmpi_entries(_common_arch, _common_compiler):
"""Generate two example JSON entries that refer to an OpenMPI """Generate two example JSON entries that refer to an OpenMPI
installation and a hwloc dependency. installation and a hwloc dependency.
""" """
@ -202,7 +215,7 @@ def generate_openmpi_entries():
hash="hwlocfakehashaaa", hash="hwlocfakehashaaa",
prefix="/path/to/hwloc-install/", prefix="/path/to/hwloc-install/",
version="2.0.3", version="2.0.3",
arch=_common_arch, arch=_common_arch.spec_json(),
compiler=_common_compiler.spec_json(), compiler=_common_compiler.spec_json(),
dependencies={}, dependencies={},
parameters={}, parameters={},
@ -216,26 +229,25 @@ def generate_openmpi_entries():
hash="openmpifakehasha", hash="openmpifakehasha",
prefix="/path/to/openmpi-install/", prefix="/path/to/openmpi-install/",
version="4.1.0", version="4.1.0",
arch=_common_arch, arch=_common_arch.spec_json(),
compiler=_common_compiler.spec_json(), compiler=_common_compiler.spec_json(),
dependencies=dict([hwloc.as_dependency(deptypes=["link"])]), dependencies=dict([hwloc.as_dependency(deptypes=["link"])]),
parameters={"internal-hwloc": False, "fabrics": ["psm"], "missing_variant": True}, parameters={"internal-hwloc": False, "fabrics": ["psm"], "missing_variant": True},
) )
return [openmpi, hwloc] return list(x.to_dict() for x in [openmpi, hwloc])
def test_generate_specs_from_manifest(): def test_generate_specs_from_manifest(generate_openmpi_entries):
"""Given JSON entries, check that we can form a set of Specs """Given JSON entries, check that we can form a set of Specs
including dependency references. including dependency references.
""" """
entries = list(x.to_dict() for x in generate_openmpi_entries()) specs = entries_to_specs(generate_openmpi_entries)
specs = entries_to_specs(entries)
(openmpi_spec,) = list(x for x in specs.values() if x.name == "openmpi") (openmpi_spec,) = list(x for x in specs.values() if x.name == "openmpi")
assert openmpi_spec["hwloc"] assert openmpi_spec["hwloc"]
def test_translate_cray_platform_to_linux(monkeypatch): def test_translate_cray_platform_to_linux(monkeypatch, _common_compiler):
"""Manifests might list specs on newer Cray platforms as being "cray", """Manifests might list specs on newer Cray platforms as being "cray",
but Spack identifies such platforms as "linux". Make sure we but Spack identifies such platforms as "linux". Make sure we
automaticaly transform these entries. automaticaly transform these entries.
@ -247,13 +259,13 @@ def the_host_is_linux():
monkeypatch.setattr(spack.platforms, "host", the_host_is_linux) monkeypatch.setattr(spack.platforms, "host", the_host_is_linux)
cray_arch = JsonArchEntry(platform="cray", os="rhel8", target="x86_64").to_dict() cray_arch = JsonArchEntry(platform="cray", os="rhel8", target="x86_64")
spec_json = JsonSpecEntry( spec_json = JsonSpecEntry(
name="cray-mpich", name="cray-mpich",
hash="craympichfakehashaaa", hash="craympichfakehashaaa",
prefix="/path/to/cray-mpich/", prefix="/path/to/cray-mpich/",
version="1.0.0", version="1.0.0",
arch=cray_arch, arch=cray_arch.spec_json(),
compiler=_common_compiler.spec_json(), compiler=_common_compiler.spec_json(),
dependencies={}, dependencies={},
parameters={}, parameters={},
@ -263,14 +275,15 @@ def the_host_is_linux():
assert spec.architecture.platform == "linux" assert spec.architecture.platform == "linux"
def test_translate_compiler_name(): def test_translate_compiler_name(_common_arch):
nvidia_compiler = JsonCompilerEntry( nvidia_compiler = JsonCompilerEntry(
name="nvidia", name="nvidia",
version="19.1", version="19.1",
arch=_common_arch,
executables={"cc": "/path/to/compiler/nvc", "cxx": "/path/to/compiler/nvc++"}, executables={"cc": "/path/to/compiler/nvc", "cxx": "/path/to/compiler/nvc++"},
) )
compiler = compiler_from_entry(nvidia_compiler.compiler_json()) compiler = compiler_from_entry(nvidia_compiler.compiler_json(), "/example/file")
assert compiler.name == "nvhpc" assert compiler.name == "nvhpc"
spec_json = JsonSpecEntry( spec_json = JsonSpecEntry(
@ -278,7 +291,7 @@ def test_translate_compiler_name():
hash="hwlocfakehashaaa", hash="hwlocfakehashaaa",
prefix="/path/to/hwloc-install/", prefix="/path/to/hwloc-install/",
version="2.0.3", version="2.0.3",
arch=_common_arch, arch=_common_arch.spec_json(),
compiler=nvidia_compiler.spec_json(), compiler=nvidia_compiler.spec_json(),
dependencies={}, dependencies={},
parameters={}, parameters={},
@ -288,18 +301,18 @@ def test_translate_compiler_name():
assert spec.compiler.name == "nvhpc" assert spec.compiler.name == "nvhpc"
def test_failed_translate_compiler_name(): def test_failed_translate_compiler_name(_common_arch):
unknown_compiler = JsonCompilerEntry(name="unknown", version="1.0") unknown_compiler = JsonCompilerEntry(name="unknown", version="1.0")
with pytest.raises(spack.compilers.UnknownCompilerError): with pytest.raises(spack.compilers.UnknownCompilerError):
compiler_from_entry(unknown_compiler.compiler_json()) compiler_from_entry(unknown_compiler.compiler_json(), "/example/file")
spec_json = JsonSpecEntry( spec_json = JsonSpecEntry(
name="packagey", name="packagey",
hash="hash-of-y", hash="hash-of-y",
prefix="/path/to/packagey-install/", prefix="/path/to/packagey-install/",
version="1.0", version="1.0",
arch=_common_arch, arch=_common_arch.spec_json(),
compiler=unknown_compiler.spec_json(), compiler=unknown_compiler.spec_json(),
dependencies={}, dependencies={},
parameters={}, parameters={},
@ -309,7 +322,8 @@ def test_failed_translate_compiler_name():
entries_to_specs([spec_json]) entries_to_specs([spec_json])
def create_manifest_content(): @pytest.fixture
def manifest_content(generate_openmpi_entries, _common_compiler, _other_compiler):
return { return {
# Note: the cray_manifest module doesn't use the _meta section right # Note: the cray_manifest module doesn't use the _meta section right
# now, but it is anticipated to be useful # now, but it is anticipated to be useful
@ -319,43 +333,70 @@ def create_manifest_content():
"schema-version": "1.3", "schema-version": "1.3",
"cpe-version": "22.06", "cpe-version": "22.06",
}, },
"specs": list(x.to_dict() for x in generate_openmpi_entries()), "specs": generate_openmpi_entries,
"compilers": [_common_compiler.compiler_json()], "compilers": [_common_compiler.compiler_json(), _other_compiler.compiler_json()],
} }
@pytest.mark.only_original( def test_read_cray_manifest(
"The ASP-based concretizer is currently picky about OS matching and will fail." tmpdir, mutable_config, mock_packages, mutable_database, manifest_content
) ):
def test_read_cray_manifest(tmpdir, mutable_config, mock_packages, mutable_database):
"""Check that (a) we can read the cray manifest and add it to the Spack """Check that (a) we can read the cray manifest and add it to the Spack
Database and (b) we can concretize specs based on that. Database and (b) we can concretize specs based on that.
""" """
with tmpdir.as_cwd(): with tmpdir.as_cwd():
test_db_fname = "external-db.json" test_db_fname = "external-db.json"
with open(test_db_fname, "w") as db_file: with open(test_db_fname, "w") as db_file:
json.dump(create_manifest_content(), db_file) json.dump(manifest_content, db_file)
cray_manifest.read(test_db_fname, True) cray_manifest.read(test_db_fname, True)
query_specs = spack.store.STORE.db.query("openmpi") query_specs = spack.store.STORE.db.query("openmpi")
assert any(x.dag_hash() == "openmpifakehasha" for x in query_specs) assert any(x.dag_hash() == "openmpifakehasha" for x in query_specs)
concretized_specs = spack.cmd.parse_specs( concretized_specs = spack.cmd.parse_specs(
"depends-on-openmpi %gcc@4.5.0 arch=test-redhat6-x86_64" " ^/openmpifakehasha".split(), "depends-on-openmpi ^/openmpifakehasha".split(), concretize=True
concretize=True,
) )
assert concretized_specs[0]["hwloc"].dag_hash() == "hwlocfakehashaaa" assert concretized_specs[0]["hwloc"].dag_hash() == "hwlocfakehashaaa"
@pytest.mark.only_original( def test_read_cray_manifest_add_compiler_failure(
"The ASP-based concretizer is currently picky about OS matching and will fail." tmpdir, mutable_config, mock_packages, mutable_database, manifest_content, monkeypatch
) ):
"""Check that cray manifest can be read even if some compilers cannot
be added.
"""
orig_add_compilers_to_config = spack.compilers.add_compilers_to_config
class fail_for_clang:
def __init__(self):
self.called_with_clang = False
def __call__(self, compilers, **kwargs):
if any(x.name == "clang" for x in compilers):
self.called_with_clang = True
raise Exception()
return orig_add_compilers_to_config(compilers, **kwargs)
checker = fail_for_clang()
monkeypatch.setattr(spack.compilers, "add_compilers_to_config", checker)
with tmpdir.as_cwd():
test_db_fname = "external-db.json"
with open(test_db_fname, "w") as db_file:
json.dump(manifest_content, db_file)
cray_manifest.read(test_db_fname, True)
query_specs = spack.store.STORE.db.query("openmpi")
assert any(x.dag_hash() == "openmpifakehasha" for x in query_specs)
assert checker.called_with_clang
def test_read_cray_manifest_twice_no_compiler_duplicates( def test_read_cray_manifest_twice_no_compiler_duplicates(
tmpdir, mutable_config, mock_packages, mutable_database tmpdir, mutable_config, mock_packages, mutable_database, manifest_content
): ):
with tmpdir.as_cwd(): with tmpdir.as_cwd():
test_db_fname = "external-db.json" test_db_fname = "external-db.json"
with open(test_db_fname, "w") as db_file: with open(test_db_fname, "w") as db_file:
json.dump(create_manifest_content(), db_file) json.dump(manifest_content, db_file)
# Read the manifest twice # Read the manifest twice
cray_manifest.read(test_db_fname, True) cray_manifest.read(test_db_fname, True)
@ -363,7 +404,7 @@ def test_read_cray_manifest_twice_no_compiler_duplicates(
compilers = spack.compilers.all_compilers() compilers = spack.compilers.all_compilers()
filtered = list( filtered = list(
c for c in compilers if c.spec == spack.spec.CompilerSpec("gcc@=10.2.0.cray") c for c in compilers if c.spec == spack.spec.CompilerSpec("gcc@=10.2.0.2112")
) )
assert len(filtered) == 1 assert len(filtered) == 1
@ -423,3 +464,27 @@ def test_convert_validation_error(tmpdir, mutable_config, mock_packages, mutable
with pytest.raises(cray_manifest.ManifestValidationError) as e: with pytest.raises(cray_manifest.ManifestValidationError) as e:
cray_manifest.read(invalid_schema_path, True) cray_manifest.read(invalid_schema_path, True)
str(e) str(e)
@pytest.fixture
def directory_with_manifest(tmpdir, manifest_content):
"""Create a manifest file in a directory. Used by 'spack external'."""
with tmpdir.as_cwd():
test_db_fname = "external-db.json"
with open(test_db_fname, "w") as db_file:
json.dump(manifest_content, db_file)
yield str(tmpdir)
def test_find_external_nonempty_default_manifest_dir(
mutable_database, mutable_mock_repo, tmpdir, monkeypatch, directory_with_manifest
):
"""The user runs 'spack external find'; the default manifest directory
contains a manifest file. Ensure that the specs are read.
"""
monkeypatch.setenv("PATH", "")
monkeypatch.setattr(spack.cray_manifest, "default_path", str(directory_with_manifest))
spack.cmd.external._collect_and_consume_cray_manifest_files(ignore_default_dir=False)
specs = spack.store.STORE.db.query("hwloc")
assert any(x.dag_hash() == "hwlocfakehashaaa" for x in specs)