Compare commits
24 Commits
develop-20
...
v0.20.0
Author | SHA1 | Date | |
---|---|---|---|
![]() |
e493ab31c6 | ||
![]() |
e0f45b33e9 | ||
![]() |
bb61ecb9b9 | ||
![]() |
9694225b80 | ||
![]() |
3b15e7bf41 | ||
![]() |
ac5f0cc340 | ||
![]() |
f67840511a | ||
![]() |
bd9cfa3a47 | ||
![]() |
96c262b13e | ||
![]() |
d22fd79a0b | ||
![]() |
8cf4bf7559 | ||
![]() |
14a703a4bb | ||
![]() |
d7726f80e8 | ||
![]() |
d69c3a6ab7 | ||
![]() |
1fd964140d | ||
![]() |
c9bab946d4 | ||
![]() |
74a5cd2bb0 | ||
![]() |
151ce6f923 | ||
![]() |
1c31ce82af | ||
![]() |
caab2cbfd2 | ||
![]() |
a6f41006eb | ||
![]() |
18b4670d9f | ||
![]() |
322fe415e4 | ||
![]() |
096bfa4ba9 |
1
.github/workflows/unit_tests.yaml
vendored
1
.github/workflows/unit_tests.yaml
vendored
@@ -137,6 +137,7 @@ jobs:
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd spack-test
|
||||
|
1
.github/workflows/valid-style.yml
vendored
1
.github/workflows/valid-style.yml
vendored
@@ -72,6 +72,7 @@ jobs:
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd spack-test
|
||||
|
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "0.20.0.dev0"
|
||||
__version__ = "0.20.0"
|
||||
spack_version = __version__
|
||||
|
||||
|
||||
|
@@ -1216,6 +1216,9 @@ def child_fun():
|
||||
return child_result
|
||||
|
||||
|
||||
CONTEXT_BASES = (spack.package_base.PackageBase, spack.build_systems._checks.BaseBuilder)
|
||||
|
||||
|
||||
def get_package_context(traceback, context=3):
|
||||
"""Return some context for an error message when the build fails.
|
||||
|
||||
@@ -1244,32 +1247,38 @@ def make_stack(tb, stack=None):
|
||||
|
||||
stack = make_stack(traceback)
|
||||
|
||||
basenames = tuple(base.__name__ for base in CONTEXT_BASES)
|
||||
for tb in stack:
|
||||
frame = tb.tb_frame
|
||||
if "self" in frame.f_locals:
|
||||
# Find the first proper subclass of PackageBase.
|
||||
# Find the first proper subclass of the PackageBase or BaseBuilder, but
|
||||
# don't provide context if the code is actually in the base classes.
|
||||
obj = frame.f_locals["self"]
|
||||
if isinstance(obj, spack.package_base.PackageBase):
|
||||
func = getattr(obj, tb.tb_frame.f_code.co_name, "")
|
||||
if func:
|
||||
typename, *_ = func.__qualname__.partition(".")
|
||||
|
||||
if isinstance(obj, CONTEXT_BASES) and typename not in basenames:
|
||||
break
|
||||
else:
|
||||
return None
|
||||
|
||||
# We found obj, the Package implementation we care about.
|
||||
# Point out the location in the install method where we failed.
|
||||
lines = [
|
||||
"{0}:{1:d}, in {2}:".format(
|
||||
inspect.getfile(frame.f_code),
|
||||
frame.f_lineno - 1, # subtract 1 because f_lineno is 0-indexed
|
||||
frame.f_code.co_name,
|
||||
)
|
||||
]
|
||||
filename = inspect.getfile(frame.f_code)
|
||||
lineno = frame.f_lineno
|
||||
if os.path.basename(filename) == "package.py":
|
||||
# subtract 1 because we inject a magic import at the top of package files.
|
||||
# TODO: get rid of the magic import.
|
||||
lineno -= 1
|
||||
|
||||
lines = ["{0}:{1:d}, in {2}:".format(filename, lineno, frame.f_code.co_name)]
|
||||
|
||||
# Build a message showing context in the install method.
|
||||
sourcelines, start = inspect.getsourcelines(frame)
|
||||
|
||||
# Calculate lineno of the error relative to the start of the function.
|
||||
# Subtract 1 because f_lineno is 0-indexed.
|
||||
fun_lineno = frame.f_lineno - start - 1
|
||||
fun_lineno = lineno - start
|
||||
start_ctx = max(0, fun_lineno - context)
|
||||
sourcelines = sourcelines[start_ctx : fun_lineno + context + 1]
|
||||
|
||||
|
@@ -756,6 +756,7 @@ def generate_gitlab_ci_yaml(
|
||||
# Get the joined "ci" config with all of the current scopes resolved
|
||||
ci_config = cfg.get("ci")
|
||||
|
||||
config_deprecated = False
|
||||
if not ci_config:
|
||||
tty.warn("Environment does not have `ci` a configuration")
|
||||
gitlabci_config = yaml_root.get("gitlab-ci")
|
||||
@@ -768,6 +769,7 @@ def generate_gitlab_ci_yaml(
|
||||
)
|
||||
translate_deprecated_config(gitlabci_config)
|
||||
ci_config = gitlabci_config
|
||||
config_deprecated = True
|
||||
|
||||
# Default target is gitlab...and only target is gitlab
|
||||
if not ci_config.get("target", "gitlab") == "gitlab":
|
||||
@@ -831,6 +833,14 @@ def generate_gitlab_ci_yaml(
|
||||
# Values: "spack_pull_request", "spack_protected_branch", or not set
|
||||
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE", None)
|
||||
|
||||
copy_only_pipeline = spack_pipeline_type == "spack_copy_only"
|
||||
if copy_only_pipeline and config_deprecated:
|
||||
tty.warn(
|
||||
"SPACK_PIPELINE_TYPE=spack_copy_only is not supported when using\n",
|
||||
"deprecated ci configuration, a no-op pipeline will be generated\n",
|
||||
"instead.",
|
||||
)
|
||||
|
||||
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
||||
tty.die("spack ci generate requires an env containing a mirror")
|
||||
|
||||
@@ -1207,7 +1217,7 @@ def main_script_replacements(cmd):
|
||||
).format(c_spec, release_spec)
|
||||
tty.debug(debug_msg)
|
||||
|
||||
if prune_dag and not rebuild_spec and spack_pipeline_type != "spack_copy_only":
|
||||
if prune_dag and not rebuild_spec and not copy_only_pipeline:
|
||||
tty.debug(
|
||||
"Pruning {0}/{1}, does not need rebuild.".format(
|
||||
release_spec.name, release_spec.dag_hash()
|
||||
@@ -1298,7 +1308,7 @@ def main_script_replacements(cmd):
|
||||
max_length_needs = length_needs
|
||||
max_needs_job = job_name
|
||||
|
||||
if spack_pipeline_type != "spack_copy_only":
|
||||
if not copy_only_pipeline:
|
||||
output_object[job_name] = job_object
|
||||
job_id += 1
|
||||
|
||||
@@ -1330,7 +1340,7 @@ def main_script_replacements(cmd):
|
||||
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
|
||||
}
|
||||
|
||||
if spack_pipeline_type == "spack_copy_only":
|
||||
if copy_only_pipeline and not config_deprecated:
|
||||
stage_names.append("copy")
|
||||
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
|
||||
sync_job["stage"] = "copy"
|
||||
@@ -1474,12 +1484,18 @@ def main_script_replacements(cmd):
|
||||
sorted_output = cinw.needs_to_dependencies(sorted_output)
|
||||
else:
|
||||
# No jobs were generated
|
||||
tty.debug("No specs to rebuild, generating no-op job")
|
||||
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
||||
|
||||
noop_job["retry"] = service_job_retries
|
||||
|
||||
sorted_output = {"no-specs-to-rebuild": noop_job}
|
||||
if copy_only_pipeline and config_deprecated:
|
||||
tty.debug("Generating no-op job as copy-only is unsupported here.")
|
||||
noop_job["script"] = [
|
||||
'echo "copy-only pipelines are not supported with deprecated ci configs"'
|
||||
]
|
||||
sorted_output = {"unsupported-copy": noop_job}
|
||||
else:
|
||||
tty.debug("No specs to rebuild, generating no-op job")
|
||||
sorted_output = {"no-specs-to-rebuild": noop_job}
|
||||
|
||||
if known_broken_specs_encountered:
|
||||
tty.error("This pipeline generated hashes known to be broken on develop:")
|
||||
|
@@ -347,7 +347,7 @@ def iter_groups(specs, indent, all_headers):
|
||||
spack.spec.architecture_color,
|
||||
architecture if architecture else "no arch",
|
||||
spack.spec.compiler_color,
|
||||
f"{compiler.name}@{compiler.version}" if compiler else "no compiler",
|
||||
f"{compiler.display_str}" if compiler else "no compiler",
|
||||
)
|
||||
|
||||
# Sometimes we want to display specs that are not yet concretized.
|
||||
|
@@ -98,7 +98,7 @@ def compiler_find(args):
|
||||
config = spack.config.config
|
||||
filename = config.get_config_filename(args.scope, "compilers")
|
||||
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
|
||||
colify(reversed(sorted(c.spec for c in new_compilers)), indent=4)
|
||||
colify(reversed(sorted(c.spec.display_str for c in new_compilers)), indent=4)
|
||||
else:
|
||||
tty.msg("Found no new compilers")
|
||||
tty.msg("Compilers are defined in the following files:")
|
||||
@@ -112,13 +112,13 @@ def compiler_remove(args):
|
||||
tty.die("No compilers match spec %s" % cspec)
|
||||
elif not args.all and len(compilers) > 1:
|
||||
tty.error("Multiple compilers match spec %s. Choose one:" % cspec)
|
||||
colify(reversed(sorted([c.spec for c in compilers])), indent=4)
|
||||
colify(reversed(sorted([c.spec.display_str for c in compilers])), indent=4)
|
||||
tty.msg("Or, use `spack compiler remove -a` to remove all of them.")
|
||||
sys.exit(1)
|
||||
|
||||
for compiler in compilers:
|
||||
spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope)
|
||||
tty.msg("Removed compiler %s" % compiler.spec)
|
||||
tty.msg("Removed compiler %s" % compiler.spec.display_str)
|
||||
|
||||
|
||||
def compiler_info(args):
|
||||
@@ -130,7 +130,7 @@ def compiler_info(args):
|
||||
tty.die("No compilers match spec %s" % cspec)
|
||||
else:
|
||||
for c in compilers:
|
||||
print(str(c.spec) + ":")
|
||||
print(c.spec.display_str + ":")
|
||||
print("\tpaths:")
|
||||
for cpath in ["cc", "cxx", "f77", "fc"]:
|
||||
print("\t\t%s = %s" % (cpath, getattr(c, cpath, None)))
|
||||
@@ -188,7 +188,7 @@ def compiler_list(args):
|
||||
os_str += "-%s" % target
|
||||
cname = "%s{%s} %s" % (spack.spec.compiler_color, name, os_str)
|
||||
tty.hline(colorize(cname), char="-")
|
||||
colify(reversed(sorted(c.spec for c in compilers)))
|
||||
colify(reversed(sorted(c.spec.display_str for c in compilers)))
|
||||
|
||||
|
||||
def compiler(parser, args):
|
||||
|
@@ -302,7 +302,7 @@ def env_create(args):
|
||||
# the environment should not include a view.
|
||||
with_view = None
|
||||
|
||||
_env_create(
|
||||
env = _env_create(
|
||||
args.create_env,
|
||||
init_file=args.envfile,
|
||||
dir=args.dir,
|
||||
@@ -310,6 +310,9 @@ def env_create(args):
|
||||
keep_relative=args.keep_relative,
|
||||
)
|
||||
|
||||
# Generate views, only really useful for environments created from spack.lock files.
|
||||
env.regenerate_views()
|
||||
|
||||
|
||||
def _env_create(name_or_path, *, init_file=None, dir=False, with_view=None, keep_relative=False):
|
||||
"""Create a new environment, with an optional yaml description.
|
||||
|
@@ -116,21 +116,23 @@ def one_spec_or_raise(specs):
|
||||
|
||||
|
||||
def check_module_set_name(name):
|
||||
modules_config = spack.config.get("modules")
|
||||
valid_names = set(
|
||||
[
|
||||
key
|
||||
for key, value in modules_config.items()
|
||||
if isinstance(value, dict) and value.get("enable", [])
|
||||
]
|
||||
)
|
||||
if "enable" in modules_config and modules_config["enable"]:
|
||||
valid_names.add("default")
|
||||
modules = spack.config.get("modules")
|
||||
if name != "prefix_inspections" and name in modules:
|
||||
return
|
||||
|
||||
if name not in valid_names:
|
||||
msg = "Cannot use invalid module set %s." % name
|
||||
msg += " Valid module set names are %s" % list(valid_names)
|
||||
raise spack.config.ConfigError(msg)
|
||||
names = [k for k in modules if k != "prefix_inspections"]
|
||||
|
||||
if not names:
|
||||
raise spack.config.ConfigError(
|
||||
f"Module set configuration is missing. Cannot use module set '{name}'"
|
||||
)
|
||||
|
||||
pretty_names = "', '".join(names)
|
||||
|
||||
raise spack.config.ConfigError(
|
||||
f"Cannot use invalid module set '{name}'.",
|
||||
f"Valid module set names are: '{pretty_names}'.",
|
||||
)
|
||||
|
||||
|
||||
_missing_modules_warning = (
|
||||
|
@@ -25,7 +25,7 @@
|
||||
|
||||
|
||||
# tutorial configuration parameters
|
||||
tutorial_branch = "releases/v0.19"
|
||||
tutorial_branch = "releases/v0.20"
|
||||
tutorial_mirror = "file:///mirror"
|
||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||
|
||||
|
@@ -30,7 +30,7 @@
|
||||
|
||||
|
||||
def get_valid_fortran_pth(comp_ver):
|
||||
cl_ver = str(comp_ver).split("@")[1]
|
||||
cl_ver = str(comp_ver)
|
||||
sort_fn = lambda fc_ver: StrictVersion(fc_ver)
|
||||
sort_fc_ver = sorted(list(avail_fc_version), key=sort_fn)
|
||||
for ver in sort_fc_ver:
|
||||
@@ -75,7 +75,7 @@ class Msvc(Compiler):
|
||||
# file based on compiler executable path.
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
new_pth = [pth if pth else get_valid_fortran_pth(args[0]) for pth in args[3]]
|
||||
new_pth = [pth if pth else get_valid_fortran_pth(args[0].version) for pth in args[3]]
|
||||
args[3][:] = new_pth
|
||||
super(Msvc, self).__init__(*args, **kwargs)
|
||||
if os.getenv("ONEAPI_ROOT"):
|
||||
|
@@ -1402,6 +1402,10 @@ def _concretize_together_where_possible(
|
||||
if not new_user_specs:
|
||||
return []
|
||||
|
||||
old_concrete_to_abstract = {
|
||||
concrete: abstract for (abstract, concrete) in self.concretized_specs()
|
||||
}
|
||||
|
||||
self.concretized_user_specs = []
|
||||
self.concretized_order = []
|
||||
self.specs_by_hash = {}
|
||||
@@ -1413,11 +1417,13 @@ def _concretize_together_where_possible(
|
||||
|
||||
result = []
|
||||
for abstract, concrete in sorted(result_by_user_spec.items()):
|
||||
# If the "abstract" spec is a concrete spec from the previous concretization
|
||||
# translate it back to an abstract spec. Otherwise, keep the abstract spec
|
||||
abstract = old_concrete_to_abstract.get(abstract, abstract)
|
||||
if abstract in new_user_specs:
|
||||
result.append((abstract, concrete))
|
||||
else:
|
||||
assert (abstract, concrete) in result
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
|
||||
return result
|
||||
|
||||
def _concretize_together(
|
||||
@@ -1436,7 +1442,7 @@ def _concretize_together(
|
||||
self.specs_by_hash = {}
|
||||
|
||||
try:
|
||||
concrete_specs = spack.concretize.concretize_specs_together(
|
||||
concrete_specs: List[spack.spec.Spec] = spack.concretize.concretize_specs_together(
|
||||
*specs_to_concretize, tests=tests
|
||||
)
|
||||
except spack.error.UnsatisfiableSpecError as e:
|
||||
@@ -1455,11 +1461,14 @@ def _concretize_together(
|
||||
)
|
||||
raise
|
||||
|
||||
# zip truncates the longer list, which is exactly what we want here
|
||||
concretized_specs = [x for x in zip(new_user_specs | kept_user_specs, concrete_specs)]
|
||||
# set() | set() does not preserve ordering, even though sets are ordered
|
||||
ordered_user_specs = list(new_user_specs) + list(kept_user_specs)
|
||||
concretized_specs = [x for x in zip(ordered_user_specs, concrete_specs)]
|
||||
for abstract, concrete in concretized_specs:
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
return concretized_specs
|
||||
|
||||
# zip truncates the longer list, which is exactly what we want here
|
||||
return list(zip(new_user_specs, concrete_specs))
|
||||
|
||||
def _concretize_separately(self, tests=False):
|
||||
"""Concretization strategy that concretizes separately one
|
||||
|
@@ -215,6 +215,31 @@ def print_message(logger: LogType, msg: str, verbose: bool = False):
|
||||
tty.info(msg, format="g")
|
||||
|
||||
|
||||
def overall_status(current_status: "TestStatus", substatuses: List["TestStatus"]) -> "TestStatus":
|
||||
"""Determine the overall status based on the current and associated sub status values.
|
||||
|
||||
Args:
|
||||
current_status: current overall status, assumed to default to PASSED
|
||||
substatuses: status of each test part or overall status of each test spec
|
||||
Returns:
|
||||
test status encompassing the main test and all subtests
|
||||
"""
|
||||
if current_status in [TestStatus.SKIPPED, TestStatus.NO_TESTS, TestStatus.FAILED]:
|
||||
return current_status
|
||||
|
||||
skipped = 0
|
||||
for status in substatuses:
|
||||
if status == TestStatus.FAILED:
|
||||
return status
|
||||
elif status == TestStatus.SKIPPED:
|
||||
skipped += 1
|
||||
|
||||
if skipped and skipped == len(substatuses):
|
||||
return TestStatus.SKIPPED
|
||||
|
||||
return current_status
|
||||
|
||||
|
||||
class PackageTest:
|
||||
"""The class that manages stand-alone (post-install) package tests."""
|
||||
|
||||
@@ -308,14 +333,12 @@ def status(self, name: str, status: "TestStatus", msg: Optional[str] = None):
|
||||
# to start with the same name) may not have PASSED. This extra
|
||||
# check is used to ensure the containing test part is not claiming
|
||||
# to have passed when at least one subpart failed.
|
||||
if status == TestStatus.PASSED:
|
||||
for pname, substatus in self.test_parts.items():
|
||||
if pname != part_name and pname.startswith(part_name):
|
||||
if substatus == TestStatus.FAILED:
|
||||
print(f"{substatus}: {part_name}{extra}")
|
||||
self.test_parts[part_name] = substatus
|
||||
self.counts[substatus] += 1
|
||||
return
|
||||
substatuses = []
|
||||
for pname, substatus in self.test_parts.items():
|
||||
if pname != part_name and pname.startswith(part_name):
|
||||
substatuses.append(substatus)
|
||||
if substatuses:
|
||||
status = overall_status(status, substatuses)
|
||||
|
||||
print(f"{status}: {part_name}{extra}")
|
||||
self.test_parts[part_name] = status
|
||||
@@ -420,6 +443,25 @@ def summarize(self):
|
||||
lines.append(f"{totals:=^80}")
|
||||
return lines
|
||||
|
||||
def write_tested_status(self):
|
||||
"""Write the overall status to the tested file.
|
||||
|
||||
If there any test part failures, then the tests failed. If all test
|
||||
parts are skipped, then the tests were skipped. If any tests passed
|
||||
then the tests passed; otherwise, there were not tests executed.
|
||||
"""
|
||||
status = TestStatus.NO_TESTS
|
||||
if self.counts[TestStatus.FAILED] > 0:
|
||||
status = TestStatus.FAILED
|
||||
else:
|
||||
skipped = self.counts[TestStatus.SKIPPED]
|
||||
if skipped and self.parts() == skipped:
|
||||
status = TestStatus.SKIPPED
|
||||
elif self.counts[TestStatus.PASSED] > 0:
|
||||
status = TestStatus.PASSED
|
||||
|
||||
_add_msg_to_file(self.tested_file, f"{status.value}")
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def test_part(pkg: Pb, test_name: str, purpose: str, work_dir: str = ".", verbose: bool = False):
|
||||
@@ -654,8 +696,9 @@ def process_test_parts(pkg: Pb, test_specs: List[spack.spec.Spec], verbose: bool
|
||||
try:
|
||||
tests = test_functions(spec.package_class)
|
||||
except spack.repo.UnknownPackageError:
|
||||
# some virtuals don't have a package
|
||||
tests = []
|
||||
# Some virtuals don't have a package so we don't want to report
|
||||
# them as not having tests when that isn't appropriate.
|
||||
continue
|
||||
|
||||
if len(tests) == 0:
|
||||
tester.status(spec.name, TestStatus.NO_TESTS)
|
||||
@@ -682,7 +725,7 @@ def process_test_parts(pkg: Pb, test_specs: List[spack.spec.Spec], verbose: bool
|
||||
|
||||
finally:
|
||||
if tester.ran_tests():
|
||||
fs.touch(tester.tested_file)
|
||||
tester.write_tested_status()
|
||||
|
||||
# log one more test message to provide a completion timestamp
|
||||
# for CDash reporting
|
||||
@@ -889,20 +932,15 @@ def __call__(self, *args, **kwargs):
|
||||
if remove_directory:
|
||||
shutil.rmtree(test_dir)
|
||||
|
||||
tested = os.path.exists(self.tested_file_for_spec(spec))
|
||||
if tested:
|
||||
status = TestStatus.PASSED
|
||||
else:
|
||||
self.ensure_stage()
|
||||
if spec.external and not externals:
|
||||
status = TestStatus.SKIPPED
|
||||
elif not spec.installed:
|
||||
status = TestStatus.SKIPPED
|
||||
else:
|
||||
status = TestStatus.NO_TESTS
|
||||
status = self.test_status(spec, externals)
|
||||
self.counts[status] += 1
|
||||
|
||||
self.write_test_result(spec, status)
|
||||
|
||||
except SkipTest:
|
||||
status = TestStatus.SKIPPED
|
||||
self.counts[status] += 1
|
||||
self.write_test_result(spec, TestStatus.SKIPPED)
|
||||
|
||||
except BaseException as exc:
|
||||
status = TestStatus.FAILED
|
||||
self.counts[status] += 1
|
||||
@@ -939,6 +977,31 @@ def __call__(self, *args, **kwargs):
|
||||
if failures:
|
||||
raise TestSuiteFailure(failures)
|
||||
|
||||
def test_status(self, spec: spack.spec.Spec, externals: bool) -> Optional[TestStatus]:
|
||||
"""Determine the overall test results status for the spec.
|
||||
|
||||
Args:
|
||||
spec: instance of the spec under test
|
||||
externals: ``True`` if externals are to be tested, else ``False``
|
||||
|
||||
Returns:
|
||||
the spec's test status if available or ``None``
|
||||
"""
|
||||
tests_status_file = self.tested_file_for_spec(spec)
|
||||
if not os.path.exists(tests_status_file):
|
||||
self.ensure_stage()
|
||||
if spec.external and not externals:
|
||||
status = TestStatus.SKIPPED
|
||||
elif not spec.installed:
|
||||
status = TestStatus.SKIPPED
|
||||
else:
|
||||
status = TestStatus.NO_TESTS
|
||||
return status
|
||||
|
||||
with open(tests_status_file, "r") as f:
|
||||
value = (f.read()).strip("\n")
|
||||
return TestStatus(int(value)) if value else TestStatus.NO_TESTS
|
||||
|
||||
def ensure_stage(self):
|
||||
"""Ensure the test suite stage directory exists."""
|
||||
if not os.path.exists(self.stage):
|
||||
|
@@ -170,17 +170,12 @@ def merge_config_rules(configuration, spec):
|
||||
Returns:
|
||||
dict: actions to be taken on the spec passed as an argument
|
||||
"""
|
||||
|
||||
# Get the top-level configuration for the module type we are using
|
||||
module_specific_configuration = copy.deepcopy(configuration)
|
||||
|
||||
# Construct a dictionary with the actions we need to perform on the spec
|
||||
# passed as a parameter
|
||||
|
||||
# Construct a dictionary with the actions we need to perform on the spec passed as a parameter
|
||||
spec_configuration = {}
|
||||
# The keyword 'all' is always evaluated first, all the others are
|
||||
# evaluated in order of appearance in the module file
|
||||
spec_configuration = module_specific_configuration.pop("all", {})
|
||||
for constraint, action in module_specific_configuration.items():
|
||||
spec_configuration.update(copy.deepcopy(configuration.get("all", {})))
|
||||
for constraint, action in configuration.items():
|
||||
if spec.satisfies(constraint):
|
||||
if hasattr(constraint, "override") and constraint.override:
|
||||
spec_configuration = {}
|
||||
@@ -200,14 +195,14 @@ def merge_config_rules(configuration, spec):
|
||||
# configuration
|
||||
|
||||
# Hash length in module files
|
||||
hash_length = module_specific_configuration.get("hash_length", 7)
|
||||
hash_length = configuration.get("hash_length", 7)
|
||||
spec_configuration["hash_length"] = hash_length
|
||||
|
||||
verbose = module_specific_configuration.get("verbose", False)
|
||||
verbose = configuration.get("verbose", False)
|
||||
spec_configuration["verbose"] = verbose
|
||||
|
||||
# module defaults per-package
|
||||
defaults = module_specific_configuration.get("defaults", [])
|
||||
defaults = configuration.get("defaults", [])
|
||||
spec_configuration["defaults"] = defaults
|
||||
|
||||
return spec_configuration
|
||||
|
@@ -7,7 +7,7 @@
|
||||
import itertools
|
||||
import os.path
|
||||
import posixpath
|
||||
from typing import Any, Dict
|
||||
from typing import Any, Dict, List
|
||||
|
||||
import llnl.util.lang as lang
|
||||
|
||||
@@ -56,7 +56,7 @@ def make_context(spec, module_set_name, explicit):
|
||||
return LmodContext(conf)
|
||||
|
||||
|
||||
def guess_core_compilers(name, store=False):
|
||||
def guess_core_compilers(name, store=False) -> List[spack.spec.CompilerSpec]:
|
||||
"""Guesses the list of core compilers installed in the system.
|
||||
|
||||
Args:
|
||||
@@ -64,21 +64,19 @@ def guess_core_compilers(name, store=False):
|
||||
modules.yaml configuration file
|
||||
|
||||
Returns:
|
||||
List of core compilers, if found, or None
|
||||
List of found core compilers
|
||||
"""
|
||||
core_compilers = []
|
||||
for compiler_config in spack.compilers.all_compilers_config():
|
||||
for compiler in spack.compilers.all_compilers():
|
||||
try:
|
||||
compiler = compiler_config["compiler"]
|
||||
# A compiler is considered to be a core compiler if any of the
|
||||
# C, C++ or Fortran compilers reside in a system directory
|
||||
is_system_compiler = any(
|
||||
os.path.dirname(x) in spack.util.environment.SYSTEM_DIRS
|
||||
for x in compiler["paths"].values()
|
||||
if x is not None
|
||||
os.path.dirname(getattr(compiler, x, "")) in spack.util.environment.SYSTEM_DIRS
|
||||
for x in ("cc", "cxx", "f77", "fc")
|
||||
)
|
||||
if is_system_compiler:
|
||||
core_compilers.append(str(compiler["spec"]))
|
||||
core_compilers.append(compiler.spec)
|
||||
except (KeyError, TypeError, AttributeError):
|
||||
continue
|
||||
|
||||
@@ -89,10 +87,10 @@ def guess_core_compilers(name, store=False):
|
||||
modules_cfg = spack.config.get(
|
||||
"modules:" + name, {}, scope=spack.config.default_modify_scope()
|
||||
)
|
||||
modules_cfg.setdefault("lmod", {})["core_compilers"] = core_compilers
|
||||
modules_cfg.setdefault("lmod", {})["core_compilers"] = [str(x) for x in core_compilers]
|
||||
spack.config.set("modules:" + name, modules_cfg, scope=spack.config.default_modify_scope())
|
||||
|
||||
return core_compilers or None
|
||||
return core_compilers
|
||||
|
||||
|
||||
class LmodConfiguration(BaseConfiguration):
|
||||
@@ -104,7 +102,7 @@ class LmodConfiguration(BaseConfiguration):
|
||||
default_projections = {"all": posixpath.join("{name}", "{version}")}
|
||||
|
||||
@property
|
||||
def core_compilers(self):
|
||||
def core_compilers(self) -> List[spack.spec.CompilerSpec]:
|
||||
"""Returns the list of "Core" compilers
|
||||
|
||||
Raises:
|
||||
@@ -112,14 +110,18 @@ def core_compilers(self):
|
||||
specified in the configuration file or the sequence
|
||||
is empty
|
||||
"""
|
||||
value = configuration(self.name).get("core_compilers") or guess_core_compilers(
|
||||
self.name, store=True
|
||||
)
|
||||
compilers = [
|
||||
spack.spec.CompilerSpec(c) for c in configuration(self.name).get("core_compilers", [])
|
||||
]
|
||||
|
||||
if not value:
|
||||
if not compilers:
|
||||
compilers = guess_core_compilers(self.name, store=True)
|
||||
|
||||
if not compilers:
|
||||
msg = 'the key "core_compilers" must be set in modules.yaml'
|
||||
raise CoreCompilersNotFoundError(msg)
|
||||
return value
|
||||
|
||||
return compilers
|
||||
|
||||
@property
|
||||
def core_specs(self):
|
||||
@@ -283,16 +285,18 @@ def token_to_path(self, name, value):
|
||||
|
||||
# If we are dealing with a core compiler, return 'Core'
|
||||
core_compilers = self.conf.core_compilers
|
||||
if name == "compiler" and str(value) in core_compilers:
|
||||
if name == "compiler" and any(
|
||||
spack.spec.CompilerSpec(value).satisfies(c) for c in core_compilers
|
||||
):
|
||||
return "Core"
|
||||
|
||||
# CompilerSpec does not have an hash, as we are not allowed to
|
||||
# CompilerSpec does not have a hash, as we are not allowed to
|
||||
# use different flavors of the same compiler
|
||||
if name == "compiler":
|
||||
return path_part_fmt.format(token=value)
|
||||
|
||||
# In case the hierarchy token refers to a virtual provider
|
||||
# we need to append an hash to the version to distinguish
|
||||
# we need to append a hash to the version to distinguish
|
||||
# among flavors of the same library (e.g. openblas~openmp vs.
|
||||
# openblas+openmp)
|
||||
path = path_part_fmt.format(token=value)
|
||||
|
@@ -2017,7 +2017,8 @@ def test_title(purpose, test_name):
|
||||
# stack instead of from traceback.
|
||||
# The traceback is truncated here, so we can't use it to
|
||||
# traverse the stack.
|
||||
m = "\n".join(spack.build_environment.get_package_context(tb))
|
||||
context = spack.build_environment.get_package_context(tb)
|
||||
m = "\n".join(context) if context else ""
|
||||
|
||||
exc = e # e is deleted after this block
|
||||
|
||||
|
@@ -861,9 +861,9 @@ class SpackSolverSetup(object):
|
||||
def __init__(self, tests=False):
|
||||
self.gen = None # set by setup()
|
||||
|
||||
self.declared_versions = {}
|
||||
self.possible_versions = {}
|
||||
self.deprecated_versions = {}
|
||||
self.declared_versions = collections.defaultdict(list)
|
||||
self.possible_versions = collections.defaultdict(set)
|
||||
self.deprecated_versions = collections.defaultdict(set)
|
||||
|
||||
self.possible_virtuals = None
|
||||
self.possible_compilers = []
|
||||
@@ -1669,9 +1669,34 @@ class Body(object):
|
||||
if concrete_build_deps or dtype != "build":
|
||||
clauses.append(fn.attr("depends_on", spec.name, dep.name, dtype))
|
||||
|
||||
# Ensure Spack will not coconcretize this with another provider
|
||||
# for the same virtual
|
||||
for virtual in dep.package.virtuals_provided:
|
||||
# TODO: We have to look up info from package.py here, but we'd
|
||||
# TODO: like to avoid this entirely. We should not need to look
|
||||
# TODO: up potentially wrong info if we have virtual edge info.
|
||||
try:
|
||||
try:
|
||||
pkg = dep.package
|
||||
|
||||
except spack.repo.UnknownNamespaceError:
|
||||
# Try to look up the package of the same name and use its
|
||||
# providers. This is as good as we can do without edge info.
|
||||
pkg_class = spack.repo.path.get_pkg_class(dep.name)
|
||||
spec = spack.spec.Spec(f"{dep.name}@{dep.version}")
|
||||
pkg = pkg_class(spec)
|
||||
|
||||
virtuals = pkg.virtuals_provided
|
||||
|
||||
except spack.repo.UnknownPackageError:
|
||||
# Skip virtual node constriants for renamed/deleted packages,
|
||||
# so their binaries can still be installed.
|
||||
# NOTE: with current specs (which lack edge attributes) this
|
||||
# can allow concretizations with two providers, but it's unlikely.
|
||||
continue
|
||||
|
||||
# Don't concretize with two providers of the same virtual.
|
||||
# See above for exception for unknown packages.
|
||||
# TODO: we will eventually record provider information on edges,
|
||||
# TODO: which avoids the need for the package lookup above.
|
||||
for virtual in virtuals:
|
||||
clauses.append(fn.attr("virtual_node", virtual.name))
|
||||
clauses.append(fn.provider(dep.name, virtual.name))
|
||||
|
||||
@@ -1697,10 +1722,6 @@ class Body(object):
|
||||
|
||||
def build_version_dict(self, possible_pkgs):
|
||||
"""Declare any versions in specs not declared in packages."""
|
||||
self.declared_versions = collections.defaultdict(list)
|
||||
self.possible_versions = collections.defaultdict(set)
|
||||
self.deprecated_versions = collections.defaultdict(set)
|
||||
|
||||
packages_yaml = spack.config.get("packages")
|
||||
packages_yaml = _normalize_packages_yaml(packages_yaml)
|
||||
for pkg_name in possible_pkgs:
|
||||
@@ -1734,13 +1755,47 @@ def key_fn(item):
|
||||
# All the preferred version from packages.yaml, versions in external
|
||||
# specs will be computed later
|
||||
version_preferences = packages_yaml.get(pkg_name, {}).get("version", [])
|
||||
for idx, v in enumerate(version_preferences):
|
||||
# v can be a string so force it into an actual version for comparisons
|
||||
ver = vn.Version(v)
|
||||
version_defs = []
|
||||
pkg_class = spack.repo.path.get_pkg_class(pkg_name)
|
||||
for vstr in version_preferences:
|
||||
v = vn.ver(vstr)
|
||||
if isinstance(v, vn.GitVersion):
|
||||
version_defs.append(v)
|
||||
else:
|
||||
satisfying_versions = self._check_for_defined_matching_versions(pkg_class, v)
|
||||
# Amongst all defined versions satisfying this specific
|
||||
# preference, the highest-numbered version is the
|
||||
# most-preferred: therefore sort satisfying versions
|
||||
# from greatest to least
|
||||
version_defs.extend(sorted(satisfying_versions, reverse=True))
|
||||
|
||||
for weight, vdef in enumerate(llnl.util.lang.dedupe(version_defs)):
|
||||
self.declared_versions[pkg_name].append(
|
||||
DeclaredVersion(version=ver, idx=idx, origin=Provenance.PACKAGES_YAML)
|
||||
DeclaredVersion(version=vdef, idx=weight, origin=Provenance.PACKAGES_YAML)
|
||||
)
|
||||
self.possible_versions[pkg_name].add(ver)
|
||||
self.possible_versions[pkg_name].add(vdef)
|
||||
|
||||
def _check_for_defined_matching_versions(self, pkg_class, v):
|
||||
"""Given a version specification (which may be a concrete version,
|
||||
range, etc.), determine if any package.py version declarations
|
||||
or externals define a version which satisfies it.
|
||||
|
||||
This is primarily for determining whether a version request (e.g.
|
||||
version preferences, which should not themselves define versions)
|
||||
refers to a defined version.
|
||||
|
||||
This function raises an exception if no satisfying versions are
|
||||
found.
|
||||
"""
|
||||
pkg_name = pkg_class.name
|
||||
satisfying_versions = list(x for x in pkg_class.versions if x.satisfies(v))
|
||||
satisfying_versions.extend(x for x in self.possible_versions[pkg_name] if x.satisfies(v))
|
||||
if not satisfying_versions:
|
||||
raise spack.config.ConfigError(
|
||||
"Preference for version {0} does not match any version"
|
||||
" defined for {1} (in its package.py or any external)".format(str(v), pkg_name)
|
||||
)
|
||||
return satisfying_versions
|
||||
|
||||
def add_concrete_versions_from_specs(self, specs, origin):
|
||||
"""Add concrete versions to possible versions from lists of CLI/dev specs."""
|
||||
@@ -2173,14 +2228,6 @@ def setup(self, driver, specs, reuse=None):
|
||||
# get possible compilers
|
||||
self.possible_compilers = self.generate_possible_compilers(specs)
|
||||
|
||||
# traverse all specs and packages to build dict of possible versions
|
||||
self.build_version_dict(possible)
|
||||
self.add_concrete_versions_from_specs(specs, Provenance.SPEC)
|
||||
self.add_concrete_versions_from_specs(dev_specs, Provenance.DEV_SPEC)
|
||||
|
||||
req_version_specs = _get_versioned_specs_from_pkg_requirements()
|
||||
self.add_concrete_versions_from_specs(req_version_specs, Provenance.PACKAGE_REQUIREMENT)
|
||||
|
||||
self.gen.h1("Concrete input spec definitions")
|
||||
self.define_concrete_input_specs(specs, possible)
|
||||
|
||||
@@ -2208,6 +2255,14 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.provider_requirements()
|
||||
self.external_packages()
|
||||
|
||||
# traverse all specs and packages to build dict of possible versions
|
||||
self.build_version_dict(possible)
|
||||
self.add_concrete_versions_from_specs(specs, Provenance.SPEC)
|
||||
self.add_concrete_versions_from_specs(dev_specs, Provenance.DEV_SPEC)
|
||||
|
||||
req_version_specs = self._get_versioned_specs_from_pkg_requirements()
|
||||
self.add_concrete_versions_from_specs(req_version_specs, Provenance.PACKAGE_REQUIREMENT)
|
||||
|
||||
self.gen.h1("Package Constraints")
|
||||
for pkg in sorted(self.pkgs):
|
||||
self.gen.h2("Package rules: %s" % pkg)
|
||||
@@ -2254,55 +2309,78 @@ def literal_specs(self, specs):
|
||||
if self.concretize_everything:
|
||||
self.gen.fact(fn.concretize_everything())
|
||||
|
||||
def _get_versioned_specs_from_pkg_requirements(self):
|
||||
"""If package requirements mention versions that are not mentioned
|
||||
elsewhere, then we need to collect those to mark them as possible
|
||||
versions.
|
||||
"""
|
||||
req_version_specs = list()
|
||||
config = spack.config.get("packages")
|
||||
for pkg_name, d in config.items():
|
||||
if pkg_name == "all":
|
||||
continue
|
||||
if "require" in d:
|
||||
req_version_specs.extend(self._specs_from_requires(pkg_name, d["require"]))
|
||||
return req_version_specs
|
||||
|
||||
def _get_versioned_specs_from_pkg_requirements():
|
||||
"""If package requirements mention versions that are not mentioned
|
||||
elsewhere, then we need to collect those to mark them as possible
|
||||
versions.
|
||||
"""
|
||||
req_version_specs = list()
|
||||
config = spack.config.get("packages")
|
||||
for pkg_name, d in config.items():
|
||||
if pkg_name == "all":
|
||||
continue
|
||||
if "require" in d:
|
||||
req_version_specs.extend(_specs_from_requires(pkg_name, d["require"]))
|
||||
return req_version_specs
|
||||
|
||||
|
||||
def _specs_from_requires(pkg_name, section):
|
||||
if isinstance(section, str):
|
||||
spec = spack.spec.Spec(section)
|
||||
if not spec.name:
|
||||
spec.name = pkg_name
|
||||
extracted_specs = [spec]
|
||||
else:
|
||||
spec_strs = []
|
||||
for spec_group in section:
|
||||
if isinstance(spec_group, str):
|
||||
spec_strs.append(spec_group)
|
||||
else:
|
||||
# Otherwise it is an object. The object can contain a single
|
||||
# "spec" constraint, or a list of them with "any_of" or
|
||||
# "one_of" policy.
|
||||
if "spec" in spec_group:
|
||||
new_constraints = [spec_group["spec"]]
|
||||
else:
|
||||
key = "one_of" if "one_of" in spec_group else "any_of"
|
||||
new_constraints = spec_group[key]
|
||||
spec_strs.extend(new_constraints)
|
||||
|
||||
extracted_specs = []
|
||||
for spec_str in spec_strs:
|
||||
spec = spack.spec.Spec(spec_str)
|
||||
def _specs_from_requires(self, pkg_name, section):
|
||||
"""Collect specs from requirements which define versions (i.e. those that
|
||||
have a concrete version). Requirements can define *new* versions if
|
||||
they are included as part of an equivalence (hash=number) but not
|
||||
otherwise.
|
||||
"""
|
||||
if isinstance(section, str):
|
||||
spec = spack.spec.Spec(section)
|
||||
if not spec.name:
|
||||
spec.name = pkg_name
|
||||
extracted_specs.append(spec)
|
||||
extracted_specs = [spec]
|
||||
else:
|
||||
spec_strs = []
|
||||
for spec_group in section:
|
||||
if isinstance(spec_group, str):
|
||||
spec_strs.append(spec_group)
|
||||
else:
|
||||
# Otherwise it is an object. The object can contain a single
|
||||
# "spec" constraint, or a list of them with "any_of" or
|
||||
# "one_of" policy.
|
||||
if "spec" in spec_group:
|
||||
new_constraints = [spec_group["spec"]]
|
||||
else:
|
||||
key = "one_of" if "one_of" in spec_group else "any_of"
|
||||
new_constraints = spec_group[key]
|
||||
spec_strs.extend(new_constraints)
|
||||
|
||||
version_specs = [x for x in extracted_specs if x.versions.concrete]
|
||||
for spec in version_specs:
|
||||
spec.attach_git_version_lookup()
|
||||
return version_specs
|
||||
extracted_specs = []
|
||||
for spec_str in spec_strs:
|
||||
spec = spack.spec.Spec(spec_str)
|
||||
if not spec.name:
|
||||
spec.name = pkg_name
|
||||
extracted_specs.append(spec)
|
||||
|
||||
version_specs = []
|
||||
for spec in extracted_specs:
|
||||
if spec.versions.concrete:
|
||||
# Note: this includes git versions
|
||||
version_specs.append(spec)
|
||||
continue
|
||||
|
||||
# Prefer spec's name if it exists, in case the spec is
|
||||
# requiring a specific implementation inside of a virtual section
|
||||
# e.g. packages:mpi:require:openmpi@4.0.1
|
||||
pkg_class = spack.repo.path.get_pkg_class(spec.name or pkg_name)
|
||||
satisfying_versions = self._check_for_defined_matching_versions(
|
||||
pkg_class, spec.versions
|
||||
)
|
||||
|
||||
# Version ranges ("@1.3" without the "=", "@1.2:1.4") and lists
|
||||
# will end up here
|
||||
ordered_satisfying_versions = sorted(satisfying_versions, reverse=True)
|
||||
vspecs = list(spack.spec.Spec("@{0}".format(x)) for x in ordered_satisfying_versions)
|
||||
version_specs.extend(vspecs)
|
||||
|
||||
for spec in version_specs:
|
||||
spec.attach_git_version_lookup()
|
||||
return version_specs
|
||||
|
||||
|
||||
class SpecBuilder(object):
|
||||
|
@@ -679,6 +679,16 @@ def from_dict(d):
|
||||
d = d["compiler"]
|
||||
return CompilerSpec(d["name"], vn.VersionList.from_dict(d))
|
||||
|
||||
@property
|
||||
def display_str(self):
|
||||
"""Equivalent to {compiler.name}{@compiler.version} for Specs, without extra
|
||||
@= for readability."""
|
||||
if self.concrete:
|
||||
return f"{self.name}@{self.version}"
|
||||
elif self.versions != vn.any_version:
|
||||
return f"{self.name}@{self.versions}"
|
||||
return self.name
|
||||
|
||||
def __str__(self):
|
||||
out = self.name
|
||||
if self.versions and self.versions != vn.any_version:
|
||||
@@ -1730,14 +1740,14 @@ def traverse_edges(self, **kwargs):
|
||||
def short_spec(self):
|
||||
"""Returns a version of the spec with the dependencies hashed
|
||||
instead of completely enumerated."""
|
||||
spec_format = "{name}{@version}{%compiler}"
|
||||
spec_format = "{name}{@version}{%compiler.name}{@compiler.version}"
|
||||
spec_format += "{variants}{arch=architecture}{/hash:7}"
|
||||
return self.format(spec_format)
|
||||
|
||||
@property
|
||||
def cshort_spec(self):
|
||||
"""Returns an auto-colorized version of ``self.short_spec``."""
|
||||
spec_format = "{name}{@version}{%compiler}"
|
||||
spec_format = "{name}{@version}{%compiler.name}{@compiler.version}"
|
||||
spec_format += "{variants}{arch=architecture}{/hash:7}"
|
||||
return self.cformat(spec_format)
|
||||
|
||||
@@ -2789,11 +2799,11 @@ def inject_patches_variant(root):
|
||||
# Also record all patches required on dependencies by
|
||||
# depends_on(..., patch=...)
|
||||
for dspec in root.traverse_edges(deptype=all, cover="edges", root=False):
|
||||
pkg_deps = dspec.parent.package_class.dependencies
|
||||
if dspec.spec.name not in pkg_deps:
|
||||
if dspec.spec.concrete:
|
||||
continue
|
||||
|
||||
if dspec.spec.concrete:
|
||||
pkg_deps = dspec.parent.package_class.dependencies
|
||||
if dspec.spec.name not in pkg_deps:
|
||||
continue
|
||||
|
||||
patches = []
|
||||
|
@@ -204,8 +204,8 @@ def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, clangdir):
|
||||
os.environ["PATH"] = str(clangdir)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "clang@=11.0.0" in output
|
||||
assert "gcc@=8.4.0" in output
|
||||
assert "clang@11.0.0" in output
|
||||
assert "gcc@8.4.0" in output
|
||||
|
||||
config = spack.compilers.get_compiler_config("site", False)
|
||||
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
|
||||
@@ -246,8 +246,8 @@ def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, clangdir
|
||||
os.environ["PATH"] = str(clangdir)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "clang@=11.0.0" in output
|
||||
assert "gcc@=8.4.0" in output
|
||||
assert "clang@11.0.0" in output
|
||||
assert "gcc@8.4.0" in output
|
||||
|
||||
config = spack.compilers.get_compiler_config("site", False)
|
||||
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
|
||||
|
@@ -906,7 +906,7 @@ def test_env_config_precedence(environment_from_manifest):
|
||||
mpileaks:
|
||||
version: ["2.2"]
|
||||
libelf:
|
||||
version: ["0.8.11"]
|
||||
version: ["0.8.10"]
|
||||
"""
|
||||
)
|
||||
|
||||
@@ -3299,3 +3299,22 @@ def test_environment_created_in_users_location(mutable_config, tmpdir):
|
||||
assert dir_name in out
|
||||
assert env_dir in ev.root(dir_name)
|
||||
assert os.path.isdir(os.path.join(env_dir, dir_name))
|
||||
|
||||
|
||||
def test_environment_created_from_lockfile_has_view(mock_packages, tmpdir):
|
||||
"""When an env is created from a lockfile, a view should be generated for it"""
|
||||
env_a = str(tmpdir.join("a"))
|
||||
env_b = str(tmpdir.join("b"))
|
||||
|
||||
# Create an environment and install a package in it
|
||||
env("create", "-d", env_a)
|
||||
with ev.Environment(env_a):
|
||||
add("libelf")
|
||||
install("--fake")
|
||||
|
||||
# Create another environment from the lockfile of the first environment
|
||||
env("create", "-d", env_b, os.path.join(env_a, "spack.lock"))
|
||||
|
||||
# Make sure the view was created
|
||||
with ev.Environment(env_b) as e:
|
||||
assert os.path.isdir(e.view_path_default)
|
||||
|
@@ -357,3 +357,18 @@ def test_find_loaded(database, working_env):
|
||||
output = find("--loaded")
|
||||
expected = find()
|
||||
assert output == expected
|
||||
|
||||
|
||||
@pytest.mark.regression("37712")
|
||||
def test_environment_with_version_range_in_compiler_doesnt_fail(tmp_path):
|
||||
"""Tests that having an active environment with a root spec containing a compiler constrained
|
||||
by a version range (i.e. @X.Y rather the single version than @=X.Y) doesn't result in an error
|
||||
when invoking "spack find".
|
||||
"""
|
||||
test_environment = ev.create_in_dir(tmp_path)
|
||||
test_environment.add("zlib %gcc@12.1.0")
|
||||
test_environment.write()
|
||||
|
||||
with test_environment:
|
||||
output = find()
|
||||
assert "zlib%gcc@12.1.0" in output
|
||||
|
@@ -319,3 +319,17 @@ def test_report_filename_for_cdash(install_mockery_mutable_config, mock_fetch):
|
||||
spack.cmd.common.arguments.sanitize_reporter_options(args)
|
||||
filename = spack.cmd.test.report_filename(args, suite)
|
||||
assert filename != "https://blahblah/submit.php?project=debugging"
|
||||
|
||||
|
||||
def test_test_output_multiple_specs(
|
||||
mock_test_stage, mock_packages, mock_archive, mock_fetch, install_mockery_mutable_config
|
||||
):
|
||||
"""Ensure proper reporting for suite with skipped, failing, and passed tests."""
|
||||
install("test-error", "simple-standalone-test@0.9", "simple-standalone-test@1.0")
|
||||
out = spack_test("run", "test-error", "simple-standalone-test", fail_on_error=False)
|
||||
|
||||
# Note that a spec with passing *and* skipped tests is still considered
|
||||
# to have passed at this level. If you want to see the spec-specific
|
||||
# part result summaries, you'll have to look at the "test-out.txt" files
|
||||
# for each spec.
|
||||
assert "1 failed, 2 passed of 3 specs" in out
|
||||
|
@@ -152,7 +152,9 @@ def test_preferred_versions(self):
|
||||
assert spec.version == Version("2.2")
|
||||
|
||||
def test_preferred_versions_mixed_version_types(self):
|
||||
update_packages("mixedversions", "version", ["2.0"])
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("This behavior is not enforced for the old concretizer")
|
||||
update_packages("mixedversions", "version", ["=2.0"])
|
||||
spec = concretize("mixedversions")
|
||||
assert spec.version == Version("2.0")
|
||||
|
||||
@@ -228,6 +230,29 @@ def test_preferred(self):
|
||||
spec.concretize()
|
||||
assert spec.version == Version("3.5.0")
|
||||
|
||||
def test_preferred_undefined_raises(self):
|
||||
"""Preference should not specify an undefined version"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("This behavior is not enforced for the old concretizer")
|
||||
|
||||
update_packages("python", "version", ["3.5.0.1"])
|
||||
spec = Spec("python")
|
||||
with pytest.raises(spack.config.ConfigError):
|
||||
spec.concretize()
|
||||
|
||||
def test_preferred_truncated(self):
|
||||
"""Versions without "=" are treated as version ranges: if there is
|
||||
a satisfying version defined in the package.py, we should use that
|
||||
(don't define a new version).
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("This behavior is not enforced for the old concretizer")
|
||||
|
||||
update_packages("python", "version", ["3.5"])
|
||||
spec = Spec("python")
|
||||
spec.concretize()
|
||||
assert spec.satisfies("@3.5.1")
|
||||
|
||||
def test_develop(self):
|
||||
"""Test concretization with develop-like versions"""
|
||||
spec = Spec("develop-test")
|
||||
|
@@ -66,6 +66,28 @@ class V(Package):
|
||||
)
|
||||
|
||||
|
||||
_pkgt = (
|
||||
"t",
|
||||
"""\
|
||||
class T(Package):
|
||||
version('2.1')
|
||||
version('2.0')
|
||||
|
||||
depends_on('u', when='@2.1:')
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkgu = (
|
||||
"u",
|
||||
"""\
|
||||
class U(Package):
|
||||
version('1.1')
|
||||
version('1.0')
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def create_test_repo(tmpdir, mutable_config):
|
||||
repo_path = str(tmpdir)
|
||||
@@ -79,7 +101,7 @@ def create_test_repo(tmpdir, mutable_config):
|
||||
)
|
||||
|
||||
packages_dir = tmpdir.join("packages")
|
||||
for pkg_name, pkg_str in [_pkgx, _pkgy, _pkgv]:
|
||||
for pkg_name, pkg_str in [_pkgx, _pkgy, _pkgv, _pkgt, _pkgu]:
|
||||
pkg_dir = packages_dir.ensure(pkg_name, dir=True)
|
||||
pkg_file = pkg_dir.join("package.py")
|
||||
with open(str(pkg_file), "w") as f:
|
||||
@@ -144,6 +166,45 @@ def test_requirement_isnt_optional(concretize_scope, test_repo):
|
||||
Spec("x@1.1").concretize()
|
||||
|
||||
|
||||
def test_require_undefined_version(concretize_scope, test_repo):
|
||||
"""If a requirement specifies a numbered version that isn't in
|
||||
the associated package.py and isn't part of a Git hash
|
||||
equivalence (hash=number), then Spack should raise an error
|
||||
(it is assumed this is a typo, and raising the error here
|
||||
avoids a likely error when Spack attempts to fetch the version).
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
|
||||
conf_str = """\
|
||||
packages:
|
||||
x:
|
||||
require: "@1.2"
|
||||
"""
|
||||
update_packages_config(conf_str)
|
||||
with pytest.raises(spack.config.ConfigError):
|
||||
Spec("x").concretize()
|
||||
|
||||
|
||||
def test_require_truncated(concretize_scope, test_repo):
|
||||
"""A requirement specifies a version range, with satisfying
|
||||
versions defined in the package.py. Make sure we choose one
|
||||
of the defined versions (vs. allowing the requirement to
|
||||
define a new version).
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
|
||||
conf_str = """\
|
||||
packages:
|
||||
x:
|
||||
require: "@1"
|
||||
"""
|
||||
update_packages_config(conf_str)
|
||||
xspec = Spec("x").concretized()
|
||||
assert xspec.satisfies("@1.1")
|
||||
|
||||
|
||||
def test_git_user_supplied_reference_satisfaction(
|
||||
concretize_scope, test_repo, mock_git_version_info, monkeypatch
|
||||
):
|
||||
@@ -220,6 +281,40 @@ def test_requirement_adds_new_version(
|
||||
assert s1.version.ref == a_commit_hash
|
||||
|
||||
|
||||
def test_requirement_adds_version_satisfies(
|
||||
concretize_scope, test_repo, mock_git_version_info, monkeypatch
|
||||
):
|
||||
"""Make sure that new versions added by requirements are factored into
|
||||
conditions. In this case create a new version that satisfies a
|
||||
depends_on condition and make sure it is triggered (i.e. the
|
||||
dependency is added).
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration" " requirements")
|
||||
|
||||
repo_path, filename, commits = mock_git_version_info
|
||||
monkeypatch.setattr(
|
||||
spack.package_base.PackageBase, "git", path_to_file_url(repo_path), raising=False
|
||||
)
|
||||
|
||||
# Sanity check: early version of T does not include U
|
||||
s0 = Spec("t@2.0").concretized()
|
||||
assert not ("u" in s0)
|
||||
|
||||
conf_str = """\
|
||||
packages:
|
||||
t:
|
||||
require: "@{0}=2.2"
|
||||
""".format(
|
||||
commits[0]
|
||||
)
|
||||
update_packages_config(conf_str)
|
||||
|
||||
s1 = Spec("t").concretized()
|
||||
assert "u" in s1
|
||||
assert s1.satisfies("@2.2")
|
||||
|
||||
|
||||
def test_requirement_adds_git_hash_version(
|
||||
concretize_scope, test_repo, mock_git_version_info, monkeypatch
|
||||
):
|
||||
@@ -272,8 +367,11 @@ def test_requirement_adds_multiple_new_versions(
|
||||
def test_preference_adds_new_version(
|
||||
concretize_scope, test_repo, mock_git_version_info, monkeypatch
|
||||
):
|
||||
"""Normally a preference cannot define a new version, but that constraint
|
||||
is ignored if the version is a Git hash-based version.
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
pytest.skip("Original concretizer does not enforce this constraint for preferences")
|
||||
|
||||
repo_path, filename, commits = mock_git_version_info
|
||||
monkeypatch.setattr(
|
||||
@@ -296,6 +394,29 @@ def test_preference_adds_new_version(
|
||||
assert not s3.satisfies("@2.3")
|
||||
|
||||
|
||||
def test_external_adds_new_version_that_is_preferred(concretize_scope, test_repo):
|
||||
"""Test that we can use a version, not declared in package recipe, as the
|
||||
preferred version if that version appears in an external spec.
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not enforce this constraint for preferences")
|
||||
|
||||
conf_str = """\
|
||||
packages:
|
||||
y:
|
||||
version: ["2.7"]
|
||||
externals:
|
||||
- spec: y@2.7 # Not defined in y
|
||||
prefix: /fake/nonexistent/path/
|
||||
buildable: false
|
||||
"""
|
||||
update_packages_config(conf_str)
|
||||
|
||||
spec = Spec("x").concretized()
|
||||
assert spec["y"].satisfies("@2.7")
|
||||
assert spack.version.Version("2.7") not in spec["y"].package.versions
|
||||
|
||||
|
||||
def test_requirement_is_successfully_applied(concretize_scope, test_repo):
|
||||
"""If a simple requirement can be satisfied, make sure the
|
||||
concretization succeeds and the requirement spec is applied.
|
||||
|
@@ -4,7 +4,7 @@ lmod:
|
||||
hash_length: 0
|
||||
|
||||
core_compilers:
|
||||
- 'clang@3.3'
|
||||
- 'clang@12.0.0'
|
||||
|
||||
core_specs:
|
||||
- 'mpich@3.0.1'
|
||||
|
@@ -0,0 +1,5 @@
|
||||
enable:
|
||||
- lmod
|
||||
lmod:
|
||||
core_compilers:
|
||||
- 'clang@12.0.0'
|
@@ -0,0 +1,5 @@
|
||||
enable:
|
||||
- lmod
|
||||
lmod:
|
||||
core_compilers:
|
||||
- 'clang@=12.0.0'
|
@@ -1399,17 +1399,24 @@ def test_print_install_test_log_skipped(install_mockery, mock_packages, capfd, r
|
||||
assert out == ""
|
||||
|
||||
|
||||
def test_print_install_test_log_missing(
|
||||
def test_print_install_test_log_failures(
|
||||
tmpdir, install_mockery, mock_packages, ensure_debug, capfd
|
||||
):
|
||||
"""Confirm expected error on attempt to print missing test log file."""
|
||||
"""Confirm expected outputs when there are test failures."""
|
||||
name = "trivial-install-test-package"
|
||||
s = spack.spec.Spec(name).concretized()
|
||||
pkg = s.package
|
||||
|
||||
# Missing test log is an error
|
||||
pkg.run_tests = True
|
||||
pkg.tester.test_log_file = str(tmpdir.join("test-log.txt"))
|
||||
pkg.tester.add_failure(AssertionError("test"), "test-failure")
|
||||
spack.installer.print_install_test_log(pkg)
|
||||
err = capfd.readouterr()[1]
|
||||
assert "no test log file" in err
|
||||
|
||||
# Having test log results in path being output
|
||||
fs.touch(pkg.tester.test_log_file)
|
||||
spack.installer.print_install_test_log(pkg)
|
||||
out = capfd.readouterr()[0]
|
||||
assert "See test results at" in out
|
||||
|
@@ -8,6 +8,8 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.cmd.modules
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.modules.tcl
|
||||
import spack.package_base
|
||||
@@ -187,3 +189,31 @@ def find_nothing(*args):
|
||||
assert module_path
|
||||
|
||||
spack.package_base.PackageBase.uninstall_by_spec(spec)
|
||||
|
||||
|
||||
@pytest.mark.regression("37649")
|
||||
def test_check_module_set_name(mutable_config):
|
||||
"""Tests that modules set name are validated correctly and an error is reported if the
|
||||
name we require does not exist or is reserved by the configuration."""
|
||||
|
||||
# Minimal modules.yaml config.
|
||||
spack.config.set(
|
||||
"modules",
|
||||
{
|
||||
"prefix_inspections": {"./bin": ["PATH"]},
|
||||
# module sets
|
||||
"first": {},
|
||||
"second": {},
|
||||
},
|
||||
)
|
||||
|
||||
# Valid module set name
|
||||
spack.cmd.modules.check_module_set_name("first")
|
||||
|
||||
# Invalid module set names
|
||||
msg = "Valid module set names are"
|
||||
with pytest.raises(spack.config.ConfigError, match=msg):
|
||||
spack.cmd.modules.check_module_set_name("prefix_inspections")
|
||||
|
||||
with pytest.raises(spack.config.ConfigError, match=msg):
|
||||
spack.cmd.modules.check_module_set_name("third")
|
||||
|
@@ -45,6 +45,18 @@ def provider(request):
|
||||
|
||||
@pytest.mark.usefixtures("config", "mock_packages")
|
||||
class TestLmod(object):
|
||||
@pytest.mark.regression("37788")
|
||||
@pytest.mark.parametrize("modules_config", ["core_compilers", "core_compilers_at_equal"])
|
||||
def test_layout_for_specs_compiled_with_core_compilers(
|
||||
self, modules_config, module_configuration, factory
|
||||
):
|
||||
"""Tests that specs compiled with core compilers are in the 'Core' folder. Also tests that
|
||||
we can use both ``compiler@version`` and ``compiler@=version`` to specify a core compiler.
|
||||
"""
|
||||
module_configuration(modules_config)
|
||||
module, spec = factory("libelf%clang@12.0.0")
|
||||
assert "Core" in module.layout.available_path_parts
|
||||
|
||||
def test_file_layout(self, compiler, provider, factory, module_configuration):
|
||||
"""Tests the layout of files in the hierarchy is the one expected."""
|
||||
module_configuration("complex_hierarchy")
|
||||
@@ -61,7 +73,7 @@ def test_file_layout(self, compiler, provider, factory, module_configuration):
|
||||
# is transformed to r"Core" if the compiler is listed among core
|
||||
# compilers
|
||||
# Check that specs listed as core_specs are transformed to "Core"
|
||||
if compiler == "clang@=3.3" or spec_string == "mpich@3.0.1":
|
||||
if compiler == "clang@=12.0.0" or spec_string == "mpich@3.0.1":
|
||||
assert "Core" in layout.available_path_parts
|
||||
else:
|
||||
assert compiler.replace("@=", "/") in layout.available_path_parts
|
||||
|
@@ -12,6 +12,7 @@
|
||||
|
||||
import spack.install_test
|
||||
import spack.spec
|
||||
from spack.install_test import TestStatus
|
||||
from spack.util.executable import which
|
||||
|
||||
|
||||
@@ -20,7 +21,7 @@ def _true(*args, **kwargs):
|
||||
return True
|
||||
|
||||
|
||||
def ensure_results(filename, expected):
|
||||
def ensure_results(filename, expected, present=True):
|
||||
assert os.path.exists(filename)
|
||||
with open(filename, "r") as fd:
|
||||
lines = fd.readlines()
|
||||
@@ -29,7 +30,10 @@ def ensure_results(filename, expected):
|
||||
if expected in line:
|
||||
have = True
|
||||
break
|
||||
assert have
|
||||
if present:
|
||||
assert have, f"Expected '{expected}' in the file"
|
||||
else:
|
||||
assert not have, f"Expected '{expected}' NOT to be in the file"
|
||||
|
||||
|
||||
def test_test_log_name(mock_packages, config):
|
||||
@@ -78,8 +82,8 @@ def test_write_test_result(mock_packages, mock_test_stage):
|
||||
assert spec.name in msg
|
||||
|
||||
|
||||
def test_test_uninstalled(mock_packages, install_mockery, mock_test_stage):
|
||||
"""Attempt to perform stand-alone test for uninstalled package."""
|
||||
def test_test_not_installed(mock_packages, install_mockery, mock_test_stage):
|
||||
"""Attempt to perform stand-alone test for not_installed package."""
|
||||
spec = spack.spec.Spec("trivial-smoke-test").concretized()
|
||||
test_suite = spack.install_test.TestSuite([spec])
|
||||
|
||||
@@ -91,10 +95,7 @@ def test_test_uninstalled(mock_packages, install_mockery, mock_test_stage):
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"arguments,status,msg",
|
||||
[
|
||||
({}, spack.install_test.TestStatus.SKIPPED, "Skipped"),
|
||||
({"externals": True}, spack.install_test.TestStatus.NO_TESTS, "No tests"),
|
||||
],
|
||||
[({}, TestStatus.SKIPPED, "Skipped"), ({"externals": True}, TestStatus.NO_TESTS, "No tests")],
|
||||
)
|
||||
def test_test_external(
|
||||
mock_packages, install_mockery, mock_test_stage, monkeypatch, arguments, status, msg
|
||||
@@ -156,6 +157,7 @@ def test_test_spec_passes(mock_packages, install_mockery, mock_test_stage, monke
|
||||
|
||||
ensure_results(test_suite.results_file, "PASSED")
|
||||
ensure_results(test_suite.log_file_for_spec(spec), "simple stand-alone")
|
||||
ensure_results(test_suite.log_file_for_spec(spec), "standalone-ifc", present=False)
|
||||
|
||||
|
||||
def test_get_test_suite():
|
||||
@@ -212,8 +214,10 @@ def test_test_functions_pkgless(mock_packages, install_mockery, ensure_debug, ca
|
||||
spec = spack.spec.Spec("simple-standalone-test").concretized()
|
||||
fns = spack.install_test.test_functions(spec.package, add_virtuals=True)
|
||||
out = capsys.readouterr()
|
||||
assert len(fns) == 1, "Expected only one test function"
|
||||
assert "does not appear to have a package file" in out[1]
|
||||
assert len(fns) == 2, "Expected two test functions"
|
||||
for f in fns:
|
||||
assert f[1].__name__ in ["test_echo", "test_skip"]
|
||||
assert "virtual does not appear to have a package file" in out[1]
|
||||
|
||||
|
||||
# TODO: This test should go away when compilers as dependencies is supported
|
||||
@@ -301,7 +305,7 @@ def test_test_part_fail(tmpdir, install_mockery_mutable_config, mock_fetch, mock
|
||||
|
||||
for part_name, status in pkg.tester.test_parts.items():
|
||||
assert part_name.endswith(name)
|
||||
assert status == spack.install_test.TestStatus.FAILED
|
||||
assert status == TestStatus.FAILED
|
||||
|
||||
|
||||
def test_test_part_pass(install_mockery_mutable_config, mock_fetch, mock_test_stage):
|
||||
@@ -317,7 +321,7 @@ def test_test_part_pass(install_mockery_mutable_config, mock_fetch, mock_test_st
|
||||
|
||||
for part_name, status in pkg.tester.test_parts.items():
|
||||
assert part_name.endswith(name)
|
||||
assert status == spack.install_test.TestStatus.PASSED
|
||||
assert status == TestStatus.PASSED
|
||||
|
||||
|
||||
def test_test_part_skip(install_mockery_mutable_config, mock_fetch, mock_test_stage):
|
||||
@@ -331,7 +335,7 @@ def test_test_part_skip(install_mockery_mutable_config, mock_fetch, mock_test_st
|
||||
|
||||
for part_name, status in pkg.tester.test_parts.items():
|
||||
assert part_name.endswith(name)
|
||||
assert status == spack.install_test.TestStatus.SKIPPED
|
||||
assert status == TestStatus.SKIPPED
|
||||
|
||||
|
||||
def test_test_part_missing_exe_fail_fast(
|
||||
@@ -354,7 +358,7 @@ def test_test_part_missing_exe_fail_fast(
|
||||
assert len(test_parts) == 1
|
||||
for part_name, status in test_parts.items():
|
||||
assert part_name.endswith(name)
|
||||
assert status == spack.install_test.TestStatus.FAILED
|
||||
assert status == TestStatus.FAILED
|
||||
|
||||
|
||||
def test_test_part_missing_exe(
|
||||
@@ -375,7 +379,66 @@ def test_test_part_missing_exe(
|
||||
assert len(test_parts) == 1
|
||||
for part_name, status in test_parts.items():
|
||||
assert part_name.endswith(name)
|
||||
assert status == spack.install_test.TestStatus.FAILED
|
||||
assert status == TestStatus.FAILED
|
||||
|
||||
|
||||
# TODO (embedded test parts): Update this once embedded test part tracking
|
||||
# TODO (embedded test parts): properly handles the nested context managers.
|
||||
@pytest.mark.parametrize(
|
||||
"current,substatuses,expected",
|
||||
[
|
||||
(TestStatus.PASSED, [TestStatus.PASSED, TestStatus.PASSED], TestStatus.PASSED),
|
||||
(TestStatus.FAILED, [TestStatus.PASSED, TestStatus.PASSED], TestStatus.FAILED),
|
||||
(TestStatus.SKIPPED, [TestStatus.PASSED, TestStatus.PASSED], TestStatus.SKIPPED),
|
||||
(TestStatus.NO_TESTS, [TestStatus.PASSED, TestStatus.PASSED], TestStatus.NO_TESTS),
|
||||
(TestStatus.PASSED, [TestStatus.PASSED, TestStatus.SKIPPED], TestStatus.PASSED),
|
||||
(TestStatus.PASSED, [TestStatus.PASSED, TestStatus.FAILED], TestStatus.FAILED),
|
||||
(TestStatus.PASSED, [TestStatus.SKIPPED, TestStatus.SKIPPED], TestStatus.SKIPPED),
|
||||
],
|
||||
)
|
||||
def test_embedded_test_part_status(
|
||||
install_mockery_mutable_config, mock_fetch, mock_test_stage, current, substatuses, expected
|
||||
):
|
||||
"""Check to ensure the status of the enclosing test part reflects summary of embedded parts."""
|
||||
|
||||
s = spack.spec.Spec("trivial-smoke-test").concretized()
|
||||
pkg = s.package
|
||||
base_name = "test_example"
|
||||
part_name = f"{pkg.__class__.__name__}::{base_name}"
|
||||
|
||||
pkg.tester.test_parts[part_name] = current
|
||||
for i, status in enumerate(substatuses):
|
||||
pkg.tester.test_parts[f"{part_name}_{i}"] = status
|
||||
|
||||
pkg.tester.status(base_name, current)
|
||||
assert pkg.tester.test_parts[part_name] == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"statuses,expected",
|
||||
[
|
||||
([TestStatus.PASSED, TestStatus.PASSED], TestStatus.PASSED),
|
||||
([TestStatus.PASSED, TestStatus.SKIPPED], TestStatus.PASSED),
|
||||
([TestStatus.PASSED, TestStatus.FAILED], TestStatus.FAILED),
|
||||
([TestStatus.SKIPPED, TestStatus.SKIPPED], TestStatus.SKIPPED),
|
||||
([], TestStatus.NO_TESTS),
|
||||
],
|
||||
)
|
||||
def test_write_tested_status(
|
||||
tmpdir, install_mockery_mutable_config, mock_fetch, mock_test_stage, statuses, expected
|
||||
):
|
||||
"""Check to ensure the status of the enclosing test part reflects summary of embedded parts."""
|
||||
s = spack.spec.Spec("trivial-smoke-test").concretized()
|
||||
pkg = s.package
|
||||
for i, status in enumerate(statuses):
|
||||
pkg.tester.test_parts[f"test_{i}"] = status
|
||||
pkg.tester.counts[status] += 1
|
||||
|
||||
pkg.tester.tested_file = tmpdir.join("test-log.txt")
|
||||
pkg.tester.write_tested_status()
|
||||
with open(pkg.tester.tested_file, "r") as f:
|
||||
status = int(f.read().strip("\n"))
|
||||
assert TestStatus(status) == expected
|
||||
|
||||
|
||||
def test_check_special_outputs(tmpdir):
|
||||
|
@@ -24,4 +24,4 @@ spack:
|
||||
mirrors: { "mirror": "s3://spack-binaries/develop/build_systems" }
|
||||
|
||||
cdash:
|
||||
build-group: Build tests for different build systems
|
||||
build-group: Build Systems
|
||||
|
@@ -6,9 +6,9 @@ spack:
|
||||
mesa:
|
||||
require: "+glx +osmesa +opengl ~opengles +llvm"
|
||||
libosmesa:
|
||||
require: ^mesa +osmesa
|
||||
require: "mesa +osmesa"
|
||||
libglx:
|
||||
require: ^mesa +glx
|
||||
require: "mesa +glx"
|
||||
ospray:
|
||||
require: "@2.8.0 +denoiser +mpi"
|
||||
llvm:
|
||||
|
@@ -269,6 +269,10 @@ spack:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2023-01-01
|
||||
before_script:
|
||||
- - . /bootstrap/runner/view/lmod/lmod/init/bash
|
||||
- module use /opt/intel/oneapi/modulefiles
|
||||
- module load compiler
|
||||
|
||||
cdash:
|
||||
build-group: E4S OneAPI
|
||||
|
@@ -14,6 +14,7 @@ class Python(Package):
|
||||
|
||||
extendable = True
|
||||
|
||||
version("3.7.1", md5="aaabbbcccdddeeefffaaabbbcccddd12")
|
||||
version("3.5.1", md5="be78e48cdfc1a7ad90efff146dce6cfe")
|
||||
version("3.5.0", md5="a56c0c0b45d75a0ec9c6dee933c41c36")
|
||||
version("2.7.11", md5="6b6076ec9e93f05dd63e47eb9c15728b", preferred=True)
|
||||
|
@@ -7,16 +7,24 @@
|
||||
|
||||
|
||||
class SimpleStandaloneTest(Package):
|
||||
"""This package has a simple stand-alone test features."""
|
||||
"""This package has simple stand-alone test features."""
|
||||
|
||||
homepage = "http://www.example.com/simple_test"
|
||||
url = "http://www.unit-test-should-replace-this-url/simple_test-1.0.tar.gz"
|
||||
|
||||
version("1.0", md5="0123456789abcdef0123456789abcdef")
|
||||
version("1.0", md5="123456789abcdef0123456789abcdefg")
|
||||
version("0.9", md5="0123456789abcdef0123456789abcdef")
|
||||
|
||||
provides("standalone-test")
|
||||
provides("standalone-ifc")
|
||||
|
||||
def test_echo(self):
|
||||
"""simple stand-alone test"""
|
||||
echo = which("echo")
|
||||
echo("testing echo", output=str.split, error=str.split)
|
||||
|
||||
def test_skip(self):
|
||||
"""simple skip test"""
|
||||
if self.spec.satisfies("@1.0:"):
|
||||
raise SkipTest("This test is not available from v1.0 on")
|
||||
|
||||
print("Ran test_skip")
|
||||
|
@@ -21,6 +21,7 @@ class Lmod(AutotoolsPackage):
|
||||
homepage = "https://www.tacc.utexas.edu/research-development/tacc-projects/lmod"
|
||||
url = "https://github.com/TACC/Lmod/archive/8.5.6.tar.gz"
|
||||
|
||||
version("8.7.24", sha256="8451267652059b6507b652e1b563929ecf9b689ffb20830642085eb6a55bd539")
|
||||
version("8.7.20", sha256="c04deff7d2ca354610a362459a7aa9a1c642a095e45a4b0bb2471bb3254e85f4")
|
||||
version("8.7.2", sha256="5f44f3783496d2d597ced7531e1714c740dbb2883a7d16fde362135fb0b0fd96")
|
||||
version("8.6.18", sha256="3db1c665c35fb8beb78c02e40d56accd361d82b715df70b2a995bcb10fbc2c80")
|
||||
@@ -55,6 +56,8 @@ class Lmod(AutotoolsPackage):
|
||||
depends_on("lua-luafilesystem", type=("build", "run"))
|
||||
depends_on("tcl", type=("build", "link", "run"))
|
||||
|
||||
depends_on("bc", type="build", when="@8.7.10:")
|
||||
|
||||
variant("auto_swap", default=True, description="Auto swapping of compilers, etc.")
|
||||
variant(
|
||||
"redirect", default=False, description="Redirect messages to stdout (instead of stderr)"
|
||||
|
@@ -5,6 +5,8 @@
|
||||
|
||||
import os
|
||||
|
||||
from llnl.util.filesystem import find_first
|
||||
|
||||
from spack.package import *
|
||||
from spack.util.environment import is_system_path
|
||||
|
||||
@@ -93,6 +95,18 @@ def command(self):
|
||||
os.path.realpath(self.prefix.bin.join("tclsh{0}".format(self.version.up_to(2))))
|
||||
)
|
||||
|
||||
def _find_script_dir(self):
|
||||
# Put more-specific prefixes first
|
||||
check_prefixes = [
|
||||
join_path(self.prefix, "share", "tcl{0}".format(self.version.up_to(2))),
|
||||
self.prefix,
|
||||
]
|
||||
for prefix in check_prefixes:
|
||||
result = find_first(prefix, "init.tcl")
|
||||
if result:
|
||||
return os.path.dirname(result)
|
||||
raise RuntimeError("Cannot locate init.tcl")
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
"""Set TCL_LIBRARY to the directory containing init.tcl.
|
||||
|
||||
@@ -102,7 +116,7 @@ def setup_run_environment(self, env):
|
||||
"""
|
||||
# When using tkinter from within spack provided python+tkinter,
|
||||
# python will not be able to find Tcl unless TCL_LIBRARY is set.
|
||||
env.set("TCL_LIBRARY", os.path.dirname(sorted(find(self.prefix, "init.tcl"))[0]))
|
||||
env.set("TCL_LIBRARY", self._find_script_dir())
|
||||
|
||||
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
"""Set TCL_LIBRARY to the directory containing init.tcl.
|
||||
@@ -114,7 +128,7 @@ def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
* https://wiki.tcl-lang.org/page/TCL_LIBRARY
|
||||
* https://wiki.tcl-lang.org/page/TCLLIBPATH
|
||||
"""
|
||||
env.set("TCL_LIBRARY", os.path.dirname(sorted(find(self.prefix, "init.tcl"))[0]))
|
||||
env.set("TCL_LIBRARY", self._find_script_dir())
|
||||
|
||||
# If we set TCLLIBPATH, we must also ensure that the corresponding
|
||||
# tcl is found in the build environment. This to prevent cases
|
||||
|
@@ -5,6 +5,8 @@
|
||||
|
||||
import os
|
||||
|
||||
from llnl.util.filesystem import find_first
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
@@ -118,6 +120,18 @@ def libs(self):
|
||||
["libtk{0}".format(self.version.up_to(2))], root=self.prefix, recursive=True
|
||||
)
|
||||
|
||||
def _find_script_dir(self):
|
||||
# Put more-specific prefixes first
|
||||
check_prefixes = [
|
||||
join_path(self.prefix, "share", "tk{0}".format(self.version.up_to(2))),
|
||||
self.prefix,
|
||||
]
|
||||
for prefix in check_prefixes:
|
||||
result = find_first(prefix, "tk.tcl")
|
||||
if result:
|
||||
return os.path.dirname(result)
|
||||
raise RuntimeError("Cannot locate tk.tcl")
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
"""Set TK_LIBRARY to the directory containing tk.tcl.
|
||||
|
||||
@@ -127,7 +141,7 @@ def setup_run_environment(self, env):
|
||||
"""
|
||||
# When using tkinter from within spack provided python+tkinter,
|
||||
# python will not be able to find Tk unless TK_LIBRARY is set.
|
||||
env.set("TK_LIBRARY", os.path.dirname(sorted(find(self.prefix, "tk.tcl"))[0]))
|
||||
env.set("TK_LIBRARY", self._find_script_dir())
|
||||
|
||||
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
"""Set TK_LIBRARY to the directory containing tk.tcl.
|
||||
@@ -136,4 +150,4 @@ def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
|
||||
* https://www.tcl-lang.org/man/tcl/TkCmd/tkvars.htm
|
||||
"""
|
||||
env.set("TK_LIBRARY", os.path.dirname(sorted(find(self.prefix, "tk.tcl"))[0]))
|
||||
env.set("TK_LIBRARY", self._find_script_dir())
|
||||
|
Reference in New Issue
Block a user