Merge branch 'develop' into bugfix/compiler-flag-propagation
This commit is contained in:
commit
9db3d9dd6a
2
.github/workflows/audit.yaml
vendored
2
.github/workflows/audit.yaml
vendored
@ -34,6 +34,7 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
coverage run $(which spack) audit packages
|
||||
coverage run $(which spack) audit externals
|
||||
coverage combine
|
||||
coverage xml
|
||||
- name: Package audits (without coverage)
|
||||
@ -41,6 +42,7 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
$(which spack) audit externals
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
|
@ -6196,7 +6196,100 @@ follows:
|
||||
"foo-package@{0}".format(version_str)
|
||||
)
|
||||
|
||||
.. _package-lifecycle:
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Add detection tests to packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To ensure that software is detected correctly for multiple configurations
|
||||
and on different systems users can write a ``detection_test.yaml`` file and
|
||||
put it in the package directory alongside the ``package.py`` file.
|
||||
This YAML file contains enough information for Spack to mock an environment
|
||||
and try to check if the detection logic yields the results that are expected.
|
||||
|
||||
As a general rule, attributes at the top-level of ``detection_test.yaml``
|
||||
represent search mechanisms and they each map to a list of tests that should confirm
|
||||
the validity of the package's detection logic.
|
||||
|
||||
The detection tests can be run with the following command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack audit externals
|
||||
|
||||
Errors that have been detected are reported to screen.
|
||||
|
||||
""""""""""""""""""""""""""
|
||||
Tests for PATH inspections
|
||||
""""""""""""""""""""""""""
|
||||
|
||||
Detection tests insisting on ``PATH`` inspections are listed under
|
||||
the ``paths`` attribute:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
paths:
|
||||
- layout:
|
||||
- executables:
|
||||
- "bin/clang-3.9"
|
||||
- "bin/clang++-3.9"
|
||||
script: |
|
||||
echo "clang version 3.9.1-19ubuntu1 (tags/RELEASE_391/rc2)"
|
||||
echo "Target: x86_64-pc-linux-gnu"
|
||||
echo "Thread model: posix"
|
||||
echo "InstalledDir: /usr/bin"
|
||||
results:
|
||||
- spec: 'llvm@3.9.1 +clang~lld~lldb'
|
||||
|
||||
Each test is performed by first creating a temporary directory structure as
|
||||
specified in the corresponding ``layout`` and by then running
|
||||
package detection and checking that the outcome matches the expected
|
||||
``results``. The exact details on how to specify both the ``layout`` and the
|
||||
``results`` are reported in the table below:
|
||||
|
||||
.. list-table:: Test based on PATH inspections
|
||||
:header-rows: 1
|
||||
|
||||
* - Option Name
|
||||
- Description
|
||||
- Allowed Values
|
||||
- Required Field
|
||||
* - ``layout``
|
||||
- Specifies the filesystem tree used for the test
|
||||
- List of objects
|
||||
- Yes
|
||||
* - ``layout:[0]:executables``
|
||||
- Relative paths for the mock executables to be created
|
||||
- List of strings
|
||||
- Yes
|
||||
* - ``layout:[0]:script``
|
||||
- Mock logic for the executable
|
||||
- Any valid shell script
|
||||
- Yes
|
||||
* - ``results``
|
||||
- List of expected results
|
||||
- List of objects (empty if no result is expected)
|
||||
- Yes
|
||||
* - ``results:[0]:spec``
|
||||
- A spec that is expected from detection
|
||||
- Any valid spec
|
||||
- Yes
|
||||
|
||||
"""""""""""""""""""""""""""""""
|
||||
Reuse tests from other packages
|
||||
"""""""""""""""""""""""""""""""
|
||||
|
||||
When using a custom repository, it is possible to customize a package that already exists in ``builtin``
|
||||
and reuse its external tests. To do so, just write a ``detection_tests.yaml`` alongside the customized
|
||||
``package.py`` with an ``includes`` attribute. For instance the ``detection_tests.yaml`` for
|
||||
``myrepo.llvm`` might look like:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
includes:
|
||||
- "builtin.llvm"
|
||||
|
||||
This YAML file instructs Spack to run the detection tests defined in ``builtin.llvm`` in addition to
|
||||
those locally defined in the file.
|
||||
|
||||
-----------------------------
|
||||
Style guidelines for packages
|
||||
|
@ -38,10 +38,13 @@ def _search_duplicate_compilers(error_cls):
|
||||
import ast
|
||||
import collections
|
||||
import collections.abc
|
||||
import glob
|
||||
import inspect
|
||||
import itertools
|
||||
import pathlib
|
||||
import pickle
|
||||
import re
|
||||
import warnings
|
||||
from urllib.request import urlopen
|
||||
|
||||
import llnl.util.lang
|
||||
@ -798,3 +801,76 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
||||
errors.append(err)
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
#: Sanity checks on package directives
|
||||
external_detection = AuditClass(
|
||||
group="externals",
|
||||
tag="PKG-EXTERNALS",
|
||||
description="Sanity checks for external software detection",
|
||||
kwargs=("pkgs",),
|
||||
)
|
||||
|
||||
|
||||
def packages_with_detection_tests():
|
||||
"""Return the list of packages with a corresponding detection_test.yaml file."""
|
||||
import spack.config
|
||||
import spack.util.path
|
||||
|
||||
to_be_tested = []
|
||||
for current_repo in spack.repo.PATH.repos:
|
||||
namespace = current_repo.namespace
|
||||
packages_dir = pathlib.PurePath(current_repo.packages_path)
|
||||
pattern = packages_dir / "**" / "detection_test.yaml"
|
||||
pkgs_with_tests = [
|
||||
f"{namespace}.{str(pathlib.PurePath(x).parent.name)}" for x in glob.glob(str(pattern))
|
||||
]
|
||||
to_be_tested.extend(pkgs_with_tests)
|
||||
|
||||
return to_be_tested
|
||||
|
||||
|
||||
@external_detection
|
||||
def _test_detection_by_executable(pkgs, error_cls):
|
||||
"""Test drive external detection for packages"""
|
||||
import spack.detection
|
||||
|
||||
errors = []
|
||||
|
||||
# Filter the packages and retain only the ones with detection tests
|
||||
pkgs_with_tests = packages_with_detection_tests()
|
||||
selected_pkgs = []
|
||||
for current_package in pkgs_with_tests:
|
||||
_, unqualified_name = spack.repo.partition_package_name(current_package)
|
||||
# Check for both unqualified name and qualified name
|
||||
if unqualified_name in pkgs or current_package in pkgs:
|
||||
selected_pkgs.append(current_package)
|
||||
selected_pkgs.sort()
|
||||
|
||||
if not selected_pkgs:
|
||||
summary = "No detection test to run"
|
||||
details = [f' "{p}" has no detection test' for p in pkgs]
|
||||
warnings.warn("\n".join([summary] + details))
|
||||
return errors
|
||||
|
||||
for pkg_name in selected_pkgs:
|
||||
for idx, test_runner in enumerate(
|
||||
spack.detection.detection_tests(pkg_name, spack.repo.PATH)
|
||||
):
|
||||
specs = test_runner.execute()
|
||||
expected_specs = test_runner.expected_specs
|
||||
|
||||
not_detected = set(expected_specs) - set(specs)
|
||||
if not_detected:
|
||||
summary = pkg_name + ": cannot detect some specs"
|
||||
details = [f'"{s}" was not detected [test_id={idx}]' for s in sorted(not_detected)]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
|
||||
not_expected = set(specs) - set(expected_specs)
|
||||
if not_expected:
|
||||
summary = pkg_name + ": detected unexpected specs"
|
||||
msg = '"{0}" was detected, but was not expected [test_id={1}]'
|
||||
details = [msg.format(s, idx) for s in sorted(not_expected)]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
|
||||
return errors
|
||||
|
@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.colify
|
||||
import llnl.util.tty.color as cl
|
||||
|
||||
import spack.audit
|
||||
@ -20,6 +21,15 @@ def setup_parser(subparser):
|
||||
# Audit configuration files
|
||||
sp.add_parser("configs", help="audit configuration files")
|
||||
|
||||
# Audit package recipes
|
||||
external_parser = sp.add_parser("externals", help="check external detection in packages")
|
||||
external_parser.add_argument(
|
||||
"--list",
|
||||
action="store_true",
|
||||
dest="list_externals",
|
||||
help="if passed, list which packages have detection tests",
|
||||
)
|
||||
|
||||
# Https and other linting
|
||||
https_parser = sp.add_parser("packages-https", help="check https in packages")
|
||||
https_parser.add_argument(
|
||||
@ -29,7 +39,7 @@ def setup_parser(subparser):
|
||||
# Audit package recipes
|
||||
pkg_parser = sp.add_parser("packages", help="audit package recipes")
|
||||
|
||||
for group in [pkg_parser, https_parser]:
|
||||
for group in [pkg_parser, https_parser, external_parser]:
|
||||
group.add_argument(
|
||||
"name",
|
||||
metavar="PKG",
|
||||
@ -62,6 +72,18 @@ def packages_https(parser, args):
|
||||
_process_reports(reports)
|
||||
|
||||
|
||||
def externals(parser, args):
|
||||
if args.list_externals:
|
||||
msg = "@*{The following packages have detection tests:}"
|
||||
tty.msg(cl.colorize(msg))
|
||||
llnl.util.tty.colify.colify(spack.audit.packages_with_detection_tests(), indent=2)
|
||||
return
|
||||
|
||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||
_process_reports(reports)
|
||||
|
||||
|
||||
def list(parser, args):
|
||||
for subcommand, check_tags in spack.audit.GROUPS.items():
|
||||
print(cl.colorize("@*b{" + subcommand + "}:"))
|
||||
@ -78,6 +100,7 @@ def list(parser, args):
|
||||
def audit(parser, args):
|
||||
subcommands = {
|
||||
"configs": configs,
|
||||
"externals": externals,
|
||||
"packages": packages,
|
||||
"packages-https": packages_https,
|
||||
"list": list,
|
||||
|
@ -5,6 +5,7 @@
|
||||
import argparse
|
||||
import errno
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from typing import List, Optional
|
||||
|
||||
@ -156,11 +157,20 @@ def packages_to_search_for(
|
||||
):
|
||||
result = []
|
||||
for current_tag in tags:
|
||||
result.extend(spack.repo.PATH.packages_with_tags(current_tag))
|
||||
result.extend(spack.repo.PATH.packages_with_tags(current_tag, full=True))
|
||||
|
||||
if names:
|
||||
result = [x for x in result if x in names]
|
||||
# Match both fully qualified and unqualified
|
||||
parts = [rf"(^{x}$|[.]{x}$)" for x in names]
|
||||
select_re = re.compile("|".join(parts))
|
||||
result = [x for x in result if select_re.search(x)]
|
||||
|
||||
if exclude:
|
||||
result = [x for x in result if x not in exclude]
|
||||
# Match both fully qualified and unqualified
|
||||
parts = [rf"(^{x}$|[.]{x}$)" for x in exclude]
|
||||
select_re = re.compile("|".join(parts))
|
||||
result = [x for x in result if not select_re.search(x)]
|
||||
|
||||
return result
|
||||
|
||||
|
||||
|
@ -112,6 +112,7 @@ def extract_version_from_output(cls, output):
|
||||
match = re.search(r"AOCC_(\d+)[._](\d+)[._](\d+)", output)
|
||||
if match:
|
||||
return ".".join(match.groups())
|
||||
return "unknown"
|
||||
|
||||
@classmethod
|
||||
def fc_version(cls, fortran_compiler):
|
||||
|
@ -4,6 +4,9 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import json
|
||||
import os
|
||||
import traceback
|
||||
import warnings
|
||||
|
||||
import jsonschema
|
||||
import jsonschema.exceptions
|
||||
@ -46,9 +49,29 @@ def translated_compiler_name(manifest_compiler_name):
|
||||
)
|
||||
|
||||
|
||||
def compiler_from_entry(entry):
|
||||
def compiler_from_entry(entry: dict, manifest_path: str):
|
||||
# Note that manifest_path is only passed here to compose a
|
||||
# useful warning message when paths appear to be missing.
|
||||
compiler_name = translated_compiler_name(entry["name"])
|
||||
paths = entry["executables"]
|
||||
|
||||
if "prefix" in entry:
|
||||
prefix = entry["prefix"]
|
||||
paths = dict(
|
||||
(lang, os.path.join(prefix, relpath))
|
||||
for (lang, relpath) in entry["executables"].items()
|
||||
)
|
||||
else:
|
||||
paths = entry["executables"]
|
||||
|
||||
# Do a check for missing paths. Note that this isn't possible for
|
||||
# all compiler entries, since their "paths" might actually be
|
||||
# exe names like "cc" that depend on modules being loaded. Cray
|
||||
# manifest entries are always paths though.
|
||||
missing_paths = []
|
||||
for path in paths.values():
|
||||
if not os.path.exists(path):
|
||||
missing_paths.append(path)
|
||||
|
||||
# to instantiate a compiler class we may need a concrete version:
|
||||
version = "={}".format(entry["version"])
|
||||
arch = entry["arch"]
|
||||
@ -57,8 +80,18 @@ def compiler_from_entry(entry):
|
||||
|
||||
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
|
||||
spec = spack.spec.CompilerSpec(compiler_cls.name, version)
|
||||
paths = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")]
|
||||
return compiler_cls(spec, operating_system, target, paths)
|
||||
path_list = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")]
|
||||
|
||||
if missing_paths:
|
||||
warnings.warn(
|
||||
"Manifest entry refers to nonexistent paths:\n\t"
|
||||
+ "\n\t".join(missing_paths)
|
||||
+ f"\nfor {str(spec)}"
|
||||
+ f"\nin {manifest_path}"
|
||||
+ "\nPlease report this issue"
|
||||
)
|
||||
|
||||
return compiler_cls(spec, operating_system, target, path_list)
|
||||
|
||||
|
||||
def spec_from_entry(entry):
|
||||
@ -187,12 +220,21 @@ def read(path, apply_updates):
|
||||
tty.debug("{0}: {1} specs read from manifest".format(path, str(len(specs))))
|
||||
compilers = list()
|
||||
if "compilers" in json_data:
|
||||
compilers.extend(compiler_from_entry(x) for x in json_data["compilers"])
|
||||
compilers.extend(compiler_from_entry(x, path) for x in json_data["compilers"])
|
||||
tty.debug("{0}: {1} compilers read from manifest".format(path, str(len(compilers))))
|
||||
# Filter out the compilers that already appear in the configuration
|
||||
compilers = spack.compilers.select_new_compilers(compilers)
|
||||
if apply_updates and compilers:
|
||||
spack.compilers.add_compilers_to_config(compilers, init_config=False)
|
||||
for compiler in compilers:
|
||||
try:
|
||||
spack.compilers.add_compilers_to_config([compiler], init_config=False)
|
||||
except Exception:
|
||||
warnings.warn(
|
||||
f"Could not add compiler {str(compiler.spec)}: "
|
||||
f"\n\tfrom manifest: {path}"
|
||||
"\nPlease reexecute with 'spack -d' and include the stack trace"
|
||||
)
|
||||
tty.debug(f"Include this\n{traceback.format_exc()}")
|
||||
if apply_updates:
|
||||
for spec in specs.values():
|
||||
spack.store.STORE.db.add(spec, directory_layout=None)
|
||||
|
@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from .common import DetectedPackage, executable_prefix, update_configuration
|
||||
from .path import by_path, executables_in_path
|
||||
from .test import detection_tests
|
||||
|
||||
__all__ = [
|
||||
"DetectedPackage",
|
||||
@ -11,4 +12,5 @@
|
||||
"executables_in_path",
|
||||
"executable_prefix",
|
||||
"update_configuration",
|
||||
"detection_tests",
|
||||
]
|
||||
|
@ -299,36 +299,36 @@ def find_windows_compiler_bundled_packages() -> List[str]:
|
||||
|
||||
|
||||
class WindowsKitExternalPaths:
|
||||
plat_major_ver = None
|
||||
if sys.platform == "win32":
|
||||
plat_major_ver = str(winOs.windows_version()[0])
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_roots() -> Optional[str]:
|
||||
def find_windows_kit_roots() -> List[str]:
|
||||
"""Return Windows kit root, typically %programfiles%\\Windows Kits\\10|11\\"""
|
||||
if sys.platform != "win32":
|
||||
return None
|
||||
return []
|
||||
program_files = os.environ["PROGRAMFILES(x86)"]
|
||||
kit_base = os.path.join(
|
||||
program_files, "Windows Kits", WindowsKitExternalPaths.plat_major_ver
|
||||
)
|
||||
return kit_base
|
||||
kit_base = os.path.join(program_files, "Windows Kits", "**")
|
||||
return glob.glob(kit_base)
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_bin_paths(kit_base: Optional[str] = None) -> List[str]:
|
||||
"""Returns Windows kit bin directory per version"""
|
||||
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
||||
assert kit_base is not None, "unexpected value for kit_base"
|
||||
kit_bin = os.path.join(kit_base, "bin")
|
||||
return glob.glob(os.path.join(kit_bin, "[0-9]*", "*\\"))
|
||||
assert kit_base, "Unexpectedly empty value for Windows kit base path"
|
||||
kit_paths = []
|
||||
for kit in kit_base:
|
||||
kit_bin = os.path.join(kit, "bin")
|
||||
kit_paths.extend(glob.glob(os.path.join(kit_bin, "[0-9]*", "*\\")))
|
||||
return kit_paths
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_lib_paths(kit_base: Optional[str] = None) -> List[str]:
|
||||
"""Returns Windows kit lib directory per version"""
|
||||
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
||||
assert kit_base is not None, "unexpected value for kit_base"
|
||||
kit_lib = os.path.join(kit_base, "Lib")
|
||||
return glob.glob(os.path.join(kit_lib, "[0-9]*", "*", "*\\"))
|
||||
assert kit_base, "Unexpectedly empty value for Windows kit base path"
|
||||
kit_paths = []
|
||||
for kit in kit_base:
|
||||
kit_lib = os.path.join(kit, "Lib")
|
||||
kit_paths.extend(glob.glob(os.path.join(kit_lib, "[0-9]*", "*", "*\\")))
|
||||
return kit_paths
|
||||
|
||||
@staticmethod
|
||||
def find_windows_driver_development_kit_paths() -> List[str]:
|
||||
@ -347,23 +347,30 @@ def find_windows_kit_reg_installed_roots_paths() -> List[str]:
|
||||
if not reg:
|
||||
# couldn't find key, return empty list
|
||||
return []
|
||||
return WindowsKitExternalPaths.find_windows_kit_lib_paths(
|
||||
reg.get_value("KitsRoot%s" % WindowsKitExternalPaths.plat_major_ver).value
|
||||
)
|
||||
kit_root_reg = re.compile(r"KitsRoot[0-9]+")
|
||||
root_paths = []
|
||||
for kit_root in filter(kit_root_reg.match, reg.get_values().keys()):
|
||||
root_paths.extend(
|
||||
WindowsKitExternalPaths.find_windows_kit_lib_paths(reg.get_value(kit_root).value)
|
||||
)
|
||||
return root_paths
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_reg_sdk_paths() -> List[str]:
|
||||
reg = spack.util.windows_registry.WindowsRegistryView(
|
||||
"SOFTWARE\\WOW6432Node\\Microsoft\\Microsoft SDKs\\Windows\\v%s.0"
|
||||
% WindowsKitExternalPaths.plat_major_ver,
|
||||
sdk_paths = []
|
||||
sdk_regex = re.compile(r"v[0-9]+.[0-9]+")
|
||||
windows_reg = spack.util.windows_registry.WindowsRegistryView(
|
||||
"SOFTWARE\\WOW6432Node\\Microsoft\\Microsoft SDKs\\Windows",
|
||||
root_key=spack.util.windows_registry.HKEY.HKEY_LOCAL_MACHINE,
|
||||
)
|
||||
if not reg:
|
||||
# couldn't find key, return empty list
|
||||
return []
|
||||
return WindowsKitExternalPaths.find_windows_kit_lib_paths(
|
||||
reg.get_value("InstallationFolder").value
|
||||
)
|
||||
for key in filter(sdk_regex.match, [x.name for x in windows_reg.get_subkeys()]):
|
||||
reg = windows_reg.get_subkey(key)
|
||||
sdk_paths.extend(
|
||||
WindowsKitExternalPaths.find_windows_kit_lib_paths(
|
||||
reg.get_value("InstallationFolder").value
|
||||
)
|
||||
)
|
||||
return sdk_paths
|
||||
|
||||
|
||||
def find_win32_additional_install_paths() -> List[str]:
|
||||
|
@ -2,7 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Detection of software installed in the system based on paths inspections
|
||||
"""Detection of software installed in the system, based on paths inspections
|
||||
and running executables.
|
||||
"""
|
||||
import collections
|
||||
@ -322,12 +322,14 @@ def by_path(
|
||||
path_hints: Optional[List[str]] = None,
|
||||
max_workers: Optional[int] = None,
|
||||
) -> Dict[str, List[DetectedPackage]]:
|
||||
"""Return the list of packages that have been detected on the system,
|
||||
searching by path.
|
||||
"""Return the list of packages that have been detected on the system, keyed by
|
||||
unqualified package name.
|
||||
|
||||
Args:
|
||||
packages_to_search: list of package classes to be detected
|
||||
packages_to_search: list of packages to be detected. Each package can be either unqualified
|
||||
of fully qualified
|
||||
path_hints: initial list of paths to be searched
|
||||
max_workers: maximum number of workers to search for packages in parallel
|
||||
"""
|
||||
# TODO: Packages should be able to define both .libraries and .executables in the future
|
||||
# TODO: determine_spec_details should get all relevant libraries and executables in one call
|
||||
@ -355,7 +357,8 @@ def by_path(
|
||||
try:
|
||||
detected = future.result(timeout=DETECTION_TIMEOUT)
|
||||
if detected:
|
||||
result[pkg_name].extend(detected)
|
||||
_, unqualified_name = spack.repo.partition_package_name(pkg_name)
|
||||
result[unqualified_name].extend(detected)
|
||||
except Exception:
|
||||
llnl.util.tty.debug(
|
||||
f"[EXTERNAL DETECTION] Skipping {pkg_name}: timeout reached"
|
||||
|
187
lib/spack/spack/detection/test.py
Normal file
187
lib/spack/spack/detection/test.py
Normal file
@ -0,0 +1,187 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Create and run mock e2e tests for package detection."""
|
||||
import collections
|
||||
import contextlib
|
||||
import pathlib
|
||||
import tempfile
|
||||
from typing import Any, Deque, Dict, Generator, List, NamedTuple, Tuple
|
||||
|
||||
import jinja2
|
||||
|
||||
from llnl.util import filesystem
|
||||
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
from spack.util import spack_yaml
|
||||
|
||||
from .path import by_path
|
||||
|
||||
|
||||
class MockExecutables(NamedTuple):
|
||||
"""Mock executables to be used in detection tests"""
|
||||
|
||||
#: Relative paths for mock executables to be created
|
||||
executables: List[str]
|
||||
#: Shell script for the mock executable
|
||||
script: str
|
||||
|
||||
|
||||
class ExpectedTestResult(NamedTuple):
|
||||
"""Data structure to model assertions on detection tests"""
|
||||
|
||||
#: Spec to be detected
|
||||
spec: str
|
||||
|
||||
|
||||
class DetectionTest(NamedTuple):
|
||||
"""Data structure to construct detection tests by PATH inspection.
|
||||
|
||||
Packages may have a YAML file containing the description of one or more detection tests
|
||||
to be performed. Each test creates a few mock executable scripts in a temporary folder,
|
||||
and checks that detection by PATH gives the expected results.
|
||||
"""
|
||||
|
||||
pkg_name: str
|
||||
layout: List[MockExecutables]
|
||||
results: List[ExpectedTestResult]
|
||||
|
||||
|
||||
class Runner:
|
||||
"""Runs an external detection test"""
|
||||
|
||||
def __init__(self, *, test: DetectionTest, repository: spack.repo.RepoPath) -> None:
|
||||
self.test = test
|
||||
self.repository = repository
|
||||
self.tmpdir = tempfile.TemporaryDirectory()
|
||||
|
||||
def execute(self) -> List[spack.spec.Spec]:
|
||||
"""Executes a test and returns the specs that have been detected.
|
||||
|
||||
This function sets-up a test in a temporary directory, according to the prescriptions
|
||||
in the test layout, then performs a detection by executables and returns the specs that
|
||||
have been detected.
|
||||
"""
|
||||
with self._mock_layout() as path_hints:
|
||||
entries = by_path([self.test.pkg_name], path_hints=path_hints)
|
||||
_, unqualified_name = spack.repo.partition_package_name(self.test.pkg_name)
|
||||
specs = set(x.spec for x in entries[unqualified_name])
|
||||
return list(specs)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _mock_layout(self) -> Generator[List[str], None, None]:
|
||||
hints = set()
|
||||
try:
|
||||
for entry in self.test.layout:
|
||||
exes = self._create_executable_scripts(entry)
|
||||
|
||||
for mock_executable in exes:
|
||||
hints.add(str(mock_executable.parent))
|
||||
|
||||
yield list(hints)
|
||||
finally:
|
||||
self.tmpdir.cleanup()
|
||||
|
||||
def _create_executable_scripts(self, mock_executables: MockExecutables) -> List[pathlib.Path]:
|
||||
relative_paths = mock_executables.executables
|
||||
script = mock_executables.script
|
||||
script_template = jinja2.Template("#!/bin/bash\n{{ script }}\n")
|
||||
result = []
|
||||
for mock_exe_path in relative_paths:
|
||||
rel_path = pathlib.Path(mock_exe_path)
|
||||
abs_path = pathlib.Path(self.tmpdir.name) / rel_path
|
||||
abs_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
abs_path.write_text(script_template.render(script=script))
|
||||
filesystem.set_executable(abs_path)
|
||||
result.append(abs_path)
|
||||
return result
|
||||
|
||||
@property
|
||||
def expected_specs(self) -> List[spack.spec.Spec]:
|
||||
return [spack.spec.Spec(r.spec) for r in self.test.results]
|
||||
|
||||
|
||||
def detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> List[Runner]:
|
||||
"""Returns a list of test runners for a given package.
|
||||
|
||||
Currently, detection tests are specified in a YAML file, called ``detection_test.yaml``,
|
||||
alongside the ``package.py`` file.
|
||||
|
||||
This function reads that file to create a bunch of ``Runner`` objects.
|
||||
|
||||
Args:
|
||||
pkg_name: name of the package to test
|
||||
repository: repository where the package lives
|
||||
"""
|
||||
result = []
|
||||
detection_tests_content = read_detection_tests(pkg_name, repository)
|
||||
|
||||
tests_by_path = detection_tests_content.get("paths", [])
|
||||
for single_test_data in tests_by_path:
|
||||
mock_executables = []
|
||||
for layout in single_test_data["layout"]:
|
||||
mock_executables.append(
|
||||
MockExecutables(executables=layout["executables"], script=layout["script"])
|
||||
)
|
||||
expected_results = []
|
||||
for assertion in single_test_data["results"]:
|
||||
expected_results.append(ExpectedTestResult(spec=assertion["spec"]))
|
||||
|
||||
current_test = DetectionTest(
|
||||
pkg_name=pkg_name, layout=mock_executables, results=expected_results
|
||||
)
|
||||
result.append(Runner(test=current_test, repository=repository))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def read_detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> Dict[str, Any]:
|
||||
"""Returns the normalized content of the detection_tests.yaml associated with the package
|
||||
passed in input.
|
||||
|
||||
The content is merged with that of any package that is transitively included using the
|
||||
"includes" attribute.
|
||||
|
||||
Args:
|
||||
pkg_name: name of the package to test
|
||||
repository: repository in which to search for packages
|
||||
"""
|
||||
content_stack, seen = [], set()
|
||||
included_packages: Deque[str] = collections.deque()
|
||||
|
||||
root_detection_yaml, result = _detection_tests_yaml(pkg_name, repository)
|
||||
included_packages.extend(result.get("includes", []))
|
||||
seen |= set(result.get("includes", []))
|
||||
|
||||
while included_packages:
|
||||
current_package = included_packages.popleft()
|
||||
try:
|
||||
current_detection_yaml, content = _detection_tests_yaml(current_package, repository)
|
||||
except FileNotFoundError as e:
|
||||
msg = (
|
||||
f"cannot read the detection tests from the '{current_package}' package, "
|
||||
f"included by {root_detection_yaml}"
|
||||
)
|
||||
raise FileNotFoundError(msg + f"\n\n\t{e}\n")
|
||||
|
||||
content_stack.append((current_package, content))
|
||||
included_packages.extend(x for x in content.get("includes", []) if x not in seen)
|
||||
seen |= set(content.get("includes", []))
|
||||
|
||||
result.setdefault("paths", [])
|
||||
for pkg_name, content in content_stack:
|
||||
result["paths"].extend(content.get("paths", []))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _detection_tests_yaml(
|
||||
pkg_name: str, repository: spack.repo.RepoPath
|
||||
) -> Tuple[pathlib.Path, Dict[str, Any]]:
|
||||
pkg_dir = pathlib.Path(repository.filename_for_package_name(pkg_name)).parent
|
||||
detection_tests_yaml = pkg_dir / "detection_test.yaml"
|
||||
with open(str(detection_tests_yaml)) as f:
|
||||
content = spack_yaml.load(f)
|
||||
return detection_tests_yaml, content
|
@ -734,7 +734,11 @@ def version_from_git(git_exe):
|
||||
@property
|
||||
def git(self):
|
||||
if not self._git:
|
||||
self._git = spack.util.git.git()
|
||||
try:
|
||||
self._git = spack.util.git.git(required=True)
|
||||
except CommandNotFoundError as exc:
|
||||
tty.error(str(exc))
|
||||
raise
|
||||
|
||||
# Disable advice for a quieter fetch
|
||||
# https://github.com/git/git/blob/master/Documentation/RelNotes/1.7.2.txt
|
||||
|
@ -847,10 +847,11 @@ def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int:
|
||||
else:
|
||||
cache_only = self.install_args.get("dependencies_cache_only")
|
||||
|
||||
# Include build dependencies if pkg is not installed and cache_only
|
||||
# is False, or if build depdencies are explicitly called for
|
||||
# by include_build_deps.
|
||||
if include_build_deps or not (cache_only or pkg.spec.installed):
|
||||
# Include build dependencies if pkg is going to be built from sources, or
|
||||
# if build deps are explicitly requested.
|
||||
if include_build_deps or not (
|
||||
cache_only or pkg.spec.installed and not pkg.spec.dag_hash() in self.overwrite
|
||||
):
|
||||
depflag |= dt.BUILD
|
||||
if self.run_tests(pkg):
|
||||
depflag |= dt.TEST
|
||||
|
@ -24,7 +24,7 @@
|
||||
import traceback
|
||||
import types
|
||||
import uuid
|
||||
from typing import Any, Dict, List, Union
|
||||
from typing import Any, Dict, List, Tuple, Union
|
||||
|
||||
import llnl.path
|
||||
import llnl.util.filesystem as fs
|
||||
@ -745,10 +745,18 @@ def all_package_paths(self):
|
||||
for name in self.all_package_names():
|
||||
yield self.package_path(name)
|
||||
|
||||
def packages_with_tags(self, *tags):
|
||||
def packages_with_tags(self, *tags, full=False):
|
||||
"""Returns a list of packages matching any of the tags in input.
|
||||
|
||||
Args:
|
||||
full: if True the package names in the output are fully-qualified
|
||||
"""
|
||||
r = set()
|
||||
for repo in self.repos:
|
||||
r |= set(repo.packages_with_tags(*tags))
|
||||
current = repo.packages_with_tags(*tags)
|
||||
if full:
|
||||
current = [f"{repo.namespace}.{x}" for x in current]
|
||||
r |= set(current)
|
||||
return sorted(r)
|
||||
|
||||
def all_package_classes(self):
|
||||
@ -1124,7 +1132,8 @@ def extensions_for(self, extendee_spec):
|
||||
def dirname_for_package_name(self, pkg_name):
|
||||
"""Get the directory name for a particular package. This is the
|
||||
directory that contains its package.py file."""
|
||||
return os.path.join(self.packages_path, pkg_name)
|
||||
_, unqualified_name = self.partition_package_name(pkg_name)
|
||||
return os.path.join(self.packages_path, unqualified_name)
|
||||
|
||||
def filename_for_package_name(self, pkg_name):
|
||||
"""Get the filename for the module we should load for a particular
|
||||
@ -1222,15 +1231,10 @@ def get_pkg_class(self, pkg_name):
|
||||
package. Then extracts the package class from the module
|
||||
according to Spack's naming convention.
|
||||
"""
|
||||
namespace, _, pkg_name = pkg_name.rpartition(".")
|
||||
if namespace and (namespace != self.namespace):
|
||||
raise InvalidNamespaceError(
|
||||
"Invalid namespace for %s repo: %s" % (self.namespace, namespace)
|
||||
)
|
||||
|
||||
namespace, pkg_name = self.partition_package_name(pkg_name)
|
||||
class_name = nm.mod_to_class(pkg_name)
|
||||
fullname = f"{self.full_namespace}.{pkg_name}"
|
||||
|
||||
fullname = "{0}.{1}".format(self.full_namespace, pkg_name)
|
||||
try:
|
||||
module = importlib.import_module(fullname)
|
||||
except ImportError:
|
||||
@ -1241,7 +1245,7 @@ def get_pkg_class(self, pkg_name):
|
||||
|
||||
cls = getattr(module, class_name)
|
||||
if not inspect.isclass(cls):
|
||||
tty.die("%s.%s is not a class" % (pkg_name, class_name))
|
||||
tty.die(f"{pkg_name}.{class_name} is not a class")
|
||||
|
||||
new_cfg_settings = (
|
||||
spack.config.get("packages").get(pkg_name, {}).get("package_attributes", {})
|
||||
@ -1280,6 +1284,15 @@ def get_pkg_class(self, pkg_name):
|
||||
|
||||
return cls
|
||||
|
||||
def partition_package_name(self, pkg_name: str) -> Tuple[str, str]:
|
||||
namespace, pkg_name = partition_package_name(pkg_name)
|
||||
if namespace and (namespace != self.namespace):
|
||||
raise InvalidNamespaceError(
|
||||
f"Invalid namespace for the '{self.namespace}' repo: {namespace}"
|
||||
)
|
||||
|
||||
return namespace, pkg_name
|
||||
|
||||
def __str__(self):
|
||||
return "[Repo '%s' at '%s']" % (self.namespace, self.root)
|
||||
|
||||
@ -1293,6 +1306,20 @@ def __contains__(self, pkg_name):
|
||||
RepoType = Union[Repo, RepoPath]
|
||||
|
||||
|
||||
def partition_package_name(pkg_name: str) -> Tuple[str, str]:
|
||||
"""Given a package name that might be fully-qualified, returns the namespace part,
|
||||
if present and the unqualified package name.
|
||||
|
||||
If the package name is unqualified, the namespace is an empty string.
|
||||
|
||||
Args:
|
||||
pkg_name: a package name, either unqualified like "llvl", or
|
||||
fully-qualified, like "builtin.llvm"
|
||||
"""
|
||||
namespace, _, pkg_name = pkg_name.rpartition(".")
|
||||
return namespace, pkg_name
|
||||
|
||||
|
||||
def create_repo(root, namespace=None, subdir=packages_dir_name):
|
||||
"""Create a new repository in root with the specified namespace.
|
||||
|
||||
|
@ -120,8 +120,9 @@ def test_find_external_cmd_not_buildable(mutable_config, working_env, mock_execu
|
||||
"names,tags,exclude,expected",
|
||||
[
|
||||
# find --all
|
||||
(None, ["detectable"], [], ["find-externals1"]),
|
||||
(None, ["detectable"], [], ["builtin.mock.find-externals1"]),
|
||||
# find --all --exclude find-externals1
|
||||
(None, ["detectable"], ["builtin.mock.find-externals1"], []),
|
||||
(None, ["detectable"], ["find-externals1"], []),
|
||||
# find cmake (and cmake is not detectable)
|
||||
(["cmake"], ["detectable"], [], []),
|
||||
@ -202,19 +203,6 @@ def fail():
|
||||
assert "Skipping manifest and continuing" in output
|
||||
|
||||
|
||||
def test_find_external_nonempty_default_manifest_dir(
|
||||
mutable_database, mutable_mock_repo, tmpdir, monkeypatch, directory_with_manifest
|
||||
):
|
||||
"""The user runs 'spack external find'; the default manifest directory
|
||||
contains a manifest file. Ensure that the specs are read.
|
||||
"""
|
||||
monkeypatch.setenv("PATH", "")
|
||||
monkeypatch.setattr(spack.cray_manifest, "default_path", str(directory_with_manifest))
|
||||
external("find")
|
||||
specs = spack.store.STORE.db.query("hwloc")
|
||||
assert any(x.dag_hash() == "hwlocfakehashaaa" for x in specs)
|
||||
|
||||
|
||||
def test_find_external_merge(mutable_config, mutable_mock_repo):
|
||||
"""Check that 'spack find external' doesn't overwrite an existing spec
|
||||
entry in packages.yaml.
|
||||
|
@ -1714,17 +1714,6 @@ def brand_new_binary_cache():
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def directory_with_manifest(tmpdir):
|
||||
"""Create a manifest file in a directory. Used by 'spack external'."""
|
||||
with tmpdir.as_cwd():
|
||||
test_db_fname = "external-db.json"
|
||||
with open(test_db_fname, "w") as db_file:
|
||||
json.dump(spack.test.cray_manifest.create_manifest_content(), db_file)
|
||||
|
||||
yield str(tmpdir)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def noncyclical_dir_structure(tmpdir):
|
||||
"""
|
||||
|
@ -23,53 +23,6 @@
|
||||
import spack.store
|
||||
from spack.cray_manifest import compiler_from_entry, entries_to_specs
|
||||
|
||||
example_x_json_str = """\
|
||||
{
|
||||
"name": "packagex",
|
||||
"hash": "hash-of-x",
|
||||
"prefix": "/path/to/packagex-install/",
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "linux",
|
||||
"platform_os": "centos8",
|
||||
"target": {
|
||||
"name": "haswell"
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "gcc",
|
||||
"version": "10.2.0.cray"
|
||||
},
|
||||
"dependencies": {
|
||||
"packagey": {
|
||||
"hash": "hash-of-y",
|
||||
"type": ["link"]
|
||||
}
|
||||
},
|
||||
"parameters": {
|
||||
"precision": ["double", "float"]
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
example_compiler_entry = """\
|
||||
{
|
||||
"name": "gcc",
|
||||
"prefix": "/path/to/compiler/",
|
||||
"version": "7.5.0",
|
||||
"arch": {
|
||||
"os": "centos8",
|
||||
"target": "x86_64"
|
||||
},
|
||||
"executables": {
|
||||
"cc": "/path/to/compiler/cc",
|
||||
"cxx": "/path/to/compiler/cxx",
|
||||
"fc": "/path/to/compiler/fc"
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
class JsonSpecEntry:
|
||||
def __init__(self, name, hash, prefix, version, arch, compiler, dependencies, parameters):
|
||||
@ -104,16 +57,19 @@ def __init__(self, platform, os, target):
|
||||
self.os = os
|
||||
self.target = target
|
||||
|
||||
def to_dict(self):
|
||||
def spec_json(self):
|
||||
return {"platform": self.platform, "platform_os": self.os, "target": {"name": self.target}}
|
||||
|
||||
def compiler_json(self):
|
||||
return {"os": self.os, "target": self.target}
|
||||
|
||||
|
||||
class JsonCompilerEntry:
|
||||
def __init__(self, name, version, arch=None, executables=None):
|
||||
self.name = name
|
||||
self.version = version
|
||||
if not arch:
|
||||
arch = {"os": "centos8", "target": "x86_64"}
|
||||
arch = JsonArchEntry("anyplatform", "anyos", "anytarget")
|
||||
if not executables:
|
||||
executables = {
|
||||
"cc": "/path/to/compiler/cc",
|
||||
@ -127,7 +83,7 @@ def compiler_json(self):
|
||||
return {
|
||||
"name": self.name,
|
||||
"version": self.version,
|
||||
"arch": self.arch,
|
||||
"arch": self.arch.compiler_json(),
|
||||
"executables": self.executables,
|
||||
}
|
||||
|
||||
@ -138,22 +94,58 @@ def spec_json(self):
|
||||
return {"name": self.name, "version": self.version}
|
||||
|
||||
|
||||
_common_arch = JsonArchEntry(platform="linux", os="centos8", target="haswell").to_dict()
|
||||
|
||||
# Intended to match example_compiler_entry above
|
||||
_common_compiler = JsonCompilerEntry(
|
||||
name="gcc",
|
||||
version="10.2.0.cray",
|
||||
arch={"os": "centos8", "target": "x86_64"},
|
||||
executables={
|
||||
"cc": "/path/to/compiler/cc",
|
||||
"cxx": "/path/to/compiler/cxx",
|
||||
"fc": "/path/to/compiler/fc",
|
||||
},
|
||||
)
|
||||
@pytest.fixture
|
||||
def _common_arch(test_platform):
|
||||
return JsonArchEntry(
|
||||
platform=test_platform.name,
|
||||
os=test_platform.front_os,
|
||||
target=test_platform.target("fe").name,
|
||||
)
|
||||
|
||||
|
||||
def test_compatibility():
|
||||
@pytest.fixture
|
||||
def _common_compiler(_common_arch):
|
||||
return JsonCompilerEntry(
|
||||
name="gcc",
|
||||
version="10.2.0.2112",
|
||||
arch=_common_arch,
|
||||
executables={
|
||||
"cc": "/path/to/compiler/cc",
|
||||
"cxx": "/path/to/compiler/cxx",
|
||||
"fc": "/path/to/compiler/fc",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _other_compiler(_common_arch):
|
||||
return JsonCompilerEntry(
|
||||
name="clang",
|
||||
version="3.0.0",
|
||||
arch=_common_arch,
|
||||
executables={
|
||||
"cc": "/path/to/compiler/clang",
|
||||
"cxx": "/path/to/compiler/clang++",
|
||||
"fc": "/path/to/compiler/flang",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _raw_json_x(_common_arch):
|
||||
return {
|
||||
"name": "packagex",
|
||||
"hash": "hash-of-x",
|
||||
"prefix": "/path/to/packagex-install/",
|
||||
"version": "1.0",
|
||||
"arch": _common_arch.spec_json(),
|
||||
"compiler": {"name": "gcc", "version": "10.2.0.2112"},
|
||||
"dependencies": {"packagey": {"hash": "hash-of-y", "type": ["link"]}},
|
||||
"parameters": {"precision": ["double", "float"]},
|
||||
}
|
||||
|
||||
|
||||
def test_manifest_compatibility(_common_arch, _common_compiler, _raw_json_x):
|
||||
"""Make sure that JsonSpecEntry outputs the expected JSON structure
|
||||
by comparing it with JSON parsed from an example string. This
|
||||
ensures that the testing objects like JsonSpecEntry produce the
|
||||
@ -164,7 +156,7 @@ def test_compatibility():
|
||||
hash="hash-of-y",
|
||||
prefix="/path/to/packagey-install/",
|
||||
version="1.0",
|
||||
arch=_common_arch,
|
||||
arch=_common_arch.spec_json(),
|
||||
compiler=_common_compiler.spec_json(),
|
||||
dependencies={},
|
||||
parameters={},
|
||||
@ -175,23 +167,44 @@ def test_compatibility():
|
||||
hash="hash-of-x",
|
||||
prefix="/path/to/packagex-install/",
|
||||
version="1.0",
|
||||
arch=_common_arch,
|
||||
arch=_common_arch.spec_json(),
|
||||
compiler=_common_compiler.spec_json(),
|
||||
dependencies=dict([y.as_dependency(deptypes=["link"])]),
|
||||
parameters={"precision": ["double", "float"]},
|
||||
)
|
||||
|
||||
x_from_entry = x.to_dict()
|
||||
x_from_str = json.loads(example_x_json_str)
|
||||
assert x_from_entry == x_from_str
|
||||
assert x_from_entry == _raw_json_x
|
||||
|
||||
|
||||
def test_compiler_from_entry():
|
||||
compiler_data = json.loads(example_compiler_entry)
|
||||
compiler_from_entry(compiler_data)
|
||||
compiler_data = json.loads(
|
||||
"""\
|
||||
{
|
||||
"name": "gcc",
|
||||
"prefix": "/path/to/compiler/",
|
||||
"version": "7.5.0",
|
||||
"arch": {
|
||||
"os": "centos8",
|
||||
"target": "x86_64"
|
||||
},
|
||||
"executables": {
|
||||
"cc": "/path/to/compiler/cc",
|
||||
"cxx": "/path/to/compiler/cxx",
|
||||
"fc": "/path/to/compiler/fc"
|
||||
}
|
||||
}
|
||||
"""
|
||||
)
|
||||
compiler = compiler_from_entry(compiler_data, "/example/file")
|
||||
assert compiler.cc == "/path/to/compiler/cc"
|
||||
assert compiler.cxx == "/path/to/compiler/cxx"
|
||||
assert compiler.fc == "/path/to/compiler/fc"
|
||||
assert compiler.operating_system == "centos8"
|
||||
|
||||
|
||||
def generate_openmpi_entries():
|
||||
@pytest.fixture
|
||||
def generate_openmpi_entries(_common_arch, _common_compiler):
|
||||
"""Generate two example JSON entries that refer to an OpenMPI
|
||||
installation and a hwloc dependency.
|
||||
"""
|
||||
@ -202,7 +215,7 @@ def generate_openmpi_entries():
|
||||
hash="hwlocfakehashaaa",
|
||||
prefix="/path/to/hwloc-install/",
|
||||
version="2.0.3",
|
||||
arch=_common_arch,
|
||||
arch=_common_arch.spec_json(),
|
||||
compiler=_common_compiler.spec_json(),
|
||||
dependencies={},
|
||||
parameters={},
|
||||
@ -216,26 +229,25 @@ def generate_openmpi_entries():
|
||||
hash="openmpifakehasha",
|
||||
prefix="/path/to/openmpi-install/",
|
||||
version="4.1.0",
|
||||
arch=_common_arch,
|
||||
arch=_common_arch.spec_json(),
|
||||
compiler=_common_compiler.spec_json(),
|
||||
dependencies=dict([hwloc.as_dependency(deptypes=["link"])]),
|
||||
parameters={"internal-hwloc": False, "fabrics": ["psm"], "missing_variant": True},
|
||||
)
|
||||
|
||||
return [openmpi, hwloc]
|
||||
return list(x.to_dict() for x in [openmpi, hwloc])
|
||||
|
||||
|
||||
def test_generate_specs_from_manifest():
|
||||
def test_generate_specs_from_manifest(generate_openmpi_entries):
|
||||
"""Given JSON entries, check that we can form a set of Specs
|
||||
including dependency references.
|
||||
"""
|
||||
entries = list(x.to_dict() for x in generate_openmpi_entries())
|
||||
specs = entries_to_specs(entries)
|
||||
specs = entries_to_specs(generate_openmpi_entries)
|
||||
(openmpi_spec,) = list(x for x in specs.values() if x.name == "openmpi")
|
||||
assert openmpi_spec["hwloc"]
|
||||
|
||||
|
||||
def test_translate_cray_platform_to_linux(monkeypatch):
|
||||
def test_translate_cray_platform_to_linux(monkeypatch, _common_compiler):
|
||||
"""Manifests might list specs on newer Cray platforms as being "cray",
|
||||
but Spack identifies such platforms as "linux". Make sure we
|
||||
automaticaly transform these entries.
|
||||
@ -247,13 +259,13 @@ def the_host_is_linux():
|
||||
|
||||
monkeypatch.setattr(spack.platforms, "host", the_host_is_linux)
|
||||
|
||||
cray_arch = JsonArchEntry(platform="cray", os="rhel8", target="x86_64").to_dict()
|
||||
cray_arch = JsonArchEntry(platform="cray", os="rhel8", target="x86_64")
|
||||
spec_json = JsonSpecEntry(
|
||||
name="cray-mpich",
|
||||
hash="craympichfakehashaaa",
|
||||
prefix="/path/to/cray-mpich/",
|
||||
version="1.0.0",
|
||||
arch=cray_arch,
|
||||
arch=cray_arch.spec_json(),
|
||||
compiler=_common_compiler.spec_json(),
|
||||
dependencies={},
|
||||
parameters={},
|
||||
@ -263,14 +275,15 @@ def the_host_is_linux():
|
||||
assert spec.architecture.platform == "linux"
|
||||
|
||||
|
||||
def test_translate_compiler_name():
|
||||
def test_translate_compiler_name(_common_arch):
|
||||
nvidia_compiler = JsonCompilerEntry(
|
||||
name="nvidia",
|
||||
version="19.1",
|
||||
arch=_common_arch,
|
||||
executables={"cc": "/path/to/compiler/nvc", "cxx": "/path/to/compiler/nvc++"},
|
||||
)
|
||||
|
||||
compiler = compiler_from_entry(nvidia_compiler.compiler_json())
|
||||
compiler = compiler_from_entry(nvidia_compiler.compiler_json(), "/example/file")
|
||||
assert compiler.name == "nvhpc"
|
||||
|
||||
spec_json = JsonSpecEntry(
|
||||
@ -278,7 +291,7 @@ def test_translate_compiler_name():
|
||||
hash="hwlocfakehashaaa",
|
||||
prefix="/path/to/hwloc-install/",
|
||||
version="2.0.3",
|
||||
arch=_common_arch,
|
||||
arch=_common_arch.spec_json(),
|
||||
compiler=nvidia_compiler.spec_json(),
|
||||
dependencies={},
|
||||
parameters={},
|
||||
@ -288,18 +301,18 @@ def test_translate_compiler_name():
|
||||
assert spec.compiler.name == "nvhpc"
|
||||
|
||||
|
||||
def test_failed_translate_compiler_name():
|
||||
def test_failed_translate_compiler_name(_common_arch):
|
||||
unknown_compiler = JsonCompilerEntry(name="unknown", version="1.0")
|
||||
|
||||
with pytest.raises(spack.compilers.UnknownCompilerError):
|
||||
compiler_from_entry(unknown_compiler.compiler_json())
|
||||
compiler_from_entry(unknown_compiler.compiler_json(), "/example/file")
|
||||
|
||||
spec_json = JsonSpecEntry(
|
||||
name="packagey",
|
||||
hash="hash-of-y",
|
||||
prefix="/path/to/packagey-install/",
|
||||
version="1.0",
|
||||
arch=_common_arch,
|
||||
arch=_common_arch.spec_json(),
|
||||
compiler=unknown_compiler.spec_json(),
|
||||
dependencies={},
|
||||
parameters={},
|
||||
@ -309,7 +322,8 @@ def test_failed_translate_compiler_name():
|
||||
entries_to_specs([spec_json])
|
||||
|
||||
|
||||
def create_manifest_content():
|
||||
@pytest.fixture
|
||||
def manifest_content(generate_openmpi_entries, _common_compiler, _other_compiler):
|
||||
return {
|
||||
# Note: the cray_manifest module doesn't use the _meta section right
|
||||
# now, but it is anticipated to be useful
|
||||
@ -319,43 +333,70 @@ def create_manifest_content():
|
||||
"schema-version": "1.3",
|
||||
"cpe-version": "22.06",
|
||||
},
|
||||
"specs": list(x.to_dict() for x in generate_openmpi_entries()),
|
||||
"compilers": [_common_compiler.compiler_json()],
|
||||
"specs": generate_openmpi_entries,
|
||||
"compilers": [_common_compiler.compiler_json(), _other_compiler.compiler_json()],
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.only_original(
|
||||
"The ASP-based concretizer is currently picky about OS matching and will fail."
|
||||
)
|
||||
def test_read_cray_manifest(tmpdir, mutable_config, mock_packages, mutable_database):
|
||||
def test_read_cray_manifest(
|
||||
tmpdir, mutable_config, mock_packages, mutable_database, manifest_content
|
||||
):
|
||||
"""Check that (a) we can read the cray manifest and add it to the Spack
|
||||
Database and (b) we can concretize specs based on that.
|
||||
"""
|
||||
with tmpdir.as_cwd():
|
||||
test_db_fname = "external-db.json"
|
||||
with open(test_db_fname, "w") as db_file:
|
||||
json.dump(create_manifest_content(), db_file)
|
||||
json.dump(manifest_content, db_file)
|
||||
cray_manifest.read(test_db_fname, True)
|
||||
query_specs = spack.store.STORE.db.query("openmpi")
|
||||
assert any(x.dag_hash() == "openmpifakehasha" for x in query_specs)
|
||||
|
||||
concretized_specs = spack.cmd.parse_specs(
|
||||
"depends-on-openmpi %gcc@4.5.0 arch=test-redhat6-x86_64" " ^/openmpifakehasha".split(),
|
||||
concretize=True,
|
||||
"depends-on-openmpi ^/openmpifakehasha".split(), concretize=True
|
||||
)
|
||||
assert concretized_specs[0]["hwloc"].dag_hash() == "hwlocfakehashaaa"
|
||||
|
||||
|
||||
@pytest.mark.only_original(
|
||||
"The ASP-based concretizer is currently picky about OS matching and will fail."
|
||||
)
|
||||
def test_read_cray_manifest_add_compiler_failure(
|
||||
tmpdir, mutable_config, mock_packages, mutable_database, manifest_content, monkeypatch
|
||||
):
|
||||
"""Check that cray manifest can be read even if some compilers cannot
|
||||
be added.
|
||||
"""
|
||||
orig_add_compilers_to_config = spack.compilers.add_compilers_to_config
|
||||
|
||||
class fail_for_clang:
|
||||
def __init__(self):
|
||||
self.called_with_clang = False
|
||||
|
||||
def __call__(self, compilers, **kwargs):
|
||||
if any(x.name == "clang" for x in compilers):
|
||||
self.called_with_clang = True
|
||||
raise Exception()
|
||||
return orig_add_compilers_to_config(compilers, **kwargs)
|
||||
|
||||
checker = fail_for_clang()
|
||||
monkeypatch.setattr(spack.compilers, "add_compilers_to_config", checker)
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
test_db_fname = "external-db.json"
|
||||
with open(test_db_fname, "w") as db_file:
|
||||
json.dump(manifest_content, db_file)
|
||||
cray_manifest.read(test_db_fname, True)
|
||||
query_specs = spack.store.STORE.db.query("openmpi")
|
||||
assert any(x.dag_hash() == "openmpifakehasha" for x in query_specs)
|
||||
|
||||
assert checker.called_with_clang
|
||||
|
||||
|
||||
def test_read_cray_manifest_twice_no_compiler_duplicates(
|
||||
tmpdir, mutable_config, mock_packages, mutable_database
|
||||
tmpdir, mutable_config, mock_packages, mutable_database, manifest_content
|
||||
):
|
||||
with tmpdir.as_cwd():
|
||||
test_db_fname = "external-db.json"
|
||||
with open(test_db_fname, "w") as db_file:
|
||||
json.dump(create_manifest_content(), db_file)
|
||||
json.dump(manifest_content, db_file)
|
||||
|
||||
# Read the manifest twice
|
||||
cray_manifest.read(test_db_fname, True)
|
||||
@ -363,7 +404,7 @@ def test_read_cray_manifest_twice_no_compiler_duplicates(
|
||||
|
||||
compilers = spack.compilers.all_compilers()
|
||||
filtered = list(
|
||||
c for c in compilers if c.spec == spack.spec.CompilerSpec("gcc@=10.2.0.cray")
|
||||
c for c in compilers if c.spec == spack.spec.CompilerSpec("gcc@=10.2.0.2112")
|
||||
)
|
||||
assert len(filtered) == 1
|
||||
|
||||
@ -423,3 +464,27 @@ def test_convert_validation_error(tmpdir, mutable_config, mock_packages, mutable
|
||||
with pytest.raises(cray_manifest.ManifestValidationError) as e:
|
||||
cray_manifest.read(invalid_schema_path, True)
|
||||
str(e)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def directory_with_manifest(tmpdir, manifest_content):
|
||||
"""Create a manifest file in a directory. Used by 'spack external'."""
|
||||
with tmpdir.as_cwd():
|
||||
test_db_fname = "external-db.json"
|
||||
with open(test_db_fname, "w") as db_file:
|
||||
json.dump(manifest_content, db_file)
|
||||
|
||||
yield str(tmpdir)
|
||||
|
||||
|
||||
def test_find_external_nonempty_default_manifest_dir(
|
||||
mutable_database, mutable_mock_repo, tmpdir, monkeypatch, directory_with_manifest
|
||||
):
|
||||
"""The user runs 'spack external find'; the default manifest directory
|
||||
contains a manifest file. Ensure that the specs are read.
|
||||
"""
|
||||
monkeypatch.setenv("PATH", "")
|
||||
monkeypatch.setattr(spack.cray_manifest, "default_path", str(directory_with_manifest))
|
||||
spack.cmd.external._collect_and_consume_cray_manifest_files(ignore_default_dir=False)
|
||||
specs = spack.store.STORE.db.query("hwloc")
|
||||
assert any(x.dag_hash() == "hwlocfakehashaaa" for x in specs)
|
||||
|
@ -20,6 +20,7 @@
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.database
|
||||
import spack.deptypes as dt
|
||||
import spack.installer as inst
|
||||
import spack.package_base
|
||||
import spack.package_prefs as prefs
|
||||
@ -1388,6 +1389,26 @@ def test_single_external_implicit_install(install_mockery, explicit_args, is_exp
|
||||
assert spack.store.STORE.db.get_record(pkg).explicit == is_explicit
|
||||
|
||||
|
||||
def test_overwrite_install_does_install_build_deps(install_mockery, mock_fetch):
|
||||
"""When overwrite installing something from sources, build deps should be installed."""
|
||||
s = spack.spec.Spec("dtrun3").concretized()
|
||||
create_installer([(s, {})]).install()
|
||||
|
||||
# Verify there is a pure build dep
|
||||
edge = s.edges_to_dependencies(name="dtbuild3").pop()
|
||||
assert edge.depflag == dt.BUILD
|
||||
build_dep = edge.spec
|
||||
|
||||
# Uninstall the build dep
|
||||
build_dep.package.do_uninstall()
|
||||
|
||||
# Overwrite install the root dtrun3
|
||||
create_installer([(s, {"overwrite": [s.dag_hash()]})]).install()
|
||||
|
||||
# Verify that the build dep was also installed.
|
||||
assert build_dep.installed
|
||||
|
||||
|
||||
@pytest.mark.parametrize("run_tests", [True, False])
|
||||
def test_print_install_test_log_skipped(install_mockery, mock_packages, capfd, run_tests):
|
||||
"""Confirm printing of install log skipped if not run/no failures."""
|
||||
|
@ -181,3 +181,15 @@ def test_repository_construction_doesnt_use_globals(nullify_globals, repo_paths,
|
||||
repo_path = spack.repo.RepoPath(*repo_paths)
|
||||
assert len(repo_path.repos) == len(namespaces)
|
||||
assert [x.namespace for x in repo_path.repos] == namespaces
|
||||
|
||||
|
||||
@pytest.mark.parametrize("method_name", ["dirname_for_package_name", "filename_for_package_name"])
|
||||
def test_path_computation_with_names(method_name, mock_repo_path):
|
||||
"""Tests that repositories can compute the correct paths when using both fully qualified
|
||||
names and unqualified names.
|
||||
"""
|
||||
repo_path = spack.repo.RepoPath(mock_repo_path)
|
||||
method = getattr(repo_path, method_name)
|
||||
unqualified = method("mpileaks")
|
||||
qualified = method("builtin.mock.mpileaks")
|
||||
assert qualified == unqualified
|
||||
|
@ -827,16 +827,16 @@ e4s-cray-rhel-build:
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: e4s-cray-sles
|
||||
|
||||
# e4s-cray-sles-generate:
|
||||
# extends: [ ".generate-cray-sles", ".e4s-cray-sles" ]
|
||||
e4s-cray-sles-generate:
|
||||
extends: [ ".generate-cray-sles", ".e4s-cray-sles" ]
|
||||
|
||||
# e4s-cray-sles-build:
|
||||
# extends: [ ".build", ".e4s-cray-sles" ]
|
||||
# trigger:
|
||||
# include:
|
||||
# - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
# job: e4s-cray-sles-generate
|
||||
# strategy: depend
|
||||
# needs:
|
||||
# - artifacts: True
|
||||
# job: e4s-cray-sles-generate
|
||||
e4s-cray-sles-build:
|
||||
extends: [ ".build", ".e4s-cray-sles" ]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: e4s-cray-sles-generate
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: e4s-cray-sles-generate
|
||||
|
@ -423,7 +423,7 @@ _spack_audit() {
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help"
|
||||
else
|
||||
SPACK_COMPREPLY="configs packages-https packages list"
|
||||
SPACK_COMPREPLY="configs externals packages-https packages list"
|
||||
fi
|
||||
}
|
||||
|
||||
@ -431,6 +431,15 @@ _spack_audit_configs() {
|
||||
SPACK_COMPREPLY="-h --help"
|
||||
}
|
||||
|
||||
_spack_audit_externals() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help --list"
|
||||
else
|
||||
SPACK_COMPREPLY=""
|
||||
fi
|
||||
}
|
||||
|
||||
_spack_audit_packages_https() {
|
||||
if $list_options
|
||||
then
|
||||
|
@ -508,6 +508,7 @@ complete -c spack -n '__fish_spack_using_command arch' -s b -l backend -d 'print
|
||||
# spack audit
|
||||
set -g __fish_spack_optspecs_spack_audit h/help
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 audit' -f -a configs -d 'audit configuration files'
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 audit' -f -a externals -d 'check external detection in packages'
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 audit' -f -a packages-https -d 'check https in packages'
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 audit' -f -a packages -d 'audit package recipes'
|
||||
complete -c spack -n '__fish_spack_using_command_pos 0 audit' -f -a list -d 'list available checks and exits'
|
||||
@ -519,6 +520,14 @@ set -g __fish_spack_optspecs_spack_audit_configs h/help
|
||||
complete -c spack -n '__fish_spack_using_command audit configs' -s h -l help -f -a help
|
||||
complete -c spack -n '__fish_spack_using_command audit configs' -s h -l help -d 'show this help message and exit'
|
||||
|
||||
# spack audit externals
|
||||
set -g __fish_spack_optspecs_spack_audit_externals h/help list
|
||||
complete -c spack -n '__fish_spack_using_command_pos_remainder 0 audit externals' -f -a '(__fish_spack_packages)'
|
||||
complete -c spack -n '__fish_spack_using_command audit externals' -s h -l help -f -a help
|
||||
complete -c spack -n '__fish_spack_using_command audit externals' -s h -l help -d 'show this help message and exit'
|
||||
complete -c spack -n '__fish_spack_using_command audit externals' -l list -f -a list_externals
|
||||
complete -c spack -n '__fish_spack_using_command audit externals' -l list -d 'if passed, list which packages have detection tests'
|
||||
|
||||
# spack audit packages-https
|
||||
set -g __fish_spack_optspecs_spack_audit_packages_https h/help all
|
||||
complete -c spack -n '__fish_spack_using_command_pos_remainder 0 audit packages-https' -f -a '(__fish_spack_packages)'
|
||||
|
38
var/spack/repos/builtin/packages/gcc/detection_test.yaml
Normal file
38
var/spack/repos/builtin/packages/gcc/detection_test.yaml
Normal file
@ -0,0 +1,38 @@
|
||||
paths:
|
||||
# Ubuntu 18.04, system compilers without Fortran
|
||||
- layout:
|
||||
- executables:
|
||||
- "bin/gcc"
|
||||
- "bin/g++"
|
||||
script: "echo 7.5.0"
|
||||
results:
|
||||
- spec: "gcc@7.5.0 languages=c,c++"
|
||||
# Mock a version < 7 of GCC that requires -dumpversion and
|
||||
# errors with -dumpfullversion
|
||||
- layout:
|
||||
- executables:
|
||||
- "bin/gcc-5"
|
||||
- "bin/g++-5"
|
||||
- "bin/gfortran-5"
|
||||
script: |
|
||||
if [[ "$1" == "-dumpversion" ]] ; then
|
||||
echo "5.5.0"
|
||||
else
|
||||
echo "gcc-5: fatal error: no input files"
|
||||
echo "compilation terminated."
|
||||
exit 1
|
||||
fi
|
||||
results:
|
||||
- spec: "gcc@5.5.0 languages=c,c++,fortran"
|
||||
# Multiple compilers present at the same time
|
||||
- layout:
|
||||
- executables:
|
||||
- "bin/x86_64-linux-gnu-gcc-6"
|
||||
script: 'echo 6.5.0'
|
||||
- executables:
|
||||
- "bin/x86_64-linux-gnu-gcc-10"
|
||||
- "bin/x86_64-linux-gnu-g++-10"
|
||||
script: "echo 10.1.0"
|
||||
results:
|
||||
- spec: "gcc@6.5.0 languages=c"
|
||||
- spec: "gcc@10.1.0 languages=c,c++"
|
@ -27,6 +27,8 @@ class Hpctoolkit(AutotoolsPackage):
|
||||
test_requires_compiler = True
|
||||
|
||||
version("develop", branch="develop")
|
||||
version("2023.08.stable", branch="release/2023.08")
|
||||
version("2023.08.1", tag="2023.08.1", commit="753a72affd584a5e72fe153d1e8c47a394a3886e")
|
||||
version("2023.03.stable", branch="release/2023.03")
|
||||
version("2023.03.01", commit="9e0daf2ad169f6c7f6c60408475b3c2f71baebbf")
|
||||
version("2022.10.01", commit="e8a5cc87e8f5ddfd14338459a4106f8e0d162c83")
|
||||
|
19
var/spack/repos/builtin/packages/intel/detection_test.yaml
Normal file
19
var/spack/repos/builtin/packages/intel/detection_test.yaml
Normal file
@ -0,0 +1,19 @@
|
||||
paths:
|
||||
- layout:
|
||||
- executables:
|
||||
- "bin/intel64/icc"
|
||||
script: |
|
||||
echo "icc (ICC) 18.0.5 20180823"
|
||||
echo "Copyright (C) 1985-2018 Intel Corporation. All rights reserved."
|
||||
- executables:
|
||||
- "bin/intel64/icpc"
|
||||
script: |
|
||||
echo "icpc (ICC) 18.0.5 20180823"
|
||||
echo "Copyright (C) 1985-2018 Intel Corporation. All rights reserved."
|
||||
- executables:
|
||||
- "bin/intel64/ifort"
|
||||
script: |
|
||||
echo "ifort (IFORT) 18.0.5 20180823"
|
||||
echo "Copyright (C) 1985-2018 Intel Corporation. All rights reserved."
|
||||
results:
|
||||
- spec: 'intel@18.0.5'
|
@ -19,6 +19,7 @@ class KokkosNvccWrapper(Package):
|
||||
|
||||
maintainers("Rombur")
|
||||
|
||||
version("4.1.00", sha256="cf725ea34ba766fdaf29c884cfe2daacfdc6dc2d6af84042d1c78d0f16866275")
|
||||
version("4.0.01", sha256="bb942de8afdd519fd6d5d3974706bfc22b6585a62dd565c12e53bdb82cd154f0")
|
||||
version("4.0.00", sha256="1829a423883d4b44223c7c3a53d3c51671145aad57d7d23e6a1a4bebf710dcf6")
|
||||
version("3.7.02", sha256="5024979f06bc8da2fb696252a66297f3e0e67098595a0cc7345312b3b4aa0f54")
|
||||
|
@ -3,6 +3,10 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import subprocess
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
@ -14,25 +18,47 @@ class Libcatalyst(CMakePackage):
|
||||
git = "https://gitlab.kitware.com/paraview/catalyst.git"
|
||||
url = "https://gitlab.kitware.com/api/v4/projects/paraview%2Fcatalyst/packages/generic/catalyst/v2.0.0/catalyst-v2.0.0.tar.gz"
|
||||
|
||||
maintainers("mathstuf")
|
||||
|
||||
version("2.0.0-rc3", sha256="8862bd0a4d0be2176b4272f9affda1ea4e5092087acbb99a2fe2621c33834e05")
|
||||
|
||||
# master as of 2021-05-12
|
||||
version("0.20210512", commit="8456ccd6015142b5a7705f79471361d4f5644fa7")
|
||||
maintainers("mathstuf", "ayenpure")
|
||||
version("master", branch="master")
|
||||
version("2.0.0-rc4", sha256="cb491e4ccd344156cc2494f65b9f38885598c16d12e1016c36e2ee0bc3640863")
|
||||
|
||||
variant("mpi", default=False, description="Enable MPI support")
|
||||
variant("conduit", default=False, description="Use external Conduit for Catalyst")
|
||||
|
||||
depends_on("mpi", when="+mpi")
|
||||
|
||||
# TODO: catalyst doesn't support an external conduit
|
||||
# depends_on('conduit')
|
||||
depends_on("conduit", when="+conduit")
|
||||
|
||||
def cmake_args(self):
|
||||
"""Populate cmake arguments for libcatalyst."""
|
||||
args = [
|
||||
"-DCATALYST_BUILD_TESTING=OFF",
|
||||
self.define_from_variant("CATALYST_USE_MPI", "mpi"),
|
||||
self.define_from_variant("CATALYST_WITH_EXTERNAL_CONDUIT", "conduit"),
|
||||
]
|
||||
|
||||
return args
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
spec = self.spec
|
||||
if spec.satisfies("+conduit"):
|
||||
env.prepend_path("CMAKE_PREFIX_PATH", spec["conduit"].prefix)
|
||||
|
||||
@on_package_attributes(run_tests=True)
|
||||
@run_after("install")
|
||||
def build_test(self):
|
||||
testdir = "smoke_test_build"
|
||||
cmakeExampleDir = join_path(self.stage.source_path, "examples")
|
||||
cmake_args = [
|
||||
cmakeExampleDir,
|
||||
"-DBUILD_SHARED_LIBS=ON",
|
||||
self.define("CMAKE_PREFIX_PATH", self.prefix),
|
||||
]
|
||||
cmake = which(self.spec["cmake"].prefix.bin.cmake)
|
||||
|
||||
with working_dir(testdir, create=True):
|
||||
cmake(*cmake_args)
|
||||
cmake(*(["--build", "."]))
|
||||
tty.info("Running Catalyst test")
|
||||
|
||||
res = subprocess.run(["adaptor0/adaptor0_test", "catalyst"])
|
||||
assert res.returncode == 0
|
||||
|
56
var/spack/repos/builtin/packages/llvm/detection_test.yaml
Normal file
56
var/spack/repos/builtin/packages/llvm/detection_test.yaml
Normal file
@ -0,0 +1,56 @@
|
||||
paths:
|
||||
- layout:
|
||||
- executables:
|
||||
- "bin/clang-3.9"
|
||||
script: |
|
||||
echo "clang version 3.9.1-19ubuntu1 (tags/RELEASE_391/rc2)"
|
||||
echo "Target: x86_64-pc-linux-gnu"
|
||||
echo "Thread model: posix"
|
||||
echo "InstalledDir: /usr/bin"
|
||||
- executables:
|
||||
- "bin/clang++-3.9"
|
||||
script: |
|
||||
echo "clang version 3.9.1-19ubuntu1 (tags/RELEASE_391/rc2)"
|
||||
echo "Target: x86_64-pc-linux-gnu"
|
||||
echo "Thread model: posix"
|
||||
echo "InstalledDir: /usr/bin"
|
||||
results:
|
||||
- spec: 'llvm@3.9.1 +clang~lld~lldb'
|
||||
# Multiple LLVM packages in the same prefix
|
||||
- layout:
|
||||
- executables:
|
||||
- "bin/clang-8"
|
||||
- "bin/clang++-8"
|
||||
script: |
|
||||
echo "clang version 8.0.0-3~ubuntu18.04.2 (tags/RELEASE_800/final)"
|
||||
echo "Target: x86_64-pc-linux-gnu"
|
||||
echo "Thread model: posix"
|
||||
echo "InstalledDir: /usr/bin"
|
||||
- executables:
|
||||
- "bin/ld.lld-8"
|
||||
script: 'echo "LLD 8.0.0 (compatible with GNU linkers)"'
|
||||
- executables:
|
||||
- "bin/lldb"
|
||||
script: 'echo "lldb version 8.0.0"'
|
||||
- executables:
|
||||
- "bin/clang-3.9"
|
||||
- "bin/clang++-3.9"
|
||||
script: |
|
||||
echo "clang version 3.9.1-19ubuntu1 (tags/RELEASE_391/rc2)"
|
||||
echo "Target: x86_64-pc-linux-gnu"
|
||||
echo "Thread model: posix"
|
||||
echo "InstalledDir: /usr/bin"
|
||||
results:
|
||||
- spec: 'llvm@8.0.0+clang+lld+lldb'
|
||||
- spec: 'llvm@3.9.1+clang~lld~lldb'
|
||||
# Apple Clang should not be detected
|
||||
- layout:
|
||||
- executables:
|
||||
- "bin/clang"
|
||||
- "bin/clang++"
|
||||
script: |
|
||||
echo "Apple clang version 11.0.0 (clang-1100.0.33.8)"
|
||||
echo "Target: x86_64-apple-darwin19.5.0"
|
||||
echo "Thread model: posix"
|
||||
echo "InstalledDir: /Library/Developer/CommandLineTools/usr/bin"
|
||||
results: []
|
@ -13,8 +13,9 @@ class ModeltestNg(CMakePackage):
|
||||
url = "https://github.com/ddarriba/modeltest/archive/refs/tags/v0.1.7.tar.gz"
|
||||
git = "https://github.com/ddarriba/modeltest.git"
|
||||
|
||||
maintainers("dorton21")
|
||||
maintainers("snehring")
|
||||
|
||||
version("20220721", commit="1066356b984100897b8bd38ac771c5c950984c01", submodules=True)
|
||||
version("0.1.7", commit="cc028888f1d4222aaa53b99c6b02cd934a279001", submodules=True)
|
||||
|
||||
variant("mpi", default=False, description="Enable MPI")
|
||||
@ -24,5 +25,12 @@ class ModeltestNg(CMakePackage):
|
||||
depends_on("flex", type="build")
|
||||
depends_on("openmpi", when="+mpi")
|
||||
|
||||
# 40217: ICE by gcc-toolset-12-gcc-12.2.1-7.4.el8.aarch64 of Rocky Linux 8.8:
|
||||
conflicts("%gcc@12.2.0:12.2", when="target=aarch64:", msg="ICE with gcc@12.2 on aarch64")
|
||||
|
||||
requires(
|
||||
"@20220721:", when="target=aarch64:", msg="Support for aarch64 was added after 20220721."
|
||||
)
|
||||
|
||||
def cmake_args(self):
|
||||
return [self.define_from_variant("ENABLE_MPI", "mpi")]
|
||||
|
@ -274,7 +274,7 @@ def libs(self):
|
||||
search_shared = bool(spec.variants["shared"].value)
|
||||
suffix = spec.variants["symbol_suffix"].value
|
||||
if suffix != "none":
|
||||
name += suffix
|
||||
name = [x + suffix for x in name]
|
||||
|
||||
return find_libraries(name, spec.prefix, shared=search_shared, recursive=True)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user