Merge branch 'develop' into psakiev/f/git-version-refactor

This commit is contained in:
psakievich 2025-05-16 15:36:11 -06:00 committed by GitHub
commit 9cf1f5a5da
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
89 changed files with 2078 additions and 569 deletions

View File

@ -27,7 +27,10 @@ jobs:
- name: Sync spack/spack-packages with spack/spack
run: |
cd spack-packages
git-filter-repo --quiet --source ../spack --subdirectory-filter var/spack/repos --refs develop
git-filter-repo --quiet --source ../spack \
--subdirectory-filter var/spack/repos \
--path share/spack/gitlab/cloud_pipelines/ --path-rename share/spack/gitlab/cloud_pipelines/:.ci/gitlab/ \
--refs develop
- name: Push
run: |
cd spack-packages

View File

@ -35,7 +35,6 @@
if not os.path.exists(link_name):
os.symlink(os.path.abspath("../../.."), link_name, target_is_directory=True)
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external"))
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/_vendoring"))
sys.path.append(os.path.abspath("_spack_root/lib/spack/"))
# Add the Spack bin directory to the path so that we can use its output in docs.

View File

@ -2257,22 +2257,15 @@ RPATHs in Spack are handled in one of three ways:
set in standard variables like ``CC``, ``CXX``, ``F77``, and ``FC``,
so most build systems (autotools and many gmake systems) pick them
up and use them.
#. CMake also respects Spack's compiler wrappers, but many CMake
builds have logic to overwrite RPATHs when binaries are
installed. Spack provides the ``std_cmake_args`` variable, which
includes parameters necessary for CMake build use the right
installation RPATH. It can be used like this when ``cmake`` is
invoked:
.. code-block:: python
class MyPackage(Package):
...
def install(self, spec, prefix):
cmake("..", *std_cmake_args)
make()
make("install")
#. CMake has its own RPATH handling, and distinguishes between build and
install RPATHs. By default, during the build it registers RPATHs to
all libraries it links to, so that just-built executables can be run
during the build itself. Upon installation, these RPATHs are cleared,
unless the user defines the install RPATHs. When inheriting from
``CMakePackage``, Spack handles this automatically, and sets
``CMAKE_INSTALL_RPATH_USE_LINK_PATH`` and ``CMAKE_INSTALL_RPATH``,
so that libraries of dependencies and the package's own libraries
can be found at runtime.
#. If you need to modify the build to add your own RPATHs, you can
use the ``self.rpath`` property of your package, which will
return a list of all the RPATHs that Spack will use when it

View File

@ -68,9 +68,6 @@
from llnl.util.symlink import symlink
from llnl.util.tty.color import cescape, colorize
import spack.build_systems.cmake
import spack.build_systems.meson
import spack.build_systems.python
import spack.builder
import spack.compilers.libraries
import spack.config
@ -567,9 +564,6 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
jobs = spack.config.determine_number_of_jobs(parallel=pkg.parallel)
module.make_jobs = jobs
if context == Context.BUILD:
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
module.make = DeprecatedExecutable(pkg.name, "make", "gmake")
module.gmake = DeprecatedExecutable(pkg.name, "gmake", "gmake")
@ -998,15 +992,6 @@ def set_all_package_py_globals(self):
pkg.setup_dependent_package(dependent_module, spec)
dependent_module.propagate_changes_to_mro()
if self.context == Context.BUILD:
pkg = self.specs[0].package
module = ModuleChangePropagator(pkg)
# std_cmake_args is not sufficiently static to be defined
# in set_package_py_globals and is deprecated so its handled
# here as a special case
module.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
module.propagate_changes_to_mro()
def get_env_modifications(self) -> EnvironmentModifications:
"""Returns the environment variable modifications for the given input specs and context.
Environment modifications include:

View File

@ -0,0 +1,99 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""This module re-exports symbols that are part of the v1.0 Package API, but were removed in the
v2.0 Package API after build systems were moved into the ``spack_repo.builtin`` package.
In older versions of Spack, these symbols were re-exported from ``spack.package``."""
from .aspell_dict import AspellDictPackage
from .autotools import AutotoolsPackage
from .bundle import BundlePackage
from .cached_cmake import (
CachedCMakePackage,
cmake_cache_filepath,
cmake_cache_option,
cmake_cache_path,
cmake_cache_string,
)
from .cargo import CargoPackage
from .cmake import CMakePackage, generator
from .compiler import CompilerPackage
from .cuda import CudaPackage
from .generic import Package
from .gnu import GNUMirrorPackage
from .go import GoPackage
from .intel import IntelPackage
from .lua import LuaPackage
from .makefile import MakefilePackage
from .maven import MavenPackage
from .meson import MesonPackage
from .msbuild import MSBuildPackage
from .nmake import NMakePackage
from .octave import OctavePackage
from .oneapi import (
INTEL_MATH_LIBRARIES,
IntelOneApiLibraryPackage,
IntelOneApiLibraryPackageWithSdk,
IntelOneApiPackage,
IntelOneApiStaticLibraryList,
)
from .perl import PerlPackage
from .python import PythonExtension, PythonPackage
from .qmake import QMakePackage
from .r import RPackage
from .racket import RacketPackage
from .rocm import ROCmPackage
from .ruby import RubyPackage
from .scons import SConsPackage
from .sip import SIPPackage
from .sourceforge import SourceforgePackage
from .sourceware import SourcewarePackage
from .waf import WafPackage
from .xorg import XorgPackage
__all__ = [
"AspellDictPackage",
"AutotoolsPackage",
"BundlePackage",
"CachedCMakePackage",
"cmake_cache_filepath",
"cmake_cache_option",
"cmake_cache_path",
"cmake_cache_string",
"CargoPackage",
"CMakePackage",
"generator",
"CompilerPackage",
"CudaPackage",
"Package",
"GNUMirrorPackage",
"GoPackage",
"IntelPackage",
"IntelOneApiLibraryPackageWithSdk",
"IntelOneApiLibraryPackage",
"IntelOneApiStaticLibraryList",
"IntelOneApiPackage",
"INTEL_MATH_LIBRARIES",
"LuaPackage",
"MakefilePackage",
"MavenPackage",
"MesonPackage",
"MSBuildPackage",
"NMakePackage",
"OctavePackage",
"PerlPackage",
"PythonExtension",
"PythonPackage",
"QMakePackage",
"RacketPackage",
"RPackage",
"ROCmPackage",
"RubyPackage",
"SConsPackage",
"SIPPackage",
"SourceforgePackage",
"SourcewarePackage",
"WafPackage",
"XorgPackage",
]

View File

@ -49,10 +49,19 @@ class CudaPackage(PackageBase):
"90a",
"100",
"100a",
"100f",
"101",
"101a",
"101f",
"103",
"103a",
"103f",
"120",
"120a",
"120f",
"121",
"121a",
"121f",
)
# FIXME: keep cuda and cuda_arch separate to make usage easier until
@ -155,6 +164,15 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
depends_on("cuda@12.8:", when="cuda_arch=101a")
depends_on("cuda@12.8:", when="cuda_arch=120")
depends_on("cuda@12.8:", when="cuda_arch=120a")
depends_on("cuda@12.9:", when="cuda_arch=100f")
depends_on("cuda@12.9:", when="cuda_arch=101f")
depends_on("cuda@12.9:", when="cuda_arch=120f")
depends_on("cuda@12.9:", when="cuda_arch=103")
depends_on("cuda@12.9:", when="cuda_arch=103a")
depends_on("cuda@12.9:", when="cuda_arch=103f")
depends_on("cuda@12.9:", when="cuda_arch=121")
depends_on("cuda@12.9:", when="cuda_arch=121a")
depends_on("cuda@12.9:", when="cuda_arch=121f")
# From the NVIDIA install guide we know of conflicts for particular
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
@ -186,7 +204,7 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
conflicts("%gcc@14:", when="+cuda ^cuda@:12.6")
conflicts("%gcc@15:", when="+cuda ^cuda@:12.8")
conflicts("%gcc@15:", when="+cuda ^cuda@:12.9")
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
@ -195,7 +213,7 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
conflicts("%clang@19:", when="+cuda ^cuda@:12.6")
conflicts("%clang@20:", when="+cuda ^cuda@:12.8")
conflicts("%clang@20:", when="+cuda ^cuda@:12.9")
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")

View File

@ -331,7 +331,8 @@ def env_activate(args):
env = create_temp_env_directory()
env_path = os.path.abspath(env)
short_name = os.path.basename(env_path)
ev.create_in_dir(env).write(regenerate=False)
view = not args.without_view
ev.create_in_dir(env, with_view=view).write(regenerate=False)
_tty_info(f"Created and activated temporary environment in {env_path}")
# Managed environment

View File

@ -3,11 +3,13 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import shlex
import sys
from typing import List
from typing import Any, List, Optional
import llnl.util.tty as tty
import spack
import spack.config
import spack.repo
import spack.util.path
@ -65,6 +67,15 @@ def setup_parser(subparser):
help="configuration scope to modify",
)
# Migrate
migrate_parser = sp.add_parser("migrate", help=repo_migrate.__doc__)
migrate_parser.add_argument(
"namespace_or_path", help="path to a Spack package repository directory"
)
migrate_parser.add_argument(
"--fix", action="store_true", help="automatically fix the imports in the package files"
)
def repo_create(args):
"""create a new package repository"""
@ -155,12 +166,70 @@ def repo_list(args):
print(f"{repo.namespace:<{max_ns_len + 4}}{repo.package_api_str:<8}{repo.root}")
def _get_repo(name_or_path: str) -> Optional[spack.repo.Repo]:
try:
return spack.repo.from_path(name_or_path)
except spack.repo.RepoError:
pass
for repo in spack.config.get("repos"):
try:
r = spack.repo.from_path(repo)
except spack.repo.RepoError:
continue
if r.namespace == name_or_path:
return r
return None
def repo_migrate(args: Any) -> int:
"""migrate a package repository to the latest Package API"""
from spack.repo_migrate import migrate_v1_to_v2, migrate_v2_imports
repo = _get_repo(args.namespace_or_path)
if repo is None:
tty.die(f"No such repository: {args.namespace_or_path}")
if (1, 0) <= repo.package_api < (2, 0):
success, repo_v2 = migrate_v1_to_v2(repo, fix=args.fix)
exit_code = 0 if success else 1
elif (2, 0) <= repo.package_api < (3, 0):
repo_v2 = None
exit_code = 0 if migrate_v2_imports(repo.packages_path, repo.root, fix=args.fix) else 1
else:
repo_v2 = None
exit_code = 0
if exit_code == 0 and isinstance(repo_v2, spack.repo.Repo):
tty.info(
f"Repository '{repo_v2.namespace}' was successfully migrated from "
f"package API {repo.package_api_str} to {repo_v2.package_api_str}."
)
tty.warn(
"Remove the old repository from Spack's configuration and add the new one using:\n"
f" spack repo remove {shlex.quote(repo.root)}\n"
f" spack repo add {shlex.quote(repo_v2.root)}"
)
elif exit_code == 0:
tty.info(f"Repository '{repo.namespace}' was successfully migrated")
elif not args.fix and exit_code == 1:
tty.error(
f"No changes were made to the repository {repo.root} with namespace "
f"'{repo.namespace}'. Run with --fix to apply the above changes."
)
return exit_code
def repo(parser, args):
action = {
return {
"create": repo_create,
"list": repo_list,
"add": repo_add,
"remove": repo_remove,
"rm": repo_remove,
}
action[args.repo_command](args)
"migrate": repo_migrate,
}[args.repo_command](args)

View File

@ -18,8 +18,8 @@
import spack.repo
import spack.util.git
import spack.util.spack_yaml
from spack.spec_parser import SPEC_TOKENIZER, SpecTokens
from spack.tokenize import Token
from spack.spec_parser import NAME, VERSION_LIST, SpecTokens
from spack.tokenize import Token, TokenBase, Tokenizer
from spack.util.executable import Executable, which
description = "runs source code style checks on spack"
@ -206,8 +206,8 @@ def setup_parser(subparser):
"--spec-strings",
action="store_true",
help="upgrade spec strings in Python, JSON and YAML files for compatibility with Spack "
"v1.0 and v0.x. Example: spack style --spec-strings $(git ls-files). Note: this flag "
"will be removed in Spack v1.0.",
"v1.0 and v0.x. Example: spack style --spec-strings $(git ls-files). Note: must be "
"used only on specs from spack v0.X.",
)
subparser.add_argument("files", nargs=argparse.REMAINDER, help="specific files to check")
@ -521,20 +521,52 @@ def _bootstrap_dev_dependencies():
IS_PROBABLY_COMPILER = re.compile(r"%[a-zA-Z_][a-zA-Z0-9\-]")
class _LegacySpecTokens(TokenBase):
"""Reconstructs the tokens for previous specs, so we can reuse code to rotate them"""
# Dependency
START_EDGE_PROPERTIES = r"(?:\^\[)"
END_EDGE_PROPERTIES = r"(?:\])"
DEPENDENCY = r"(?:\^)"
# Version
VERSION_HASH_PAIR = SpecTokens.VERSION_HASH_PAIR.regex
GIT_VERSION = SpecTokens.GIT_VERSION.regex
VERSION = SpecTokens.VERSION.regex
# Variants
PROPAGATED_BOOL_VARIANT = SpecTokens.PROPAGATED_BOOL_VARIANT.regex
BOOL_VARIANT = SpecTokens.BOOL_VARIANT.regex
PROPAGATED_KEY_VALUE_PAIR = SpecTokens.PROPAGATED_KEY_VALUE_PAIR.regex
KEY_VALUE_PAIR = SpecTokens.KEY_VALUE_PAIR.regex
# Compilers
COMPILER_AND_VERSION = rf"(?:%\s*(?:{NAME})(?:[\s]*)@\s*(?:{VERSION_LIST}))"
COMPILER = rf"(?:%\s*(?:{NAME}))"
# FILENAME
FILENAME = SpecTokens.FILENAME.regex
# Package name
FULLY_QUALIFIED_PACKAGE_NAME = SpecTokens.FULLY_QUALIFIED_PACKAGE_NAME.regex
UNQUALIFIED_PACKAGE_NAME = SpecTokens.UNQUALIFIED_PACKAGE_NAME.regex
# DAG hash
DAG_HASH = SpecTokens.DAG_HASH.regex
# White spaces
WS = SpecTokens.WS.regex
# Unexpected character(s)
UNEXPECTED = SpecTokens.UNEXPECTED.regex
def _spec_str_reorder_compiler(idx: int, blocks: List[List[Token]]) -> None:
# only move the compiler to the back if it exists and is not already at the end
if not 0 <= idx < len(blocks) - 1:
return
# if there's only whitespace after the compiler, don't move it
if all(token.kind == SpecTokens.WS for block in blocks[idx + 1 :] for token in block):
if all(token.kind == _LegacySpecTokens.WS for block in blocks[idx + 1 :] for token in block):
return
# rotate left and always add at least one WS token between compiler and previous token
compiler_block = blocks.pop(idx)
if compiler_block[0].kind != SpecTokens.WS:
compiler_block.insert(0, Token(SpecTokens.WS, " "))
if compiler_block[0].kind != _LegacySpecTokens.WS:
compiler_block.insert(0, Token(_LegacySpecTokens.WS, " "))
# delete the WS tokens from the new first block if it was at the very start, to prevent leading
# WS tokens.
while idx == 0 and blocks[0][0].kind == SpecTokens.WS:
while idx == 0 and blocks[0][0].kind == _LegacySpecTokens.WS:
blocks[0].pop(0)
blocks.append(compiler_block)
@ -552,11 +584,13 @@ def _spec_str_format(spec_str: str) -> Optional[str]:
compiler_block_idx = -1
in_edge_attr = False
for token in SPEC_TOKENIZER.tokenize(spec_str):
if token.kind == SpecTokens.UNEXPECTED:
legacy_tokenizer = Tokenizer(_LegacySpecTokens)
for token in legacy_tokenizer.tokenize(spec_str):
if token.kind == _LegacySpecTokens.UNEXPECTED:
# parsing error, we cannot fix this string.
return None
elif token.kind in (SpecTokens.COMPILER, SpecTokens.COMPILER_AND_VERSION):
elif token.kind in (_LegacySpecTokens.COMPILER, _LegacySpecTokens.COMPILER_AND_VERSION):
# multiple compilers are not supported in Spack v0.x, so early return
if compiler_block_idx != -1:
return None
@ -565,19 +599,19 @@ def _spec_str_format(spec_str: str) -> Optional[str]:
current_block = []
compiler_block_idx = len(blocks) - 1
elif token.kind in (
SpecTokens.START_EDGE_PROPERTIES,
SpecTokens.DEPENDENCY,
SpecTokens.UNQUALIFIED_PACKAGE_NAME,
SpecTokens.FULLY_QUALIFIED_PACKAGE_NAME,
_LegacySpecTokens.START_EDGE_PROPERTIES,
_LegacySpecTokens.DEPENDENCY,
_LegacySpecTokens.UNQUALIFIED_PACKAGE_NAME,
_LegacySpecTokens.FULLY_QUALIFIED_PACKAGE_NAME,
):
_spec_str_reorder_compiler(compiler_block_idx, blocks)
compiler_block_idx = -1
if token.kind == SpecTokens.START_EDGE_PROPERTIES:
if token.kind == _LegacySpecTokens.START_EDGE_PROPERTIES:
in_edge_attr = True
current_block.append(token)
blocks.append(current_block)
current_block = []
elif token.kind == SpecTokens.END_EDGE_PROPERTIES:
elif token.kind == _LegacySpecTokens.END_EDGE_PROPERTIES:
in_edge_attr = False
current_block.append(token)
blocks.append(current_block)
@ -585,19 +619,19 @@ def _spec_str_format(spec_str: str) -> Optional[str]:
elif in_edge_attr:
current_block.append(token)
elif token.kind in (
SpecTokens.VERSION_HASH_PAIR,
SpecTokens.GIT_VERSION,
SpecTokens.VERSION,
SpecTokens.PROPAGATED_BOOL_VARIANT,
SpecTokens.BOOL_VARIANT,
SpecTokens.PROPAGATED_KEY_VALUE_PAIR,
SpecTokens.KEY_VALUE_PAIR,
SpecTokens.DAG_HASH,
_LegacySpecTokens.VERSION_HASH_PAIR,
_LegacySpecTokens.GIT_VERSION,
_LegacySpecTokens.VERSION,
_LegacySpecTokens.PROPAGATED_BOOL_VARIANT,
_LegacySpecTokens.BOOL_VARIANT,
_LegacySpecTokens.PROPAGATED_KEY_VALUE_PAIR,
_LegacySpecTokens.KEY_VALUE_PAIR,
_LegacySpecTokens.DAG_HASH,
):
current_block.append(token)
blocks.append(current_block)
current_block = []
elif token.kind == SpecTokens.WS:
elif token.kind == _LegacySpecTokens.WS:
current_block.append(token)
else:
raise ValueError(f"unexpected token {token}")

View File

@ -55,7 +55,9 @@
def is_package_module(fullname: str) -> bool:
"""Check if the given module is a package module."""
return fullname.startswith(PKG_MODULE_PREFIX_V1) or fullname.startswith(PKG_MODULE_PREFIX_V2)
return fullname.startswith(PKG_MODULE_PREFIX_V1) or (
fullname.startswith(PKG_MODULE_PREFIX_V2) and fullname.endswith(".package")
)
def namespace_from_fullname(fullname: str) -> str:
@ -77,6 +79,25 @@ def namespace_from_fullname(fullname: str) -> str:
return fullname
class _PrependFileLoader(importlib.machinery.SourceFileLoader):
def __init__(self, fullname: str, repo: "Repo", package_name: str) -> None:
self.repo = repo
self.package_name = package_name
path = repo.filename_for_package_name(package_name)
self.fullname = fullname
self.prepend = b"from spack.build_systems._package_api_v1 import *\n"
super().__init__(self.fullname, path)
def path_stats(self, path):
stats = dict(super().path_stats(path))
stats["size"] += len(self.prepend)
return stats
def get_data(self, path):
data = super().get_data(path)
return self.prepend + data if path == self.path else data
class SpackNamespaceLoader:
def create_module(self, spec):
return SpackNamespace(spec.name)
@ -123,8 +144,7 @@ def compute_loader(self, fullname: str):
# With 2 nested conditionals we can call "repo.real_name" only once
package_name = repo.real_name(module_name)
if package_name:
module_path = repo.filename_for_package_name(package_name)
return importlib.machinery.SourceFileLoader(fullname, module_path)
return _PrependFileLoader(fullname, repo, package_name)
# We are importing a full namespace like 'spack.pkg.builtin'
if fullname == repo.full_namespace:

View File

@ -0,0 +1,421 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import ast
import os
import re
import shutil
import sys
from typing import IO, Dict, List, Optional, Set, Tuple
import spack.repo
import spack.util.naming
import spack.util.spack_yaml
def _same_contents(f: str, g: str) -> bool:
"""Return True if the files have the same contents."""
try:
with open(f, "rb") as f1, open(g, "rb") as f2:
while True:
b1 = f1.read(4096)
b2 = f2.read(4096)
if b1 != b2:
return False
if not b1 and not b2:
break
return True
except OSError:
return False
def migrate_v1_to_v2(
repo: spack.repo.Repo, fix: bool, out: IO[str] = sys.stdout, err: IO[str] = sys.stderr
) -> Tuple[bool, Optional[spack.repo.Repo]]:
"""To upgrade a repo from Package API v1 to v2 we need to:
1. ensure ``spack_repo/<namespace>`` parent dirs to the ``repo.yaml`` file.
2. rename <pkg dir>/package.py to <pkg module>/package.py.
3. bump the version in ``repo.yaml``.
"""
if not (1, 0) <= repo.package_api < (2, 0):
raise RuntimeError(f"Cannot upgrade from {repo.package_api_str} to v2.0")
with open(os.path.join(repo.root, "repo.yaml"), encoding="utf-8") as f:
updated_config = spack.util.spack_yaml.load(f)
updated_config["repo"]["api"] = "v2.0"
namespace = repo.namespace.split(".")
if not all(
spack.util.naming.valid_module_name(part, package_api=(2, 0)) for part in namespace
):
print(
f"Cannot upgrade from v1 to v2, because the namespace '{repo.namespace}' is not a "
"valid Python module",
file=err,
)
return False, None
try:
subdirectory = spack.repo._validate_and_normalize_subdir(
repo.subdirectory, repo.root, package_api=(2, 0)
)
except spack.repo.BadRepoError:
print(
f"Cannot upgrade from v1 to v2, because the subdirectory '{repo.subdirectory}' is not "
"a valid Python module",
file=err,
)
return False, None
new_root = os.path.join(repo.root, "spack_repo", *namespace)
ino_to_relpath: Dict[int, str] = {}
symlink_to_ino: Dict[str, int] = {}
prefix_len = len(repo.root) + len(os.sep)
rename: Dict[str, str] = {}
dirs_to_create: List[str] = []
files_to_copy: List[str] = []
errors = False
stack: List[Tuple[str, int]] = [(repo.root, 0)]
while stack:
path, depth = stack.pop()
try:
entries = os.scandir(path)
except OSError:
continue
for entry in entries:
rel_path = entry.path[prefix_len:]
if depth == 0 and entry.name in ("spack_repo", "repo.yaml"):
continue
ino_to_relpath[entry.inode()] = entry.path[prefix_len:]
if entry.is_symlink():
symlink_to_ino[rel_path] = entry.stat(follow_symlinks=True).st_ino
continue
elif entry.is_dir(follow_symlinks=False):
if entry.name == "__pycache__":
continue
# check if this is a package
if (
depth == 1
and rel_path.startswith(f"{subdirectory}{os.sep}")
and os.path.exists(os.path.join(entry.path, "package.py"))
):
if "_" in entry.name:
print(
f"Invalid package name '{entry.name}': underscores are not allowed in "
"package names, rename the package with hyphens as separators",
file=err,
)
errors = True
continue
pkg_dir = spack.util.naming.pkg_name_to_pkg_dir(entry.name, package_api=(2, 0))
if pkg_dir != entry.name:
rename[f"{subdirectory}{os.sep}{entry.name}"] = (
f"{subdirectory}{os.sep}{pkg_dir}"
)
dirs_to_create.append(rel_path)
stack.append((entry.path, depth + 1))
continue
files_to_copy.append(rel_path)
if errors:
return False, None
rename_regex = re.compile("^(" + "|".join(re.escape(k) for k in rename.keys()) + ")")
if fix:
os.makedirs(new_root, exist_ok=True)
def _relocate(rel_path: str) -> Tuple[str, str]:
return os.path.join(repo.root, rel_path), os.path.join(
new_root, rename_regex.sub(lambda m: rename[m.group(0)], rel_path)
)
if not fix:
print("The following directories, files and symlinks will be created:\n", file=out)
for rel_path in dirs_to_create:
_, new_path = _relocate(rel_path)
if fix:
try:
os.mkdir(new_path)
except FileExistsError: # not an error if the directory already exists
continue
else:
print(f"create directory {new_path}", file=out)
for rel_path in files_to_copy:
old_path, new_path = _relocate(rel_path)
if os.path.lexists(new_path):
# if we already copied this file, don't error.
if not _same_contents(old_path, new_path):
print(
f"Cannot upgrade from v1 to v2, because the file '{new_path}' already exists",
file=err,
)
return False, None
continue
if fix:
shutil.copy2(old_path, new_path)
else:
print(f"copy {old_path} -> {new_path}", file=out)
for rel_path, ino in symlink_to_ino.items():
old_path, new_path = _relocate(rel_path)
if ino in ino_to_relpath:
# link by path relative to the new root
_, new_target = _relocate(ino_to_relpath[ino])
tgt = os.path.relpath(new_target, new_path)
else:
tgt = os.path.realpath(old_path)
# no-op if the same, error if different
if os.path.lexists(new_path):
if not os.path.islink(new_path) or os.readlink(new_path) != tgt:
print(
f"Cannot upgrade from v1 to v2, because the file '{new_path}' already exists",
file=err,
)
return False, None
continue
if fix:
os.symlink(tgt, new_path)
else:
print(f"create symlink {new_path} -> {tgt}", file=out)
if fix:
with open(os.path.join(new_root, "repo.yaml"), "w", encoding="utf-8") as f:
spack.util.spack_yaml.dump(updated_config, f)
updated_repo = spack.repo.from_path(new_root)
else:
print(file=out)
updated_repo = repo # compute the import diff on the v1 repo since v2 doesn't exist yet
result = migrate_v2_imports(
updated_repo.packages_path, updated_repo.root, fix=fix, out=out, err=err
)
return result, (updated_repo if fix else None)
def migrate_v2_imports(
packages_dir: str, root: str, fix: bool, out: IO[str] = sys.stdout, err: IO[str] = sys.stderr
) -> bool:
"""In Package API v2.0, packages need to explicitly import package classes and a few other
symbols from the build_systems module. This function automatically adds the missing imports
to each package.py file in the repository."""
symbol_to_module = {
"AspellDictPackage": "spack_repo.builtin.build_systems.aspell_dict",
"AutotoolsPackage": "spack_repo.builtin.build_systems.autotools",
"BundlePackage": "spack_repo.builtin.build_systems.bundle",
"CachedCMakePackage": "spack_repo.builtin.build_systems.cached_cmake",
"cmake_cache_filepath": "spack_repo.builtin.build_systems.cached_cmake",
"cmake_cache_option": "spack_repo.builtin.build_systems.cached_cmake",
"cmake_cache_path": "spack_repo.builtin.build_systems.cached_cmake",
"cmake_cache_string": "spack_repo.builtin.build_systems.cached_cmake",
"CargoPackage": "spack_repo.builtin.build_systems.cargo",
"CMakePackage": "spack_repo.builtin.build_systems.cmake",
"generator": "spack_repo.builtin.build_systems.cmake",
"CompilerPackage": "spack_repo.builtin.build_systems.compiler",
"CudaPackage": "spack_repo.builtin.build_systems.cuda",
"Package": "spack_repo.builtin.build_systems.generic",
"GNUMirrorPackage": "spack_repo.builtin.build_systems.gnu",
"GoPackage": "spack_repo.builtin.build_systems.go",
"IntelPackage": "spack_repo.builtin.build_systems.intel",
"LuaPackage": "spack_repo.builtin.build_systems.lua",
"MakefilePackage": "spack_repo.builtin.build_systems.makefile",
"MavenPackage": "spack_repo.builtin.build_systems.maven",
"MesonPackage": "spack_repo.builtin.build_systems.meson",
"MSBuildPackage": "spack_repo.builtin.build_systems.msbuild",
"NMakePackage": "spack_repo.builtin.build_systems.nmake",
"OctavePackage": "spack_repo.builtin.build_systems.octave",
"INTEL_MATH_LIBRARIES": "spack_repo.builtin.build_systems.oneapi",
"IntelOneApiLibraryPackage": "spack_repo.builtin.build_systems.oneapi",
"IntelOneApiLibraryPackageWithSdk": "spack_repo.builtin.build_systems.oneapi",
"IntelOneApiPackage": "spack_repo.builtin.build_systems.oneapi",
"IntelOneApiStaticLibraryList": "spack_repo.builtin.build_systems.oneapi",
"PerlPackage": "spack_repo.builtin.build_systems.perl",
"PythonExtension": "spack_repo.builtin.build_systems.python",
"PythonPackage": "spack_repo.builtin.build_systems.python",
"QMakePackage": "spack_repo.builtin.build_systems.qmake",
"RPackage": "spack_repo.builtin.build_systems.r",
"RacketPackage": "spack_repo.builtin.build_systems.racket",
"ROCmPackage": "spack_repo.builtin.build_systems.rocm",
"RubyPackage": "spack_repo.builtin.build_systems.ruby",
"SConsPackage": "spack_repo.builtin.build_systems.scons",
"SIPPackage": "spack_repo.builtin.build_systems.sip",
"SourceforgePackage": "spack_repo.builtin.build_systems.sourceforge",
"SourcewarePackage": "spack_repo.builtin.build_systems.sourceware",
"WafPackage": "spack_repo.builtin.build_systems.waf",
"XorgPackage": "spack_repo.builtin.build_systems.xorg",
}
success = True
for f in os.scandir(packages_dir):
pkg_path = os.path.join(f.path, "package.py")
if (
f.name in ("__init__.py", "__pycache__")
or not f.is_dir(follow_symlinks=False)
or os.path.islink(pkg_path)
):
print(f"Skipping {f.path}", file=err)
continue
try:
with open(pkg_path, "rb") as file:
tree = ast.parse(file.read())
except (OSError, SyntaxError) as e:
print(f"Skipping {pkg_path}: {e}", file=err)
continue
#: Symbols that are referenced in the package and may need to be imported.
referenced_symbols: Set[str] = set()
#: Set of symbols of interest that are already defined through imports, assignments, or
#: function definitions.
defined_symbols: Set[str] = set()
best_line: Optional[int] = None
seen_import = False
for node in ast.walk(tree):
# Get the last import statement from the first block of top-level imports
if isinstance(node, ast.Module):
for child in ast.iter_child_nodes(node):
# if we never encounter an import statement, the best line to add is right
# before the first node under the module
if best_line is None and isinstance(child, ast.stmt):
best_line = child.lineno
# prefer adding right before `from spack.package import ...`
if isinstance(child, ast.ImportFrom) and child.module == "spack.package":
seen_import = True
best_line = child.lineno # add it right before spack.package
break
# otherwise put it right after the last import statement
is_import = isinstance(child, (ast.Import, ast.ImportFrom))
if is_import:
if isinstance(child, (ast.stmt, ast.expr)):
best_line = (child.end_lineno or child.lineno) + 1
if not seen_import and is_import:
seen_import = True
elif seen_import and not is_import:
break
# Function definitions or assignments to variables whose name is a symbol of interest
# are considered as redefinitions, so we skip them.
elif isinstance(node, ast.FunctionDef):
if node.name in symbol_to_module:
print(
f"{pkg_path}:{node.lineno}: redefinition of `{node.name}` skipped",
file=err,
)
defined_symbols.add(node.name)
elif isinstance(node, ast.Assign):
for target in node.targets:
if isinstance(target, ast.Name) and target.id in symbol_to_module:
print(
f"{pkg_path}:{target.lineno}: redefinition of `{target.id}` skipped",
file=err,
)
defined_symbols.add(target.id)
# Register symbols that are not imported.
elif isinstance(node, ast.Name) and node.id in symbol_to_module:
referenced_symbols.add(node.id)
# Register imported symbols to make this operation idempotent
elif isinstance(node, ast.ImportFrom):
for alias in node.names:
if alias.name in symbol_to_module:
defined_symbols.add(alias.name)
if node.module == "spack.package":
success = False
print(
f"{pkg_path}:{node.lineno}: `{alias.name}` is imported from "
"`spack.package`, which no longer provides this symbol",
file=err,
)
if alias.asname and alias.asname in symbol_to_module:
defined_symbols.add(alias.asname)
# Remove imported symbols from the referenced symbols
referenced_symbols.difference_update(defined_symbols)
if not referenced_symbols:
continue
if best_line is None:
print(f"{pkg_path}: failed to update imports", file=err)
success = False
continue
# Add the missing imports right after the last import statement
with open(pkg_path, "r", encoding="utf-8", newline="") as file:
lines = file.readlines()
# Group missing symbols by their module
missing_imports_by_module: Dict[str, list] = {}
for symbol in referenced_symbols:
module = symbol_to_module[symbol]
if module not in missing_imports_by_module:
missing_imports_by_module[module] = []
missing_imports_by_module[module].append(symbol)
new_lines = [
f"from {module} import {', '.join(sorted(symbols))}\n"
for module, symbols in sorted(missing_imports_by_module.items())
]
if not seen_import:
new_lines.extend(("\n", "\n"))
if not fix: # only print the diff
success = False # packages need to be fixed, but we didn't do it
diff_start, diff_end = max(1, best_line - 3), min(best_line + 2, len(lines))
num_changed = diff_end - diff_start + 1
num_added = num_changed + len(new_lines)
rel_pkg_path = os.path.relpath(pkg_path, start=root)
out.write(f"--- a/{rel_pkg_path}\n+++ b/{rel_pkg_path}\n")
out.write(f"@@ -{diff_start},{num_changed} +{diff_start},{num_added} @@\n")
for line in lines[diff_start - 1 : best_line - 1]:
out.write(f" {line}")
for line in new_lines:
out.write(f"+{line}")
for line in lines[best_line - 1 : diff_end]:
out.write(f" {line}")
continue
lines[best_line - 1 : best_line - 1] = new_lines
tmp_file = pkg_path + ".tmp"
with open(tmp_file, "w", encoding="utf-8", newline="") as file:
file.writelines(lines)
os.replace(tmp_file, pkg_path)
return success

View File

@ -2564,7 +2564,7 @@ def _spec_clauses(
edges = spec.edges_from_dependents()
virtuals = [x for x in itertools.chain.from_iterable([edge.virtuals for edge in edges])]
if not body:
if not body and not spec.concrete:
for virtual in virtuals:
clauses.append(fn.attr("provider_set", spec.name, virtual))
clauses.append(fn.attr("virtual_node", virtual))

View File

@ -554,6 +554,32 @@ attr("concrete_variant_set", node(X, A1), Variant, Value, ID)
attr("virtual_on_build_edge", ParentNode, BuildDependency, Virtual),
not 1 { pkg_fact(BuildDependency, version_satisfies(Constraint, Version)) : hash_attr(BuildDependencyHash, "version", BuildDependency, Version) } 1.
error(100, "Cannot satisfy the request on {0} to have {1}={2}", BuildDependency, Variant, Value)
:- attr("build_requirement", ParentNode, build_requirement("variant_set", BuildDependency, Variant, Value)),
attr("concrete_build_dependency", ParentNode, BuildDependency, BuildDependencyHash),
not hash_attr(BuildDependencyHash, "variant_value", BuildDependency, Variant, Value).
error(100, "Cannot satisfy the request on {0} to have the target set to {1}", BuildDependency, Target)
:- attr("build_requirement", ParentNode, build_requirement("node_target_set", BuildDependency, Target)),
attr("concrete_build_dependency", ParentNode, BuildDependency, BuildDependencyHash),
not hash_attr(BuildDependencyHash, "node_target", BuildDependency, Target).
error(100, "Cannot satisfy the request on {0} to have the os set to {1}", BuildDependency, NodeOS)
:- attr("build_requirement", ParentNode, build_requirement("node_os_set", BuildDependency, NodeOS)),
attr("concrete_build_dependency", ParentNode, BuildDependency, BuildDependencyHash),
not hash_attr(BuildDependencyHash, "node_os", BuildDependency, NodeOS).
error(100, "Cannot satisfy the request on {0} to have the platform set to {1}", BuildDependency, Platform)
:- attr("build_requirement", ParentNode, build_requirement("node_platform_set", BuildDependency, Platform)),
attr("concrete_build_dependency", ParentNode, BuildDependency, BuildDependencyHash),
not hash_attr(BuildDependencyHash, "node_platform", BuildDependency, Platform).
error(100, "Cannot satisfy the request on {0} to have the following hash {1}", BuildDependency, BuildHash)
:- attr("build_requirement", ParentNode, build_requirement("node_target_set", BuildDependency, Target)),
attr("concrete_build_dependency", ParentNode, BuildDependency, BuildDependencyHash),
attr("build_requirement", ParentNode, build_requirement("hash", BuildDependency, BuildHash)),
BuildHash != BuildDependencyHash.
% External nodes
:- attr("build_requirement", ParentNode, build_requirement("node", BuildDependency)),
external(ParentNode),
@ -600,6 +626,32 @@ attr("node_version_satisfies", node(X, BuildDependency), Constraint) :-
attr("build_requirement", ParentNode, build_requirement("node_version_satisfies", BuildDependency, Constraint)),
build_requirement(ParentNode, node(X, BuildDependency)).
% Account for properties on the build requirements
%
% root %gcc@12.0 <properties for gcc> ^dep
%
attr("variant_set", node(X, BuildDependency), Variant, Value) :-
attr("build_requirement", ParentNode, build_requirement("variant_set", BuildDependency, Variant, Value)),
build_requirement(ParentNode, node(X, BuildDependency)).
attr("depends_on", node(X, Parent), node(Y, BuildDependency), "build") :- build_requirement(node(X, Parent), node(Y, BuildDependency)).
attr("node_target_set", node(X, BuildDependency), Target) :-
attr("build_requirement", ParentNode, build_requirement("node_target_set", BuildDependency, Target)),
build_requirement(ParentNode, node(X, BuildDependency)).
attr("node_os_set", node(X, BuildDependency), NodeOS) :-
attr("build_requirement", ParentNode, build_requirement("node_os_set", BuildDependency, NodeOS)),
build_requirement(ParentNode, node(X, BuildDependency)).
attr("node_platform_set", node(X, BuildDependency), NodePlatform) :-
attr("build_requirement", ParentNode, build_requirement("node_platform_set", BuildDependency, NodePlatform)),
build_requirement(ParentNode, node(X, BuildDependency)).
attr("hash", node(X, BuildDependency), BuildHash) :-
attr("build_requirement", ParentNode, build_requirement("hash", BuildDependency, BuildHash)),
build_requirement(ParentNode, node(X, BuildDependency)).
1 { attr("provider_set", node(X, BuildDependency), node(0..Y-1, Virtual)) : max_dupes(Virtual, Y) } 1 :-
attr("build_requirement", ParentNode, build_requirement("provider_set", BuildDependency, Virtual)),

View File

@ -2233,15 +2233,21 @@ def lookup_hash(self):
spec._dup(self._lookup_hash())
return spec
# Get dependencies that need to be replaced
for node in self.traverse(root=False):
if node.abstract_hash:
spec._add_dependency(node._lookup_hash(), depflag=0, virtuals=())
# Map the dependencies that need to be replaced
node_lookup = {
id(node): node._lookup_hash()
for node in self.traverse(root=False)
if node.abstract_hash
}
# reattach nodes that were not otherwise satisfied by new dependencies
for node in self.traverse(root=False):
if not any(n.satisfies(node) for n in spec.traverse()):
spec._add_dependency(node.copy(), depflag=0, virtuals=())
# Reconstruct dependencies
for edge in self.traverse_edges(root=False):
key = edge.parent.name
current_node = spec if key == spec.name else spec[key]
child_node = node_lookup.get(id(edge.spec), edge.spec.copy())
current_node._add_dependency(
child_node, depflag=edge.depflag, virtuals=edge.virtuals, direct=edge.direct
)
return spec

View File

@ -101,9 +101,6 @@
SPLIT_KVP = re.compile(rf"^({NAME})(:?==?)(.*)$")
#: Regex with groups to use for splitting %[virtuals=...] tokens
SPLIT_COMPILER_TOKEN = re.compile(rf"^%\[virtuals=({VALUE}|{QUOTED_VALUE})]\s*(.*)$")
#: A filename starts either with a "." or a "/" or a "{name}/, or on Windows, a drive letter
#: followed by a colon and "\" or "." or {name}\
WINDOWS_FILENAME = r"(?:\.|[a-zA-Z0-9-_]*\\|[a-zA-Z]:\\)(?:[a-zA-Z0-9-_\.\\]*)(?:\.json|\.yaml)"
@ -124,9 +121,9 @@ class SpecTokens(TokenBase):
"""
# Dependency
START_EDGE_PROPERTIES = r"(?:\^\[)"
START_EDGE_PROPERTIES = r"(?:[\^%]\[)"
END_EDGE_PROPERTIES = r"(?:\])"
DEPENDENCY = r"(?:\^)"
DEPENDENCY = r"(?:[\^\%])"
# Version
VERSION_HASH_PAIR = rf"(?:@(?:{GIT_VERSION_PATTERN})=(?:{VERSION}))"
GIT_VERSION = rf"@(?:{GIT_VERSION_PATTERN})"
@ -136,14 +133,6 @@ class SpecTokens(TokenBase):
BOOL_VARIANT = rf"(?:[~+-]\s*{NAME})"
PROPAGATED_KEY_VALUE_PAIR = rf"(?:{NAME}:?==(?:{VALUE}|{QUOTED_VALUE}))"
KEY_VALUE_PAIR = rf"(?:{NAME}:?=(?:{VALUE}|{QUOTED_VALUE}))"
# Compilers
COMPILER_AND_VERSION = rf"(?:%\s*(?:{NAME})(?:[\s]*)@\s*(?:{VERSION_LIST}))"
COMPILER = rf"(?:%\s*(?:{NAME}))"
COMPILER_AND_VERSION_WITH_VIRTUALS = (
rf"(?:%\[virtuals=(?:{VALUE}|{QUOTED_VALUE})\]"
rf"\s*(?:{NAME})(?:[\s]*)@\s*(?:{VERSION_LIST}))"
)
COMPILER_WITH_VIRTUALS = rf"(?:%\[virtuals=(?:{VALUE}|{QUOTED_VALUE})\]\s*(?:{NAME}))"
# FILENAME
FILENAME = rf"(?:{FILENAME})"
# Package name
@ -275,25 +264,58 @@ def next_spec(
def add_dependency(dep, **edge_properties):
"""wrapper around root_spec._add_dependency"""
try:
root_spec._add_dependency(dep, **edge_properties)
target_spec._add_dependency(dep, **edge_properties)
except spack.error.SpecError as e:
raise SpecParsingError(str(e), self.ctx.current_token, self.literal_str) from e
initial_spec = initial_spec or spack.spec.Spec()
root_spec, parser_warnings = SpecNodeParser(self.ctx, self.literal_str).parse(initial_spec)
current_spec = root_spec
while True:
if self.ctx.accept(SpecTokens.START_EDGE_PROPERTIES):
is_direct = self.ctx.current_token.value[0] == "%"
edge_properties = EdgeAttributeParser(self.ctx, self.literal_str).parse()
edge_properties.setdefault("depflag", 0)
edge_properties.setdefault("virtuals", ())
edge_properties["direct"] = is_direct
dependency, warnings = self._parse_node(root_spec)
if is_direct:
target_spec = current_spec
edge_properties.setdefault("depflag", spack.deptypes.BUILD)
if dependency.name in LEGACY_COMPILER_TO_BUILTIN:
dependency.name = LEGACY_COMPILER_TO_BUILTIN[dependency.name]
else:
current_spec = dependency
target_spec = root_spec
edge_properties.setdefault("depflag", 0)
# print(f"[{current_spec}], {target_spec}->{dependency} {is_direct}")
parser_warnings.extend(warnings)
add_dependency(dependency, **edge_properties)
elif self.ctx.accept(SpecTokens.DEPENDENCY):
is_direct = self.ctx.current_token.value[0] == "%"
dependency, warnings = self._parse_node(root_spec)
edge_properties = {}
edge_properties["direct"] = is_direct
edge_properties["virtuals"] = tuple()
if is_direct:
target_spec = current_spec
edge_properties.setdefault("depflag", spack.deptypes.BUILD)
if dependency.name in LEGACY_COMPILER_TO_BUILTIN:
dependency.name = LEGACY_COMPILER_TO_BUILTIN[dependency.name]
else:
current_spec = dependency
target_spec = root_spec
edge_properties.setdefault("depflag", 0)
# print(f"[{current_spec}], {target_spec}->{dependency} {is_direct}")
parser_warnings.extend(warnings)
add_dependency(dependency, depflag=0, virtuals=())
add_dependency(dependency, **edge_properties)
else:
break
@ -384,34 +406,6 @@ def warn_if_after_compiler(token: str):
while True:
if (
self.ctx.accept(SpecTokens.COMPILER)
or self.ctx.accept(SpecTokens.COMPILER_AND_VERSION)
or self.ctx.accept(SpecTokens.COMPILER_WITH_VIRTUALS)
or self.ctx.accept(SpecTokens.COMPILER_AND_VERSION_WITH_VIRTUALS)
):
current_token = self.ctx.current_token
if current_token.kind in (
SpecTokens.COMPILER_WITH_VIRTUALS,
SpecTokens.COMPILER_AND_VERSION_WITH_VIRTUALS,
):
m = SPLIT_COMPILER_TOKEN.match(current_token.value)
assert m, "SPLIT_COMPILER_TOKEN and COMPILER_* do not agree."
virtuals_str, compiler_str = m.groups()
virtuals = tuple(virtuals_str.strip("'\" ").split(","))
else:
virtuals = tuple()
compiler_str = current_token.value[1:]
build_dependency = spack.spec.Spec(compiler_str)
if build_dependency.name in LEGACY_COMPILER_TO_BUILTIN:
build_dependency.name = LEGACY_COMPILER_TO_BUILTIN[build_dependency.name]
initial_spec._add_dependency(
build_dependency, depflag=spack.deptypes.BUILD, virtuals=virtuals, direct=True
)
last_compiler = self.ctx.current_token.value
elif (
self.ctx.accept(SpecTokens.VERSION_HASH_PAIR)
or self.ctx.accept(SpecTokens.GIT_VERSION)
or self.ctx.accept(SpecTokens.VERSION)

View File

@ -28,9 +28,15 @@
(["invalid-selfhosted-gitlab-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
# This package has a stand-alone test method in build-time callbacks
(["fail-test-audit"], ["PKG-PROPERTIES"]),
# This package implements and uses several deprecated stand-alone
# test methods
(["fail-test-audit-deprecated"], ["PKG-DEPRECATED-ATTRIBUTES"]),
# This package implements and uses several deprecated stand-alone test methods
pytest.param(
["fail-test-audit-deprecated"],
["PKG-DEPRECATED-ATTRIBUTES"],
marks=pytest.mark.xfail(
reason="inspect.getsource() reads the source file, "
"which misses an injected import line"
),
),
# This package has stand-alone test methods without non-trivial docstrings
(["fail-test-audit-docstring"], ["PKG-PROPERTIES"]),
# This package has a stand-alone test method without an implementation

View File

@ -241,13 +241,13 @@ def test_default_rpaths_create_install_default_layout(temporary_mirror_dir):
uninstall_cmd("-y", "--dependents", gspec.name)
# Test installing from build caches
buildcache_cmd("install", "-u", cspec.name, sy_spec.name)
buildcache_cmd("install", "-uo", cspec.name, sy_spec.name)
# This gives warning that spec is already installed
buildcache_cmd("install", "-u", cspec.name)
buildcache_cmd("install", "-uo", cspec.name)
# Test overwrite install
buildcache_cmd("install", "-fu", cspec.name)
buildcache_cmd("install", "-ufo", cspec.name)
buildcache_cmd("keys", "-f")
buildcache_cmd("list")
@ -273,10 +273,10 @@ def test_default_rpaths_install_nondefault_layout(temporary_mirror_dir):
# Install some packages with dependent packages
# test install in non-default install path scheme
buildcache_cmd("install", "-u", cspec.name, sy_spec.name)
buildcache_cmd("install", "-uo", cspec.name, sy_spec.name)
# Test force install in non-default install path scheme
buildcache_cmd("install", "-uf", cspec.name)
buildcache_cmd("install", "-ufo", cspec.name)
@pytest.mark.requires_executables(*required_executables)
@ -298,19 +298,19 @@ def test_relative_rpaths_install_default_layout(temporary_mirror_dir):
cspec = spack.concretize.concretize_one("corge")
# Install buildcache created with relativized rpaths
buildcache_cmd("install", "-uf", cspec.name)
buildcache_cmd("install", "-ufo", cspec.name)
# This gives warning that spec is already installed
buildcache_cmd("install", "-uf", cspec.name)
buildcache_cmd("install", "-ufo", cspec.name)
# Uninstall the package and deps
uninstall_cmd("-y", "--dependents", gspec.name)
# Install build cache
buildcache_cmd("install", "-uf", cspec.name)
buildcache_cmd("install", "-ufo", cspec.name)
# Test overwrite install
buildcache_cmd("install", "-uf", cspec.name)
buildcache_cmd("install", "-ufo", cspec.name)
@pytest.mark.requires_executables(*required_executables)
@ -327,7 +327,7 @@ def test_relative_rpaths_install_nondefault(temporary_mirror_dir):
cspec = spack.concretize.concretize_one("corge")
# Test install in non-default install path scheme and relative path
buildcache_cmd("install", "-uf", cspec.name)
buildcache_cmd("install", "-ufo", cspec.name)
def test_push_and_fetch_keys(mock_gnupghome, tmp_path):

View File

@ -691,36 +691,6 @@ def test_clear_compiler_related_runtime_variables_of_build_deps(default_mock_con
assert result["ANOTHER_VAR"] == "this-should-be-present"
@pytest.mark.parametrize("context", [Context.BUILD, Context.RUN])
def test_build_system_globals_only_set_on_root_during_build(default_mock_concretization, context):
"""Test whether when setting up a build environment, the build related globals are set only
in the top level spec.
TODO: Since module instances are globals themselves, and Spack defines properties on them, they
persist across tests. In principle this is not terrible, cause the variables are mostly static.
But obviously it can lead to very hard to find bugs... We should get rid of those globals and
define them instead as a property on the package instance.
"""
root = spack.concretize.concretize_one("mpileaks")
build_variables = ("std_cmake_args", "std_meson_args", "std_pip_args")
# See todo above, we clear out any properties that may have been set by the previous test.
# Commenting this loop will make the test fail. I'm leaving it here as a reminder that those
# globals were always a bad idea, and we should pass them to the package instance.
for spec in root.traverse():
for variable in build_variables:
spec.package.module.__dict__.pop(variable, None)
spack.build_environment.SetupContext(root, context=context).set_all_package_py_globals()
# Excpect the globals to be set at the root in a build context only.
should_be_set = lambda depth: context == Context.BUILD and depth == 0
for depth, spec in root.traverse(depth=True, root=True):
for variable in build_variables:
assert hasattr(spec.package.module, variable) == should_be_set(depth)
def test_rpath_with_duplicate_link_deps():
"""If we have two instances of one package in the same link sub-dag, only the newest version is
rpath'ed. This is for runtime support without splicing."""

View File

@ -15,6 +15,8 @@
compiler = spack.main.SpackCommand("compiler")
pytestmark = [pytest.mark.usefixtures("mock_packages")]
@pytest.fixture
def compilers_dir(mock_executable):
@ -80,7 +82,7 @@ def test_compiler_find_without_paths(no_packages_yaml, working_env, mock_executa
@pytest.mark.regression("37996")
def test_compiler_remove(mutable_config, mock_packages):
def test_compiler_remove(mutable_config):
"""Tests that we can remove a compiler from configuration."""
assert any(
compiler.satisfies("gcc@=9.4.0") for compiler in spack.compilers.config.all_compilers()
@ -93,7 +95,7 @@ def test_compiler_remove(mutable_config, mock_packages):
@pytest.mark.regression("37996")
def test_removing_compilers_from_multiple_scopes(mutable_config, mock_packages):
def test_removing_compilers_from_multiple_scopes(mutable_config):
# Duplicate "site" scope into "user" scope
site_config = spack.config.get("packages", scope="site")
spack.config.set("packages", site_config, scope="user")
@ -189,12 +191,12 @@ def test_compiler_find_path_order(no_packages_yaml, working_env, compilers_dir):
}
def test_compiler_list_empty(no_packages_yaml, working_env, compilers_dir):
def test_compiler_list_empty(no_packages_yaml, compilers_dir, monkeypatch):
"""Spack should not automatically search for compilers when listing them and none are
available. And when stdout is not a tty like in tests, there should be no output and
no error exit code.
"""
os.environ["PATH"] = str(compilers_dir)
monkeypatch.setenv("PATH", str(compilers_dir), prepend=":")
out = compiler("list")
assert not out
assert compiler.returncode == 0

View File

@ -4286,7 +4286,7 @@ def test_env_include_packages_url(
"""Test inclusion of a (GitHub) URL."""
develop_url = "https://github.com/fake/fake/blob/develop/"
default_packages = develop_url + "etc/fake/defaults/packages.yaml"
sha256 = "8b69d9c6e983dfb8bac2ddc3910a86265cffdd9c85f905c716d426ec5b0d9847"
sha256 = "6a1b26c857ca7e5bcd7342092e2f218da43d64b78bd72771f603027ea3c8b4af"
spack_yaml = tmpdir.join("spack.yaml")
with spack_yaml.open("w") as f:
f.write(

View File

@ -18,6 +18,8 @@
from spack.main import SpackCommand
from spack.spec import Spec
pytestmark = [pytest.mark.usefixtures("mock_packages")]
@pytest.fixture
def executables_found(monkeypatch):
@ -36,40 +38,6 @@ def define_plat_exe(exe):
return exe
def test_find_external_single_package(mock_executable):
cmake_path = mock_executable("cmake", output="echo cmake version 1.foo")
search_dir = cmake_path.parent.parent
specs_by_package = spack.detection.by_path(["cmake"], path_hints=[str(search_dir)])
assert len(specs_by_package) == 1 and "cmake" in specs_by_package
detected_spec = specs_by_package["cmake"]
assert len(detected_spec) == 1 and detected_spec[0] == Spec("cmake@1.foo")
def test_find_external_two_instances_same_package(mock_executable):
# Each of these cmake instances is created in a different prefix
# In Windows, quoted strings are echo'd with quotes includes
# we need to avoid that for proper regex.
cmake1 = mock_executable("cmake", output="echo cmake version 1.foo", subdir=("base1", "bin"))
cmake2 = mock_executable("cmake", output="echo cmake version 3.17.2", subdir=("base2", "bin"))
search_paths = [str(cmake1.parent.parent), str(cmake2.parent.parent)]
finder = spack.detection.path.ExecutablesFinder()
detected_specs = finder.find(
pkg_name="cmake", initial_guess=search_paths, repository=spack.repo.PATH
)
assert len(detected_specs) == 2
spec_to_path = {s: s.external_path for s in detected_specs}
assert spec_to_path[Spec("cmake@1.foo")] == (
spack.detection.executable_prefix(str(cmake1.parent))
), spec_to_path
assert spec_to_path[Spec("cmake@3.17.2")] == (
spack.detection.executable_prefix(str(cmake2.parent))
)
def test_find_external_update_config(mutable_config):
entries = [
Spec.from_detection("cmake@1.foo", external_path="/x/y1"),
@ -101,13 +69,24 @@ def test_get_executables(working_env, mock_executable):
# TODO: this test should be made to work, but in the meantime it is
# causing intermittent (spurious) CI failures on all PRs
@pytest.mark.not_on_windows("Test fails intermittently on Windows")
def test_find_external_cmd_not_buildable(mutable_config, working_env, mock_executable):
def test_find_external_cmd_not_buildable(
mutable_config, working_env, mock_executable, monkeypatch
):
"""When the user invokes 'spack external find --not-buildable', the config
for any package where Spack finds an external version should be marked as
not buildable.
"""
cmake_path1 = mock_executable("cmake", output="echo cmake version 1.foo")
os.environ["PATH"] = os.pathsep.join([os.path.dirname(cmake_path1)])
version = "1.foo"
@classmethod
def _determine_version(cls, exe):
return version
cmake_cls = spack.repo.PATH.get_pkg_class("cmake")
monkeypatch.setattr(cmake_cls, "determine_version", _determine_version)
cmake_path = mock_executable("cmake", output=f"echo cmake version {version}")
os.environ["PATH"] = str(cmake_path.parent)
external("find", "--not-buildable", "cmake")
pkgs_cfg = spack.config.get("packages")
assert "cmake" in pkgs_cfg
@ -123,10 +102,12 @@ def test_find_external_cmd_not_buildable(mutable_config, working_env, mock_execu
["detectable"],
[],
[
"builtin.mock.cmake",
"builtin.mock.find-externals1",
"builtin.mock.gcc",
"builtin.mock.llvm",
"builtin.mock.intel-oneapi-compilers",
"builtin.mock.llvm",
"builtin.mock.mpich",
],
),
# find --all --exclude find-externals1
@ -134,26 +115,38 @@ def test_find_external_cmd_not_buildable(mutable_config, working_env, mock_execu
None,
["detectable"],
["builtin.mock.find-externals1"],
["builtin.mock.gcc", "builtin.mock.llvm", "builtin.mock.intel-oneapi-compilers"],
[
"builtin.mock.cmake",
"builtin.mock.gcc",
"builtin.mock.intel-oneapi-compilers",
"builtin.mock.llvm",
"builtin.mock.mpich",
],
),
(
None,
["detectable"],
["find-externals1"],
["builtin.mock.gcc", "builtin.mock.llvm", "builtin.mock.intel-oneapi-compilers"],
[
"builtin.mock.cmake",
"builtin.mock.gcc",
"builtin.mock.intel-oneapi-compilers",
"builtin.mock.llvm",
"builtin.mock.mpich",
],
),
# find cmake (and cmake is not detectable)
(["cmake"], ["detectable"], [], []),
# find hwloc (and mock hwloc is not detectable)
(["hwloc"], ["detectable"], [], []),
],
)
def test_package_selection(names, tags, exclude, expected, mutable_mock_repo):
def test_package_selection(names, tags, exclude, expected):
"""Tests various cases of selecting packages"""
# In the mock repo we only have 'find-externals1' that is detectable
result = spack.cmd.external.packages_to_search_for(names=names, tags=tags, exclude=exclude)
assert set(result) == set(expected)
def test_find_external_no_manifest(mutable_config, working_env, mutable_mock_repo, monkeypatch):
def test_find_external_no_manifest(mutable_config, working_env, monkeypatch):
"""The user runs 'spack external find'; the default path for storing
manifest files does not exist. Ensure that the command does not
fail.
@ -166,7 +159,7 @@ def test_find_external_no_manifest(mutable_config, working_env, mutable_mock_rep
def test_find_external_empty_default_manifest_dir(
mutable_config, working_env, mutable_mock_repo, tmpdir, monkeypatch
mutable_config, working_env, tmpdir, monkeypatch
):
"""The user runs 'spack external find'; the default path for storing
manifest files exists but is empty. Ensure that the command does not
@ -181,7 +174,7 @@ def test_find_external_empty_default_manifest_dir(
@pytest.mark.not_on_windows("Can't chmod on Windows")
@pytest.mark.skipif(getuid() == 0, reason="user is root")
def test_find_external_manifest_with_bad_permissions(
mutable_config, working_env, mutable_mock_repo, tmpdir, monkeypatch
mutable_config, working_env, tmpdir, monkeypatch
):
"""The user runs 'spack external find'; the default path for storing
manifest files exists but with insufficient permissions. Check that
@ -201,7 +194,7 @@ def test_find_external_manifest_with_bad_permissions(
os.chmod(test_manifest_file_path, 0o700)
def test_find_external_manifest_failure(mutable_config, mutable_mock_repo, tmpdir, monkeypatch):
def test_find_external_manifest_failure(mutable_config, tmpdir, monkeypatch):
"""The user runs 'spack external find'; the manifest parsing fails with
some exception. Ensure that the command still succeeds (i.e. moves on
to other external detection mechanisms).
@ -221,7 +214,7 @@ def fail():
assert "Skipping manifest and continuing" in output
def test_find_external_merge(mutable_config, mutable_mock_repo, tmp_path):
def test_find_external_merge(mutable_config, tmp_path):
"""Checks that 'spack find external' doesn't overwrite an existing spec in packages.yaml."""
pkgs_cfg_init = {
"find-externals1": {
@ -247,7 +240,7 @@ def test_find_external_merge(mutable_config, mutable_mock_repo, tmp_path):
assert {"spec": "find-externals1@1.2", "prefix": "/x/y2"} in pkg_externals
def test_list_detectable_packages(mutable_config, mutable_mock_repo):
def test_list_detectable_packages(mutable_config):
external("list")
assert external.returncode == 0
@ -293,13 +286,23 @@ def test_new_entries_are_reported_correctly(mock_executable, mutable_config, mon
@pytest.mark.parametrize("command_args", [("-t", "build-tools"), ("-t", "build-tools", "cmake")])
@pytest.mark.not_on_windows("the test uses bash scripts")
def test_use_tags_for_detection(command_args, mock_executable, mutable_config, monkeypatch):
versions = {"cmake": "3.19.1", "openssl": "2.8.3"}
@classmethod
def _determine_version(cls, exe):
return versions[os.path.basename(exe)]
cmake_cls = spack.repo.PATH.get_pkg_class("cmake")
monkeypatch.setattr(cmake_cls, "determine_version", _determine_version)
# Prepare an environment to detect a fake cmake
cmake_exe = mock_executable("cmake", output="echo cmake version 3.19.1")
cmake_exe = mock_executable("cmake", output=f"echo cmake version {versions['cmake']}")
prefix = os.path.dirname(cmake_exe)
monkeypatch.setenv("PATH", prefix)
openssl_exe = mock_executable("openssl", output="OpenSSL 2.8.3")
openssl_exe = mock_executable("openssl", output=f"OpenSSL {versions['openssl']}")
prefix = os.path.dirname(openssl_exe)
monkeypatch.setenv("PATH", prefix)
@ -316,6 +319,16 @@ def test_failures_in_scanning_do_not_result_in_an_error(
mock_executable, monkeypatch, mutable_config
):
"""Tests that scanning paths with wrong permissions, won't cause `external find` to error."""
versions = {"first": "3.19.1", "second": "3.23.3"}
@classmethod
def _determine_version(cls, exe):
bin_parent = os.path.dirname(exe).split(os.sep)[-2]
return versions[bin_parent]
cmake_cls = spack.repo.PATH.get_pkg_class("cmake")
monkeypatch.setattr(cmake_cls, "determine_version", _determine_version)
cmake_exe1 = mock_executable(
"cmake", output="echo cmake version 3.19.1", subdir=("first", "bin")
)
@ -333,21 +346,30 @@ def test_failures_in_scanning_do_not_result_in_an_error(
assert external.returncode == 0
assert "The following specs have been" in output
assert "cmake" in output
assert "3.23.3" in output
assert "3.19.1" not in output
for vers in versions.values():
assert vers in output
def test_detect_virtuals(mock_executable, mutable_config, monkeypatch):
"""Test whether external find --not-buildable sets virtuals as non-buildable (unless user
config sets them to buildable)"""
mpich = mock_executable("mpichversion", output="echo MPICH Version: 4.0.2")
version = "4.0.2"
@classmethod
def _determine_version(cls, exe):
return version
cmake_cls = spack.repo.PATH.get_pkg_class("mpich")
monkeypatch.setattr(cmake_cls, "determine_version", _determine_version)
mpich = mock_executable("mpichversion", output=f"echo MPICH Version: {version}")
prefix = os.path.dirname(mpich)
external("find", "--path", prefix, "--not-buildable", "mpich")
# Check that mpich was correctly detected
mpich = mutable_config.get("packages:mpich")
assert mpich["buildable"] is False
assert Spec(mpich["externals"][0]["spec"]).satisfies("mpich@4.0.2")
assert Spec(mpich["externals"][0]["spec"]).satisfies(f"mpich@{version}")
# Check that the virtual package mpi was marked as non-buildable
assert mutable_config.get("packages:mpi:buildable") is False

View File

@ -111,6 +111,7 @@ def split(output):
pkg = spack.main.SpackCommand("pkg")
@pytest.mark.requires_builtin("builtin repository path must exist")
def test_builtin_repo():
assert spack.repo.builtin_repo() is spack.repo.PATH.get_repo("builtin")

View File

@ -1,15 +1,21 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import io
import os
import pathlib
import pytest
from llnl.util.filesystem import working_dir
import spack.config
import spack.environment as ev
import spack.main
import spack.repo
import spack.repo_migrate
from spack.main import SpackCommand
from spack.util.executable import Executable
repo = spack.main.SpackCommand("repo")
env = SpackCommand("env")
@ -68,3 +74,101 @@ def test_env_repo_path_vars_substitution(
with ev.read("test") as newenv:
repos_specs = spack.config.get("repos", default={}, scope=newenv.scope_name)
assert current_dir in repos_specs
OLD_7ZIP = b"""\
# some comment
from spack.package import *
class _7zip(Package):
pass
"""
NEW_7ZIP = b"""\
# some comment
from spack_repo.builtin.build_systems.generic import Package
from spack.package import *
class _7zip(Package):
pass
"""
OLD_NUMPY = b"""\
# some comment
from spack.package import *
class PyNumpy(CMakePackage):
generator("ninja")
"""
NEW_NUMPY = b"""\
# some comment
from spack_repo.builtin.build_systems.cmake import CMakePackage, generator
from spack.package import *
class PyNumpy(CMakePackage):
generator("ninja")
"""
def test_repo_migrate(tmp_path: pathlib.Path, config):
old_root, _ = spack.repo.create_repo(str(tmp_path), "org.repo", package_api=(1, 0))
pkgs_path = pathlib.Path(spack.repo.from_path(old_root).packages_path)
new_root = pathlib.Path(old_root) / "spack_repo" / "org" / "repo"
pkg_7zip_old = pkgs_path / "7zip" / "package.py"
pkg_numpy_old = pkgs_path / "py-numpy" / "package.py"
pkg_py_7zip_new = new_root / "packages" / "_7zip" / "package.py"
pkg_py_numpy_new = new_root / "packages" / "py_numpy" / "package.py"
pkg_7zip_old.parent.mkdir(parents=True)
pkg_numpy_old.parent.mkdir(parents=True)
pkg_7zip_old.write_bytes(OLD_7ZIP)
pkg_numpy_old.write_bytes(OLD_NUMPY)
repo("migrate", "--fix", old_root)
# old files are not touched since they are moved
assert pkg_7zip_old.read_bytes() == OLD_7ZIP
assert pkg_numpy_old.read_bytes() == OLD_NUMPY
# new files are created and have updated contents
assert pkg_py_7zip_new.read_bytes() == NEW_7ZIP
assert pkg_py_numpy_new.read_bytes() == NEW_NUMPY
@pytest.mark.not_on_windows("Known failure on windows")
def test_migrate_diff(git: Executable, tmp_path: pathlib.Path):
root, _ = spack.repo.create_repo(str(tmp_path), "foo", package_api=(2, 0))
r = pathlib.Path(root)
pkg_7zip = r / "packages" / "_7zip" / "package.py"
pkg_py_numpy_new = r / "packages" / "py_numpy" / "package.py"
pkg_broken = r / "packages" / "broken" / "package.py"
pkg_7zip.parent.mkdir(parents=True)
pkg_py_numpy_new.parent.mkdir(parents=True)
pkg_broken.parent.mkdir(parents=True)
pkg_7zip.write_bytes(OLD_7ZIP)
pkg_py_numpy_new.write_bytes(OLD_NUMPY)
pkg_broken.write_bytes(b"syntax(error")
stderr = io.StringIO()
with open(tmp_path / "imports.patch", "w", encoding="utf-8") as stdout:
spack.repo_migrate.migrate_v2_imports(
str(r / "packages"), str(r), fix=False, out=stdout, err=stderr
)
assert f"Skipping {pkg_broken}" in stderr.getvalue()
# apply the patch and verify the changes
with working_dir(str(r)):
git("apply", str(tmp_path / "imports.patch"))
assert pkg_7zip.read_bytes() == NEW_7ZIP
assert pkg_py_numpy_new.read_bytes() == NEW_NUMPY

View File

@ -6,6 +6,8 @@
from spack.compilers.config import CompilerFactory
pytestmark = [pytest.mark.usefixtures("config", "mock_packages")]
@pytest.fixture()
def mock_compiler(mock_executable):
@ -55,7 +57,7 @@ def test_compiler_conversion_with_flags(mock_compiler):
assert compiler_spec.extra_attributes["flags"]["cxxflags"] == "-O2 -g"
def tests_compiler_conversion_with_environment(mock_compiler):
def test_compiler_conversion_with_environment(mock_compiler):
"""Tests that custom environment modifications are converted appropriately
for external compilers
"""
@ -67,7 +69,7 @@ def tests_compiler_conversion_with_environment(mock_compiler):
assert compiler_spec.extra_attributes["environment"] == mods
def tests_compiler_conversion_extra_rpaths(mock_compiler):
def test_compiler_conversion_extra_rpaths(mock_compiler):
"""Tests that extra rpaths are converted appropriately for external compilers"""
mock_compiler["extra_rpaths"] = ["/foo/bar"]
compiler_spec = CompilerFactory.from_legacy_yaml(mock_compiler)[0]
@ -76,7 +78,7 @@ def tests_compiler_conversion_extra_rpaths(mock_compiler):
assert compiler_spec.extra_attributes["extra_rpaths"] == ["/foo/bar"]
def tests_compiler_conversion_modules(mock_compiler):
def test_compiler_conversion_modules(mock_compiler):
"""Tests that modules are converted appropriately for external compilers"""
modules = ["foo/4.1.2", "bar/5.1.4"]
mock_compiler["modules"] = modules
@ -86,7 +88,7 @@ def tests_compiler_conversion_modules(mock_compiler):
@pytest.mark.regression("49717")
def tests_compiler_conversion_corrupted_paths(mock_compiler):
def test_compiler_conversion_corrupted_paths(mock_compiler):
"""Tests that compiler entries with corrupted path do not raise"""
mock_compiler["paths"] = {"cc": "gcc", "cxx": "g++", "fc": "gfortran", "f77": "gfortran"}
# Test this call doesn't raise

View File

@ -28,11 +28,14 @@ def call_compiler(exe, *args, **kwargs):
@pytest.fixture()
def mock_gcc(config):
compilers = spack.compilers.config.all_compilers_from(configuration=config)
assert compilers, "No compilers available"
compilers.sort(key=lambda x: (x.name == "gcc", x.version))
# Deepcopy is used to avoid more boilerplate when changing the "extra_attributes"
return copy.deepcopy(compilers[-1])
@pytest.mark.usefixtures("mock_packages")
class TestCompilerPropertyDetector:
@pytest.mark.parametrize(
"language,flagname",

View File

@ -3,6 +3,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import pathlib
import platform
import sys
import _vendoring.jinja2
@ -760,7 +761,7 @@ def test_virtual_is_fully_expanded_for_mpileaks(self):
@pytest.mark.parametrize(
"spec_str,expected,not_expected",
[
# clang only provides C, and C++ compilers, while gcc has also fortran
# clang (llvm~flang) only provides C, and C++ compilers, while gcc has also fortran
#
# If we ask mpileaks%clang, then %gcc must be used for fortran, and since
# %gcc is preferred to clang in config, it will be used for most nodes
@ -3675,3 +3676,144 @@ def test_concrete_multi_valued_variants_when_args(default_mock_concretization):
for c in ("foo:=a", "foo:=a,b,c", "foo:=a,b", "foo:=a,c"):
s = default_mock_concretization(f"mvdefaults {c}")
assert not s.satisfies("^pkg-b")
@pytest.mark.usefixtures("mock_packages")
@pytest.mark.parametrize(
"constraint_in_yaml,unsat_request,sat_request",
[
# Arch parts
pytest.param(
"target=x86_64",
"target=core2",
"target=x86_64",
marks=pytest.mark.skipif(
platform.machine() != "x86_64", reason="only valid for x86_64"
),
),
pytest.param(
"target=core2",
"target=x86_64",
"target=core2",
marks=pytest.mark.skipif(
platform.machine() != "x86_64", reason="only valid for x86_64"
),
),
("os=debian6", "os=redhat6", "os=debian6"),
("platform=test", "platform=linux", "platform=test"),
# Variants
("~lld", "+lld", "~lld"),
("+lld", "~lld", "+lld"),
],
)
def test_spec_parts_on_fresh_compilers(
constraint_in_yaml, unsat_request, sat_request, mutable_config, tmp_path
):
"""Tests that spec parts like targets and variants in `%<package> target=<target> <variants>`
are associated with `package` for `%` just as they would be for `^`, when we concretize
without reusing.
"""
packages_yaml = syaml.load_config(
f"""
packages:
llvm::
buildable: false
externals:
- spec: "llvm+clang@20 {constraint_in_yaml}"
prefix: {tmp_path / 'llvm-20'}
"""
)
mutable_config.set("packages", packages_yaml["packages"])
# Check the abstract spec is formed correctly
abstract_spec = Spec(f"pkg-a %llvm@20 +clang {unsat_request}")
assert abstract_spec["llvm"].satisfies(f"@20 +clang {unsat_request}")
# Check that we can't concretize the spec, since llvm is not buildable
with pytest.raises(spack.solver.asp.UnsatisfiableSpecError):
spack.concretize.concretize_one(abstract_spec)
# Check we can instead concretize if we use the correct constraint
s = spack.concretize.concretize_one(f"pkg-a %llvm@20 +clang {sat_request}")
assert s["c"].external and s["c"].satisfies(f"@20 +clang {sat_request}")
@pytest.mark.usefixtures("mock_packages", "mutable_database")
@pytest.mark.parametrize(
"constraint_in_yaml,unsat_request,sat_request",
[
# Arch parts
pytest.param(
"target=x86_64",
"target=core2",
"target=x86_64",
marks=pytest.mark.skipif(
platform.machine() != "x86_64", reason="only valid for x86_64"
),
),
pytest.param(
"target=core2",
"target=x86_64",
"target=core2",
marks=pytest.mark.skipif(
platform.machine() != "x86_64", reason="only valid for x86_64"
),
),
("os=debian6", "os=redhat6", "os=debian6"),
("platform=test", "platform=linux", "platform=test"),
# Variants
("~lld", "+lld", "~lld"),
("+lld", "~lld", "+lld"),
],
)
def test_spec_parts_on_reused_compilers(
constraint_in_yaml, unsat_request, sat_request, mutable_config, tmp_path
):
"""Tests that requests of the form <package>%<compiler> <requests> are considered for reused
specs, even though build dependency are not part of the ASP problem.
"""
packages_yaml = syaml.load_config(
f"""
packages:
c:
require: llvm
cxx:
require: llvm
llvm::
buildable: false
externals:
- spec: "llvm+clang@20 {constraint_in_yaml}"
prefix: {tmp_path / 'llvm-20'}
mpileaks:
buildable: true
"""
)
mutable_config.set("packages", packages_yaml["packages"])
# Install the spec
installed_spec = spack.concretize.concretize_one(f"mpileaks %llvm@20 {sat_request}")
PackageInstaller([installed_spec.package], fake=True, explicit=True).install()
# Make mpileaks not buildable
mutable_config.set("packages:mpileaks:buildable", False)
# Check we can't concretize with the unsat request...
with pytest.raises(spack.solver.asp.UnsatisfiableSpecError):
spack.concretize.concretize_one(f"mpileaks %llvm@20 {unsat_request}")
# ...but we can with the original constraint
with spack.config.override("concretizer:reuse", True):
s = spack.concretize.concretize_one(f"mpileaks %llvm@20 {sat_request}")
assert s.dag_hash() == installed_spec.dag_hash()
def test_use_compiler_by_hash(mock_packages, mutable_database, mutable_config):
"""Tests that we can reuse an installed compiler specifying its hash"""
installed_spec = spack.concretize.concretize_one("gcc@14.0")
PackageInstaller([installed_spec.package], fake=True, explicit=True).install()
with spack.config.override("concretizer:reuse", True):
s = spack.concretize.concretize_one(f"mpileaks %gcc/{installed_spec.dag_hash()}")
assert s["c"].dag_hash() == installed_spec.dag_hash()

View File

@ -163,16 +163,15 @@ def test_flag_order_and_grouping(
if cmp_flags:
compiler_spec = "%gcc@12.100.100"
cmd_flags_str = f'cflags="{cmd_flags}"' if cmd_flags else ""
if dflags:
spec_str = f"x+activatemultiflag {compiler_spec} ^y"
spec_str = f"x+activatemultiflag {compiler_spec} ^y {cmd_flags_str}"
expected_dflags = "-d1 -d2"
else:
spec_str = f"y {compiler_spec}"
spec_str = f"y {cmd_flags_str} {compiler_spec}"
expected_dflags = None
if cmd_flags:
spec_str += f' cflags="{cmd_flags}"'
root_spec = spack.concretize.concretize_one(spec_str)
spec = root_spec["y"]
satisfy_flags = " ".join(x for x in [cmd_flags, req_flags, cmp_flags, expected_dflags] if x)
@ -277,6 +276,6 @@ def test_flag_injection_different_compilers(mock_packages, mutable_config):
"""Tests that flag propagation is not activated on nodes with a compiler that is different
from the propagation source.
"""
s = spack.concretize.concretize_one('mpileaks %gcc cflags=="-O2" ^callpath %llvm')
s = spack.concretize.concretize_one('mpileaks cflags=="-O2" %gcc ^callpath %llvm')
assert s.satisfies('cflags="-O2"') and s["c"].name == "gcc"
assert not s["callpath"].satisfies('cflags="-O2"') and s["callpath"]["c"].name == "llvm"

View File

@ -892,6 +892,7 @@ def no_packages_yaml(mutable_config):
compilers_yaml = local_config.get_section_filename("packages")
if os.path.exists(compilers_yaml):
os.remove(compilers_yaml)
mutable_config.clear_caches()
return mutable_config
@ -2077,6 +2078,11 @@ def pytest_runtest_setup(item):
if only_windows_marker and sys.platform != "win32":
pytest.skip(*only_windows_marker.args)
# Skip tests marked "requires_builtin" if builtin repo is required
requires_builtin_marker = item.get_closest_marker(name="requires_builtin")
if requires_builtin_marker and not os.path.exists(spack.paths.packages_path):
pytest.skip(*requires_builtin_marker.args)
def _sequential_executor(*args, **kwargs):
return spack.util.parallel.SequentialExecutor()

View File

@ -81,7 +81,7 @@ packages:
fortran: /path/bin/gfortran-10
llvm:
externals:
- spec: "llvm@15.0.0 +clang os={linux_os.name}{linux_os.version} target={target}"
- spec: "llvm@15.0.0 +clang~flang os={linux_os.name}{linux_os.version} target={target}"
prefix: /path
extra_attributes:
compilers:

View File

@ -523,3 +523,10 @@ def test_subdir_in_v2():
with pytest.raises(spack.repo.BadRepoError, match="Must be a valid Python module name"):
spack.repo._validate_and_normalize_subdir(subdir="123", root="root", package_api=(2, 0))
def test_is_package_module():
assert spack.repo.is_package_module("spack.pkg.something.something")
assert spack.repo.is_package_module("spack_repo.foo.bar.baz.package")
assert not spack.repo.is_package_module("spack_repo.builtin.build_systems.cmake")
assert not spack.repo.is_package_module("spack.something.else")

View File

@ -137,7 +137,14 @@ def test_spec_list_nested_matrices(self, parser_and_speclist):
expected_components = itertools.product(
["zlib", "libelf"], ["%gcc", "%intel"], ["+shared", "~shared"]
)
expected = [Spec(" ".join(combo)) for combo in expected_components]
def _reduce(*, combo):
root = Spec(combo[0])
for x in combo[1:]:
root.constrain(x)
return root
expected = [_reduce(combo=combo) for combo in expected_components]
assert set(result.specs) == set(expected)
@pytest.mark.regression("16897")

View File

@ -51,10 +51,6 @@ def dependency_with_version(text):
)
def compiler_with_version_range(text):
return text, [Token(SpecTokens.COMPILER_AND_VERSION, value=text)], text
@pytest.fixture()
def specfile_for(default_mock_concretization):
def _specfile_for(spec_str, filename):
@ -84,7 +80,6 @@ def _specfile_for(spec_str, filename):
simple_package_name("3dtk"),
simple_package_name("ns-3-dev"),
# Single token anonymous specs
("%intel", [Token(SpecTokens.COMPILER, value="%intel")], "%intel"),
("@2.7", [Token(SpecTokens.VERSION, value="@2.7")], "@2.7"),
("@2.7:", [Token(SpecTokens.VERSION, value="@2.7:")], "@2.7:"),
("@:2.7", [Token(SpecTokens.VERSION, value="@:2.7")], "@:2.7"),
@ -97,6 +92,14 @@ def _specfile_for(spec_str, filename):
"arch=test-None-None",
),
# Multiple tokens anonymous specs
(
"%intel",
[
Token(SpecTokens.DEPENDENCY, value="%"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, "intel"),
],
"%intel",
),
(
"languages=go @4.2:",
[
@ -159,7 +162,9 @@ def _specfile_for(spec_str, filename):
[
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="foo"),
Token(SpecTokens.VERSION, value="@2.0"),
Token(SpecTokens.COMPILER_AND_VERSION, value="%bar@1.0"),
Token(SpecTokens.DEPENDENCY, value="%"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="bar"),
Token(SpecTokens.VERSION, value="@1.0"),
],
"foo@2.0 %bar@1.0",
),
@ -178,7 +183,9 @@ def _specfile_for(spec_str, filename):
Token(SpecTokens.VERSION, value="@1.2:1.4,1.6"),
Token(SpecTokens.BOOL_VARIANT, value="+debug"),
Token(SpecTokens.BOOL_VARIANT, value="~qt_4"),
Token(SpecTokens.COMPILER_AND_VERSION, value="%intel@12.1"),
Token(SpecTokens.DEPENDENCY, value="%"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="intel"),
Token(SpecTokens.VERSION, value="@12.1"),
Token(SpecTokens.DEPENDENCY, value="^"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="stackwalker"),
Token(SpecTokens.VERSION, value="@8.1_1e"),
@ -194,7 +201,9 @@ def _specfile_for(spec_str, filename):
Token(SpecTokens.VERSION, value="@1.2:1.4,1.6"),
Token(SpecTokens.BOOL_VARIANT, value="~qt_4"),
Token(SpecTokens.KEY_VALUE_PAIR, value="debug=2"),
Token(SpecTokens.COMPILER_AND_VERSION, value="%intel@12.1"),
Token(SpecTokens.DEPENDENCY, value="%"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="intel"),
Token(SpecTokens.VERSION, value="@12.1"),
Token(SpecTokens.DEPENDENCY, value="^"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="stackwalker"),
Token(SpecTokens.VERSION, value="@8.1_1e"),
@ -212,7 +221,9 @@ def _specfile_for(spec_str, filename):
Token(SpecTokens.KEY_VALUE_PAIR, value="cppflags=-O3"),
Token(SpecTokens.BOOL_VARIANT, value="+debug"),
Token(SpecTokens.BOOL_VARIANT, value="~qt_4"),
Token(SpecTokens.COMPILER_AND_VERSION, value="%intel@12.1"),
Token(SpecTokens.DEPENDENCY, value="%"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="intel"),
Token(SpecTokens.VERSION, value="@12.1"),
Token(SpecTokens.DEPENDENCY, value="^"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="stackwalker"),
Token(SpecTokens.VERSION, value="@8.1_1e"),
@ -226,7 +237,9 @@ def _specfile_for(spec_str, filename):
[
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="yaml-cpp"),
Token(SpecTokens.VERSION, value="@0.1.8"),
Token(SpecTokens.COMPILER_AND_VERSION, value="%intel@12.1"),
Token(SpecTokens.DEPENDENCY, value="%"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="intel"),
Token(SpecTokens.VERSION, value="@12.1"),
Token(SpecTokens.DEPENDENCY, value="^"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="boost"),
Token(SpecTokens.VERSION, value="@3.1.4"),
@ -237,7 +250,8 @@ def _specfile_for(spec_str, filename):
r"builtin.yaml-cpp%gcc",
[
Token(SpecTokens.FULLY_QUALIFIED_PACKAGE_NAME, value="builtin.yaml-cpp"),
Token(SpecTokens.COMPILER, value="%gcc"),
Token(SpecTokens.DEPENDENCY, value="%"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="gcc"),
],
"yaml-cpp %gcc",
),
@ -245,7 +259,8 @@ def _specfile_for(spec_str, filename):
r"testrepo.yaml-cpp%gcc",
[
Token(SpecTokens.FULLY_QUALIFIED_PACKAGE_NAME, value="testrepo.yaml-cpp"),
Token(SpecTokens.COMPILER, value="%gcc"),
Token(SpecTokens.DEPENDENCY, value="%"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="gcc"),
],
"yaml-cpp %gcc",
),
@ -254,7 +269,9 @@ def _specfile_for(spec_str, filename):
[
Token(SpecTokens.FULLY_QUALIFIED_PACKAGE_NAME, value="builtin.yaml-cpp"),
Token(SpecTokens.VERSION, value="@0.1.8"),
Token(SpecTokens.COMPILER_AND_VERSION, value="%gcc@7.2.0"),
Token(SpecTokens.DEPENDENCY, value="%"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="gcc"),
Token(SpecTokens.VERSION, value="@7.2.0"),
Token(SpecTokens.DEPENDENCY, value="^"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="boost"),
Token(SpecTokens.VERSION, value="@3.1.4"),
@ -419,11 +436,51 @@ def _specfile_for(spec_str, filename):
f"develop-branch-version@git.{'a' * 40}=develop+var1+var2",
),
# Compiler with version ranges
compiler_with_version_range("%gcc@10.2.1:"),
compiler_with_version_range("%gcc@:10.2.1"),
compiler_with_version_range("%gcc@10.2.1:12.1.0"),
compiler_with_version_range("%gcc@10.1.0,12.2.1:"),
compiler_with_version_range("%gcc@:8.4.3,10.2.1:12.1.0"),
(
"%gcc@10.2.1:",
[
Token(SpecTokens.DEPENDENCY, value="%"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="gcc"),
Token(SpecTokens.VERSION, value="@10.2.1:"),
],
"%gcc@10.2.1:",
),
(
"%gcc@:10.2.1",
[
Token(SpecTokens.DEPENDENCY, value="%"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="gcc"),
Token(SpecTokens.VERSION, value="@:10.2.1"),
],
"%gcc@:10.2.1",
),
(
"%gcc@10.2.1:12.1.0",
[
Token(SpecTokens.DEPENDENCY, value="%"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="gcc"),
Token(SpecTokens.VERSION, value="@10.2.1:12.1.0"),
],
"%gcc@10.2.1:12.1.0",
),
(
"%gcc@10.1.0,12.2.1:",
[
Token(SpecTokens.DEPENDENCY, value="%"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="gcc"),
Token(SpecTokens.VERSION, value="@10.1.0,12.2.1:"),
],
"%gcc@10.1.0,12.2.1:",
),
(
"%gcc@:8.4.3,10.2.1:12.1.0",
[
Token(SpecTokens.DEPENDENCY, value="%"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="gcc"),
Token(SpecTokens.VERSION, value="@:8.4.3,10.2.1:12.1.0"),
],
"%gcc@:8.4.3,10.2.1:12.1.0",
),
# Special key value arguments
("dev_path=*", [Token(SpecTokens.KEY_VALUE_PAIR, value="dev_path=*")], "dev_path='*'"),
(
@ -484,7 +541,9 @@ def _specfile_for(spec_str, filename):
"+ debug % intel @ 12.1:12.6",
[
Token(SpecTokens.BOOL_VARIANT, value="+ debug"),
Token(SpecTokens.COMPILER_AND_VERSION, value="% intel @ 12.1:12.6"),
Token(SpecTokens.DEPENDENCY, value="%"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="intel"),
Token(SpecTokens.VERSION, value="@ 12.1:12.6"),
],
"+debug %intel@12.1:12.6",
),
@ -509,7 +568,8 @@ def _specfile_for(spec_str, filename):
"@:0.4 % nvhpc",
[
Token(SpecTokens.VERSION, value="@:0.4"),
Token(SpecTokens.COMPILER, value="% nvhpc"),
Token(SpecTokens.DEPENDENCY, value="%"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="nvhpc"),
],
"@:0.4 %nvhpc",
),
@ -602,7 +662,10 @@ def _specfile_for(spec_str, filename):
"zlib %[virtuals=c] gcc",
[
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, "zlib"),
Token(SpecTokens.COMPILER_WITH_VIRTUALS, "%[virtuals=c] gcc"),
Token(SpecTokens.START_EDGE_PROPERTIES, value="%["),
Token(SpecTokens.KEY_VALUE_PAIR, value="virtuals=c"),
Token(SpecTokens.END_EDGE_PROPERTIES, value="]"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="gcc"),
],
"zlib %[virtuals=c] gcc",
),
@ -610,7 +673,10 @@ def _specfile_for(spec_str, filename):
"zlib %[virtuals=c,cxx] gcc",
[
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, "zlib"),
Token(SpecTokens.COMPILER_WITH_VIRTUALS, "%[virtuals=c,cxx] gcc"),
Token(SpecTokens.START_EDGE_PROPERTIES, value="%["),
Token(SpecTokens.KEY_VALUE_PAIR, value="virtuals=c,cxx"),
Token(SpecTokens.END_EDGE_PROPERTIES, value="]"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="gcc"),
],
"zlib %[virtuals=c,cxx] gcc",
),
@ -618,7 +684,11 @@ def _specfile_for(spec_str, filename):
"zlib %[virtuals=c,cxx] gcc@14.1",
[
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, "zlib"),
Token(SpecTokens.COMPILER_AND_VERSION_WITH_VIRTUALS, "%[virtuals=c,cxx] gcc@14.1"),
Token(SpecTokens.START_EDGE_PROPERTIES, value="%["),
Token(SpecTokens.KEY_VALUE_PAIR, value="virtuals=c,cxx"),
Token(SpecTokens.END_EDGE_PROPERTIES, value="]"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="gcc"),
Token(SpecTokens.VERSION, value="@14.1"),
],
"zlib %[virtuals=c,cxx] gcc@14.1",
),
@ -626,10 +696,15 @@ def _specfile_for(spec_str, filename):
"zlib %[virtuals=fortran] gcc@14.1 %[virtuals=c,cxx] clang",
[
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, "zlib"),
Token(
SpecTokens.COMPILER_AND_VERSION_WITH_VIRTUALS, "%[virtuals=fortran] gcc@14.1"
),
Token(SpecTokens.COMPILER_WITH_VIRTUALS, "%[virtuals=c,cxx] clang"),
Token(SpecTokens.START_EDGE_PROPERTIES, value="%["),
Token(SpecTokens.KEY_VALUE_PAIR, value="virtuals=fortran"),
Token(SpecTokens.END_EDGE_PROPERTIES, value="]"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="gcc"),
Token(SpecTokens.VERSION, value="@14.1"),
Token(SpecTokens.START_EDGE_PROPERTIES, value="%["),
Token(SpecTokens.KEY_VALUE_PAIR, value="virtuals=c,cxx"),
Token(SpecTokens.END_EDGE_PROPERTIES, value="]"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="clang"),
],
"zlib %[virtuals=fortran] gcc@14.1 %[virtuals=c,cxx] clang",
),
@ -650,6 +725,18 @@ def _specfile_for(spec_str, filename):
],
"gcc languages:=='c,c++'",
),
# test <variants> etc. after %
(
"mvapich %gcc languages:=c,c++ target=x86_64",
[
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, "mvapich"),
Token(SpecTokens.DEPENDENCY, "%"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, "gcc"),
Token(SpecTokens.KEY_VALUE_PAIR, "languages:=c,c++"),
Token(SpecTokens.KEY_VALUE_PAIR, "target=x86_64"),
],
"mvapich %gcc languages:='c,c++' arch=None-None-x86_64",
),
],
)
def test_parse_single_spec(spec_str, tokens, expected_roundtrip, mock_git_test_package):
@ -694,7 +781,8 @@ def test_parse_single_spec(spec_str, tokens, expected_roundtrip, mock_git_test_p
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="emacs"),
Token(SpecTokens.VERSION, value="@1.1.1"),
Token(SpecTokens.KEY_VALUE_PAIR, value="cflags=-O3"),
Token(SpecTokens.COMPILER, value="%intel"),
Token(SpecTokens.DEPENDENCY, value="%"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="intel"),
],
["mvapich", "emacs @1.1.1 cflags=-O3 %intel"],
),
@ -706,10 +794,27 @@ def test_parse_single_spec(spec_str, tokens, expected_roundtrip, mock_git_test_p
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="emacs"),
Token(SpecTokens.DEPENDENCY, value="^"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="ncurses"),
Token(SpecTokens.COMPILER, value="%intel"),
Token(SpecTokens.DEPENDENCY, value="%"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="intel"),
],
['mvapich cflags="-O3 -fPIC"', "emacs ^ncurses%intel"],
),
(
"mvapich %gcc languages=c,c++ emacs ^ncurses%gcc languages:=c",
[
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="mvapich"),
Token(SpecTokens.DEPENDENCY, value="%"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="gcc"),
Token(SpecTokens.KEY_VALUE_PAIR, value="languages=c,c++"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="emacs"),
Token(SpecTokens.DEPENDENCY, value="^"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="ncurses"),
Token(SpecTokens.DEPENDENCY, value="%"),
Token(SpecTokens.UNQUALIFIED_PACKAGE_NAME, value="gcc"),
Token(SpecTokens.KEY_VALUE_PAIR, value="languages:=c"),
],
["mvapich %gcc languages=c,c++", "emacs ^ncurses%gcc languages:=c"],
),
],
)
def test_parse_multiple_specs(text, tokens, expected_specs):

View File

@ -420,13 +420,15 @@ def test_load_json_specfiles(specfile, expected_hash, reader_cls):
openmpi_edges = s2.edges_to_dependencies(name="openmpi")
assert len(openmpi_edges) == 1
# Check that virtuals have been reconstructed
assert "mpi" in openmpi_edges[0].virtuals
# Check that virtuals have been reconstructed for specfiles conforming to
# version 4 on.
if reader_cls.SPEC_VERSION >= spack.spec.SpecfileV4.SPEC_VERSION:
assert "mpi" in openmpi_edges[0].virtuals
# The virtuals attribute must be a tuple, when read from a
# JSON or YAML file, not a list
for edge in s2.traverse_edges():
assert isinstance(edge.virtuals, tuple), edge
# The virtuals attribute must be a tuple, when read from a
# JSON or YAML file, not a list
for edge in s2.traverse_edges():
assert isinstance(edge.virtuals, tuple), edge
# Ensure we can format {compiler} tokens
assert s2.format("{compiler}") != "none"

View File

@ -52,9 +52,9 @@ def test_rfc_remote_local_path_no_dest():
packages_yaml_sha256 = (
"8b69d9c6e983dfb8bac2ddc3910a86265cffdd9c85f905c716d426ec5b0d9847"
"6a1b26c857ca7e5bcd7342092e2f218da43d64b78bd72771f603027ea3c8b4af"
if sys.platform != "win32"
else "182a5cdfdd88f50be23e55607b46285854c664c064e5a9f3f1e0200ebca6a1db"
else "ae3239d769f9e6dc137a998489b0d44c70b03e21de4ecd6a623a3463a1a5c3f4"
)

View File

@ -10,7 +10,7 @@ def get_spack_sys_paths(spack_prefix):
"""Given a spack prefix, return all the paths Spack needs to function."""
spack_libs = os.path.join(spack_prefix, "lib", "spack")
external_libs = os.path.join(spack_libs, "external")
# spack externals take precedence, then vendored packages, then spack itself
# spack externals take precedence, then spack itself
return [external_libs, spack_libs]

View File

@ -14,3 +14,4 @@ markers =
disable_clean_stage_check: avoid failing tests if there are leftover files in the stage area
not_on_windows: mark tests that are skipped on Windows
only_windows: mark tests that are skipped everywhere but Windows
requires_builtin: tests that require the builtin repository

View File

@ -1784,7 +1784,7 @@ _spack_repo() {
then
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY="create list add remove rm"
SPACK_COMPREPLY="create list add remove rm migrate"
fi
}
@ -1828,6 +1828,15 @@ _spack_repo_rm() {
fi
}
_spack_repo_migrate() {
if $list_options
then
SPACK_COMPREPLY="-h --help --fix"
else
_repos
fi
}
_spack_resource() {
if $list_options
then

View File

@ -2749,6 +2749,7 @@ complete -c spack -n '__fish_spack_using_command_pos 0 repo' -f -a list -d 'show
complete -c spack -n '__fish_spack_using_command_pos 0 repo' -f -a add -d 'add a package source to Spack'"'"'s configuration'
complete -c spack -n '__fish_spack_using_command_pos 0 repo' -f -a remove -d 'remove a repository from Spack'"'"'s configuration'
complete -c spack -n '__fish_spack_using_command_pos 0 repo' -f -a rm -d 'remove a repository from Spack'"'"'s configuration'
complete -c spack -n '__fish_spack_using_command_pos 0 repo' -f -a migrate -d 'migrate a package repository to the latest Package API'
complete -c spack -n '__fish_spack_using_command repo' -s h -l help -f -a help
complete -c spack -n '__fish_spack_using_command repo' -s h -l help -d 'show this help message and exit'
@ -2791,6 +2792,14 @@ complete -c spack -n '__fish_spack_using_command repo rm' -s h -l help -d 'show
complete -c spack -n '__fish_spack_using_command repo rm' -l scope -r -f -a '_builtin defaults system site user command_line'
complete -c spack -n '__fish_spack_using_command repo rm' -l scope -r -d 'configuration scope to modify'
# spack repo migrate
set -g __fish_spack_optspecs_spack_repo_migrate h/help fix
complete -c spack -n '__fish_spack_using_command_pos 0 repo migrate' $__fish_spack_force_files -a '(__fish_spack_repos)'
complete -c spack -n '__fish_spack_using_command repo migrate' -s h -l help -f -a help
complete -c spack -n '__fish_spack_using_command repo migrate' -s h -l help -d 'show this help message and exit'
complete -c spack -n '__fish_spack_using_command repo migrate' -l fix -f -a fix
complete -c spack -n '__fish_spack_using_command repo migrate' -l fix -d 'automatically fix the imports in the package files'
# spack resource
set -g __fish_spack_optspecs_spack_resource h/help
complete -c spack -n '__fish_spack_using_command_pos 0 resource' -f -a list -d 'list all resources known to spack (currently just patches)'
@ -2935,7 +2944,7 @@ complete -c spack -n '__fish_spack_using_command style' -s t -l tool -r -d 'spec
complete -c spack -n '__fish_spack_using_command style' -s s -l skip -r -f -a skip
complete -c spack -n '__fish_spack_using_command style' -s s -l skip -r -d 'specify tools to skip (choose from import, isort, black, flake8, mypy)'
complete -c spack -n '__fish_spack_using_command style' -l spec-strings -f -a spec_strings
complete -c spack -n '__fish_spack_using_command style' -l spec-strings -d 'upgrade spec strings in Python, JSON and YAML files for compatibility with Spack v1.0 and v0.x. Example: spack style --spec-strings $(git ls-files). Note: this flag will be removed in Spack v1.0.'
complete -c spack -n '__fish_spack_using_command style' -l spec-strings -d 'upgrade spec strings in Python, JSON and YAML files for compatibility with Spack v1.0 and v0.x. Example: spack style --spec-strings $(git ls-files). Note: must be used only on specs from spack v0.X.'
# spack tags
set -g __fish_spack_optspecs_spack_tags h/help i/installed a/all

View File

@ -38,6 +38,7 @@ class Apptainer(SingularityBase):
)
version("main", branch="main", get_full_repo=True) # apptainer version uses git describe
version("1.4.1", sha256="77f25c756397a0886baf462ffdde0e21fe528063505c67a51460c165094d166d")
version("1.4.0", sha256="204cded54046547cb3eb4c7874bdf45892fedc58b0d104195c59d2972cba51d3")
version("1.3.6", sha256="b5343369e7fdf67572f887d81f8d2b938f099fb39c876d96430d747935960d51")
version("1.3.5", sha256="fe1c977da952edf1056915b2df67ae2203ef06065d4e4901a237c902329306b2")
@ -64,6 +65,7 @@ class Apptainer(SingularityBase):
depends_on("go@1.19:", when="@1.2:")
depends_on("go@1.20:", when="@1.3:")
depends_on("go@1.22.7:", when="@1.4:")
depends_on("go@1.23.6:", when="@1.4.1:")
depends_on("gocryptfs@2.4:", type="run", when="@1.3:")
depends_on("squashfuse", type="run")
depends_on("squashfuse@0.5.1:", type="run", when="@1.3:")

View File

@ -137,6 +137,8 @@ class Caliper(CachedCMakePackage, CudaPackage, ROCmPackage):
conflicts("+libdw", "@:2.4")
conflicts("+rocm", "@:2.7")
conflicts("+rocm+cuda")
# Legacy nvtx is only supported until cuda@12.8, newer cuda only provides nvtx3.
conflicts("^cuda@12.9:", "@:2.12.1")
patch("for_aarch64.patch", when="@:2.11 target=aarch64:")
patch(

View File

@ -2,6 +2,9 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import glob
import os
from spack.package import *
@ -40,6 +43,7 @@ class Crtm(CMakePackage):
depends_on("crtm-fix@2.3.0_emc", when="@2.3.0 +fix")
depends_on("crtm-fix@2.4.0_emc", when="@=2.4.0 +fix")
depends_on("crtm-fix@2.4.0.1_emc", when="@2.4.0.1 +fix")
depends_on("crtm-fix@3.1.1", when="@3.1.1 +fix")
depends_on("ecbuild", type=("build"), when="@v2.3-jedi.4")
depends_on("ecbuild", type=("build"), when="@v2.4-jedi.1")
@ -51,7 +55,7 @@ class Crtm(CMakePackage):
license("CC0-1.0")
version(
"v3.1.1-build1", sha256="1ed49e594da5d3769cbaa52cc7fc19c1bb0325ee6324f6057227c31e2d95ca67"
"3.1.1-build1", sha256="1ed49e594da5d3769cbaa52cc7fc19c1bb0325ee6324f6057227c31e2d95ca67"
)
version(
"v3.1.0-skylabv8",
@ -85,6 +89,8 @@ class Crtm(CMakePackage):
def url_for_version(self, version):
if version > Version("v3") or version >= Version("3"):
fmtversion = str(version).replace("-build", "+build")
if not fmtversion.startswith("v"):
fmtversion = f"v{fmtversion}"
return f"https://github.com/JCSDA/CRTMv3/archive/refs/tags/{fmtversion}.tar.gz"
else:
return f"https://github.com/JCSDA/crtm/archive/refs/tags/{version}.tar.gz"
@ -101,3 +107,10 @@ def patch(self):
)
if not self.run_tests:
filter_file(r"add_subdirectory\(test\)", "# disable testing", "CMakeLists.txt")
@when("@3.1.1-build1")
@run_after("install")
def cmake_config_softlinks(self):
cmake_config_files = glob.glob(join_path(self.prefix, "cmake/crtm/*"))
for srcpath in cmake_config_files:
os.symlink(srcpath, join_path(self.prefix, "cmake", os.path.basename(srcpath)))

View File

@ -21,6 +21,16 @@
# format returned by platform.system() and 'arch' by platform.machine()
_versions = {
"12.9.0": {
"Linux-aarch64": (
"f3b7ae71f95d11de0a03ccfa1c0aff7be336d2199b50b1a15b03695fd15a6409",
"https://developer.download.nvidia.com/compute/cuda/12.9.0/local_installers/cuda_12.9.0_575.51.03_linux_sbsa.run",
),
"Linux-x86_64": (
"bbce2b760fe2096ca1c86f729e03bf377c1519add7b2755ecc4e9b0a9e07ee43",
"https://developer.download.nvidia.com/compute/cuda/12.9.0/local_installers/cuda_12.9.0_575.51.03_linux.run",
),
},
"12.8.1": {
"Linux-aarch64": (
"353cbab1b57282a1001071796efd95c1e40ec27a3375e854d12637eaa1c6107c",

View File

@ -15,6 +15,7 @@ class DlaFuture(CMakePackage, CudaPackage, ROCmPackage):
license("BSD-3-Clause")
version("0.10.0", sha256="cdee4e4fe5c5c08c5a7a5a3848175daa62884793988b4284c40df81cc2339c74")
version("0.9.0", sha256="0297afb46285745413fd4536d8d7fe123e3045d4899cc91eed501bcd4b588ea6")
version("0.8.0", sha256="4c30c33ee22417514d839a75d99ae4c24860078fb595ee24ce4ebf45fbce5e69")
version("0.7.3", sha256="8c829b72f4ea9c924abdb6fe2ac7489304be4056ab76b8eba226c33ce7b7dc0e")
@ -168,6 +169,8 @@ class DlaFuture(CMakePackage, CudaPackage, ROCmPackage):
depends_on(f"pika cuda_arch={arch}", when=f"cuda_arch={arch}")
depends_on(f"umpire cuda_arch={arch}", when=f"cuda_arch={arch}")
conflicts("cuda_arch=none")
patch(
"https://github.com/eth-cscs/DLA-Future/commit/efc9c176a7a8c512b3f37d079dec8c25ac1b7389.patch?full_index=1",
sha256="f40e4a734650f56c39379717a682d00d6400a7a102d90821542652824a8f64cd",

View File

@ -119,3 +119,50 @@ paths:
extra_attributes:
compilers:
c: ".*/bin/gcc-14$"
# Tests having two versions of gcc in the same folder
- layout:
- executables:
- "bin/gcc"
- "bin/g++"
script: |
if [ "$1" = "-dumpversion" ] ; then
echo "9"
elif [ "$1" = "-dumpfullversion" ] ; then
echo "9.4.0"
elif [ "$1" = "--version" ] ; then
echo "gcc (Ubuntu 9.4.0-1ubuntu1~20.04.2) 9.4.0"
echo "Copyright (C) 2019 Free Software Foundation, Inc."
echo "This is free software; see the source for copying conditions. There is NO"
echo "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
else
echo "mock executable got an unexpected flag: $1"
exit 1
fi
- executables:
- "bin/gcc-10"
script: |
if [ "$1" = "-dumpversion" ] ; then
echo "10"
elif [ "$1" = "-dumpfullversion" ] ; then
echo "10.5.0"
elif [ "$1" = "--version" ] ; then
echo "gcc-10 (Ubuntu 10.5.0-1ubuntu1~20.04) 10.5.0"
echo "Copyright (C) 2020 Free Software Foundation, Inc."
echo "This is free software; see the source for copying conditions. There is NO"
echo "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
else
echo "mock executable got an unexpected flag: $1"
exit 1
fi
platforms: ["darwin", "linux"]
results:
- spec: "gcc@9.4.0 languages:=c,c++"
extra_attributes:
compilers:
c: ".*/bin/gcc"
cxx: ".*/bin/g\\+\\+"
- spec: "gcc@10.5.0 languages:=c"
extra_attributes:
compilers:
c: ".*/bin/gcc-10"

View File

@ -15,6 +15,7 @@ class Gh(GoPackage):
license("MIT")
version("2.72.0", sha256="5a2cd4f2601d254d11a55dab463849ccccb5fa4bdcaa72b792ea9c3bf8c67d23")
version("2.70.0", sha256="9e2247e5b31131fd4ac63916b9483a065fcfb861ebb93588cf2ff42952ae08c5")
version("2.69.0", sha256="e2deb3759bbe4da8ad4f071ca604fda5c2fc803fef8b3b89896013e4b1c1fe65")
version("2.63.2", sha256="2578a8b1f00cb292a8094793515743f2a86e02b8d0b18d6b95959ddbeebd6b8d")

View File

@ -15,6 +15,7 @@ class Glab(GoPackage):
license("MIT")
version("1.57.0", sha256="b0c3333c7160a9d95ec820ceadd4636fe7760b19a23bc8957337812b191c03dd")
version("1.55.0", sha256="21f58698b92035461e8e8ba9040429f4b5a0f6d528d8333834ef522a973384c8")
version("1.54.0", sha256="99f5dd785041ad26c8463ae8630e98a657aa542a2bb02333d50243dd5cfdf9cb")
version("1.53.0", sha256="2930aa5dd76030cc6edcc33483bb49dd6a328eb531d0685733ca7be7b906e915")
@ -37,6 +38,7 @@ class Glab(GoPackage):
version("1.20.0", sha256="6beb0186fa50d0dea3b05fcfe6e4bc1f9be0c07aa5fa15b37ca2047b16980412")
with default_args(type="build"):
depends_on("go@1.24.2:", when="@1.56:")
depends_on("go@1.24.1:", when="@1.54:")
depends_on("go@1.23.4:", when="@1.52:")
depends_on("go@1.23.2:", when="@1.48:")

View File

@ -0,0 +1,57 @@
diff -r -u a/src/glew.c b/src/glew.c
--- a/src/glew.c 2020-03-15 04:53:59.000000000 -0700
+++ b/src/glew.c 2025-05-10 09:30:10.610259000 -0700
@@ -38,7 +38,15 @@
#if defined(GLEW_OSMESA)
# define GLAPI extern
+# ifndef APIENTRY
+# define APIENTRY
+# define GLEW_APIENTRY_DEFINED
+# endif
# include <GL/osmesa.h>
+# ifdef GLEW_APIENTRY_DEFINED
+# undef APIENTRY
+# undef GLEW_APIENTRY_DEFINED
+# endif
#elif defined(GLEW_EGL)
# include <GL/eglew.h>
#elif defined(_WIN32)
diff -r -u a/src/glewinfo.c b/src/glewinfo.c
--- a/src/glewinfo.c 2020-03-15 04:53:59.000000000 -0700
+++ b/src/glewinfo.c 2025-05-10 09:45:02.853885000 -0700
@@ -38,7 +38,15 @@
#include <GL/eglew.h>
#elif defined(GLEW_OSMESA)
#define GLAPI extern
+#ifndef APIENTRY
+# define APIENTRY
+# define GLEW_APIENTRY_DEFINED
+#endif
#include <GL/osmesa.h>
+#ifdef GLEW_APIENTRY_DEFINED
+# undef APIENTRY
+# undef GLEW_APIENTRY_DEFINED
+#endif
#elif defined(_WIN32)
#include <GL/wglew.h>
#elif !defined(__APPLE__) && !defined(__HAIKU__) || defined(GLEW_APPLE_GLX)
diff -r -u a/src/visualinfo.c b/src/visualinfo.c
--- a/src/visualinfo.c 2020-03-15 04:53:59.000000000 -0700
+++ b/src/visualinfo.c 2025-05-10 09:45:38.136185000 -0700
@@ -36,7 +36,15 @@
#include <GL/glew.h>
#if defined(GLEW_OSMESA)
#define GLAPI extern
+#ifndef APIENTRY
+# define APIENTRY
+# define GLEW_APIENTRY_DEFINED
+#endif
#include <GL/osmesa.h>
+#ifdef GLEW_APIENTRY_DEFINED
+# undef APIENTRY
+# undef GLEW_APIENTRY_DEFINED
+#endif
#elif defined(GLEW_EGL)
#include <GL/eglew.h>
#elif defined(_WIN32)

View File

@ -28,6 +28,9 @@ class Glew(CMakePackage):
# glu is already forcibly disabled in the CMakeLists.txt. This prevents
# it from showing up in the .pc file
patch("remove-pkgconfig-glu-dep.patch")
# Define APIENTRY in osmesa build if not defined, see
# https://github.com/nigels-com/glew/pull/407
patch("mesa-24.0.0-osmesa.patch", when="^mesa@24.0.0:")
def cmake_args(self):
spec = self.spec

View File

@ -25,7 +25,7 @@ class Grads(AutotoolsPackage):
variant("geotiff", default=True, description="Enable GeoTIFF support")
variant("shapefile", default=True, description="Enable Shapefile support")
variant("grib2", default=True, description="Enable GRIB2 support")
variant("grib2", default=True, description="Enable GRIB2 support with the g2c library.")
variant("dap", default=False, description="Enable DAP support")
# TODO: This variant depends on the "simple X" library, which is no longer available
@ -43,7 +43,7 @@ class Grads(AutotoolsPackage):
depends_on("hdf5", when="+hdf5")
depends_on("hdf", when="+hdf4")
depends_on("netcdf-c", when="+netcdf")
depends_on("g2c", when="+grib2")
depends_on("g2c+pic", when="+grib2")
depends_on("libgeotiff", when="+geotiff")
depends_on("shapelib", when="+shapefile")
depends_on("gadap", when="+dap")

View File

@ -96,7 +96,7 @@ class Hdf(AutotoolsPackage):
sha256="49733dd6143be7b30a28d386701df64a72507974274f7e4c0a9e74205510ea72",
when="@4.2.15:",
)
# https://github.com/NOAA-EMC/spack-stack/issues/317
# https://github.com/jcsda/spack-stack/issues/317
patch("hdfi_h_apple_m1.patch", when="@4.2.15: target=aarch64: platform=darwin")
@property
@ -160,6 +160,7 @@ def flag_handler(self, name, flags):
if (
self.spec.satisfies("%clang@16:")
or self.spec.satisfies("%apple-clang@15:")
or self.spec.satisfies("%oneapi")
or self.spec.satisfies("%gcc@14:")
):
flags.append("-Wno-error=implicit-int")
@ -218,6 +219,15 @@ def cached_tests_work_dir(self):
"""The working directory for cached test sources."""
return join_path(self.test_suite.current_test_cache_dir, self.extra_install_tests)
@run_after("install")
def remove_ncgen_ncdump(self):
"""Remove binaries ncdump and ncgen. These get built and
installed even if the netCDF API is turned off (known bug)."""
if self.spec.satisfies("~netcdf"):
exes_to_remove = ["ncdump", "ncgen"]
for exe in exes_to_remove:
os.remove(os.path.join(self.prefix.bin, exe))
@run_after("install")
def setup_build_tests(self):
"""Copy the build test files after the package is installed to an

View File

@ -1,81 +0,0 @@
--- hpx/cmake/GitExternal.cmake.orig 2019-08-16 17:27:04.856315000 +0200
+++ hpx/cmake/GitExternal.cmake 2019-08-16 17:27:47.362709000 +0200
@@ -19,12 +19,12 @@
#
# [optional] Flags which control behaviour
# NO_UPDATE
-# When set, GitExternal will not change a repo that has already been checked out.
-# The purpose of this is to allow one to set a default branch to be checked out,
-# but stop GitExternal from changing back to that branch if the user has checked
+# When set, GitExternal will not change a repo that has already been checked out.
+# The purpose of this is to allow one to set a default branch to be checked out,
+# but stop GitExternal from changing back to that branch if the user has checked
# out and is working on another.
-# VERBOSE
-# When set, displays information about git commands that are executed
+# VERBOSE
+# When set, displays information about git commands that are executed
#
find_package(Git)
@@ -55,17 +55,21 @@
if(nok)
message(FATAL_ERROR "${DIR} git clone failed: ${error}\n")
endif()
- endif()
- if(IS_DIRECTORY "${DIR}/.git")
+ # checkout requested tag
+ GIT_EXTERNAL_MESSAGE("git checkout -q ${TAG}")
+ execute_process(
+ COMMAND "${GIT_EXECUTABLE}" checkout -q "${TAG}"
+ RESULT_VARIABLE nok ERROR_VARIABLE error
+ WORKING_DIRECTORY "${DIR}"
+ )
+ if(nok)
+ message(STATUS "${DIR} git checkout ${TAG} failed: ${error}\n")
+ endif()
+ elseif(IS_DIRECTORY "${DIR}/.git")
if (${GIT_EXTERNAL_NO_UPDATE})
GIT_EXTERNAL_MESSAGE("Update branch disabled by user")
else()
- GIT_EXTERNAL_MESSAGE("current ref is \"${currentref}\" and tag is \"${TAG}\"")
- if(currentref STREQUAL TAG) # nothing to do
- return()
- endif()
-
# reset generated files
foreach(GIT_EXTERNAL_RESET_FILE ${GIT_EXTERNAL_RESET})
GIT_EXTERNAL_MESSAGE("git reset -q ${GIT_EXTERNAL_RESET_FILE}")
@@ -100,15 +104,24 @@
message(STATUS "${DIR} git checkout ${TAG} failed: ${error}\n")
endif()
- # update tag
- GIT_EXTERNAL_MESSAGE("git rebase FETCH_HEAD")
- execute_process(COMMAND ${GIT_EXECUTABLE} rebase FETCH_HEAD
- RESULT_VARIABLE RESULT OUTPUT_VARIABLE OUTPUT ERROR_VARIABLE OUTPUT
+ # check if this is a branch
+ GIT_EXTERNAL_MESSAGE("git symbolic-ref -q HEAD")
+ execute_process(COMMAND "${GIT_EXECUTABLE}" symbolic-ref -q HEAD
+ RESULT_VARIABLE nok ERROR_VARIABLE error
WORKING_DIRECTORY "${DIR}")
- if(RESULT)
- message(STATUS "git rebase failed, aborting ${DIR} merge")
- execute_process(COMMAND ${GIT_EXECUTABLE} rebase --abort
+ if(nok)
+ message(STATUS "${TAG} is not a branch")
+ else()
+ # update tag
+ GIT_EXTERNAL_MESSAGE("git rebase FETCH_HEAD")
+ execute_process(COMMAND ${GIT_EXECUTABLE} rebase FETCH_HEAD
+ RESULT_VARIABLE RESULT OUTPUT_VARIABLE OUTPUT ERROR_VARIABLE OUTPUT
WORKING_DIRECTORY "${DIR}")
+ if(RESULT)
+ message(STATUS "git rebase failed, aborting ${DIR} merge")
+ execute_process(COMMAND ${GIT_EXECUTABLE} rebase --abort
+ WORKING_DIRECTORY "${DIR}")
+ endif()
endif()
endif()
else()

View File

@ -352,7 +352,7 @@ def setup_build_environment(self, env: EnvironmentModifications) -> None:
if spec.satisfies("+mpi"):
env.set("CC", spec["mpi"].mpicc)
env.set("CXX", spec["mpi"].mpicxx)
if spec.satisfies("+fortan"):
if spec.satisfies("+fortran"):
env.set("F77", spec["mpi"].mpif77)
if spec.satisfies("+cuda"):
@ -398,8 +398,9 @@ def cache_test_sources(self):
makefile = join_path(install_test_root(self), self.extra_install_tests, "Makefile")
filter_file(r"^HYPRE_DIR\s* =.*", f"HYPRE_DIR = {self.prefix}", makefile)
filter_file(r"^CC\s*=.*", f"CC = {os.environ['CC']}", makefile)
filter_file(r"^F77\s*=.*", f"F77 = {os.environ['F77']}", makefile)
filter_file(r"^CXX\s*=.*", f"CXX = {os.environ['CXX']}", makefile)
if self.spec.satisfies("+fortran"):
filter_file(r"^F77\s*=.*", f"F77 = {os.environ['F77']}", makefile)
@property
def _cached_tests_work_dir(self):

View File

@ -26,6 +26,12 @@ class IntelOneapiCcl(IntelOneApiLibraryPackage):
depends_on("intel-oneapi-mpi")
version(
"2021.15.2",
url="https://registrationcenter-download.intel.com/akdlm/IRC_NAS/211d0333-b11d-4273-b63b-0e71e08a392a/intel-oneccl-2021.15.2.7_offline.sh",
sha256="0dd3b9e5085fc0d40edf435c41c3a9c058a175aa5ef795a0f94205b42bcf8c75",
expand=False,
)
version(
"2021.15.1",
url="https://registrationcenter-download.intel.com/akdlm/IRC_NAS/422b4c09-4f3b-4e4d-b74e-502775398c9a/intel-oneccl-2021.15.1.5_offline.sh",

View File

@ -17,6 +17,8 @@ class Ip(CMakePackage):
maintainers("AlexanderRichert-NOAA", "edwardhartnett", "Hang-Lei-NOAA")
version("develop", branch="develop")
version("5.3.0", sha256="17dfcb52bab58d3f1bcbbdda5e76430020d963097139e1ba240bfc5fb5c5a5d1")
version("5.2.0", sha256="2f7b44abcf24e448855f57d107db55d3d58cbc271164ba083491d0c07a7ea3d0")
version("5.1.0", sha256="5279f11f4c12db68ece74cec392b7a2a6b5166bc505877289f34cc3149779619")
version("5.0.0", sha256="54b2987bd4f94adc1f7595d2a384e646019c22d163bcd30840a916a6abd7df71")
version("4.4.0", sha256="858d9201ce0bc4d16b83581ef94a4a0262f498ed1ea1b0535de2e575da7a8b8c")

View File

@ -19,6 +19,7 @@ class JacamarCi(GoPackage):
license("Apache-2.0 OR MIT")
version("develop", branch="develop")
version("0.26.0", sha256="da63c396726af313804da5ec3704ce3754ba3eef5ca267746b594422f542dbea")
version("0.25.0", sha256="20626ed931f5bf6ba1d5a2dd56af5793efa69a4f355bdac9b8bf742aaf806653")
version("0.24.2", sha256="d2b8be464b88a92df0ad2ba1e846226b993c4162779432cb8366fb9bca5c40db")
version("0.24.1", sha256="fe1036fee2e97e38457212bf1246895803eeb6e1a6aa1ecd24eba1d3ea994029")
@ -26,6 +27,7 @@ class JacamarCi(GoPackage):
conflicts("platform=darwin", msg="Jacamar CI does not support MacOS")
depends_on("go@1.23:", type="build", when="@0.26.0:")
depends_on("go@1.22.7:", type="build", when="@0.23.0:")
depends_on("gmake", type="build")
depends_on("libc", type="link")

View File

@ -121,10 +121,10 @@ class Legion(CMakePackage, ROCmPackage):
depends_on("mpi", when="conduit=mpi")
depends_on("cuda@10.0:11.9", when="+cuda_unsupported_compiler @21.03.0:23.03.0")
depends_on("cuda@10.0:11.9", when="+cuda @21.03.0:23.03.0")
depends_on("cuda@11.7:", when="+cuda_unsupported_compiler @23.06.0:")
depends_on("cuda@11.7:", when="+cuda @23.06.0:")
depends_on("cuda@11.7:", when="+cuda_unsupported_compiler @stable")
depends_on("cuda@11.7:", when="+cuda @stable")
depends_on("cuda@11.7:12.8", when="+cuda_unsupported_compiler @23.06.0:")
depends_on("cuda@11.7:12.8", when="+cuda @23.06.0:")
depends_on("cuda@11.7:12.8", when="+cuda_unsupported_compiler @stable")
depends_on("cuda@11.7:12.8", when="+cuda @stable")
depends_on("hip@5.1:5.7", when="+rocm @23.03.0:23.12.0")
depends_on("hip@5.1:", when="+rocm")
depends_on("hdf5", when="+hdf5")

View File

@ -16,7 +16,7 @@ class Met(AutotoolsPackage):
url = "https://github.com/dtcenter/MET/archive/refs/tags/v11.0.1.tar.gz"
git = "https://github.com/dtcenter/MET"
maintainers("AlexanderRichert-NOAA")
maintainers("AlexanderRichert-NOAA", "climbfuji")
version("develop", branch="develop")
version("12.0.1", sha256="ef396a99ca6c2248855848cd194f9ceaf3b051fb5e8c01a0b0b2a00110b1fcfb")
@ -67,10 +67,8 @@ class Met(AutotoolsPackage):
patch("openmp_shape_patch.patch", when="@10.1.0")
# https://github.com/JCSDA/spack-stack/issues/615
# TODO(srherbener) Apple clang 14.x is getting pickier! When these updates are
# merged into the MET code base, the following two patches can be removed.
patch("apple-clang-string-cast-operator.patch", when="@10.1.1: %apple-clang@14:")
patch("apple-clang-no-register.patch", when="@10.1.1: %apple-clang@14:")
patch("apple-clang-string-cast-operator.patch", when="@10.1.1:11.0 %apple-clang@14:")
patch("apple-clang-no-register.patch", when="@10.1.1:11.0 %apple-clang@14:")
def url_for_version(self, version):
if version < Version("11"):

View File

@ -15,7 +15,7 @@ class Metplus(Package):
url = "https://github.com/dtcenter/METplus/archive/refs/tags/v4.1.0.tar.gz"
git = "https://github.com/dtcenter/METplus"
maintainers("AlexanderRichert-NOAA")
maintainers("AlexanderRichert-NOAA", "climbfuji")
version("develop", branch="develop")
version("6.0.0", sha256="e9358aede2fd2abecd81806227de7b165d68fdf2fc9defcbba24df229461b155")

View File

@ -35,10 +35,12 @@ class Mgis(CMakePackage):
# released version
version(
"3.0",
sha256="dae915201fd20848b69745dabda1a334eb242d823af600825b8b010ddc597640",
"3.0.1",
sha256="fb9a7f5008a43c70bdb1c4b80f32f7fd3e4274c912b93c36af7011d3c4f93039",
preferred=True,
)
version("3.0", sha256="dae915201fd20848b69745dabda1a334eb242d823af600825b8b010ddc597640")
version("2.2.1", sha256="a0e6af65f5fd2237f39306354ef786eadb0c6bc6868c23e2681e04a83e629ad2")
version("2.2", sha256="b3776d7b3a534ca626525a42b97665f7660ae2b28ea57b3f53fd7e8538da1ceb")
version("2.1", sha256="f5b556aab130da0c423f395fe4c35d6bf509dd8fc958242f2e37ea788464aea9")
version("2.0", sha256="cb427d77f2c79423e969815b948a8b44da33a4370d1760e8c1e22a569f3585e2")
@ -60,8 +62,10 @@ class Mgis(CMakePackage):
depends_on("cxx", type="build") # generated
depends_on("fortran", type="build") # generated
depends_on("tfel@5.0.0", when="@3.0")
depends_on("tfel@4.2.0", when="@2.2")
depends_on("tfel@5.0.1", when="@3.0.1")
depends_on("tfel@5.0.0", when="@3.0.0")
depends_on("tfel@4.2.3", when="@2.2.1")
depends_on("tfel@4.2.0", when="@2.2.0")
depends_on("tfel@4.1.0", when="@2.1")
depends_on("tfel@4.0.0", when="@2.0")
depends_on("tfel@3.4.3", when="@1.2.2")
@ -112,6 +116,8 @@ def cmake_args(self):
args = []
args.append("-DUSE_EXTERNAL_COMPILER_FLAGS=ON")
args.append("-Denable-website=OFF")
args.append("-Denable-doxygen-doc=OFF")
for i in ["c", "fortran", "python"]:
if "+" + i in self.spec:

View File

@ -139,6 +139,7 @@ class NetcdfC(CMakePackage, AutotoolsPackage):
variant("zstd", default=True, description="Enable Zstandard compression plugin")
depends_on("c", type="build")
depends_on("cxx", type="build", when="build_system=cmake")
with when("build_system=cmake"):
# Based on the versions required by the root CMakeLists.txt:

View File

@ -1,47 +1,47 @@
import re
import urllib.request
from html.parser import HTMLParser
class my_html_parser(HTMLParser):
def error(self, message):
pass
def __init__(self):
super().__init__()
self.state = 0
self.processes = []
def handle_starttag(self, tag, attrs):
if tag == "tr" and self.state == 2:
for att in attrs:
if att[0] == "id":
return
self.state = 3
if tag == "td" and self.state == 3:
self.state = 4
pass
def handle_endtag(self, tag):
if tag == "table" and self.state == 1:
self.state = 2
pass
def handle_data(self, data):
# print("Encountered some data :", data)
if data == "Categories" and self.state == 0:
self.state = 1
if self.state == 4:
if re.findall("[^a-z0-9_]", data):
return
self.processes.append(data)
self.state = 2
if __name__ == "__main__":
url = "https://openloops.hepforge.org/process_library.php?repo=public"
data = urllib.request.urlopen(url).read()
parser = my_html_parser()
parser.feed(data.decode("UTF8"))
print(parser.processes)
import re
import urllib.request
from html.parser import HTMLParser
class my_html_parser(HTMLParser):
def error(self, message):
pass
def __init__(self):
super().__init__()
self.state = 0
self.processes = []
def handle_starttag(self, tag, attrs):
if tag == "tr" and self.state == 2:
for att in attrs:
if att[0] == "id":
return
self.state = 3
if tag == "td" and self.state == 3:
self.state = 4
pass
def handle_endtag(self, tag):
if tag == "table" and self.state == 1:
self.state = 2
pass
def handle_data(self, data):
# print("Encountered some data :", data)
if data == "Categories" and self.state == 0:
self.state = 1
if self.state == 4:
if re.findall("[^a-z0-9_]", data):
return
self.processes.append(data)
self.state = 2
if __name__ == "__main__":
url = "https://openloops.hepforge.org/process_library.php?repo=public"
data = urllib.request.urlopen(url).read()
parser = my_html_parser()
parser.feed(data.decode("UTF8"))
print(parser.processes)

View File

@ -750,6 +750,13 @@ def revert_kokkos_nvcc_wrapper(self):
env["MPICXX_CXX"] = env["CXX"]
def configure(self, spec, prefix):
if spec.satisfies("@:3.23.1 +cuda ^cuda@12.9:"):
filter_file(
"libnvToolsExt.a",
"libnvtx3interop.a",
"config/BuildSystem/config/packages/cuda.py",
string=True,
)
self.revert_kokkos_nvcc_wrapper()
python("configure", "--prefix=%s" % prefix, *self.configure_options())

View File

@ -1,25 +1,25 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class PyCairosvg(PythonPackage):
"""
CairoSVG is an SVG converter based on Cairo.
It can export SVG files to PDF, EPS, PS, and PNG files.
"""
homepage = "https://cairosvg.org/"
pypi = "CairoSVG/CairoSVG-2.7.1.tar.gz"
version("2.7.1", sha256="432531d72347291b9a9ebfb6777026b607563fd8719c46ee742db0aef7271ba0")
depends_on("python@3.5:", type=("build", "run"))
depends_on("py-setuptools", type="build")
depends_on("py-cairocffi", type=("build", "run"))
depends_on("py-cssselect2", type=("build", "run"))
depends_on("py-defusedxml", type=("build", "run"))
depends_on("py-pillow", type=("build", "run"))
depends_on("py-tinycss2", type=("build", "run"))
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class PyCairosvg(PythonPackage):
"""
CairoSVG is an SVG converter based on Cairo.
It can export SVG files to PDF, EPS, PS, and PNG files.
"""
homepage = "https://cairosvg.org/"
pypi = "CairoSVG/CairoSVG-2.7.1.tar.gz"
version("2.7.1", sha256="432531d72347291b9a9ebfb6777026b607563fd8719c46ee742db0aef7271ba0")
depends_on("python@3.5:", type=("build", "run"))
depends_on("py-setuptools", type="build")
depends_on("py-cairocffi", type=("build", "run"))
depends_on("py-cssselect2", type=("build", "run"))
depends_on("py-defusedxml", type=("build", "run"))
depends_on("py-pillow", type=("build", "run"))
depends_on("py-tinycss2", type=("build", "run"))

View File

@ -1,22 +1,22 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class PyCssselect2(PythonPackage):
"""
cssselect2 is a straightforward implementation of CSS4 Selectors for markup
documents (HTML, XML, etc.) that can be read by ElementTree-like parsers
(including cElementTree, lxml, html5lib, etc.)
"""
homepage = "https://github.com/Kozea/cssselect2"
pypi = "cssselect2/cssselect2-0.7.0.tar.gz"
version("0.7.0", sha256="1ccd984dab89fc68955043aca4e1b03e0cf29cad9880f6e28e3ba7a74b14aa5a")
depends_on("python@3.7:", type=("build", "run"))
depends_on("py-flit-core@3.2:3", type="build")
depends_on("py-tinycss2", type=("build", "run"))
depends_on("py-webencodings", type=("build", "run"))
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class PyCssselect2(PythonPackage):
"""
cssselect2 is a straightforward implementation of CSS4 Selectors for markup
documents (HTML, XML, etc.) that can be read by ElementTree-like parsers
(including cElementTree, lxml, html5lib, etc.)
"""
homepage = "https://github.com/Kozea/cssselect2"
pypi = "cssselect2/cssselect2-0.7.0.tar.gz"
version("0.7.0", sha256="1ccd984dab89fc68955043aca4e1b03e0cf29cad9880f6e28e3ba7a74b14aa5a")
depends_on("python@3.7:", type=("build", "run"))
depends_on("py-flit-core@3.2:3", type="build")
depends_on("py-tinycss2", type=("build", "run"))
depends_on("py-webencodings", type=("build", "run"))

View File

@ -16,6 +16,7 @@ class PyJsonargparse(PythonPackage):
license("MIT")
version("4.39.0", sha256="d72f9e84c251aeac6cd3cd5d91212f4a7c47be9a58e642dd3bce4b990543b360")
version("4.35.0", sha256="815ecd190e4004d2ce69b184fe16915f8f81fd32ae3e479fa37fbb9b89130446")
version("4.28.0", sha256="ac835a290ef18cc2a5309e6bfa8ada9c5d63f46ff18701583fc8f3e95314679c")
version("4.27.5", sha256="88ad908387ea5c8285a48e7d94bfd025e6f536b0dbae616d755d701248ab85d9")

View File

@ -0,0 +1,15 @@
--- a/setup.py 2022-04-19 20:56:49.000000000 -0600
+++ b/setup.py 2022-04-19 20:58:52.000000000 -0600
@@ -54,9 +54,9 @@
for ext in self.extensions:
ext.include_dirs.insert(0, cppy.get_include())
ext.extra_compile_args = opts
- if sys.platform == 'darwin':
- ext.extra_compile_args += ['-stdlib=libc++']
- ext.extra_link_args += ['-stdlib=libc++']
+ #if sys.platform == 'darwin':
+ # ext.extra_compile_args += ['-stdlib=libc++']
+ # ext.extra_link_args += ['-stdlib=libc++']
if (ct == 'msvc' and os.environ.get('KIWI_DISABLE_FH4')):
# Disable FH4 Exception Handling implementation so that we don't
# require VCRUNTIME140_1.dll. For more details, see:

View File

@ -38,3 +38,7 @@ class PyKiwisolver(PythonPackage):
depends_on("py-cppy@1.2.0:", type="build", when="@1.4.4:")
depends_on("py-cppy@1.3.0:", type="build", when="@1.4.8:")
depends_on("py-typing-extensions", when="@1.4.4: ^python@:3.7", type=("build", "run"))
# https://github.com/spack/spack/issues/28522
# https://github.com/nucleic/kiwi/issues/126
patch("macos-gcc.patch", when="@:1.3.2 platform=darwin %gcc")

View File

@ -22,6 +22,7 @@ class PyKornia(PythonPackage):
"adamjstewart",
)
version("0.8.1", sha256="9ce5a54a11df661794934a293f89f8b8d49e83dd09b0b9419f6082ab07afe433")
version("0.8.0", sha256="a0ffc31106e8d777a8df693572ad5ea11f7236b8bc1d452754f5e57de012ea9a")
version("0.7.4", sha256="1f8dd6268ca5a2f2ec04b13c48da4dfb90ba2cfae7e31e0cc80d37f6520fa3f1")
version("0.7.3", sha256="0eb861ea5d7e6c3891ae699a8b7103a5783af0a7c41888ca482420dd3d055306")
@ -47,6 +48,7 @@ class PyKornia(PythonPackage):
depends_on("py-setuptools", type="build")
# requirements/requirements.txt
depends_on("py-kornia-rs@0.1.9:", when="@0.8.1:", type=("build", "run"))
depends_on("py-kornia-rs@0.1:", when="@0.7.2:", type=("build", "run"))
depends_on("py-packaging", when="@0.6:", type=("build", "run"))
depends_on("py-torch@1.9.1:", when="@0.6.9:", type=("build", "run"))

View File

@ -22,12 +22,12 @@ class PyKorniaRs(PythonPackage):
"adamjstewart",
)
version("0.1.9", sha256="a9b8a6afa00d80c9b1b1e3e5ff650762dac9605829a4f768ff5aedf47649efc2")
version("0.1.1", sha256="b9ac327fae6e982e6d7df9faeadd1d4f6453e65521819ae9ae5b90e9da0ed1a5")
version("0.1.0", sha256="0fca64f901dddff49b72e51fc92a25f0a7606e9a1a72ef283606245ea6b4f90d")
build_directory = "py-kornia"
depends_on("py-maturin@1.3.2:", type="build")
depends_on("py-maturin@1", when="@0.1.6:", type="build")
depends_on("py-maturin@1.3.2:", when="@:0.1.5", type="build")
# rav1e needs rustdoc
depends_on("rust+dev", type="build")
@ -40,3 +40,7 @@ class PyKorniaRs(PythonPackage):
# dlpack-rs needs libclang
depends_on("llvm+clang")
@property
def build_directory(self):
return "kornia-py" if self.spec.satisfies("@0.1.3:") else "py-kornia"

View File

@ -1,17 +1,17 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class PyNarwhals(PythonPackage):
"""Extremely lightweight compatibility layer between dataframe libraries"""
homepage = "https://github.com/narwhals-dev/narwhals"
pypi = "narwhals/narwhals-1.8.1.tar.gz"
version("1.8.1", sha256="97527778e11f39a1e5e2113b8fbb9ead788be41c0337f21852e684e378f583e8")
depends_on("python@3.8:", type=("build", "run"))
depends_on("py-hatchling", type=("build"))
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class PyNarwhals(PythonPackage):
"""Extremely lightweight compatibility layer between dataframe libraries"""
homepage = "https://github.com/narwhals-dev/narwhals"
pypi = "narwhals/narwhals-1.8.1.tar.gz"
version("1.8.1", sha256="97527778e11f39a1e5e2113b8fbb9ead788be41c0337f21852e684e378f583e8")
depends_on("python@3.8:", type=("build", "run"))
depends_on("py-hatchling", type=("build"))

View File

@ -14,6 +14,8 @@ class PyPreCommit(PythonPackage):
license("MIT")
version("4.2.0", sha256="601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146")
version("4.1.0", sha256="ae3f018575a588e30dfddfab9a05448bfbd6b73d78709617b5a2b853549716d4")
version("4.0.1", sha256="80905ac375958c0444c65e9cebebd948b3cdb518f335a091a670a89d652139d2")
version("3.8.0", sha256="8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af")
version("3.7.1", sha256="8ca3ad567bc78a4972a3f1a477e94a79d4597e8140a6e0b651c5e33899c3654a")

View File

@ -46,7 +46,6 @@ class PyPsyclone(PythonPackage):
depends_on("py-graphviz", type=("build", "run"))
depends_on("py-configparser", type=("build", "run"))
depends_on("py-jinja2", type="build")
depends_on("py-jsonschema", type=("build", "run"), when="@2.5.0:")
depends_on("py-sympy", type=("build", "run"), when="@2.2.0:")
depends_on("py-termcolor", type=("build", "run"))

View File

@ -19,6 +19,8 @@ class PyPyogrio(PythonPackage):
version("0.9.0", sha256="6a6fa2e8cf95b3d4a7c0fac48bce6e5037579e28d3eb33b53349d6e11f15e5a8")
depends_on("python@3.8:", type=("build", "run"))
depends_on("py-setuptools", type="build")
depends_on("py-wheel", type="build")
depends_on("gdal@2.4:", type=("build", "link", "run"))
depends_on("py-cython@0.29:", type="build")
depends_on("py-versioneer@0.28 +toml", type="build")

View File

@ -17,6 +17,7 @@ class PyPyscf(PythonPackage):
license("Apache-2.0")
version("2.9.0", sha256="821dc882f3a5485d8f202abb093330cf1497b26767ba2a1a63b7fa600ddc58a3")
version("2.8.0", sha256="db720372e7f7d1aa2df0fb90c07f483da363730197c937a6378952d34b6abf3d")
version("2.7.0", sha256="ca8efc2f28d72c3130f26a967e7fa8d0bbc4a6b47d16a7c4c732ec85a31b7eec")
version("2.6.2", sha256="744c89a8e4d38c4b5562f75fa68f9d079faeb23602d255fba0eb6d1bac97bca2")
@ -33,7 +34,8 @@ class PyPyscf(PythonPackage):
version("1.7.5", sha256="52856b39f0ada2f6340757caa65dc5c1d9a3cdfceea2a6615ad8af92664a6c69")
version("1.7.3", sha256="62a26146a222140395b276ea33182f87809a21989ddcf78e2dcb8e35ebc57af2")
depends_on("c", type="build") # generated
depends_on("c", type="build")
depends_on("cxx", type="build")
# dependencies
depends_on("cmake@3.10:", type="build", when="@2.1:")
@ -41,6 +43,7 @@ class PyPyscf(PythonPackage):
depends_on("python@3.6:", type=("build", "run"), when="@2.1:")
depends_on("python@2.6:", type=("build", "run"))
depends_on("py-setuptools", type="build")
depends_on("py-setuptools@61.0:", type="build", when="@2.9.0:")
depends_on("py-numpy@1.8.0:", type=("build", "run"))
depends_on("py-numpy@1.13.0:", type=("build", "run"), when="@2:")
depends_on("py-numpy@1", type=("build", "run"), when="@:2.6.0")

View File

@ -16,6 +16,7 @@ class PyRuamelYamlClib(PythonPackage):
version("0.2.12", sha256="6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f")
version("0.2.7", sha256="1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497")
version("0.2.4", sha256="f997f13fd94e37e8b7d7dbe759088bb428adc6570da06b64a913d932d891ac8d")
with default_args(deprecated=True):
version("0.2.0", sha256="b66832ea8077d9b3f6e311c4a53d06273db5dc2db6e8a908550f3c14d67e718c")

View File

@ -0,0 +1,22 @@
From 809f3658b4d7d8147f6c2f79f8aa23b50a232247 Mon Sep 17 00:00:00 2001
From: Alexander Grund <alexander.grund@tu-dresden.de>
Date: Mon, 31 May 2021 16:31:59 +0200
Subject: [PATCH] Pass WITH_BLAS option from environment to CMake
Allows to choose the BLAS backend with Eigen
---
tools/setup_helpers/cmake.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/tools/setup_helpers/cmake.py b/tools/setup_helpers/cmake.py
index 2309ad3bdc52a5c728f41d21eb5ff37daf5d1dd6..60b7bbd47ff222ef9c41604ffed1e452860aebc6 100644
--- a/tools/setup_helpers/cmake.py
+++ b/tools/setup_helpers/cmake.py
@@ -235,6 +235,7 @@ def generate(self, version, cmake_python_library, build_python, build_test, my_e
# CMakeLists.txt.
var: var for var in
('BLAS',
+ 'WITH_BLAS',
'BUILDING_WITH_TORCH_LIBS',
'CUDA_HOST_COMPILER',
'CUDA_NVCC_EXECUTABLE',

View File

@ -0,0 +1,115 @@
From e9bfe6f07faeaeba252cc426c2539b4b50326796 Mon Sep 17 00:00:00 2001
From: Nathan Brown <nathan.brown@arm.com>
Date: Tue, 4 Feb 2025 15:51:24 +0000
Subject: [PATCH] gloo: fix building system gloo with CUDA/HIP
Fix incorrect linking of Gloo's libraries when building with system
Gloo. Previously, either Gloo's native library or Gloo's CUDA library
were linked. However, Gloo had changed such that all users of Gloo must
link the native library, and can optionally link the CUDA or HIP
library for Gloo + CUDA/HIP support.
This had been updated when building/linking with vendored Gloo, but not
when using system Gloo.
Fixes: #146239
Reported-by: Adam J Stewart <ajstewart426@gmail.com>
Signed-off-by: Nathan Brown <nathan.brown@arm.com>
---
cmake/Dependencies.cmake | 11 ++++++++--
cmake/Modules/FindGloo.cmake | 39 +++++++++++++++---------------------
2 files changed, 25 insertions(+), 25 deletions(-)
diff --git a/cmake/Dependencies.cmake b/cmake/Dependencies.cmake
index 9342555d9bc7e40086c87fa7c199da18031ce808..c680e4995fb67000f6e545fe09190643dcf7ee25 100644
--- a/cmake/Dependencies.cmake
+++ b/cmake/Dependencies.cmake
@@ -1192,10 +1192,17 @@ if(USE_GLOO)
if(NOT Gloo_FOUND)
message(FATAL_ERROR "Cannot find gloo")
endif()
- message("Found gloo: ${Gloo_LIBRARY}")
+ message("Found gloo: ${Gloo_NATIVE_LIBRARY}, cuda lib: ${Gloo_CUDA_LIBRARY}, hip lib: ${Gloo_HIP_LIBRARY}")
message("Found gloo include directories: ${Gloo_INCLUDE_DIRS}")
add_library(gloo SHARED IMPORTED)
- set_target_properties(gloo PROPERTIES IMPORTED_LOCATION ${Gloo_LIBRARY})
+ set_target_properties(gloo PROPERTIES IMPORTED_LOCATION ${Gloo_NATIVE_LIBRARY})
+ if(USE_CUDA)
+ add_library(gloo_cuda SHARED IMPORTED)
+ set_target_properties(gloo_cuda PROPERTIES IMPORTED_LOCATION ${Gloo_CUDA_LIBRARY})
+ elseif(USE_ROCM)
+ add_library(gloo_hip SHARED IMPORTED)
+ set_target_properties(gloo_hip PROPERTIES IMPORTED_LOCATION ${Gloo_HIP_LIBRARY})
+ endif()
# need to use Gloo_INCLUDE_DIRS over third_party/gloo to find Gloo's auto-generated config.h
include_directories(BEFORE SYSTEM ${Gloo_INCLUDE_DIRS})
endif()
diff --git a/cmake/Modules/FindGloo.cmake b/cmake/Modules/FindGloo.cmake
index e965326e2e8a0ab006bfe79243a66292ba262b62..944cd4d8d25738125c0f85b4dd9fee2850029339 100644
--- a/cmake/Modules/FindGloo.cmake
+++ b/cmake/Modules/FindGloo.cmake
@@ -1,7 +1,8 @@
# Try to find the Gloo library and headers.
# Gloo_FOUND - system has Gloo lib
# Gloo_INCLUDE_DIRS - the Gloo include directory
-# Gloo_LIBRARY/Gloo_NATIVE_LIBRARY - libraries needed to use Gloo
+# Gloo_NATIVE_LIBRARY - base gloo library, needs to be linked
+# Gloo_CUDA_LIBRARY/Gloo_HIP_LIBRARY - CUDA/HIP support library in Gloo
find_path(Gloo_INCLUDE_DIR
NAMES gloo/common/common.h
@@ -10,40 +11,32 @@ find_path(Gloo_INCLUDE_DIR
find_library(Gloo_NATIVE_LIBRARY
NAMES gloo
- DOC "The Gloo library (without CUDA)"
+ DOC "The Gloo library"
)
+# Gloo has optional CUDA support
+# if Gloo + CUDA is desired, Gloo_CUDA_LIBRARY
+# needs to be linked into desired target
find_library(Gloo_CUDA_LIBRARY
NAMES gloo_cuda
- DOC "The Gloo library (with CUDA)"
+ DOC "Gloo's CUDA support/code"
+)
+
+# Gloo has optional HIP support
+# if Gloo + HIP is desired, Gloo_HIP_LIBRARY
+# needs to be linked to desired target
+find_library(Gloo_HIP_LIBRARY
+ NAMES gloo_hiop
+ DOC "Gloo's HIP support/code"
)
set(Gloo_INCLUDE_DIRS ${Gloo_INCLUDE_DIR})
-# use the CUDA library depending on the Gloo_USE_CUDA variable
-if (DEFINED Gloo_USE_CUDA)
- if (${Gloo_USE_CUDA})
- set(Gloo_LIBRARY ${Gloo_CUDA_LIBRARY})
- set(Gloo_NATIVE_LIBRARY ${Gloo_NATIVE_LIBRARY})
- else()
- set(Gloo_LIBRARY ${Gloo_NATIVE_LIBRARY})
- set(Gloo_NATIVE_LIBRARY ${Gloo_NATIVE_LIBRARY})
- endif()
-else()
- # else try to use the CUDA library if found
- if (${Gloo_CUDA_LIBRARY} STREQUAL "Gloo_CUDA_LIBRARY-NOTFOUND")
- set(Gloo_LIBRARY ${Gloo_NATIVE_LIBRARY})
- set(Gloo_NATIVE_LIBRARY ${Gloo_NATIVE_LIBRARY})
- else()
- set(Gloo_LIBRARY ${Gloo_CUDA_LIBRARY})
- set(Gloo_NATIVE_LIBRARY ${Gloo_NATIVE_LIBRARY})
- endif()
-endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Gloo
FOUND_VAR Gloo_FOUND
- REQUIRED_VARS Gloo_INCLUDE_DIR Gloo_LIBRARY
+ REQUIRED_VARS Gloo_INCLUDE_DIR Gloo_NATIVE_LIBRARY
)
mark_as_advanced(Gloo_FOUND)

View File

@ -333,45 +333,45 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage):
# https://github.com/pytorch/pytorch/issues/151316
patch(
"https://github.com/pytorch/pytorch/pull/151344.patch?full_index=1",
sha256="edaadfd5f8acee67fee1c77b34145640a1239c9546d77420f3887af24889799e",
"https://github.com/pytorch/pytorch/commit/331423e5c24170b218e743b3392acbad4480340d.patch?full_index=1",
sha256="493cde279804346e13cf21862fddc48040a4d7da65d4e5d3de5f717a15e0aa62",
when="@2.7.0",
)
patch("apple_clang_17.patch", when="@1.12:2.6")
# https://github.com/pytorch/pytorch/issues/146239
patch(
"https://github.com/pytorch/pytorch/pull/146637.patch?full_index=1",
"gloo_cuda.patch",
sha256="f93aa66e2cf9c0febdbcf72f44213a213e570e5f860186e81c92c8d2af0857c0",
when="@2.6:",
)
# Fixes 'FindBLAS.cmake' error: unknown command check_function_exists
patch(
"https://github.com/pytorch/pytorch/pull/145849.patch?full_index=1",
sha256="5675ab543b786b8f360de451b27b3eb5d3ce8499d6c1a207f4a854f0c473ab03",
"https://github.com/pytorch/pytorch/commit/8d91bfd9654589c41b3bbb589bcb0bf95443c53e.patch?full_index=1",
sha256="2c9e0c8986c388f38288dacfb3208b457b2eec340963b8c8c8779e9f487adc07",
when="@:2.6",
)
# https://github.com/pytorch/pytorch/issues/90448
patch(
"https://github.com/pytorch/pytorch/pull/97270.patch?full_index=1",
sha256="beb3fb57746cf8443f5caa6e08b2f8f4d4822c1e11e0c912134bd166c6a0ade7",
"https://github.com/pytorch/pytorch/commit/9a18968253e28ba8d8bdf646731087000c7876b7.patch?full_index=1",
sha256="b4f299f6751e03fcf5d9a318541156edbc49c00e8a9c78785031e438e38f5533",
when="@1.10:2.0",
)
# Fix BLAS being overridden by MKL
# https://github.com/pytorch/pytorch/issues/60328
patch(
"https://github.com/pytorch/pytorch/pull/59220.patch?full_index=1",
"59220.patch",
sha256="6d5717267f901e8ee493dfacd08734d9bcc48ad29a76ca9ef702368e96bee675",
when="@:1.11",
)
# Fixes build on older systems with glibc <2.12
patch(
"https://github.com/pytorch/pytorch/pull/55063.patch?full_index=1",
sha256="2229bcbf20fbe88aa9f7318f89c126ec7f527875ffe689a763c78abfa127a65c",
"https://github.com/pytorch/pytorch/commit/13c975684a220ec096216ec6468ccd0dc90ff50a.patch?full_index=1",
sha256="a999e9376a69bbb8620ab358b485d1529c8e1c23a09ca34c5d287f6b77d2f5d9",
when="@:1.8.1",
)
@ -505,21 +505,21 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage):
# Some missing includes
# See: https://github.com/pytorch/pytorch/pull/100036
patch(
"https://patch-diff.githubusercontent.com/raw/pytorch/pytorch/pull/100036.patch?full_index=1",
sha256="65060b54c31196b26dcff29bbb178fd17d5677e8481a2a06002c0ca4dd37b3d0",
"https://github.com/pytorch/pytorch/commit/9d99d8879cb8a7a5ec94b04e933305b8d24ad6ac.patch?full_index=1",
sha256="8c3a5b22d0dbda2ee45cfc2ae1da446fc20898e498003579490d4efe9241f9ee",
when="@2.0.0:2.0.1",
)
# See: https://github.com/pytorch/pytorch/pull/100049
patch(
"https://patch-diff.githubusercontent.com/raw/pytorch/pytorch/pull/100049.patch?full_index=1",
sha256="673056141c0ea6ff4411f65a26f1a9d7a7c49ad8fe034a01ef0d56ba8a7a9386",
"https://github.com/pytorch/pytorch/commit/aaa3eb059a0294cc01c71f8e74abcebc33404e17.patch?full_index=1",
sha256="8dcbc5cd24b4c0e4a051e2161700b485c6c598b66347e7e90a263d9319c76374",
when="@2.0.0:2.0.1",
)
# Use correct OpenBLAS include path under prefix
patch(
"https://patch-diff.githubusercontent.com/raw/pytorch/pytorch/pull/110063.patch?full_index=1",
sha256="23fb4009f7337051fc5303927ff977186a5af960245e7212895406477d8b2f66",
"https://github.com/pytorch/pytorch/commit/21d77bcf808d076f81b5e885a8ce6ca20a08dbed.patch?full_index=1",
sha256="c61a6bd8cb9f021bfa122945a332cec223a2c7c6843ac911f9dc23e6facfb0ac",
when="@:2.1",
)

View File

@ -12,6 +12,7 @@ class PyTwine(PythonPackage):
pypi = "twine/twine-2.0.0.tar.gz"
git = "https://github.com/pypa/twine.git"
version("6.1.0", sha256="be324f6272eff91d07ee93f251edf232fc647935dd585ac003539b42404a8dbd")
version("6.0.1", sha256="36158b09df5406e1c9c1fb8edb24fc2be387709443e7376689b938531582ee27")
version("4.0.2", sha256="9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8")
version("4.0.1", sha256="96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0")
@ -24,8 +25,8 @@ class PyTwine(PythonPackage):
depends_on("py-setuptools-scm+toml@6:", when="@3.4.2:")
with default_args(type=("build", "run")):
depends_on("py-pkginfo@1.8.1:", when="@3.7:")
depends_on("py-pkginfo@1.4.2:")
depends_on("py-pkginfo@1.8.1:", when="@3.7:6.0.1")
depends_on("py-pkginfo@1.4.2:", when="@:3.6")
depends_on("py-readme-renderer@35:", when="@4.0.1:")
depends_on("py-readme-renderer@21.0:")
depends_on("py-requests@2.20:")
@ -36,7 +37,13 @@ class PyTwine(PythonPackage):
depends_on("py-keyring@15.1:", when="@3:")
depends_on("py-rfc3986@1.4:", when="@3.2:")
depends_on("py-rich@12:", when="@4:")
depends_on("py-packaging", when="@6:")
depends_on("py-packaging@24:", when="@6:")
depends_on("py-packaging", when="@6.1:")
depends_on("py-id", when="@6.1:")
depends_on("python@3.8:", when="@5:")
depends_on("python@3.7:", when="@4:")
depends_on("python@3.6:", when="@2:")
# Historical Dependencies
depends_on("py-tqdm@4.14:", when="@:3")

View File

@ -1,33 +1,33 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class PyVlConvertPython(PythonPackage):
"""Convert Vega-Lite chart specifications to SVG, PNG, PDF, or Vega"""
homepage = "https://github.com/vega/vl-convert"
pypi = "vl_convert_python/vl_convert_python-1.4.0.tar.gz"
version("1.4.0", sha256="264d6f2338c7d3474e60c6907cca016b880b0c1c9be302bb84abc6690188a7e9")
version(
"1.3.0",
sha256="de1462151dfbba7b2a17881dac1d2269662012c252f1e9d1537a4daed5e36067",
deprecated=True,
)
version(
"0.13.1",
sha256="d70a608257dd6b5b782d96cccebfe7289992e522e47a8bebb7d928253ca8b396",
deprecated=True,
)
depends_on("python@3.7:", type=("build", "run"))
# TODO: This package currently requires internet access to install.
depends_on("py-maturin@1.1:1", type="build")
depends_on("cmake", type="build") # some rust dependencies need this
depends_on("protobuf", type="build") # rust dependency prost need this
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class PyVlConvertPython(PythonPackage):
"""Convert Vega-Lite chart specifications to SVG, PNG, PDF, or Vega"""
homepage = "https://github.com/vega/vl-convert"
pypi = "vl_convert_python/vl_convert_python-1.4.0.tar.gz"
version("1.4.0", sha256="264d6f2338c7d3474e60c6907cca016b880b0c1c9be302bb84abc6690188a7e9")
version(
"1.3.0",
sha256="de1462151dfbba7b2a17881dac1d2269662012c252f1e9d1537a4daed5e36067",
deprecated=True,
)
version(
"0.13.1",
sha256="d70a608257dd6b5b782d96cccebfe7289992e522e47a8bebb7d928253ca8b396",
deprecated=True,
)
depends_on("python@3.7:", type=("build", "run"))
# TODO: This package currently requires internet access to install.
depends_on("py-maturin@1.1:1", type="build")
depends_on("cmake", type="build") # some rust dependencies need this
depends_on("protobuf", type="build") # rust dependency prost need this

View File

@ -0,0 +1,54 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class Spatter(CMakePackage, CudaPackage):
"""A microbenchmark for timing Gather/Scatter kernels on CPUs and GPUs."""
homepage = "https://github.com/hpcgarage/spatter"
git = "https://github.com/hpcgarage/spatter.git"
maintainers("plavin", "jyoung3131")
license("MIT", checked_by="plavin")
version("develop", branch="spatter-devel")
version("main", branch="main", preferred=True)
version("2.1", tag="v2.1", commit="ec8923711f8dc21eedff7189f12b02eb06845d2f")
variant(
"backend",
default="openmp",
values=["serial", "openmp", "cuda"],
description="Configuration string",
)
variant("mpi", default=False, description="Enable MPI support")
variant("cuda_arch", default="none", multi=True, description="CUDA architecture")
depends_on("cmake@3.25:", type="build")
depends_on("mpi", when="+mpi")
depends_on("cuda", when="backend=cuda")
conflicts(
"backend=cuda",
when="cuda_arch=none",
msg="CUDA architecture must be specified when CUDA support is enabled.",
)
def cmake_args(self):
args = []
if self.spec.satisfies("backend=openmp"):
args.append(self.define("USE_OPENMP", "On"))
elif self.spec.satisfies("backend=cuda"):
args.append(self.define("USE_CUDA", "On"))
args.append(
self.define("CMAKE_CUDA_ARCHITECTURES", self.spec.variants["cuda_arch"].value)
)
args.append(self.define_from_variant("USE_MPI", "mpi"))
return args

View File

@ -17,6 +17,7 @@ class Tamaas(SConsPackage):
maintainers("prs513rosewood")
version("master", branch="master")
version("2.8.1", sha256="c5fc294ae3ccdee50beeeb23b8c6621a82c2b8642cbe40cd366f751a29b19225")
version("2.8.0", sha256="8ec49bf484a622c0554452416d1804eefbd545da79ced352f2ea63bbd17c83f0")
version("2.7.1", sha256="d7de6db3f5532bb9c8ab7e8cca1cdb5c133050dd5720249dde07027b0d41641f")
version("2.7.0", sha256="bc5717c1ead621cb9c18a073fdafbe8778fd160ad23d80c98283445d79066579")

View File

@ -159,7 +159,8 @@ class Tau(Package):
depends_on("python@:3.10", when="@:2.32.1")
depends_on("libunwind", when="+libunwind")
depends_on("mpi", when="+mpi", type=("build", "run", "link"))
depends_on("cuda", when="+cuda")
# Legacy nvtx is only supported until cuda@12.8, newer cuda only provides nvtx3.
depends_on("cuda@:12.8", when="+cuda")
depends_on("gasnet", when="+gasnet")
depends_on("adios2", when="+adios2")
depends_on("sqlite", when="+sqlite")

View File

@ -49,22 +49,27 @@ class Tfel(CMakePackage):
# released versions
version(
"5.0.0",
sha256="fe1ec39eba7f23571c2b0c773dab1cc274fee4512c5b2f2fc54b231da4502e87",
"5.0.1",
sha256="820b2f9d54e237b2c2d9d6f06aaf7d4a1d3f34fb373e6549bee4fd8b55ecfad1",
preferred=True,
)
version("5.0.0", sha256="fe1ec39eba7f23571c2b0c773dab1cc274fee4512c5b2f2fc54b231da4502e87")
version("4.2.3", sha256="a4f636ff9aeb6146a2d4f4bfd261092d6aa6c89be1ca725cefa8c02a5ec5183a")
version("4.2.2", sha256="021864ad5b27ffce1915bcacc8f39f3e8a72ce6bd32e80a61ea0998a060180e5")
version("4.2.1", sha256="14f27257014a992a4e511f35390e4b9a086f6a5ed74087f891f8c00306f1758f")
version("4.2.0", sha256="cf8a309c4d19a8e36232f8540ff28aa0d6285645f8dfb1ac57dd481ba3453e02")
version("4.1.4", sha256="a7db8e997a8d8cdf0551afc0b4370c698f97def3d9afc6939fed55ff9aadcaed")
version("4.1.3", sha256="c1d47345194fcffe98ff15b454ba721432e58b1943427362af8653ee8663875a")
version("4.1.2", sha256="e9e7c2aeef7d19f92ffd83b2a7fc54186e648d25e42696b5cba7c4bfa194276a")
version("4.1.1", sha256="e0f229094e88a2d6c6a78ae60fa77d2f4b8294e9d810c21fd7df61004bf29a33")
version("4.1.0", sha256="7505c41da9df5fb3c281651ff29b58a18fd4d91b92f839322f0267269c5f1375")
version("4.0.5", sha256="e0c0f937d6b826d90897ec28d1a6f03ae8dd2e2383f0ed986771e3cc49fa8921")
version("4.0.4", sha256="cc2926387c1e948866f9e0e8f0f09b699d072cc6fd1d345631cb0038d8292817")
version("4.0.3", sha256="c21c13fbd5ad8f52e9874a7931c619b9b7e69d69a2ab003e09628a1e9945542d")
version("4.0.2", sha256="f5c8a285e00f334fd3e1a95f9a393fed393990ee827dae3766da1decfaa1074e")
version("4.0.1", sha256="f54741b7e654cb12511ca68c6494a4789ba41b5ada4cd345ad2bc7da631309d1")
version("4.0.0", sha256="7a0c32c8a9cd2fd65cbcb54fff802f303665d7cba5d46f92ff3d55f057c92845")
version("3.4.8", sha256="cd6b1493accb251378e082b305a51601b7df48a428d03f1970fbb78a900f37fd")
version("3.4.7", sha256="5b79b58b9f234169eb47358f9717e8ae7401533e4e645f442194fcefdb4bcb98")
version("3.4.6", sha256="88c3d076ca360ffbadb6ffeb6cbc1267a9da0e098e7c182407501820ba2bf6e7")
version("3.4.5", sha256="064d6926106e0052829182087a025f58fc3e98dfb69967e0795d9cdb4e1500b9")
@ -73,6 +78,7 @@ class Tfel(CMakePackage):
version("3.4.2", sha256="f39e65b2282fd3b108081388f161ba662407b192fed68fafe324c7528026a202")
version("3.4.1", sha256="04cd4257e39e1b05e02b12ad941106fff4d439934bdfe6e950c08bab23e2a4ba")
version("3.4.0", sha256="176feb4c1726d0f21f4c656b20620dce6f99ab7f5f09a66905aeb643a316bbc1")
version("3.3.7", sha256="6bb99af2a218fd712693367d3dfe62f04f78ac5cc13b7f46d9176321230cf06d")
version("3.3.6", sha256="e56e999675fe08b0efdcbdd8b4bde8ab45d249098ec8d9641104819eb930bedf")
version("3.3.5", sha256="4319a7a6363f69f7f0c78abb0741bc90b49dc777831c2886b13aca61c79bae04")
version("3.3.4", sha256="3829e0b07520a14b17a8e75f879683a0d97b04b897aeb3ad0dd96dc94c0fcd6b")
@ -80,6 +86,7 @@ class Tfel(CMakePackage):
version("3.3.2", sha256="17127ffdf92367c10041258f70a88ac3dcb0a7d89c1766a6aa1ebaeb4d03d55d")
version("3.3.1", sha256="ad07329c25874832fbacc999b5f88d9b9ab84415bc897a6f3cae5b4afcd7661f")
version("3.3.0", sha256="884ad68b0fbbededc3a602d559433c24114ae4534dc9f0a759d31ca3589dace0")
version("3.2.12", sha256="7c0fecbf2ee603c8415ae64282b9cd2a6739d19c73fc79ab72beca29f7072dca")
version("3.2.11", sha256="3f00343e5cd66d3a95903cbd08f078d48cea75b64b444e3b48dddf3aa5a6aa02")
version("3.2.10", sha256="3fe24a2811811d68ce5735f601d12fae7b1da465ac5b2917bd0887782218f2bd")
version("3.2.9", sha256="4ee26f2b5db24dc10113100ae0165cbbe8c7960c99c0e64ec96410788774aa54")
@ -92,6 +99,7 @@ class Tfel(CMakePackage):
version("3.2.2", sha256="69b01ae0d1f9140b619aaa9135948284ff40d4654672c335e55ab4934c02eb43")
version("3.2.1", sha256="12786480524a7fe86889120fb334fa00211dfd44ad5ec71e2279e7adf1ddc807")
version("3.2.0", sha256="089d79745e9f267a2bd03dcd8841d484e668bd27f5cc2ff7453634cb39016848")
version("3.1.15", sha256="5fcf7b56cbc01892c43361055575fcb3a464e4115c71252f6025363082cb6df6")
version("3.1.14", sha256="04a11c146dede67777b3311e838305c9f5856d56154b263dc8b23168226b51f1")
version("3.1.13", sha256="f0e5dddb5d32931dcab2d060029da31aacb47cd3251297d701b86d93c8fa0255")
version("3.1.12", sha256="770aa4680063ddd7be4f735ed1ec9402e83502d1ceb688c79cdba27490b7bf98")
@ -107,6 +115,7 @@ class Tfel(CMakePackage):
version("3.1.2", sha256="2eaa191f0699031786d8845ac769320a42c7e035991d82b3738289886006bfba")
version("3.1.1", sha256="a4c0c21c6c22752cc90c82295a6bafe637b3395736c66fcdfcfe4aeccb5be7af")
version("3.1.0", sha256="dd67b400b5f157aef503aa3615b9bf6b52333876a29e75966f94ee3f79ab37ad")
version("3.0.15", sha256="ff5b51f7665ec639ed10fa1aaf3370cf21d85bc3a81439ab482a82957e94eeca")
version("3.0.14", sha256="5422031c63ffbd43436bd1471e107b6821649b8f2da5db77fce363a72c1d752c")
version("3.0.13", sha256="04987d318c46294853481fa987dd09e8ca38493b8994a363d20623f9b8f009ff")
version("3.0.12", sha256="f7dae9e5a00c721445b3167ec7bc71747bab047ddb36103f232b72d3e4d3cd00")
@ -152,27 +161,22 @@ class Tfel(CMakePackage):
depends_on("java", when="+java")
depends_on("python", when="+python", type=("build", "link", "run"))
depends_on("python", when="+python_bindings", type=("build", "link", "run"))
depends_on("py-numpy", when="+python_bindings", type=("build", "link", "run"))
with when("@5.1:"):
depends_on("py-pybind11", when="+python_bindings", type=("build", "link", "run"))
with when("+python_bindings"):
with when("@2.0.4:5.0.99"):
depends_on(
"boost+python+numpy+exception+container",
when="+python_bindings",
type=("build", "link", "run"),
)
depends_on("python", type=("build", "link", "run"))
depends_on("py-numpy", type=("build", "link", "run"))
with when("@rliv1.2:rliv5.0"):
depends_on(
"boost+python+numpy+exception+container",
when="+python_bindings",
type=("build", "link", "run"),
)
with when("@5.1:"):
depends_on("py-pybind11", type=("build", "link", "run"))
extends("python", when="+python_bindings")
with when("@2.0.4:5.0.99"):
depends_on("boost+python+numpy+exception+container", type=("build", "link", "run"))
with when("@rliv1.2:rliv5.0"):
depends_on("boost+python+numpy+exception+container", type=("build", "link", "run"))
extends("python", when="+python_bindings")
conflicts("%gcc@:7", when="@4:")
@ -180,6 +184,9 @@ def cmake_args(self):
args = []
args.append("-DUSE_EXTERNAL_COMPILER_FLAGS=ON")
args.append("-Denable-website=OFF")
args.append("-Denable-doxygen-doc=OFF")
args.append("-Denable-reference-doc=OFF")
for i in [
"fortran",

View File

@ -57,6 +57,7 @@ class Wgrib2(MakefilePackage, CMakePackage):
)
version("develop", branch="develop")
version("3.6.0", sha256="55913cb58f2b329759de17f5a84dd97ad1844d7a93956d245ec94f4264d802be")
version("3.5.0", sha256="b27b48228442a08bddc3d511d0c6335afca47252ae9f0e41ef6948f804afa3a1")
version("3.4.0", sha256="ecbce2209c09bd63f1bca824f58a60aa89db6762603bda7d7d3fa2148b4a0536")
version("3.3.0", sha256="010827fba9c31f05807e02375240950927e9e51379e1444388153284f08f58e2")
@ -192,7 +193,7 @@ def patch(self):
# Use Spack compiler wrapper flags
def inject_flags(self, name, flags):
if name == "cflags":
if self.spec.compiler.name == "apple-clang":
if self.spec.compiler.name in ["apple-clang", "clang"]:
flags.append("-Wno-error=implicit-function-declaration")
# When mixing Clang/gfortran need to link to -lgfortran

View File

@ -21,6 +21,7 @@ class Cmake(Package):
url = "https://cmake.org/files/v3.4/cmake-3.4.3.tar.gz"
tags = ["build-tools"]
executables = ["^cmake[0-9]*$"]
depends_on("c", type="build")
depends_on("cxx", type="build")
@ -36,6 +37,12 @@ class Cmake(Package):
url="https://cmake.org/files/v3.4/cmake-3.4.3.tar.gz",
)
@classmethod
def determine_version(cls, exe):
output = Executable(exe)("--version", output=str, error=str)
match = re.search(r"cmake.*version\s+(\S+)", output)
return match.group(1) if match else None
def setup_build_environment(self, env: EnvironmentModifications) -> None:
spack_cc # Ensure spack module-scope variable is avaiable
env.set("for_install", "for_install")

View File

@ -31,6 +31,7 @@ class Gcc(CompilerPackage, Package):
provides("fortran", when="languages=fortran")
depends_on("c", type="build")
depends_on("cxx", type="build")
c_names = ["gcc"]
cxx_names = ["g++"]
@ -48,6 +49,19 @@ class Gcc(CompilerPackage, Package):
"fortran": os.path.join("gcc", "gfortran"),
}
implicit_rpath_libs = ["libgcc", "libgfortran"]
@classmethod
def determine_variants(cls, exes, version_str):
compilers = cls.determine_compiler_paths(exes=exes)
languages = set()
translation = {"cxx": "c++"}
for lang, compiler in compilers.items():
languages.add(translation.get(lang, lang))
variant_str = "languages={0}".format(",".join(languages))
return variant_str, {"compilers": compilers}
def install(self, spec, prefix):
# Create the minimal compiler that will fool `spack compiler find`
mkdirp(prefix.bin)

View File

@ -2,6 +2,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import re
from spack.package import *
@ -12,19 +13,75 @@ class Llvm(Package, CompilerPackage):
homepage = "http://www.example.com"
url = "http://www.example.com/gcc-1.0.tar.gz"
tags = ["compiler"]
version("18.1.8", md5="0123456789abcdef0123456789abcdef")
variant(
"clang", default=True, description="Build the LLVM C/C++/Objective-C compiler frontend"
)
variant(
"flang",
default=False,
description="Build the LLVM Fortran compiler frontend "
"(experimental - parser only, needs GCC)",
)
variant("lld", default=True, description="Build the LLVM linker")
provides("c", "cxx", when="+clang")
provides("fortran", when="+flang")
depends_on("c")
compiler_version_argument = "--version"
c_names = ["clang"]
cxx_names = ["clang++"]
fortran_names = ["flang"]
clang_and_friends = "(?:clang|flang|flang-new)"
compiler_version_regex = (
# Normal clang compiler versions are left as-is
rf"{clang_and_friends} version ([^ )\n]+)-svn[~.\w\d-]*|"
# Don't include hyphenated patch numbers in the version
# (see https://github.com/spack/spack/pull/14365 for details)
rf"{clang_and_friends} version ([^ )\n]+?)-[~.\w\d-]*|"
rf"{clang_and_friends} version ([^ )\n]+)|"
# LLDB
r"lldb version ([^ )\n]+)|"
# LLD
r"LLD ([^ )\n]+) \(compatible with GNU linkers\)"
)
fortran_names = ["flang", "flang-new"]
@classmethod
def determine_version(cls, exe):
try:
compiler = Executable(exe)
output = compiler(cls.compiler_version_argument, output=str, error=str)
if "Apple" in output:
return None
if "AMD" in output:
return None
match = re.search(cls.compiler_version_regex, output)
if match:
return match.group(match.lastindex)
except ProcessError:
pass
except Exception as e:
tty.debug(e)
return None
@classmethod
def filter_detected_exes(cls, prefix, exes_in_prefix):
# Executables like lldb-vscode-X are daemon listening on some port and would hang Spack
# during detection. clang-cl, clang-cpp, etc. are dev tools that we don't need to test
reject = re.compile(
r"-(vscode|cpp|cl|ocl|gpu|tidy|rename|scan-deps|format|refactor|offload|"
r"check|query|doc|move|extdef|apply|reorder|change-namespace|"
r"include-fixer|import-test|dap|server|PerfectShuffle)"
)
return [x for x in exes_in_prefix if not reject.search(x)]
def install(self, spec, prefix):
# Create the minimal compiler that will fool `spack compiler find`

View File

@ -12,6 +12,7 @@ class Mpich(Package):
list_depth = 2
tags = ["tag1", "tag2"]
executables = ["^mpichversion$"]
variant("debug", default=False, description="Compile MPICH with debug flags.")
@ -30,6 +31,12 @@ class Mpich(Package):
depends_on("cxx", type="build")
depends_on("fortran", type="build")
@classmethod
def determine_version(cls, exe):
output = Executable(exe)(output=str, error=str)
match = re.search(r"MPICH Version:\s+(\S+)", output)
return match.group(1) if match else None
def install(self, spec, prefix):
touch(prefix.mpich)