Merge branch 'develop' into bugfix/compiler-flag-propagation

This commit is contained in:
Richarda Butler 2023-08-31 09:20:48 -07:00 committed by GitHub
commit 90393b77d9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 566 additions and 259 deletions

View File

@ -1,4 +1,4 @@
sphinx==7.2.4 sphinx==7.2.5
sphinxcontrib-programoutput==0.17 sphinxcontrib-programoutput==0.17
sphinx_design==0.5.0 sphinx_design==0.5.0
sphinx-rtd-theme==1.3.0 sphinx-rtd-theme==1.3.0

View File

@ -29,6 +29,90 @@
} }
class CmdCall:
"""Compose a call to `cmd` for an ordered series of cmd commands/scripts"""
def __init__(self, *cmds):
if not cmds:
raise RuntimeError(
"""Attempting to run commands from CMD without specifying commands.
Please add commands to be run."""
)
self._cmds = cmds
def __call__(self):
out = subprocess.check_output(self.cmd_line, stderr=subprocess.STDOUT) # novermin
return out.decode("utf-16le", errors="replace") # novermin
@property
def cmd_line(self):
base_call = "cmd /u /c "
commands = " && ".join([x.command_str() for x in self._cmds])
# If multiple commands are being invoked by a single subshell
# they must be encapsulated by a double quote. Always double
# quote to be sure of proper handling
# cmd will properly resolve nested double quotes as needed
#
# `set`` writes out the active env to the subshell stdout,
# and in this context we are always trying to obtain env
# state so it should always be appended
return base_call + f'"{commands} && set"'
class VarsInvocation:
def __init__(self, script):
self._script = script
def command_str(self):
return f'"{self._script}"'
@property
def script(self):
return self._script
class VCVarsInvocation(VarsInvocation):
def __init__(self, script, arch, msvc_version):
super(VCVarsInvocation, self).__init__(script)
self._arch = arch
self._msvc_version = msvc_version
@property
def sdk_ver(self):
"""Accessor for Windows SDK version property
Note: This property may not be set by
the calling context and as such this property will
return an empty string
This property will ONLY be set if the SDK package
is a dependency somewhere in the Spack DAG of the package
for which we are constructing an MSVC compiler env.
Otherwise this property should be unset to allow the VCVARS
script to use its internal heuristics to determine appropriate
SDK version
"""
if getattr(self, "_sdk_ver", None):
return self._sdk_ver + ".0"
return ""
@sdk_ver.setter
def sdk_ver(self, val):
self._sdk_ver = val
@property
def arch(self):
return self._arch
@property
def vcvars_ver(self):
return f"-vcvars_ver={self._msvc_version}"
def command_str(self):
script = super(VCVarsInvocation, self).command_str()
return f"{script} {self.arch} {self.sdk_ver} {self.vcvars_ver}"
def get_valid_fortran_pth(comp_ver): def get_valid_fortran_pth(comp_ver):
cl_ver = str(comp_ver) cl_ver = str(comp_ver)
sort_fn = lambda fc_ver: StrictVersion(fc_ver) sort_fn = lambda fc_ver: StrictVersion(fc_ver)
@ -75,13 +159,15 @@ class Msvc(Compiler):
# file based on compiler executable path. # file based on compiler executable path.
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
new_pth = [pth if pth else get_valid_fortran_pth(args[0].version) for pth in args[3]] # This positional argument "paths" is later parsed and process by the base class
args[3][:] = new_pth # via the call to `super` later in this method
paths = args[3]
# This positional argument "cspec" is also parsed and handled by the base class
# constructor
cspec = args[0]
new_pth = [pth if pth else get_valid_fortran_pth(cspec.version) for pth in paths]
paths[:] = new_pth
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
if os.getenv("ONEAPI_ROOT"):
# If this found, it sets all the vars
self.setvarsfile = os.path.join(os.getenv("ONEAPI_ROOT"), "setvars.bat")
else:
# To use the MSVC compilers, VCVARS must be invoked # To use the MSVC compilers, VCVARS must be invoked
# VCVARS is located at a fixed location, referencable # VCVARS is located at a fixed location, referencable
# idiomatically by the following relative path from the # idiomatically by the following relative path from the
@ -89,8 +175,32 @@ def __init__(self, *args, **kwargs):
# Spack first finds the compilers via VSWHERE # Spack first finds the compilers via VSWHERE
# and stores their path, but their respective VCVARS # and stores their path, but their respective VCVARS
# file must be invoked before useage. # file must be invoked before useage.
self.setvarsfile = os.path.abspath(os.path.join(self.cc, "../../../../../../..")) env_cmds = []
self.setvarsfile = os.path.join(self.setvarsfile, "Auxiliary", "Build", "vcvars64.bat") compiler_root = os.path.join(self.cc, "../../../../../../..")
vcvars_script_path = os.path.join(compiler_root, "Auxiliary", "Build", "vcvars64.bat")
# get current platform architecture and format for vcvars argument
arch = spack.platforms.real_host().default.lower()
arch = arch.replace("-", "_")
self.vcvars_call = VCVarsInvocation(vcvars_script_path, arch, self.msvc_version)
env_cmds.append(self.vcvars_call)
# Below is a check for a valid fortran path
# paths has c, cxx, fc, and f77 paths in that order
# paths[2] refers to the fc path and is a generic check
# for a fortran compiler
if paths[2]:
# If this found, it sets all the vars
oneapi_root = os.getenv("ONEAPI_ROOT")
oneapi_root_setvars = os.path.join(oneapi_root, "setvars.bat")
oneapi_version_setvars = os.path.join(
oneapi_root, "compiler", str(self.ifx_version), "env", "vars.bat"
)
# order matters here, the specific version env must be invoked first,
# otherwise it will be ignored if the root setvars sets up the oneapi
# env first
env_cmds.extend(
[VarsInvocation(oneapi_version_setvars), VarsInvocation(oneapi_root_setvars)]
)
self.msvc_compiler_environment = CmdCall(*env_cmds)
@property @property
def msvc_version(self): def msvc_version(self):
@ -119,16 +229,30 @@ def platform_toolset_ver(self):
""" """
return self.msvc_version[:2].joined.string[:3] return self.msvc_version[:2].joined.string[:3]
@property def _compiler_version(self, compiler):
def cl_version(self): """Returns version object for given compiler"""
"""Cl toolset version""" # ignore_errors below is true here due to ifx's
# non zero return code if it is not provided
# and input file
return Version( return Version(
re.search( re.search(
Msvc.version_regex, Msvc.version_regex,
spack.compiler.get_compiler_version_output(self.cc, version_arg=None), spack.compiler.get_compiler_version_output(
compiler, version_arg=None, ignore_errors=True
),
).group(1) ).group(1)
) )
@property
def cl_version(self):
"""Cl toolset version"""
return self._compiler_version(self.cc)
@property
def ifx_version(self):
"""Ifx compiler version associated with this version of MSVC"""
return self._compiler_version(self.fc)
@property @property
def vs_root(self): def vs_root(self):
# The MSVC install root is located at a fix level above the compiler # The MSVC install root is located at a fix level above the compiler
@ -146,27 +270,12 @@ def setup_custom_environment(self, pkg, env):
# output, sort into dictionary, use that to make the build # output, sort into dictionary, use that to make the build
# environment. # environment.
# get current platform architecture and format for vcvars argument
arch = spack.platforms.real_host().default.lower()
arch = arch.replace("-", "_")
# vcvars can target specific sdk versions, force it to pick up concretized sdk # vcvars can target specific sdk versions, force it to pick up concretized sdk
# version, if needed by spec # version, if needed by spec
sdk_ver = ( if pkg.name != "win-sdk" and "win-sdk" in pkg.spec:
"" self.vcvars_call.sdk_ver = pkg.spec["win-sdk"].version.string
if "win-sdk" not in pkg.spec or pkg.name == "win-sdk"
else pkg.spec["win-sdk"].version.string + ".0"
)
# provide vcvars with msvc version selected by concretization,
# not whatever it happens to pick up on the system (highest available version)
out = subprocess.check_output( # novermin
'cmd /u /c "{}" {} {} {} && set'.format(
self.setvarsfile, arch, sdk_ver, "-vcvars_ver=%s" % self.msvc_version
),
stderr=subprocess.STDOUT,
)
if sys.version_info[0] >= 3:
out = out.decode("utf-16le", errors="replace") # novermin
out = self.msvc_compiler_environment()
int_env = dict( int_env = dict(
(key, value) (key, value)
for key, _, value in (line.partition("=") for line in out.splitlines()) for key, _, value in (line.partition("=") for line in out.splitlines())

View File

@ -42,6 +42,7 @@ class OpenMpi(Package):
import spack.patch import spack.patch
import spack.spec import spack.spec
import spack.url import spack.url
import spack.util.crypto
import spack.variant import spack.variant
from spack.dependency import Dependency, canonical_deptype, default_deptype from spack.dependency import Dependency, canonical_deptype, default_deptype
from spack.fetch_strategy import from_kwargs from spack.fetch_strategy import from_kwargs
@ -407,10 +408,7 @@ def version(
def _execute_version(pkg, ver, **kwargs): def _execute_version(pkg, ver, **kwargs):
if ( if (
any( (any(s in kwargs for s in spack.util.crypto.hashes) or "checksum" in kwargs)
s in kwargs
for s in ("sha256", "sha384", "sha512", "md5", "sha1", "sha224", "checksum")
)
and hasattr(pkg, "has_code") and hasattr(pkg, "has_code")
and not pkg.has_code and not pkg.has_code
): ):

View File

@ -2664,6 +2664,26 @@ def __init__(self, manifest_dir: Union[pathlib.Path, str]) -> None:
self.yaml_content = with_defaults_added self.yaml_content = with_defaults_added
self.changed = False self.changed = False
def _all_matches(self, user_spec: str) -> List[str]:
"""Maps the input string to the first equivalent user spec in the manifest,
and returns it.
Args:
user_spec: user spec to be found
Raises:
ValueError: if no equivalent match is found
"""
result = []
for yaml_spec_str in self.pristine_configuration["specs"]:
if Spec(yaml_spec_str) == Spec(user_spec):
result.append(yaml_spec_str)
if not result:
raise ValueError(f"cannot find a spec equivalent to {user_spec}")
return result
def add_user_spec(self, user_spec: str) -> None: def add_user_spec(self, user_spec: str) -> None:
"""Appends the user spec passed as input to the list of root specs. """Appends the user spec passed as input to the list of root specs.
@ -2684,8 +2704,9 @@ def remove_user_spec(self, user_spec: str) -> None:
SpackEnvironmentError: when the user spec is not in the list SpackEnvironmentError: when the user spec is not in the list
""" """
try: try:
self.pristine_configuration["specs"].remove(user_spec) for key in self._all_matches(user_spec):
self.configuration["specs"].remove(user_spec) self.pristine_configuration["specs"].remove(key)
self.configuration["specs"].remove(key)
except ValueError as e: except ValueError as e:
msg = f"cannot remove {user_spec} from {self}, no such spec exists" msg = f"cannot remove {user_spec} from {self}, no such spec exists"
raise SpackEnvironmentError(msg) from e raise SpackEnvironmentError(msg) from e

View File

@ -13,7 +13,7 @@
import re import re
import types import types
import warnings import warnings
from typing import List, NamedTuple from typing import List, NamedTuple, Tuple, Union
import archspec.cpu import archspec.cpu
@ -44,15 +44,18 @@
import spack.repo import spack.repo
import spack.spec import spack.spec
import spack.store import spack.store
import spack.traverse import spack.util.crypto
import spack.util.path import spack.util.path
import spack.util.timer import spack.util.timer
import spack.variant import spack.variant
import spack.version as vn import spack.version as vn
import spack.version.git_ref_lookup import spack.version.git_ref_lookup
from spack import traverse
from .counter import FullDuplicatesCounter, MinimalDuplicatesCounter, NoDuplicatesCounter from .counter import FullDuplicatesCounter, MinimalDuplicatesCounter, NoDuplicatesCounter
GitOrStandardVersion = Union[spack.version.GitVersion, spack.version.StandardVersion]
# these are from clingo.ast and bootstrapped later # these are from clingo.ast and bootstrapped later
ASTType = None ASTType = None
parse_files = None parse_files = None
@ -569,6 +572,41 @@ def keyfn(x):
return normalized_yaml return normalized_yaml
def _is_checksummed_git_version(v):
return isinstance(v, vn.GitVersion) and v.is_commit
def _is_checksummed_version(version_info: Tuple[GitOrStandardVersion, dict]):
"""Returns true iff the version is not a moving target"""
version, info = version_info
if isinstance(version, spack.version.StandardVersion):
if any(h in info for h in spack.util.crypto.hashes.keys()) or "checksum" in info:
return True
return "commit" in info and len(info["commit"]) == 40
return _is_checksummed_git_version(version)
def _concretization_version_order(version_info: Tuple[GitOrStandardVersion, dict]):
"""Version order key for concretization, where preferred > not preferred,
not deprecated > deprecated, finite > any infinite component; only if all are
the same, do we use default version ordering."""
version, info = version_info
return (
info.get("preferred", False),
not info.get("deprecated", False),
not version.isdevelop(),
version,
)
def _spec_with_default_name(spec_str, name):
"""Return a spec with a default name if none is provided, used for requirement specs"""
spec = spack.spec.Spec(spec_str)
if not spec.name:
spec.name = name
return spec
def bootstrap_clingo(): def bootstrap_clingo():
global clingo, ASTType, parse_files global clingo, ASTType, parse_files
@ -1857,30 +1895,27 @@ class Body:
return clauses return clauses
def build_version_dict(self, possible_pkgs): def define_package_versions_and_validate_preferences(
self, possible_pkgs, require_checksum: bool
):
"""Declare any versions in specs not declared in packages.""" """Declare any versions in specs not declared in packages."""
packages_yaml = spack.config.get("packages") packages_yaml = spack.config.get("packages")
packages_yaml = _normalize_packages_yaml(packages_yaml)
for pkg_name in possible_pkgs: for pkg_name in possible_pkgs:
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name) pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
# All the versions from the corresponding package.py file. Since concepts # All the versions from the corresponding package.py file. Since concepts
# like being a "develop" version or being preferred exist only at a # like being a "develop" version or being preferred exist only at a
# package.py level, sort them in this partial list here # package.py level, sort them in this partial list here
def key_fn(item): package_py_versions = sorted(
version, info = item pkg_cls.versions.items(), key=_concretization_version_order, reverse=True
# When COMPARING VERSIONS, the '@develop' version is always
# larger than other versions. BUT when CONCRETIZING, the largest
# NON-develop version is selected by default.
return (
info.get("preferred", False),
not info.get("deprecated", False),
not version.isdevelop(),
version,
) )
for idx, item in enumerate(sorted(pkg_cls.versions.items(), key=key_fn, reverse=True)): if require_checksum and pkg_cls.has_code:
v, version_info = item package_py_versions = [
x for x in package_py_versions if _is_checksummed_version(x)
]
for idx, (v, version_info) in enumerate(package_py_versions):
self.possible_versions[pkg_name].add(v) self.possible_versions[pkg_name].add(v)
self.declared_versions[pkg_name].append( self.declared_versions[pkg_name].append(
DeclaredVersion(version=v, idx=idx, origin=Provenance.PACKAGE_PY) DeclaredVersion(version=v, idx=idx, origin=Provenance.PACKAGE_PY)
@ -1889,22 +1924,26 @@ def key_fn(item):
if deprecated: if deprecated:
self.deprecated_versions[pkg_name].add(v) self.deprecated_versions[pkg_name].add(v)
# All the preferred version from packages.yaml, versions in external if pkg_name not in packages_yaml or "version" not in packages_yaml[pkg_name]:
# specs will be computed later continue
version_preferences = packages_yaml.get(pkg_name, {}).get("version", [])
version_defs = [] version_defs = []
pkg_class = spack.repo.PATH.get_pkg_class(pkg_name)
for vstr in version_preferences: for vstr in packages_yaml[pkg_name]["version"]:
v = vn.ver(vstr) v = vn.ver(vstr)
if isinstance(v, vn.GitVersion): if isinstance(v, vn.GitVersion):
if not require_checksum or v.is_commit:
version_defs.append(v) version_defs.append(v)
else: else:
satisfying_versions = self._check_for_defined_matching_versions(pkg_class, v) matches = [x for x in self.possible_versions[pkg_name] if x.satisfies(v)]
# Amongst all defined versions satisfying this specific matches.sort(reverse=True)
# preference, the highest-numbered version is the if not matches:
# most-preferred: therefore sort satisfying versions raise spack.config.ConfigError(
# from greatest to least f"Preference for version {v} does not match any known "
version_defs.extend(sorted(satisfying_versions, reverse=True)) f"version of {pkg_name} (in its package.py or any external)"
)
version_defs.extend(matches)
for weight, vdef in enumerate(llnl.util.lang.dedupe(version_defs)): for weight, vdef in enumerate(llnl.util.lang.dedupe(version_defs)):
self.declared_versions[pkg_name].append( self.declared_versions[pkg_name].append(
@ -1912,31 +1951,9 @@ def key_fn(item):
) )
self.possible_versions[pkg_name].add(vdef) self.possible_versions[pkg_name].add(vdef)
def _check_for_defined_matching_versions(self, pkg_class, v): def define_ad_hoc_versions_from_specs(self, specs, origin, require_checksum: bool):
"""Given a version specification (which may be a concrete version,
range, etc.), determine if any package.py version declarations
or externals define a version which satisfies it.
This is primarily for determining whether a version request (e.g.
version preferences, which should not themselves define versions)
refers to a defined version.
This function raises an exception if no satisfying versions are
found.
"""
pkg_name = pkg_class.name
satisfying_versions = list(x for x in pkg_class.versions if x.satisfies(v))
satisfying_versions.extend(x for x in self.possible_versions[pkg_name] if x.satisfies(v))
if not satisfying_versions:
raise spack.config.ConfigError(
"Preference for version {0} does not match any version"
" defined for {1} (in its package.py or any external)".format(str(v), pkg_name)
)
return satisfying_versions
def add_concrete_versions_from_specs(self, specs, origin):
"""Add concrete versions to possible versions from lists of CLI/dev specs.""" """Add concrete versions to possible versions from lists of CLI/dev specs."""
for s in spack.traverse.traverse_nodes(specs): for s in traverse.traverse_nodes(specs):
# If there is a concrete version on the CLI *that we know nothing # If there is a concrete version on the CLI *that we know nothing
# about*, add it to the known versions. Use idx=0, which is the # about*, add it to the known versions. Use idx=0, which is the
# best possible, so they're guaranteed to be used preferentially. # best possible, so they're guaranteed to be used preferentially.
@ -1945,9 +1962,13 @@ def add_concrete_versions_from_specs(self, specs, origin):
if version is None or any(v == version for v in self.possible_versions[s.name]): if version is None or any(v == version for v in self.possible_versions[s.name]):
continue continue
self.declared_versions[s.name].append( if require_checksum and not _is_checksummed_git_version(version):
DeclaredVersion(version=version, idx=0, origin=origin) raise UnsatisfiableSpecError(
s.format("No matching version for constraint {name}{@versions}")
) )
declared = DeclaredVersion(version=version, idx=0, origin=origin)
self.declared_versions[s.name].append(declared)
self.possible_versions[s.name].add(version) self.possible_versions[s.name].add(version)
def _supported_targets(self, compiler_name, compiler_version, targets): def _supported_targets(self, compiler_name, compiler_version, targets):
@ -2144,7 +2165,7 @@ def generate_possible_compilers(self, specs):
# add compiler specs from the input line to possibilities if we # add compiler specs from the input line to possibilities if we
# don't require compilers to exist. # don't require compilers to exist.
strict = spack.concretize.Concretizer().check_for_compiler_existence strict = spack.concretize.Concretizer().check_for_compiler_existence
for s in spack.traverse.traverse_nodes(specs): for s in traverse.traverse_nodes(specs):
# we don't need to validate compilers for already-built specs # we don't need to validate compilers for already-built specs
if s.concrete or not s.compiler: if s.concrete or not s.compiler:
continue continue
@ -2394,13 +2415,12 @@ def setup(self, driver, specs, reuse=None):
self.provider_requirements() self.provider_requirements()
self.external_packages() self.external_packages()
# traverse all specs and packages to build dict of possible versions # TODO: make a config option for this undocumented feature
self.build_version_dict(self.pkgs) require_checksum = "SPACK_CONCRETIZER_REQUIRE_CHECKSUM" in os.environ
self.add_concrete_versions_from_specs(specs, Provenance.SPEC) self.define_package_versions_and_validate_preferences(self.pkgs, require_checksum)
self.add_concrete_versions_from_specs(dev_specs, Provenance.DEV_SPEC) self.define_ad_hoc_versions_from_specs(specs, Provenance.SPEC, require_checksum)
self.define_ad_hoc_versions_from_specs(dev_specs, Provenance.DEV_SPEC, require_checksum)
req_version_specs = self._get_versioned_specs_from_pkg_requirements() self.validate_and_define_versions_from_requirements(require_checksum)
self.add_concrete_versions_from_specs(req_version_specs, Provenance.PACKAGE_REQUIREMENT)
self.gen.h1("Package Constraints") self.gen.h1("Package Constraints")
for pkg in sorted(self.pkgs): for pkg in sorted(self.pkgs):
@ -2449,78 +2469,68 @@ def literal_specs(self, specs):
if self.concretize_everything: if self.concretize_everything:
self.gen.fact(fn.solve_literal(idx)) self.gen.fact(fn.solve_literal(idx))
def _get_versioned_specs_from_pkg_requirements(self): def validate_and_define_versions_from_requirements(self, require_checksum: bool):
"""If package requirements mention versions that are not mentioned """If package requirements mention concrete versions that are not mentioned
elsewhere, then we need to collect those to mark them as possible elsewhere, then we need to collect those to mark them as possible
versions. versions. If they are abstract and statically have no match, then we
""" need to throw an error. This function assumes all possible versions are already
req_version_specs = list() registered in self.possible_versions."""
config = spack.config.get("packages") for pkg_name, d in spack.config.get("packages").items():
for pkg_name, d in config.items(): if pkg_name == "all" or "require" not in d:
if pkg_name == "all":
continue continue
if "require" in d:
req_version_specs.extend(self._specs_from_requires(pkg_name, d["require"])) for s in traverse.traverse_nodes(self._specs_from_requires(pkg_name, d["require"])):
return req_version_specs name, versions = s.name, s.versions
if name not in self.pkgs or versions == spack.version.any_version:
continue
s.attach_git_version_lookup()
v = versions.concrete
if not v:
# If the version is not concrete, check it's statically concretizable. If
# not throw an error, which is just so that users know they need to change
# their config, instead of getting a hard to decipher concretization error.
if not any(x for x in self.possible_versions[name] if x.satisfies(versions)):
raise spack.config.ConfigError(
f"Version requirement {versions} on {pkg_name} for {name} "
f"cannot match any known version from package.py or externals"
)
continue
if v in self.possible_versions[name]:
continue
# If concrete an not yet defined, conditionally define it, like we do for specs
# from the command line.
if not require_checksum or _is_checksummed_git_version(v):
self.declared_versions[name].append(
DeclaredVersion(version=v, idx=0, origin=Provenance.PACKAGE_REQUIREMENT)
)
self.possible_versions[name].add(v)
def _specs_from_requires(self, pkg_name, section): def _specs_from_requires(self, pkg_name, section):
"""Collect specs from requirements which define versions (i.e. those that """Collect specs from a requirement rule"""
have a concrete version). Requirements can define *new* versions if
they are included as part of an equivalence (hash=number) but not
otherwise.
"""
if isinstance(section, str): if isinstance(section, str):
spec = spack.spec.Spec(section) yield _spec_with_default_name(section, pkg_name)
if not spec.name: return
spec.name = pkg_name
extracted_specs = [spec]
else:
spec_strs = []
for spec_group in section: for spec_group in section:
if isinstance(spec_group, str): if isinstance(spec_group, str):
spec_strs.append(spec_group) yield _spec_with_default_name(spec_group, pkg_name)
else: continue
# Otherwise it is an object. The object can contain a single # Otherwise it is an object. The object can contain a single
# "spec" constraint, or a list of them with "any_of" or # "spec" constraint, or a list of them with "any_of" or
# "one_of" policy. # "one_of" policy.
if "spec" in spec_group: if "spec" in spec_group:
new_constraints = [spec_group["spec"]] yield _spec_with_default_name(spec_group["spec"], pkg_name)
else:
key = "one_of" if "one_of" in spec_group else "any_of"
new_constraints = spec_group[key]
spec_strs.extend(new_constraints)
extracted_specs = []
for spec_str in spec_strs:
spec = spack.spec.Spec(spec_str)
if not spec.name:
spec.name = pkg_name
extracted_specs.append(spec)
version_specs = []
for spec in extracted_specs:
if spec.versions.concrete:
# Note: this includes git versions
version_specs.append(spec)
continue continue
# Prefer spec's name if it exists, in case the spec is key = "one_of" if "one_of" in spec_group else "any_of"
# requiring a specific implementation inside of a virtual section for s in spec_group[key]:
# e.g. packages:mpi:require:openmpi@4.0.1 yield _spec_with_default_name(s, pkg_name)
pkg_class = spack.repo.PATH.get_pkg_class(spec.name or pkg_name)
satisfying_versions = self._check_for_defined_matching_versions(
pkg_class, spec.versions
)
# Version ranges ("@1.3" without the "=", "@1.2:1.4") and lists
# will end up here
ordered_satisfying_versions = sorted(satisfying_versions, reverse=True)
vspecs = list(spack.spec.Spec("@{0}".format(x)) for x in ordered_satisfying_versions)
version_specs.extend(vspecs)
for spec in version_specs:
spec.attach_git_version_lookup()
return version_specs
class SpecBuilder: class SpecBuilder:

View File

@ -97,8 +97,10 @@ def remove(self, spec):
msg += "Either %s is not in %s or %s is " % (spec, self.name, spec) msg += "Either %s is not in %s or %s is " % (spec, self.name, spec)
msg += "expanded from a matrix and cannot be removed directly." msg += "expanded from a matrix and cannot be removed directly."
raise SpecListError(msg) raise SpecListError(msg)
assert len(remove) == 1
self.yaml_list.remove(remove[0]) # Remove may contain more than one string representation of the same spec
for item in remove:
self.yaml_list.remove(item)
# invalidate cache variables when we change the list # invalidate cache variables when we change the list
self._expanded_list = None self._expanded_list = None

View File

@ -21,10 +21,11 @@
import spack.hash_types as ht import spack.hash_types as ht
import spack.platforms import spack.platforms
import spack.repo import spack.repo
import spack.solver.asp
import spack.variant as vt import spack.variant as vt
from spack.concretize import find_spec from spack.concretize import find_spec
from spack.spec import CompilerSpec, Spec from spack.spec import CompilerSpec, Spec
from spack.version import ver from spack.version import Version, ver
def check_spec(abstract, concrete): def check_spec(abstract, concrete):
@ -1610,8 +1611,6 @@ def test_reuse_with_unknown_package_dont_raise(self, tmpdir, monkeypatch):
) )
@pytest.mark.only_clingo("Original concretizer cannot concretize in rounds") @pytest.mark.only_clingo("Original concretizer cannot concretize in rounds")
def test_best_effort_coconcretize(self, specs, expected): def test_best_effort_coconcretize(self, specs, expected):
import spack.solver.asp
specs = [Spec(s) for s in specs] specs = [Spec(s) for s in specs]
solver = spack.solver.asp.Solver() solver = spack.solver.asp.Solver()
solver.reuse = False solver.reuse = False
@ -1655,8 +1654,6 @@ def test_best_effort_coconcretize(self, specs, expected):
@pytest.mark.only_clingo("Original concretizer cannot concretize in rounds") @pytest.mark.only_clingo("Original concretizer cannot concretize in rounds")
def test_best_effort_coconcretize_preferences(self, specs, expected_spec, occurances): def test_best_effort_coconcretize_preferences(self, specs, expected_spec, occurances):
"""Test package preferences during coconcretization.""" """Test package preferences during coconcretization."""
import spack.solver.asp
specs = [Spec(s) for s in specs] specs = [Spec(s) for s in specs]
solver = spack.solver.asp.Solver() solver = spack.solver.asp.Solver()
solver.reuse = False solver.reuse = False
@ -1672,8 +1669,6 @@ def test_best_effort_coconcretize_preferences(self, specs, expected_spec, occura
@pytest.mark.only_clingo("Use case not supported by the original concretizer") @pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_coconcretize_reuse_and_virtuals(self): def test_coconcretize_reuse_and_virtuals(self):
import spack.solver.asp
reusable_specs = [] reusable_specs = []
for s in ["mpileaks ^mpich", "zmpi"]: for s in ["mpileaks ^mpich", "zmpi"]:
reusable_specs.extend(Spec(s).concretized().traverse(root=True)) reusable_specs.extend(Spec(s).concretized().traverse(root=True))
@ -1694,8 +1689,6 @@ def test_misleading_error_message_on_version(self, mutable_database):
# For this bug to be triggered we need a reusable dependency # For this bug to be triggered we need a reusable dependency
# that is not optimal in terms of optimization scores. # that is not optimal in terms of optimization scores.
# We pick an old version of "b" # We pick an old version of "b"
import spack.solver.asp
reusable_specs = [Spec("non-existing-conditional-dep@1.0").concretized()] reusable_specs = [Spec("non-existing-conditional-dep@1.0").concretized()]
root_spec = Spec("non-existing-conditional-dep@2.0") root_spec = Spec("non-existing-conditional-dep@2.0")
@ -1711,8 +1704,6 @@ def test_misleading_error_message_on_version(self, mutable_database):
@pytest.mark.only_clingo("Use case not supported by the original concretizer") @pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_version_weight_and_provenance(self): def test_version_weight_and_provenance(self):
"""Test package preferences during coconcretization.""" """Test package preferences during coconcretization."""
import spack.solver.asp
reusable_specs = [Spec(spec_str).concretized() for spec_str in ("b@0.9", "b@1.0")] reusable_specs = [Spec(spec_str).concretized() for spec_str in ("b@0.9", "b@1.0")]
root_spec = Spec("a foobar=bar") root_spec = Spec("a foobar=bar")
@ -1744,8 +1735,6 @@ def test_version_weight_and_provenance(self):
@pytest.mark.regression("31169") @pytest.mark.regression("31169")
@pytest.mark.only_clingo("Use case not supported by the original concretizer") @pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_not_reusing_incompatible_os_or_compiler(self): def test_not_reusing_incompatible_os_or_compiler(self):
import spack.solver.asp
root_spec = Spec("b") root_spec = Spec("b")
s = root_spec.concretized() s = root_spec.concretized()
wrong_compiler, wrong_os = s.copy(), s.copy() wrong_compiler, wrong_os = s.copy(), s.copy()
@ -2126,6 +2115,14 @@ def test_virtuals_are_reconstructed_on_reuse(self, spec_str, mpi_name, database)
assert len(mpi_edges) == 1 assert len(mpi_edges) == 1
assert "mpi" in mpi_edges[0].virtuals assert "mpi" in mpi_edges[0].virtuals
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_dont_define_new_version_from_input_if_checksum_required(self, working_env):
os.environ["SPACK_CONCRETIZER_REQUIRE_CHECKSUM"] = "yes"
with pytest.raises(spack.error.UnsatisfiableSpecError):
# normally spack concretizes to @=3.0 if it's not defined in package.py, except
# when checksums are required
Spec("a@=3.0").concretized()
@pytest.fixture() @pytest.fixture()
def duplicates_test_repository(): def duplicates_test_repository():
@ -2220,3 +2217,39 @@ def test_solution_without_cycles(self):
s = Spec("cycle-b").concretized() s = Spec("cycle-b").concretized()
assert s["cycle-a"].satisfies("~cycle") assert s["cycle-a"].satisfies("~cycle")
assert s["cycle-b"].satisfies("+cycle") assert s["cycle-b"].satisfies("+cycle")
@pytest.mark.parametrize(
"v_str,v_opts,checksummed",
[
("1.2.3", {"sha256": f"{1:064x}"}, True),
# it's not about the version being "infinite",
# but whether it has a digest
("develop", {"sha256": f"{1:064x}"}, True),
# other hash types
("1.2.3", {"checksum": f"{1:064x}"}, True),
("1.2.3", {"md5": f"{1:032x}"}, True),
("1.2.3", {"sha1": f"{1:040x}"}, True),
("1.2.3", {"sha224": f"{1:056x}"}, True),
("1.2.3", {"sha384": f"{1:096x}"}, True),
("1.2.3", {"sha512": f"{1:0128x}"}, True),
# no digest key
("1.2.3", {"bogus": f"{1:064x}"}, False),
# git version with full commit sha
("1.2.3", {"commit": f"{1:040x}"}, True),
(f"{1:040x}=1.2.3", {}, True),
# git version with short commit sha
("1.2.3", {"commit": f"{1:07x}"}, False),
(f"{1:07x}=1.2.3", {}, False),
# git tag is a moving target
("1.2.3", {"tag": "v1.2.3"}, False),
("1.2.3", {"tag": "v1.2.3", "commit": f"{1:07x}"}, False),
# git branch is a moving target
("1.2.3", {"branch": "releases/1.2"}, False),
# git ref is a moving target
("git.branch=1.2.3", {}, False),
],
)
def test_drop_moving_targets(v_str, v_opts, checksummed):
v = Version(v_str)
assert spack.solver.asp._is_checksummed_version((v, v_opts)) == checksummed

View File

@ -2,6 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import pathlib import pathlib
import pytest import pytest
@ -299,9 +300,14 @@ def test_requirement_adds_version_satisfies(
assert s1.satisfies("@2.2") assert s1.satisfies("@2.2")
@pytest.mark.parametrize("require_checksum", (True, False))
def test_requirement_adds_git_hash_version( def test_requirement_adds_git_hash_version(
concretize_scope, test_repo, mock_git_version_info, monkeypatch require_checksum, concretize_scope, test_repo, mock_git_version_info, monkeypatch, working_env
): ):
# A full commit sha is a checksummed version, so this test should pass in both cases
if require_checksum:
os.environ["SPACK_CONCRETIZER_REQUIRE_CHECKSUM"] = "yes"
repo_path, filename, commits = mock_git_version_info repo_path, filename, commits = mock_git_version_info
monkeypatch.setattr( monkeypatch.setattr(
spack.package_base.PackageBase, "git", path_to_file_url(repo_path), raising=False spack.package_base.PackageBase, "git", path_to_file_url(repo_path), raising=False

View File

@ -13,7 +13,11 @@
import spack.environment as ev import spack.environment as ev
import spack.spec import spack.spec
from spack.environment.environment import SpackEnvironmentViewError, _error_on_nonempty_view_dir from spack.environment.environment import (
EnvironmentManifestFile,
SpackEnvironmentViewError,
_error_on_nonempty_view_dir,
)
pytestmark = pytest.mark.not_on_windows("Envs are not supported on windows") pytestmark = pytest.mark.not_on_windows("Envs are not supported on windows")
@ -623,3 +627,66 @@ def test_requires_on_virtual_and_potential_providers(
assert mpileaks.satisfies("^mpich2") assert mpileaks.satisfies("^mpich2")
assert mpileaks["mpi"].satisfies("mpich2") assert mpileaks["mpi"].satisfies("mpich2")
assert not mpileaks.satisfies(f"^{possible_mpi_spec}") assert not mpileaks.satisfies(f"^{possible_mpi_spec}")
@pytest.mark.regression("39387")
@pytest.mark.parametrize(
"spec_str", ["mpileaks +opt", "mpileaks +opt ~shared", "mpileaks ~shared +opt"]
)
def test_manifest_file_removal_works_if_spec_is_not_normalized(tmp_path, spec_str):
"""Tests that we can remove a spec from a manifest file even if its string
representation is not normalized.
"""
manifest = tmp_path / "spack.yaml"
manifest.write_text(
f"""\
spack:
specs:
- {spec_str}
"""
)
s = spack.spec.Spec(spec_str)
spack_yaml = EnvironmentManifestFile(tmp_path)
# Doing a round trip str -> Spec -> str normalizes the representation
spack_yaml.remove_user_spec(str(s))
spack_yaml.flush()
assert spec_str not in manifest.read_text()
@pytest.mark.regression("39387")
@pytest.mark.parametrize(
"duplicate_specs,expected_number",
[
# Swap variants, versions, etc. add spaces
(["foo +bar ~baz", "foo ~baz +bar"], 3),
(["foo @1.0 ~baz %gcc", "foo ~baz @1.0%gcc"], 3),
# Item 1 and 3 are exactly the same
(["zlib +shared", "zlib +shared", "zlib +shared"], 4),
],
)
def test_removing_spec_from_manifest_with_exact_duplicates(
duplicate_specs, expected_number, tmp_path
):
"""Tests that we can remove exact duplicates from a manifest file.
Note that we can't get in a state with duplicates using only CLI, but this might happen
on user edited spack.yaml files.
"""
manifest = tmp_path / "spack.yaml"
manifest.write_text(
f"""\
spack:
specs: [{", ".join(duplicate_specs)} , "zlib"]
"""
)
with ev.Environment(tmp_path) as env:
assert len(env.user_specs) == expected_number
env.remove(duplicate_specs[0])
env.write()
assert "+shared" not in manifest.read_text()
assert "zlib" in manifest.read_text()
with ev.Environment(tmp_path) as env:
assert len(env.user_specs) == 1

View File

@ -9,7 +9,8 @@
import llnl.util.tty as tty import llnl.util.tty as tty
#: Set of hash algorithms that Spack can use, mapped to digest size in bytes #: Set of hash algorithms that Spack can use, mapped to digest size in bytes
hashes = {"md5": 16, "sha1": 20, "sha224": 28, "sha256": 32, "sha384": 48, "sha512": 64} hashes = {"sha256": 32, "md5": 16, "sha1": 20, "sha224": 28, "sha384": 48, "sha512": 64}
# Note: keys are ordered by popularity for earliest return in ``hash_key in version_dict`` checks.
#: size of hash digests in bytes, mapped to algoritm names #: size of hash digests in bytes, mapped to algoritm names

View File

@ -136,6 +136,8 @@ default:
variables: variables:
KUBERNETES_CPU_REQUEST: 4000m KUBERNETES_CPU_REQUEST: 4000m
KUBERNETES_MEMORY_REQUEST: 16G KUBERNETES_MEMORY_REQUEST: 16G
# avoid moving targets like branches and tags
SPACK_CONCRETIZER_REQUIRE_CHECKSUM: 1
interruptible: true interruptible: true
timeout: 60 minutes timeout: 60 minutes
retry: retry:

View File

@ -14,20 +14,36 @@ class DlaFuture(CMakePackage, CudaPackage, ROCmPackage):
git = "https://github.com/eth-cscs/DLA-Future.git" git = "https://github.com/eth-cscs/DLA-Future.git"
maintainers = ["rasolca", "albestro", "msimberg", "aurianer"] maintainers = ["rasolca", "albestro", "msimberg", "aurianer"]
version("0.2.0", sha256="da73cbd1b88287c86d84b1045a05406b742be924e65c52588bbff200abd81a10")
version("0.1.0", sha256="f7ffcde22edabb3dc24a624e2888f98829ee526da384cd752b2b271c731ca9b1") version("0.1.0", sha256="f7ffcde22edabb3dc24a624e2888f98829ee526da384cd752b2b271c731ca9b1")
version("master", branch="master") version("master", branch="master")
variant("shared", default=True, description="Build shared libraries.") variant("shared", default=True, description="Build shared libraries.")
variant(
"hdf5",
default=False,
when="@0.2.0:",
description="HDF5 support for dealing with matrices on disk.",
)
variant("doc", default=False, description="Build documentation.") variant("doc", default=False, description="Build documentation.")
variant("miniapps", default=False, description="Build miniapps.") variant("miniapps", default=False, description="Build miniapps.")
variant(
"scalapack",
default=False,
when="@0.2.0:",
description="Build C API compatible with ScaLAPACK",
)
depends_on("cmake@3.22:", type="build") depends_on("cmake@3.22:", type="build")
depends_on("doxygen", type="build", when="+doc") depends_on("doxygen", type="build", when="+doc")
depends_on("mpi") depends_on("mpi")
depends_on("blaspp@2022.05.00:") depends_on("blaspp@2022.05.00:")
depends_on("lapackpp@2022.05.00:") depends_on("lapackpp@2022.05.00:")
depends_on("scalapack", when="+scalapack")
depends_on("umpire~examples") depends_on("umpire~examples")
depends_on("umpire~cuda", when="~cuda") depends_on("umpire~cuda", when="~cuda")
@ -36,7 +52,8 @@ class DlaFuture(CMakePackage, CudaPackage, ROCmPackage):
depends_on("umpire+rocm~shared", when="+rocm") depends_on("umpire+rocm~shared", when="+rocm")
depends_on("umpire@4.1.0:") depends_on("umpire@4.1.0:")
depends_on("pika@0.15.1:") depends_on("pika@0.15.1:", when="@0.1")
depends_on("pika@0.16:", when="@0.2.0:")
depends_on("pika-algorithms@0.1:") depends_on("pika-algorithms@0.1:")
depends_on("pika +mpi") depends_on("pika +mpi")
depends_on("pika +cuda", when="+cuda") depends_on("pika +cuda", when="+cuda")
@ -52,6 +69,8 @@ class DlaFuture(CMakePackage, CudaPackage, ROCmPackage):
depends_on("rocsolver", when="+rocm") depends_on("rocsolver", when="+rocm")
depends_on("rocthrust", when="+rocm") depends_on("rocthrust", when="+rocm")
depends_on("hdf5 +cxx+mpi+threadsafe+shared", when="+hdf5")
conflicts("+cuda", when="+rocm") conflicts("+cuda", when="+rocm")
with when("+rocm"): with when("+rocm"):
@ -92,16 +111,29 @@ def cmake_args(self):
"openmp": "omp", "openmp": "omp",
"tbb": "tbb", "tbb": "tbb",
} # Map MKL variants to LAPACK target name } # Map MKL variants to LAPACK target name
mkl_threads = vmap[spec["intel-mkl"].variants["threads"].value]
# TODO: Generalise for intel-oneapi-mkl # TODO: Generalise for intel-oneapi-mkl
args += [ args += [
self.define("DLAF_WITH_MKL", True), self.define("DLAF_WITH_MKL", True),
self.define( self.define("MKL_LAPACK_TARGET", f"mkl::mkl_intel_32bit_{mkl_threads}_dyn"),
"MKL_LAPACK_TARGET",
"mkl::mkl_intel_32bit_{0}_dyn".format(
vmap[spec["intel-mkl"].variants["threads"].value]
),
),
] ]
if "+scalapack" in spec:
if (
"^mpich" in spec
or "^cray-mpich" in spec
or "^intel-mpi" in spec
or "^mvapich" in spec
or "^mvapich2" in spec
):
mkl_mpi = "mpich"
elif "^openmpi" in spec:
mkl_mpi = "ompi"
args.append(
self.define(
"MKL_SCALAPACK_TARGET",
f"mkl::scalapack_{mkl_mpi}_intel_32bit_{mkl_threads}_dyn",
)
)
else: else:
args.append(self.define("DLAF_WITH_MKL", False)) args.append(self.define("DLAF_WITH_MKL", False))
args.append( args.append(
@ -110,6 +142,11 @@ def cmake_args(self):
" ".join([spec[dep].libs.ld_flags for dep in ["blas", "lapack"]]), " ".join([spec[dep].libs.ld_flags for dep in ["blas", "lapack"]]),
) )
) )
if "+scalapack" in spec:
args.append(self.define("SCALAPACK_LIBRARY", spec["scalapack"].libs.ld_flags))
if "+scalapack" in spec:
args.append(self.define_from_variant("DLAF_WITH_SCALAPACK", "scalapack"))
# CUDA/HIP # CUDA/HIP
args.append(self.define_from_variant("DLAF_WITH_CUDA", "cuda")) args.append(self.define_from_variant("DLAF_WITH_CUDA", "cuda"))
@ -125,6 +162,9 @@ def cmake_args(self):
arch_str = ";".join(archs) arch_str = ";".join(archs)
args.append(self.define("CMAKE_CUDA_ARCHITECTURES", arch_str)) args.append(self.define("CMAKE_CUDA_ARCHITECTURES", arch_str))
# HDF5 support
args.append(self.define_from_variant("DLAF_WITH_HDF5", "hdf5"))
# DOC # DOC
args.append(self.define_from_variant("DLAF_BUILD_DOC", "doc")) args.append(self.define_from_variant("DLAF_BUILD_DOC", "doc"))

View File

@ -756,7 +756,6 @@ def cmake_args(self):
cmake_args = [ cmake_args = [
define("LLVM_REQUIRES_RTTI", True), define("LLVM_REQUIRES_RTTI", True),
define("LLVM_ENABLE_RTTI", True), define("LLVM_ENABLE_RTTI", True),
define("LLVM_ENABLE_EH", True),
define("LLVM_ENABLE_LIBXML2", False), define("LLVM_ENABLE_LIBXML2", False),
define("CLANG_DEFAULT_OPENMP_RUNTIME", "libomp"), define("CLANG_DEFAULT_OPENMP_RUNTIME", "libomp"),
define("PYTHON_EXECUTABLE", python.command.path), define("PYTHON_EXECUTABLE", python.command.path),
@ -765,6 +764,16 @@ def cmake_args(self):
from_variant("LLVM_ENABLE_ZSTD", "zstd"), from_variant("LLVM_ENABLE_ZSTD", "zstd"),
] ]
# Flang does not support exceptions from core llvm.
# LLVM_ENABLE_EH=True when building flang will soon
# fail (with changes at the llvm-project level).
# Only enable exceptions in LLVM if we are *not*
# building flang. FYI: LLVM <= 16.x will build flang
# successfully but the executable will suffer from
# link errors looking for C++ EH support.
if "+flang" not in spec:
cmake_args.append(define("LLVM_ENABLE_EH", True))
version_suffix = spec.variants["version_suffix"].value version_suffix = spec.variants["version_suffix"].value
if version_suffix != "none": if version_suffix != "none":
cmake_args.append(define("LLVM_VERSION_SUFFIX", version_suffix)) cmake_args.append(define("LLVM_VERSION_SUFFIX", version_suffix))

View File

@ -355,6 +355,9 @@ class Nvhpc(Package):
) )
variant("lapack", default=True, description="Enable LAPACK") variant("lapack", default=True, description="Enable LAPACK")
variant("mpi", default=False, description="Enable MPI") variant("mpi", default=False, description="Enable MPI")
variant(
"default_cuda", default="default", description="Default CUDA version, for example 11.8"
)
provides("blas", when="+blas") provides("blas", when="+blas")
provides("lapack", when="+lapack") provides("lapack", when="+lapack")
@ -373,6 +376,8 @@ def setup_build_environment(self, env):
env.set("NVHPC_SILENT", "true") env.set("NVHPC_SILENT", "true")
env.set("NVHPC_ACCEPT_EULA", "accept") env.set("NVHPC_ACCEPT_EULA", "accept")
env.set("NVHPC_INSTALL_DIR", self.prefix) env.set("NVHPC_INSTALL_DIR", self.prefix)
if self.spec.variants["default_cuda"].value != "default":
env.set("NVHPC_DEFAULT_CUDA", self.spec.variants["default_cuda"].value)
if self.spec.variants["install_type"].value == "network": if self.spec.variants["install_type"].value == "network":
local_dir = join_path(self._version_prefix(), "share_objects") local_dir = join_path(self._version_prefix(), "share_objects")

View File

@ -26,19 +26,18 @@ class PyLlvmlite(PythonPackage):
version("0.27.1", sha256="48a1c3ae69fd8920cba153bfed8a46ac46474bc706a2100226df4abffe0000ab") version("0.27.1", sha256="48a1c3ae69fd8920cba153bfed8a46ac46474bc706a2100226df4abffe0000ab")
version("0.26.0", sha256="13e84fe6ebb0667233074b429fd44955f309dead3161ec89d9169145dbad2ebf") version("0.26.0", sha256="13e84fe6ebb0667233074b429fd44955f309dead3161ec89d9169145dbad2ebf")
version("0.25.0", sha256="fd64def9a51dd7dc61913a7a08eeba5b9785522740bec5a7c5995b2a90525025") version("0.25.0", sha256="fd64def9a51dd7dc61913a7a08eeba5b9785522740bec5a7c5995b2a90525025")
version("0.23.0", sha256="bc8b1b46274d05b578fe9e980a6d98fa71c8727f6f9ed31d4d8468dce7aa5762")
depends_on("py-setuptools", type="build") depends_on("py-setuptools", type="build")
depends_on("python@3.8:3.11", type=("build", "run"), when="@0.40.0:") depends_on("python@3.8:3.11", when="@0.40:", type=("build", "run"))
depends_on("python@3.7:3.10", type=("build", "run"), when="@0.38.0:0.39") depends_on("python@:3.10", when="@0.38:0.39", type=("build", "run"))
depends_on("python@3.7:3.9", type=("build", "run"), when="@0.37") depends_on("python@:3.9", when="@0.36:0.37", type=("build", "run"))
depends_on("python@3.6:", type=("build", "run"), when="@0.33:") depends_on("python@:3.8", when="@0.31:0.35", type=("build", "run"))
depends_on("python@2.6:2.8,3.4:", type=("build", "run")) depends_on("python@:3.7", when="@:0.30", type=("build", "run"))
# llvmlite compatibility information taken from https://github.com/numba/llvmlite#compatibility # https://github.com/numba/llvmlite#compatibility
depends_on("llvm@14:~flang", when="@0.41:") depends_on("llvm@14", when="@0.41:")
depends_on("llvm@11:14~flang", when="@0.40") depends_on("llvm@11:14", when="@0.40")
depends_on("llvm@11~flang", when="@0.37.0:0.39") depends_on("llvm@11", when="@0.37:0.39")
for t in [ for t in [
"arm:", "arm:",
"ppc:", "ppc:",
@ -50,12 +49,12 @@ class PyLlvmlite(PythonPackage):
"x86:", "x86:",
"x86_64:", "x86_64:",
]: ]:
depends_on("llvm@10.0.0:10.0~flang", when="@0.34.0:0.36 target={0}".format(t)) depends_on("llvm@10.0", when=f"@0.34:0.36 target={t}")
depends_on("llvm@9.0.0:9.0~flang", when="@0.34.0:0.36 target=aarch64:") depends_on("llvm@9.0", when="@0.34:0.36 target=aarch64:")
depends_on("llvm@9.0.0:9.0~flang", when="@0.33.0:0.33") depends_on("llvm@9.0", when="@0.33")
depends_on("llvm@7.0.0:8.0~flang", when="@0.29.0:0.32") depends_on("llvm@7.0:7.1,8.0", when="@0.29:0.32")
depends_on("llvm@7.0.0:7.0~flang", when="@0.27.0:0.28") depends_on("llvm@7.0", when="@0.27:0.28")
depends_on("llvm@6.0.0:6.0~flang", when="@0.23.0:0.26") depends_on("llvm@6.0", when="@0.23:0.26")
depends_on("binutils", type="build") depends_on("binutils", type="build")
def setup_build_environment(self, env): def setup_build_environment(self, env):

View File

@ -17,6 +17,7 @@ class PyPandas(PythonPackage):
maintainers("adamjstewart") maintainers("adamjstewart")
version("2.1.0", sha256="62c24c7fc59e42b775ce0679cfa7b14a5f9bfb7643cfbe708c960699e05fb918")
version("2.0.3", sha256="c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c") version("2.0.3", sha256="c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c")
version("2.0.2", sha256="dd5476b6c3fe410ee95926873f377b856dbc4e81a9c605a0dc05aaccc6a7c6c6") version("2.0.2", sha256="dd5476b6c3fe410ee95926873f377b856dbc4e81a9c605a0dc05aaccc6a7c6c6")
version("2.0.1", sha256="19b8e5270da32b41ebf12f0e7165efa7024492e9513fb46fb631c5022ae5709d") version("2.0.1", sha256="19b8e5270da32b41ebf12f0e7165efa7024492e9513fb46fb631c5022ae5709d")
@ -61,70 +62,74 @@ class PyPandas(PythonPackage):
version("0.24.2", sha256="4f919f409c433577a501e023943e582c57355d50a724c589e78bc1d551a535a2") version("0.24.2", sha256="4f919f409c433577a501e023943e582c57355d50a724c589e78bc1d551a535a2")
version("0.24.1", sha256="435821cb2501eabbcee7e83614bd710940dc0cf28b5afbc4bdb816c31cec71af") version("0.24.1", sha256="435821cb2501eabbcee7e83614bd710940dc0cf28b5afbc4bdb816c31cec71af")
version("0.23.4", sha256="5b24ca47acf69222e82530e89111dd9d14f9b970ab2cd3a1c2c78f0c4fbba4f4") version("0.23.4", sha256="5b24ca47acf69222e82530e89111dd9d14f9b970ab2cd3a1c2c78f0c4fbba4f4")
version("0.21.1", sha256="c5f5cba88bf0659554c41c909e1f78139f6fce8fa9315a29a23692b38ff9788a")
version("0.20.0", sha256="54f7a2bb2a7832c0446ad51d779806f07ec4ea2bb7c9aea4b83669fa97e778c4")
version("0.19.2", sha256="6f0f4f598c2b16746803c8bafef7c721c57e4844da752d36240c0acf97658014")
version("0.19.0", sha256="4697606cdf023c6b7fcb74e48aaf25cf282a1a00e339d2d274cf1b663748805b")
version("0.18.0", sha256="c975710ce8154b50f39a46aa3ea88d95b680191d1d9d4b5dd91eae7215e01814")
version("0.16.1", sha256="570d243f8cb068bf780461b9225d2e7bef7c90aa10d43cf908fe541fc92df8b6")
version("0.16.0", sha256="4013de6f8796ca9d2871218861823bd9878a8dfacd26e08ccf9afdd01bbad9f1")
# Required dependencies # Required dependencies
# https://pandas.pydata.org/pandas-docs/stable/getting_started/install.html#python-version-support # https://pandas.pydata.org/pandas-docs/stable/getting_started/install.html#python-version-support
depends_on("python@3.8:", type=("build", "run"), when="@1.4:") depends_on("python@3.9:3.11", when="@2.1:", type=("build", "run"))
depends_on("python@3.7.1:", type=("build", "run"), when="@1.2:") depends_on("python@3.8:3.11", when="@1.5:2.0", type=("build", "run"))
depends_on("python@3.6.1:", type=("build", "run"), when="@1:") depends_on("python@3.8:3.10", when="@1.4", type=("build", "run"))
depends_on("python@3.5.3:", type=("build", "run"), when="@0.25:") depends_on("python@:3.10", when="@1.3.3:1.3", type=("build", "run"))
depends_on("python@:3.9", when="@1.1.3:1.3.2", type=("build", "run"))
depends_on("python@:3.8", when="@0.25.2:1.1.2", type=("build", "run"))
depends_on("python@:3.7", when="@:0.25.1", type=("build", "run"))
# pyproject.toml # pyproject.toml
depends_on("py-setuptools@61:", type="build", when="@2:") depends_on("py-meson-python@0.13.1", when="@2.1:", type="build")
depends_on("py-setuptools@51:", type="build", when="@1.3.2:") depends_on("meson@1.0.1", when="@2.1:", type="build")
depends_on("py-setuptools@38.6:", type="build", when="@1.3:") depends_on("py-cython@0.29.33:2", when="@2:", type="build")
depends_on("py-setuptools@24.2:", type="build") depends_on("py-cython@0.29.32:2", when="@1.4.4:", type="build")
depends_on("py-cython@0.29.33:2", type="build", when="@2:") depends_on("py-cython@0.29.30:2", when="@1.4.3:", type="build")
depends_on("py-cython@0.29.32:2", type="build", when="@1.4.4:") depends_on("py-cython@0.29.24:2", when="@1.3.4:", type="build")
depends_on("py-cython@0.29.30:2", type="build", when="@1.4.3:") depends_on("py-cython@0.29.21:2", when="@1.1.3:", type="build")
depends_on("py-cython@0.29.24:2", type="build", when="@1.3.4:") depends_on("py-cython@0.29.16:2", when="@1.1:", type="build")
depends_on("py-cython@0.29.21:2", type="build", when="@1.1.3:") depends_on("py-cython@0.29.13:2", when="@1:", type="build")
depends_on("py-cython@0.29.16:2", type="build", when="@1.1:") depends_on("py-versioneer+toml", when="@2:", type="build")
depends_on("py-cython@0.29.13:2", type="build", when="@1:")
depends_on("py-versioneer+toml", type="build", when="@2:")
# https://pandas.pydata.org/pandas-docs/stable/getting_started/install.html#dependencies # https://pandas.pydata.org/pandas-docs/stable/getting_started/install.html#dependencies
depends_on("py-numpy@1.20.3:", type=("build", "run"), when="@1.5:") depends_on("py-numpy@1.22.4:", when="@2.1:", type=("build", "run"))
depends_on("py-numpy@1.18.5:", type=("build", "run"), when="@1.4:") depends_on("py-numpy@1.20.3:", when="@1.5:", type=("build", "run"))
depends_on("py-numpy@1.17.3:", type=("build", "run"), when="@1.3:") depends_on("py-numpy@1.18.5:", when="@1.4:", type=("build", "run"))
depends_on("py-numpy@1.16.5:", type=("build", "run"), when="@1.2:") depends_on("py-numpy@1.17.3:", when="@1.3:", type=("build", "run"))
depends_on("py-numpy@1.15.4:", type=("build", "run"), when="@1.1:") depends_on("py-numpy@1.16.5:", when="@1.2:", type=("build", "run"))
depends_on("py-numpy@1.13.3:", type=("build", "run"), when="@0.25:") depends_on("py-numpy@1.15.4:", when="@1.1:", type=("build", "run"))
depends_on("py-numpy@1.13.3:", when="@0.25:", type=("build", "run"))
depends_on("py-numpy", type=("build", "run")) depends_on("py-numpy", type=("build", "run"))
# 'NUMPY_IMPORT_ARRAY_RETVAL' was removed in numpy@1.19 # 'NUMPY_IMPORT_ARRAY_RETVAL' was removed in numpy@1.19
depends_on("py-numpy@:1.18", type=("build", "run"), when="@:0.25") depends_on("py-numpy@:1.18", when="@:0.25", type=("build", "run"))
depends_on("py-python-dateutil@2.8.2:", type=("build", "run"), when="@2:") depends_on("py-python-dateutil@2.8.2:", when="@2:", type=("build", "run"))
depends_on("py-python-dateutil@2.8.1:", type=("build", "run"), when="@1.4:") depends_on("py-python-dateutil@2.8.1:", when="@1.4:", type=("build", "run"))
depends_on("py-python-dateutil@2.7.3:", type=("build", "run"), when="@1.1:") depends_on("py-python-dateutil@2.7.3:", when="@1.1:", type=("build", "run"))
depends_on("py-python-dateutil@2.6.1:", type=("build", "run"), when="@0.25:") depends_on("py-python-dateutil@2.6.1:", when="@0.25:", type=("build", "run"))
depends_on("py-python-dateutil", type=("build", "run")) depends_on("py-python-dateutil", type=("build", "run"))
depends_on("py-pytz@2020.1:", type=("build", "run"), when="@1.4:") depends_on("py-pytz@2020.1:", when="@1.4:", type=("build", "run"))
depends_on("py-pytz@2017.3:", type=("build", "run"), when="@1.2:") depends_on("py-pytz@2017.3:", when="@1.2:", type=("build", "run"))
depends_on("py-pytz@2017.2:", type=("build", "run")) depends_on("py-pytz@2017.2:", type=("build", "run"))
depends_on("py-tzdata@2022.1:", type=("build", "run"), when="@2:") depends_on("py-tzdata@2022.1:", when="@2:", type=("build", "run"))
# Recommended dependencies # Recommended dependencies
# https://pandas.pydata.org/pandas-docs/stable/getting_started/install.html#recommended-dependencies # https://pandas.pydata.org/pandas-docs/stable/getting_started/install.html#performance-dependencies-recommended
depends_on("py-numexpr@2.7.3:", type=("build", "run"), when="@1.5:") depends_on("py-numexpr@2.8.0:", when="@2.1:", type=("build", "run"))
depends_on("py-numexpr@2.7.1:", type=("build", "run"), when="@1.4:") depends_on("py-numexpr@2.7.3:", when="@1.5:", type=("build", "run"))
depends_on("py-numexpr@2.7.0:", type=("build", "run"), when="@1.3:") depends_on("py-numexpr@2.7.1:", when="@1.4:", type=("build", "run"))
depends_on("py-numexpr@2.6.8:", type=("build", "run"), when="@1.2:") depends_on("py-numexpr@2.7.0:", when="@1.3:", type=("build", "run"))
depends_on("py-numexpr@2.6.2:", type=("build", "run"), when="@0.25:") depends_on("py-numexpr@2.6.8:", when="@1.2:", type=("build", "run"))
depends_on("py-numexpr@2.6.2:", when="@0.25:", type=("build", "run"))
depends_on("py-numexpr", type=("build", "run")) depends_on("py-numexpr", type=("build", "run"))
depends_on("py-bottleneck@1.3.2:", type=("build", "run"), when="@1.5:") depends_on("py-bottleneck@1.3.4:", when="@2.1:", type=("build", "run"))
depends_on("py-bottleneck@1.3.1:", type=("build", "run"), when="@1.4:") depends_on("py-bottleneck@1.3.2:", when="@1.5:", type=("build", "run"))
depends_on("py-bottleneck@1.2.1:", type=("build", "run"), when="@0.25:") depends_on("py-bottleneck@1.3.1:", when="@1.4:", type=("build", "run"))
depends_on("py-bottleneck@1.2.1:", when="@0.25:", type=("build", "run"))
depends_on("py-bottleneck", type=("build", "run")) depends_on("py-bottleneck", type=("build", "run"))
depends_on("py-numba@0.53.1:", type=("build", "run"), when="@2:") depends_on("py-numba@0.55.2:", when="@2.1:", type=("build", "run"))
depends_on("py-numba@0.53.1:", when="@2.0:", type=("build", "run"))
# Optional dependencies # Optional dependencies
# https://pandas.pydata.org/pandas-docs/stable/getting_started/install.html#optional-dependencies # https://pandas.pydata.org/pandas-docs/stable/getting_started/install.html#optional-dependencies
# Historical dependencies
depends_on("py-setuptools@61:", when="@2.0", type="build")
depends_on("py-setuptools@51:", when="@1.3.2:1", type="build")
depends_on("py-setuptools@38.6:", when="@1.3.0:1.3.1", type="build")
depends_on("py-setuptools@24.2:", when="@:1.2", type="build")
skip_modules = ["pandas.tests", "pandas.plotting._matplotlib", "pandas.core._numba.kernels"] skip_modules = ["pandas.tests", "pandas.plotting._matplotlib", "pandas.core._numba.kernels"]