Merge branch 'develop' into dev-path-parsing-bug

This commit is contained in:
jnhealy2 2025-01-16 13:22:03 -07:00 committed by GitHub
commit 274ac74122
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
19 changed files with 611 additions and 294 deletions

View File

@ -106,8 +106,16 @@
from spack.variant import any_combination_of, auto_or_any_combination_of, disjoint_sets
from spack.version import Version, ver
# These are just here for editor support; they will be replaced when the build env
# is set up.
make = MakeExecutable("make", jobs=1)
ninja = MakeExecutable("ninja", jobs=1)
configure = Executable(join_path(".", "configure"))
# These are just here for editor support; they may be set when the build env is set up.
configure: Executable
make_jobs: int
make: MakeExecutable
ninja: MakeExecutable
python_include: str
python_platlib: str
python_purelib: str
python: Executable
spack_cc: str
spack_cxx: str
spack_f77: str
spack_fc: str

View File

@ -3653,6 +3653,7 @@ def _resolve_automatic_splices(self):
):
continue
new_spec = spec.copy(deps=False)
new_spec.clear_caches(ignore=("package_hash",))
new_spec.build_spec = spec
for edge in spec.edges_to_dependencies():
depflag = edge.depflag & ~dt.BUILD

View File

@ -3591,25 +3591,16 @@ def patches(self):
return self._patches
def _dup(self, other, deps: Union[bool, dt.DepTypes, dt.DepFlag] = True, cleardeps=True):
"""Copy the spec other into self. This is an overwriting
copy. It does not copy any dependents (parents), but by default
copies dependencies.
To duplicate an entire DAG, call _dup() on the root of the DAG.
def _dup(self, other: "Spec", deps: Union[bool, dt.DepTypes, dt.DepFlag] = True) -> bool:
"""Copies "other" into self, by overwriting all attributes.
Args:
other (Spec): spec to be copied onto ``self``
deps: if True copies all the dependencies. If
False copies None. If deptype/depflag, copy matching types.
cleardeps (bool): if True clears the dependencies of ``self``,
before possibly copying the dependencies of ``other`` onto
``self``
other: spec to be copied onto ``self``
deps: if True copies all the dependencies. If False copies None.
If deptype, or depflag, copy matching types.
Returns:
True if ``self`` changed because of the copy operation,
False otherwise.
True if ``self`` changed because of the copy operation, False otherwise.
"""
# We don't count dependencies as changes here
changed = True
@ -3634,14 +3625,15 @@ def _dup(self, other, deps: Union[bool, dt.DepTypes, dt.DepFlag] = True, clearde
self.versions = other.versions.copy()
self.architecture = other.architecture.copy() if other.architecture else None
self.compiler = other.compiler.copy() if other.compiler else None
if cleardeps:
self._dependents = _EdgeMap(store_by_child=False)
self._dependencies = _EdgeMap(store_by_child=True)
self.compiler_flags = other.compiler_flags.copy()
self.compiler_flags.spec = self
self.variants = other.variants.copy()
self._build_spec = other._build_spec
# Clear dependencies
self._dependents = _EdgeMap(store_by_child=False)
self._dependencies = _EdgeMap(store_by_child=True)
# FIXME: we manage _patches_in_order_of_appearance specially here
# to keep it from leaking out of spec.py, but we should figure
# out how to handle it more elegantly in the Variant classes.
@ -4524,7 +4516,7 @@ def mask_build_deps(in_spec):
return spec
def clear_caches(self, ignore=()):
def clear_caches(self, ignore: Tuple[str, ...] = ()) -> None:
"""
Clears all cached hashes in a Spec, while preserving other properties.
"""

View File

@ -10,33 +10,11 @@
import spack.concretize
import spack.config
import spack.deptypes as dt
import spack.solver.asp
from spack.installer import PackageInstaller
from spack.solver.asp import SolverError
from spack.spec import Spec
class CacheManager:
def __init__(self, specs: List[str]) -> None:
self.req_specs = specs
self.concr_specs: List[Spec]
self.concr_specs = []
def __enter__(self):
self.concr_specs = [spack.concretize.concretize_one(s) for s in self.req_specs]
for s in self.concr_specs:
PackageInstaller([s.package], fake=True, explicit=True).install()
def __exit__(self, exc_type, exc_val, exc_tb):
for s in self.concr_specs:
s.package.do_uninstall()
# MacOS and Windows only work if you pass this function pointer rather than a
# closure
def _mock_has_runtime_dependencies(_x):
return True
def _make_specs_non_buildable(specs: List[str]):
output_config = {}
for spec in specs:
@ -45,203 +23,263 @@ def _make_specs_non_buildable(specs: List[str]):
@pytest.fixture
def splicing_setup(mutable_database, mock_packages, monkeypatch):
spack.config.set("concretizer:reuse", True)
monkeypatch.setattr(
spack.solver.asp, "_has_runtime_dependencies", _mock_has_runtime_dependencies
)
def install_specs(
mutable_database,
mock_packages,
mutable_config,
do_not_check_runtimes_on_reuse,
install_mockery,
):
"""Returns a function that concretizes and installs a list of abstract specs"""
mutable_config.set("concretizer:reuse", True)
def _impl(*specs_str):
concrete_specs = [Spec(s).concretized() for s in specs_str]
PackageInstaller([s.package for s in concrete_specs], fake=True, explicit=True).install()
return concrete_specs
return _impl
def _enable_splicing():
spack.config.set("concretizer:splice", {"automatic": True})
def _has_build_dependency(spec: Spec, name: str):
return any(s.name == name for s in spec.dependencies(None, dt.BUILD))
@pytest.mark.parametrize("spec_str", ["splice-z", "splice-h@1"])
def test_spec_reuse(spec_str, install_specs, mutable_config):
"""Tests reuse of splice-z, without splicing, as a root and as a dependency of splice-h"""
splice_z = install_specs("splice-z@1.0.0+compat")[0]
mutable_config.set("packages", _make_specs_non_buildable(["splice-z"]))
concrete = spack.concretize.concretize_one(spec_str)
assert concrete["splice-z"].satisfies(splice_z)
def test_simple_reuse(splicing_setup):
with CacheManager(["splice-z@1.0.0+compat"]):
spack.config.set("packages", _make_specs_non_buildable(["splice-z"]))
assert spack.concretize.concretize_one("splice-z").satisfies(Spec("splice-z"))
def test_simple_dep_reuse(splicing_setup):
with CacheManager(["splice-z@1.0.0+compat"]):
spack.config.set("packages", _make_specs_non_buildable(["splice-z"]))
assert spack.concretize.concretize_one("splice-h@1").satisfies(Spec("splice-h@1"))
def test_splice_installed_hash(splicing_setup):
cache = [
@pytest.mark.regression("48578")
def test_splice_installed_hash(install_specs, mutable_config):
"""Tests splicing the dependency of an installed spec, for another installed spec"""
splice_t, splice_h = install_specs(
"splice-t@1 ^splice-h@1.0.0+compat ^splice-z@1.0.0",
"splice-h@1.0.2+compat ^splice-z@1.0.0",
]
with CacheManager(cache):
packages_config = _make_specs_non_buildable(["splice-t", "splice-h"])
spack.config.set("packages", packages_config)
goal_spec = Spec("splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.0")
with pytest.raises(Exception):
spack.concretize.concretize_one(goal_spec)
_enable_splicing()
assert spack.concretize.concretize_one(goal_spec).satisfies(goal_spec)
)
packages_config = _make_specs_non_buildable(["splice-t", "splice-h"])
mutable_config.set("packages", packages_config)
goal_spec = "splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.0"
with pytest.raises(SolverError):
spack.concretize.concretize_one(goal_spec)
_enable_splicing()
concrete = spack.concretize.concretize_one(goal_spec)
# splice-t has a dependency that is changing, thus its hash should be different
assert concrete.dag_hash() != splice_t.dag_hash()
assert concrete.build_spec.satisfies(splice_t)
assert not concrete.satisfies(splice_t)
# splice-h is reused, so the hash should stay the same
assert concrete["splice-h"].satisfies(splice_h)
assert concrete["splice-h"].build_spec.satisfies(splice_h)
assert concrete["splice-h"].dag_hash() == splice_h.dag_hash()
def test_splice_build_splice_node(splicing_setup):
with CacheManager(["splice-t@1 ^splice-h@1.0.0+compat ^splice-z@1.0.0+compat"]):
spack.config.set("packages", _make_specs_non_buildable(["splice-t"]))
goal_spec = Spec("splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.0+compat")
with pytest.raises(Exception):
spack.concretize.concretize_one(goal_spec)
_enable_splicing()
assert spack.concretize.concretize_one(goal_spec).satisfies(goal_spec)
def test_splice_build_splice_node(install_specs, mutable_config):
"""Tests splicing the dependency of an installed spec, for a spec that is yet to be built"""
splice_t = install_specs("splice-t@1 ^splice-h@1.0.0+compat ^splice-z@1.0.0+compat")[0]
mutable_config.set("packages", _make_specs_non_buildable(["splice-t"]))
goal_spec = "splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.0+compat"
with pytest.raises(SolverError):
spack.concretize.concretize_one(goal_spec)
_enable_splicing()
concrete = spack.concretize.concretize_one(goal_spec)
# splice-t has a dependency that is changing, thus its hash should be different
assert concrete.dag_hash() != splice_t.dag_hash()
assert concrete.build_spec.satisfies(splice_t)
assert not concrete.satisfies(splice_t)
# splice-h should be different
assert concrete["splice-h"].dag_hash() != splice_t["splice-h"].dag_hash()
assert concrete["splice-h"].build_spec.dag_hash() == concrete["splice-h"].dag_hash()
def test_double_splice(splicing_setup):
cache = [
@pytest.mark.xfail(reason="the spliced splice-h has sometimes the original splice-h hash")
def test_double_splice(install_specs, mutable_config):
"""Tests splicing two dependencies of an installed spec, for other installed specs"""
splice_t, splice_h, splice_z = install_specs(
"splice-t@1 ^splice-h@1.0.0+compat ^splice-z@1.0.0+compat",
"splice-h@1.0.2+compat ^splice-z@1.0.1+compat",
"splice-z@1.0.2+compat",
]
with CacheManager(cache):
freeze_builds_config = _make_specs_non_buildable(["splice-t", "splice-h", "splice-z"])
spack.config.set("packages", freeze_builds_config)
goal_spec = Spec("splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.2+compat")
with pytest.raises(Exception):
spack.concretize.concretize_one(goal_spec)
_enable_splicing()
assert spack.concretize.concretize_one(goal_spec).satisfies(goal_spec)
)
mutable_config.set("packages", _make_specs_non_buildable(["splice-t", "splice-h", "splice-z"]))
goal_spec = "splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.2+compat"
with pytest.raises(SolverError):
spack.concretize.concretize_one(goal_spec)
_enable_splicing()
concrete = spack.concretize.concretize_one(goal_spec)
# splice-t and splice-h have a dependency that is changing, thus its hash should be different
assert concrete.dag_hash() != splice_t.dag_hash()
assert concrete.build_spec.satisfies(splice_t)
assert not concrete.satisfies(splice_t)
assert concrete["splice-h"].dag_hash() != splice_h.dag_hash()
assert concrete["splice-h"].build_spec.satisfies(splice_h)
assert not concrete["splice-h"].satisfies(splice_h)
# splice-z is reused, so the hash should stay the same
assert concrete["splice-z"].dag_hash() == splice_z.dag_hash()
# The next two tests are mirrors of one another
def test_virtual_multi_splices_in(splicing_setup):
cache = [
"depends-on-virtual-with-abi ^virtual-abi-1",
"depends-on-virtual-with-abi ^virtual-abi-2",
]
goal_specs = [
"depends-on-virtual-with-abi ^virtual-abi-multi abi=one",
"depends-on-virtual-with-abi ^virtual-abi-multi abi=two",
]
with CacheManager(cache):
spack.config.set("packages", _make_specs_non_buildable(["depends-on-virtual-with-abi"]))
for gs in goal_specs:
with pytest.raises(Exception):
spack.concretize.concretize_one(gs)
_enable_splicing()
for gs in goal_specs:
assert spack.concretize.concretize_one(gs).satisfies(gs)
@pytest.mark.parametrize(
"original_spec,goal_spec",
[
# `virtual-abi-1` can be spliced for `virtual-abi-multi abi=one` and vice-versa
(
"depends-on-virtual-with-abi ^virtual-abi-1",
"depends-on-virtual-with-abi ^virtual-abi-multi abi=one",
),
(
"depends-on-virtual-with-abi ^virtual-abi-multi abi=one",
"depends-on-virtual-with-abi ^virtual-abi-1",
),
# `virtual-abi-2` can be spliced for `virtual-abi-multi abi=two` and vice-versa
(
"depends-on-virtual-with-abi ^virtual-abi-2",
"depends-on-virtual-with-abi ^virtual-abi-multi abi=two",
),
(
"depends-on-virtual-with-abi ^virtual-abi-multi abi=two",
"depends-on-virtual-with-abi ^virtual-abi-2",
),
],
)
def test_virtual_multi_splices_in(original_spec, goal_spec, install_specs, mutable_config):
"""Tests that we can splice a virtual dependency with a different, but compatible, provider."""
original = install_specs(original_spec)[0]
mutable_config.set("packages", _make_specs_non_buildable(["depends-on-virtual-with-abi"]))
with pytest.raises(SolverError):
spack.concretize.concretize_one(goal_spec)
_enable_splicing()
spliced = spack.concretize.concretize_one(goal_spec)
assert spliced.dag_hash() != original.dag_hash()
assert spliced.build_spec.dag_hash() == original.dag_hash()
assert spliced["virtual-with-abi"].name != spliced.build_spec["virtual-with-abi"].name
def test_virtual_multi_can_be_spliced(splicing_setup):
cache = [
"depends-on-virtual-with-abi ^virtual-abi-multi abi=one",
"depends-on-virtual-with-abi ^virtual-abi-multi abi=two",
]
goal_specs = [
"depends-on-virtual-with-abi ^virtual-abi-1",
"depends-on-virtual-with-abi ^virtual-abi-2",
]
with CacheManager(cache):
spack.config.set("packages", _make_specs_non_buildable(["depends-on-virtual-with-abi"]))
for gs in goal_specs:
with pytest.raises(Exception):
spack.concretize.concretize_one(gs)
_enable_splicing()
for gs in goal_specs:
assert spack.concretize.concretize_one(gs).satisfies(gs)
def test_manyvariant_star_matching_variant_splice(splicing_setup):
cache = [
@pytest.mark.parametrize(
"original_spec,goal_spec",
[
# can_splice("manyvariants@1.0.0", when="@1.0.1", match_variants="*")
"depends-on-manyvariants ^manyvariants@1.0.0+a+b c=v1 d=v2",
"depends-on-manyvariants ^manyvariants@1.0.0~a~b c=v3 d=v3",
]
goal_specs = [
Spec("depends-on-manyvariants ^manyvariants@1.0.1+a+b c=v1 d=v2"),
Spec("depends-on-manyvariants ^manyvariants@1.0.1~a~b c=v3 d=v3"),
]
with CacheManager(cache):
freeze_build_config = {"depends-on-manyvariants": {"buildable": False}}
spack.config.set("packages", freeze_build_config)
for goal in goal_specs:
with pytest.raises(Exception):
spack.concretize.concretize_one(goal)
_enable_splicing()
for goal in goal_specs:
assert spack.concretize.concretize_one(goal).satisfies(goal)
def test_manyvariant_limited_matching(splicing_setup):
cache = [
(
"depends-on-manyvariants ^manyvariants@1.0.0+a+b c=v1 d=v2",
"depends-on-manyvariants ^manyvariants@1.0.1+a+b c=v1 d=v2",
),
(
"depends-on-manyvariants ^manyvariants@1.0.0~a~b c=v3 d=v3",
"depends-on-manyvariants ^manyvariants@1.0.1~a~b c=v3 d=v3",
),
# can_splice("manyvariants@2.0.0+a~b", when="@2.0.1~a+b", match_variants=["c", "d"])
"depends-on-manyvariants@2.0 ^manyvariants@2.0.0+a~b c=v3 d=v2",
(
"depends-on-manyvariants@2.0 ^manyvariants@2.0.0+a~b c=v3 d=v2",
"depends-on-manyvariants@2.0 ^manyvariants@2.0.1~a+b c=v3 d=v2",
),
# can_splice("manyvariants@2.0.0 c=v1 d=v1", when="@2.0.1+a+b")
"depends-on-manyvariants@2.0 ^manyvariants@2.0.0~a~b c=v1 d=v1",
]
goal_specs = [
Spec("depends-on-manyvariants@2.0 ^manyvariants@2.0.1~a+b c=v3 d=v2"),
Spec("depends-on-manyvariants@2.0 ^manyvariants@2.0.1+a+b c=v3 d=v3"),
]
with CacheManager(cache):
freeze_build_config = {"depends-on-manyvariants": {"buildable": False}}
spack.config.set("packages", freeze_build_config)
for s in goal_specs:
with pytest.raises(Exception):
spack.concretize.concretize_one(s)
_enable_splicing()
for s in goal_specs:
assert spack.concretize.concretize_one(s).satisfies(s)
(
"depends-on-manyvariants@2.0 ^manyvariants@2.0.0~a~b c=v1 d=v1",
"depends-on-manyvariants@2.0 ^manyvariants@2.0.1+a+b c=v3 d=v3",
),
],
)
def test_manyvariant_matching_variant_splice(
original_spec, goal_spec, install_specs, mutable_config
):
"""Tests splicing with different kind of matching on variants"""
original = install_specs(original_spec)[0]
mutable_config.set("packages", {"depends-on-manyvariants": {"buildable": False}})
with pytest.raises(SolverError):
spack.concretize.concretize_one(goal_spec)
_enable_splicing()
spliced = spack.concretize.concretize_one(goal_spec)
assert spliced.dag_hash() != original.dag_hash()
assert spliced.build_spec.dag_hash() == original.dag_hash()
# The spliced 'manyvariants' is yet to be built
assert spliced["manyvariants"].dag_hash() != original["manyvariants"].dag_hash()
assert spliced["manyvariants"].build_spec.dag_hash() == spliced["manyvariants"].dag_hash()
def test_external_splice_same_name(splicing_setup):
cache = [
def test_external_splice_same_name(install_specs, mutable_config):
"""Tests that externals can be spliced for non-external specs"""
original_splice_h, original_splice_t = install_specs(
"splice-h@1.0.0 ^splice-z@1.0.0+compat",
"splice-t@1.0 ^splice-h@1.0.1 ^splice-z@1.0.1+compat",
]
packages_yaml = {
"splice-z": {"externals": [{"spec": "splice-z@1.0.2+compat", "prefix": "/usr"}]}
}
goal_specs = [
Spec("splice-h@1.0.0 ^splice-z@1.0.2"),
Spec("splice-t@1.0 ^splice-h@1.0.1 ^splice-z@1.0.2"),
]
with CacheManager(cache):
spack.config.set("packages", packages_yaml)
_enable_splicing()
for s in goal_specs:
assert spack.concretize.concretize_one(s).satisfies(s)
)
mutable_config.set("packages", _make_specs_non_buildable(["splice-t", "splice-h"]))
mutable_config.set(
"packages",
{
"splice-z": {
"externals": [{"spec": "splice-z@1.0.2+compat", "prefix": "/usr"}],
"buildable": False,
}
},
)
_enable_splicing()
concrete_splice_h = spack.concretize.concretize_one("splice-h@1.0.0 ^splice-z@1.0.2")
concrete_splice_t = spack.concretize.concretize_one(
"splice-t@1.0 ^splice-h@1.0.1 ^splice-z@1.0.2"
)
assert concrete_splice_h.dag_hash() != original_splice_h.dag_hash()
assert concrete_splice_h.build_spec.dag_hash() == original_splice_h.dag_hash()
assert concrete_splice_h["splice-z"].external
assert concrete_splice_t.dag_hash() != original_splice_t.dag_hash()
assert concrete_splice_t.build_spec.dag_hash() == original_splice_t.dag_hash()
assert concrete_splice_t["splice-z"].external
assert concrete_splice_t["splice-z"].dag_hash() == concrete_splice_h["splice-z"].dag_hash()
def test_spliced_build_deps_only_in_build_spec(splicing_setup):
cache = ["splice-t@1.0 ^splice-h@1.0.1 ^splice-z@1.0.0"]
goal_spec = Spec("splice-t@1.0 ^splice-h@1.0.2 ^splice-z@1.0.0")
def test_spliced_build_deps_only_in_build_spec(install_specs):
"""Tests that build specs are not reported in the spliced spec"""
install_specs("splice-t@1.0 ^splice-h@1.0.1 ^splice-z@1.0.0")
with CacheManager(cache):
_enable_splicing()
concr_goal = spack.concretize.concretize_one(goal_spec)
build_spec = concr_goal._build_spec
# Spec has been spliced
assert build_spec is not None
# Build spec has spliced build dependencies
assert _has_build_dependency(build_spec, "splice-h")
assert _has_build_dependency(build_spec, "splice-z")
# Spliced build dependencies are removed
assert len(concr_goal.dependencies(None, dt.BUILD)) == 0
_enable_splicing()
spliced = spack.concretize.concretize_one("splice-t@1.0 ^splice-h@1.0.2 ^splice-z@1.0.0")
build_spec = spliced.build_spec
# Spec has been spliced
assert build_spec.dag_hash() != spliced.dag_hash()
# Build spec has spliced build dependencies
assert build_spec.dependencies("splice-h", dt.BUILD)
assert build_spec.dependencies("splice-z", dt.BUILD)
# Spliced build dependencies are removed
assert len(spliced.dependencies(None, dt.BUILD)) == 0
def test_spliced_transitive_dependency(splicing_setup):
cache = ["splice-depends-on-t@1.0 ^splice-h@1.0.1"]
goal_spec = Spec("splice-depends-on-t^splice-h@1.0.2")
def test_spliced_transitive_dependency(install_specs, mutable_config):
"""Tests that build specs are not reported, even for spliced transitive dependencies"""
install_specs("splice-depends-on-t@1.0 ^splice-h@1.0.1")
mutable_config.set("packages", _make_specs_non_buildable(["splice-depends-on-t"]))
with CacheManager(cache):
spack.config.set("packages", _make_specs_non_buildable(["splice-depends-on-t"]))
_enable_splicing()
concr_goal = spack.concretize.concretize_one(goal_spec)
# Spec has been spliced
assert concr_goal._build_spec is not None
assert concr_goal["splice-t"]._build_spec is not None
assert concr_goal.satisfies(goal_spec)
# Spliced build dependencies are removed
assert len(concr_goal.dependencies(None, dt.BUILD)) == 0
_enable_splicing()
spliced = spack.concretize.concretize_one("splice-depends-on-t^splice-h@1.0.2")
# Spec has been spliced
assert spliced.build_spec.dag_hash() != spliced.dag_hash()
assert spliced["splice-t"].build_spec.dag_hash() != spliced["splice-t"].dag_hash()
# Spliced build dependencies are removed
assert len(spliced.dependencies(None, dt.BUILD)) == 0
assert len(spliced["splice-t"].dependencies(None, dt.BUILD)) == 0

View File

@ -2125,15 +2125,7 @@ def configure_reuse(reuse_mode, combined_env) -> Optional[ev.Environment]:
"from_environment_raise",
],
)
def test_env_include_concrete_reuse(monkeypatch, reuse_mode):
# The mock packages do not use the gcc-runtime
def mock_has_runtime_dependencies(*args, **kwargs):
return True
monkeypatch.setattr(
spack.solver.asp, "_has_runtime_dependencies", mock_has_runtime_dependencies
)
def test_env_include_concrete_reuse(do_not_check_runtimes_on_reuse, reuse_mode):
# The default mpi version is 3.x provided by mpich in the mock repo.
# This test verifies that concretizing with an included concrete
# environment with "concretizer:reuse:true" the included

View File

@ -3124,14 +3124,13 @@ def test_concretization_version_order():
),
],
)
@pytest.mark.usefixtures("mutable_database", "mock_store")
@pytest.mark.usefixtures("mutable_database", "mock_store", "do_not_check_runtimes_on_reuse")
@pytest.mark.not_on_windows("Expected length is different on Windows")
def test_filtering_reused_specs(
roots, reuse_yaml, expected, not_expected, expected_length, mutable_config, monkeypatch
roots, reuse_yaml, expected, not_expected, expected_length, mutable_config
):
"""Tests that we can select which specs are to be reused, using constraints as filters"""
# Assume all specs have a runtime dependency
monkeypatch.setattr(spack.solver.asp, "_has_runtime_dependencies", lambda x: True)
mutable_config.set("concretizer:reuse", reuse_yaml)
selector = spack.solver.asp.ReusableSpecsSelector(mutable_config)
specs = selector.reusable_specs(roots)
@ -3151,10 +3150,11 @@ def test_filtering_reused_specs(
[({"from": [{"type": "local"}]}, 17), ({"from": [{"type": "buildcache"}]}, 0)],
)
@pytest.mark.not_on_windows("Expected length is different on Windows")
def test_selecting_reused_sources(reuse_yaml, expected_length, mutable_config, monkeypatch):
def test_selecting_reused_sources(
reuse_yaml, expected_length, mutable_config, do_not_check_runtimes_on_reuse
):
"""Tests that we can turn on/off sources of reusable specs"""
# Assume all specs have a runtime dependency
monkeypatch.setattr(spack.solver.asp, "_has_runtime_dependencies", lambda x: True)
mutable_config.set("concretizer:reuse", reuse_yaml)
selector = spack.solver.asp.ReusableSpecsSelector(mutable_config)
specs = selector.reusable_specs(["mpileaks"])

View File

@ -92,7 +92,6 @@ src_paths = "lib"
honor_noqa = true
[tool.mypy]
python_version = 3.7
files = ['lib/spack/llnl/**/*.py', 'lib/spack/spack/**/*.py', './var/spack/repos/builtin/packages/*/package.py']
mypy_path = ['bin', 'lib/spack', 'lib/spack/external', 'var/spack/repos/builtin']
allow_redefinition = true

View File

@ -0,0 +1,13 @@
diff --git a/Makefile b/Makefile
index e607bb3cb..a4752dff6 100644
--- a/Makefile
+++ b/Makefile
@@ -203,7 +203,7 @@ endif
.PHONY: compile-arrow-cpp
compile-arrow-cpp:
- $(CHPL_CXX) -O3 -std=c++17 -c $(ARROW_CPP) -o $(ARROW_O) $(INCLUDE_FLAGS) $(ARROW_SANITIZE)
+ $(CHPL_CXX) -fPIC -O3 -std=c++17 -c $(ARROW_CPP) -o $(ARROW_O) $(INCLUDE_FLAGS) $(ARROW_SANITIZE)
$(ARROW_O): $(ARROW_CPP) $(ARROW_H)
make compile-arrow-cpp

View File

@ -0,0 +1,23 @@
diff --git a/Makefile b/Makefile
index 13a9c4be1..099896ec9 100644
--- a/Makefile
+++ b/Makefile
@@ -236,15 +236,15 @@ compile-arrow-cpp:
.PHONY: compile-arrow-write
compile-arrow-write:
- $(CHPL_CXX) -O3 -std=c++17 -c $(ARROW_WRITE_CPP) -o $(ARROW_WRITE_O) $(INCLUDE_FLAGS) $(ARROW_SANITIZE)
+ $(CHPL_CXX) -O3 -std=c++17 -fPIC -c $(ARROW_WRITE_CPP) -o $(ARROW_WRITE_O) $(INCLUDE_FLAGS) $(ARROW_SANITIZE)
.PHONY: compile-arrow-read
compile-arrow-read:
- $(CHPL_CXX) -O3 -std=c++17 -c $(ARROW_READ_CPP) -o $(ARROW_READ_O) $(INCLUDE_FLAGS) $(ARROW_SANITIZE)
+ $(CHPL_CXX) -O3 -std=c++17 -fPIC -c $(ARROW_READ_CPP) -o $(ARROW_READ_O) $(INCLUDE_FLAGS) $(ARROW_SANITIZE)
.PHONY: compile-arrow-util
compile-arrow-util:
- $(CHPL_CXX) -O3 -std=c++17 -c $(ARROW_UTIL_CPP) -o $(ARROW_UTIL_O) $(INCLUDE_FLAGS) $(ARROW_SANITIZE)
+ $(CHPL_CXX) -O3 -std=c++17 -fPIC -c $(ARROW_UTIL_CPP) -o $(ARROW_UTIL_O) $(INCLUDE_FLAGS) $(ARROW_SANITIZE)
$(ARROW_UTIL_O): $(ARROW_UTIL_CPP) $(ARROW_UTIL_H)
make compile-arrow-util

View File

@ -0,0 +1,109 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import llnl.util.tty as tty
from spack.package import *
from spack.util.environment import set_env
class Arkouda(MakefilePackage):
"""Arkouda is a NumPy-like library for distributed data with a focus on
large-scale data science applications."""
homepage = "https://github.com/Bears-R-Us/arkouda"
# Arkouda does not have a current PyPI package, so we use the GitHub tarball
url = "https://github.com/Bears-R-Us/arkouda/archive/refs/tags/v2024.10.02.tar.gz"
git = "https://github.com/Bears-R-Us/arkouda.git"
# See https://spdx.org/licenses/ for a list.
license("MIT")
# A list of GitHub accounts to notify when the package is updated.
# TODO: add arkouda devs github account
maintainers("arezaii")
version("master", branch="master")
version(
"2024.10.02", sha256="00671a89a08be57ff90a94052f69bfc6fe793f7b50cf9195dd7ee794d6d13f23"
)
version(
"2024.06.21", sha256="ab7f753befb3a0b8e27a3d28f3c83332d2c6ae49678877a7456f0fcfe42df51c"
)
variant(
"distributed",
default=False,
description="Build Arkouda for multi-locale execution on a cluster or supercomputer",
)
depends_on("chapel@2.1: +hdf5 +zmq", type=("build", "link", "run", "test"))
depends_on("cmake@3.13.4:", type="build")
depends_on("python@3.9:", type=("build", "link", "run", "test"))
depends_on("libzmq@4.2.5:", type=("build", "link", "run", "test"))
depends_on("hdf5+hl~mpi", type=("build", "link", "run", "test"))
depends_on("libiconv", type=("build", "link", "run", "test"))
depends_on("libidn2", type=("build", "link", "run", "test"))
depends_on(
"arrow +parquet +snappy +zlib +brotli +bz2 +lz4 +zstd",
type=("build", "link", "run", "test"),
)
requires("^chapel comm=none", when="~distributed")
requires("^chapel +python-bindings", when="@2024.10.02:")
requires(
"^chapel comm=gasnet",
"^chapel comm=ugni",
"^chapel comm=ofi",
policy="one_of",
when="+distributed",
)
# Some systems need explicit -fPIC flag when building the Arrow functions
patch("makefile-fpic-2024.06.21.patch", when="@2024.06.21")
patch("makefile-fpic-2024.10.02.patch", when="@2024.10.02:")
sanity_check_is_file = [join_path("bin", "arkouda_server")]
def check(self):
# skip b/c we need the python client
pass
# override the default edit method to apply the patch
def edit(self, spec, prefix):
self.update_makefile_paths(spec, prefix)
def update_makefile_paths(self, spec, prefix):
# add to the Makefile.paths file for all of the dependencies installed by spack
# in the form $(eval $(call add-path,<path-to-dep-aka-prefix>))
with open("Makefile.paths", "w") as f:
f.write("$(eval $(call add-path,{0}))\n".format(spec["hdf5"].prefix))
f.write("$(eval $(call add-path,{0}))\n".format(spec["libzmq"].prefix))
f.write("$(eval $(call add-path,{0}))\n".format(spec["arrow"].prefix))
f.write("$(eval $(call add-path,{0}))\n".format(spec["libiconv"].prefix))
f.write("$(eval $(call add-path,{0}))\n".format(spec["libidn2"].prefix))
def build(self, spec, prefix):
# Detect distributed builds and skip the dependency checks built into
# the Arkouda Makefile. These checks will try to spawn multiple jobs which may
# cause the build to fail in situations where the user is constrained
# to a limited number of simultaneous jobs.
if spec.satisfies("+distributed"):
with set_env(ARKOUDA_SKIP_CHECK_DEPS="1"):
tty.warn("Distributed build detected. Skipping dependency checks")
make()
else:
make()
# Arkouda does not have an install target in its Makefile
def install(self, spec, prefix):
mkdir(prefix.bin)
install("arkouda_server", prefix.bin)
# Arkouda can have two executables depending on if Chapel is compiled in
# single-locale or multi-locale mode
if spec.satisfies("+distributed"):
install("arkouda_server_real", prefix.bin)

View File

@ -14,6 +14,9 @@ class Elk(MakefilePackage):
license("LGPL-3.0-or-later")
version("10.2.4", sha256="015e1d2a04a6c8335af2e5f5adaae143c6c0287f34772e069834a691bb15ac9d")
version("9.6.8", sha256="d5b60406744a13be42a258a6efd9545ce38a7006d8e76e40e3770368e05c1dae")
version("8.8.26", sha256="f0d397a0e2fd8b6f74bc9fccc03fae701bb348e3f08ca143d41757f5f6cf794a")
version("8.3.22", sha256="1c31f09b7c09d6b24e775d4f0d5e1e8871f95a7656ee4ca21ac17dbe7ea16277")
version("7.2.42", sha256="73f03776dbf9b2147bfcc5b7c062af5befa0944608f6fc4b6a1e590615400fc6")
version("7.1.14", sha256="7c2ff30f4b1d72d5dc116de9d70761f2c206700c69d85dd82a17a5a6374453d2")
@ -36,7 +39,7 @@ class Elk(MakefilePackage):
# blis - use internal lapack and blas implementation from blis
variant(
"linalg",
default="internal",
default="generic",
multi=False,
description="Build with custom BLAS library",
values=("internal", "generic", "openblas", "mkl", "blis"),
@ -48,7 +51,7 @@ class Elk(MakefilePackage):
# should be used with linalg=mkls
variant(
"fft",
default="internal",
default="fftw",
multi=False,
description="Build with custom FFT library",
values=("internal", "fftw", "mkl"),
@ -63,6 +66,11 @@ class Elk(MakefilePackage):
conflicts("fft=mkl", when="linalg=openblas")
conflicts("fft=mkl", when="linalg=blis")
conflicts("linalg=internal", when="@8.6:", msg="Internal BLAS is not supported")
conflicts("fft=internal", when="@8.6:", msg="Internal FFTW is not supported")
conflicts("libxc@:6", when="@10:", msg="Versions >= 10 requires libxc >= 7")
conflicts("libxc@7:", when="@:9", msg="Versions <=9 requires libxc =< 6")
variant("mpi", default=True, description="Enable MPI parallelism")
variant("openmp", default=True, description="Enable OpenMP support")
variant("libxc", default=True, description="Link to Libxc functional library")
@ -90,7 +98,9 @@ class Elk(MakefilePackage):
depends_on("mkl", when="fft=mkl")
depends_on("mpi@2:", when="+mpi")
depends_on("libxc@5:", when="@7:+libxc")
depends_on("libxc@7:", when="@10:+libxc")
depends_on("libxc@6:", when="@:9+libxc")
depends_on("libxc@5:", when="@:7+libxc")
depends_on("libxc@:3", when="@:3+libxc")
depends_on("wannier90", when="+w90")
@ -98,6 +108,13 @@ class Elk(MakefilePackage):
parallel = False
def edit(self, spec, prefix):
if spec.satisfies("@8.6:"):
libxc_env_var_src = "SRC_LIBXC"
libxc_env_var_lib = "LIB_LIBXC"
else:
libxc_env_var_src = "SRC_libxc"
libxc_env_var_lib = "LIB_libxc"
# Dictionary of configuration options with default values assigned
config = {
"MAKE": "make",
@ -109,7 +126,7 @@ def edit(self, spec, prefix):
"SRC_OBLAS": "oblas_stub.f90",
"SRC_OMP": "omp_stub.f90",
"SRC_BLIS": "blis_stub.f90",
"SRC_libxc": "libxcifc_stub.f90",
libxc_env_var_src: "libxcifc_stub.f90",
"SRC_FFT": "zfftifc.f90",
"SRC_W90S": "w90_stub.f90",
"F90": spack_fc,
@ -173,9 +190,14 @@ def edit(self, spec, prefix):
elif spec.satisfies("fft=fftw"):
config["LIB_FFT"] = spec["fftw"].libs.ld_flags
config["SRC_FFT"] = "zfftifc_fftw.f90"
if spec.satisfies("@8.6:"):
config["LIB_FFT"] += " -lfftw3f"
config["SRC_FFT"] += " cfftifc_fftw.f90"
elif spec.satisfies("fft=mkl"):
config["LIB_FFT"] = spec["mkl"].libs.ld_flags
config["SRC_FFT"] = "mkl_dfti.f90 zfftifc_mkl.f90"
if spec.satisfies("@8.6:"):
config["SRC_FFT"] += " cfftifc_mkl.f90"
cp = which("cp")
mkl_prefix = spec["mkl"].prefix
if spec.satisfies("^intel-mkl"):
@ -185,21 +207,29 @@ def edit(self, spec, prefix):
join_path(self.build_directory, "src"),
)
if spec.satisfies("@8.6:"):
config["F90_LIB"] = " ".join([config["LIB_LPK"], config["LIB_FFT"]])
del config["LIB_LPK"]
del config["LIB_FFT"]
# Define targets
self.build_targets.append("elk")
print(self.build_targets)
# Libxc support
if spec.satisfies("+libxc"):
config["LIB_libxc"] = " ".join(
[
join_path(spec["libxc"].prefix.lib, "libxcf90.so"),
join_path(spec["libxc"].prefix.lib, "libxc.so"),
]
)
if self.spec.satisfies("@7:"):
config["SRC_libxc"] = "libxcf90.f90 libxcifc.f90"
if self.spec.satisfies("@10:"):
config[libxc_env_var_lib] = join_path(spec["libxc"].prefix.lib, "libxcf03.so")
else:
config["SRC_libxc"] = "libxc_funcs.f90 libxc.f90 libxcifc.f90"
config[libxc_env_var_lib] = join_path(spec["libxc"].prefix.lib, "libxcf90.so")
_libxc_lib = join_path(spec["libxc"].prefix.lib, "libxc.so")
config[libxc_env_var_lib] += f" {_libxc_lib}"
if self.spec.satisfies("@10:"):
config[libxc_env_var_src] = "libxcf03.f90 libxcifc.f90"
elif self.spec.satisfies("@7:9"):
config[libxc_env_var_src] = "libxcf90.f90 libxcifc.f90"
else:
config[libxc_env_var_src] = "libxc_funcs.f90 libxc.f90 libxcifc.f90"
# Write configuration options to include file
with open("make.inc", "w") as inc:
@ -222,3 +252,9 @@ def install(self, spec, prefix):
install_tree("examples", join_path(prefix, "examples"))
install_tree("species", join_path(prefix, "species"))
@on_package_attributes(run_tests=True)
def check(self):
with working_dir("{0}/tests".format(self.build_directory)):
bash = which("bash")
bash("./test.sh")

View File

@ -22,6 +22,7 @@ class Helics(CMakePackage):
version("develop", branch="develop", submodules=True)
version("main", branch="main", submodules=True)
version("master", branch="main", submodules=True)
version("3.6.0", sha256="e111ac5d92e808f27e330afd1f8b8ca4d86adf6ccd74e3280f2d40fb3e0e2ce9")
version("3.5.3", sha256="f9ace240510b18caf642f55d08f9009a9babb203fbc032ec7d7d8aa6fd5e1553")
version("3.5.2", sha256="c2604694698a1e33c4a68f3d1c5ab0a228ef2bfca1b0d3bae94801dbd3b11048")
version("3.5.1", sha256="546fc6e6a85de6ba841e4bd547b811cc81a67a22be5e212ccb54be139d740555")
@ -88,8 +89,10 @@ class Helics(CMakePackage):
depends_on("git", type="build", when="@master:")
depends_on("cmake@3.4:", type="build", when="@:2")
depends_on("cmake@3.10:", type="build", when="@3.0.0:3.2.1")
depends_on("cmake@3.11:", type="build", when="@3.3.0:")
depends_on("boost@1.70:", type="build", when="+boost")
depends_on("cmake@3.11:", type="build", when="@3.3.0:3.5.3")
depends_on("cmake@3.22:", type="build", when="@3.6.0:")
depends_on("boost@1.70:", type="build", when="@:3.5.3 +boost")
depends_on("boost@1.75:", type="build", when="@3.6.0: +boost")
# TODO: replace this with an explicit list of components of Boost,
# for instance depends_on('boost +filesystem')
@ -105,11 +108,24 @@ class Helics(CMakePackage):
depends_on("python@3:", when="@:2 +python")
# Compiler restrictions based on C++ standard supported
conflicts("%gcc@:6", when="@3.0.0:", msg="HELICS 3+ cannot be built with GCC older than 7.0")
conflicts(
"%clang@:4", when="@3.0.0:", msg="HELICS 3+ cannot be built with Clang older than 5.0"
"%gcc@:6", when="@3.0.0:3.5.3", msg="HELICS 3+ cannot be built with GCC older than 7.0"
)
conflicts(
"%gcc@:11", when="@3.6.0:", msg="HELICS 3.6+ cannot be built with GCC older than 11.0"
)
conflicts(
"%clang@:4", when="@3.0.0:3.5.3", msg="HELICS 3+ cannot be built with Clang older than 5.0"
)
conflicts(
"%clang@:15", when="@3.6.0:", msg="HELICS 3.6+ cannot be built with Clang older than 15.0"
)
conflicts(
"%intel@:18", when="@3.0.0:3.5.3", msg="HELICS 3+ cannot be built with ICC older than 19"
)
conflicts(
"%intel@:21", when="@3.6.0:", msg="HELICS 3.6+ cannot be built with ICC older than 21"
)
conflicts("%intel@:18", when="@3.0.0:", msg="HELICS 3+ cannot be built with ICC older than 19")
# OpenMPI doesn't work with HELICS <=2.4.1
conflicts("^openmpi", when="@:2.4.1 +mpi")
@ -174,9 +190,9 @@ def cmake_args(self):
# Python interface was removed from the main HELICS build in v3
args.append(from_variant("BUILD_PYTHON_INTERFACE", "python"))
# GCC >=13
if spec.satisfies("%gcc@13:"):
# C++20 required when building with GCC>=13
# GCC >=13 or HELICS 3.6+
if spec.satisfies("%gcc@13:") or spec.satisfies("@3.6.0:"):
# C++20 required when building with GCC>=13 or HELICS 3.6+
args.append("-DCMAKE_CXX_STANDARD=20")
return args

View File

@ -26,8 +26,13 @@ class Julia(MakefilePackage):
maintainers("vchuravy", "haampie", "giordano")
version("master", branch="master")
version("1.11.2", sha256="5d56c7163aefbf4dfb97d97388f93175826bcc3f4b0e885fa351694f84dc70c4")
version("1.11.1", sha256="895549f40b21dee66b6380e30811f40d2d938c2baba0750de69c9a183cccd756")
version("1.11.0", sha256="a938c6b7758a83e817b56db3e542bd85e6d74db75e1381b1ba24cd6e3dc8c566")
version("1.10.7", sha256="9ff0fec7ff92e27c5909982047d1bd2dc80a32173e21a2e2e029eca2ccc1c0e1")
version("1.10.6", sha256="16a2227840a2acda80f375fc21fbd42a3da3be24bd375bc9a40ca8321e3172fe")
version("1.10.5", sha256="12b1bf720b76e51a116127b30f7a824d601347bc0999cf36a0c90f1f53d00833")
version("1.10.4", sha256="c46ed8166fe860a7258d088a0add68dfdf11ad64cc4c0b1f113570862d3ef777")
version("1.10.3", sha256="b3cd34c839d25b98a162070b4e3abd5f34564ffdad13e07073be7885e5678a18")
version("1.10.2", sha256="e3d20c02975da054aeb18d32ed84c5d760d54d2563e45e25017684a5a105d185")

View File

@ -0,0 +1,55 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)s
from spack.package import *
class PyArkouda(PythonPackage):
"""This is the python client for Arkouda."""
homepage = "https://github.com/Bears-R-Us/arkouda"
# Updating the arkouda PyPI package is future work
url = "https://github.com/Bears-R-Us/arkouda/archive/refs/tags/v2024.10.02.tar.gz"
git = "https://github.com/Bears-R-Us/arkouda.git"
# See https://spdx.org/licenses/ for a list.
license("MIT")
test_requires_compiler = True
# A list of GitHub accounts to notify when the package is updated.
# TODO: add arkouda devs github account
maintainers("arezaii")
version("master", branch="master")
version(
"2024.10.02", sha256="00671a89a08be57ff90a94052f69bfc6fe793f7b50cf9195dd7ee794d6d13f23"
)
version(
"2024.06.21", sha256="ab7f753befb3a0b8e27a3d28f3c83332d2c6ae49678877a7456f0fcfe42df51c"
)
variant("dev", default=False, description="Include arkouda developer extras")
depends_on("python@3.8:", type=("build", "run"), when="@:2024.06.21")
depends_on("python@3.9:3.12.3", type=("build", "run"), when="@2024.10.02:")
depends_on("py-setuptools", type="build")
depends_on("py-numpy@1.24.1:1.99", type=("build", "run"))
depends_on("py-pandas@1.4.0:", type=("build", "run"))
conflicts("^py-pandas@2.2.0", msg="arkouda client not compatible with pandas 2.2.0")
depends_on("py-pyarrow", type=("build", "run"))
depends_on("py-pyzmq@20:", type=("build", "run"))
depends_on("py-scipy@:1.13.1", type=("build", "run"), when="@2024.06.21:")
depends_on("py-tables@3.7.0: +lzo +bzip2", type=("build", "run"), when="@:2024.06.21")
depends_on("py-tables@3.8.0: +lzo +bzip2", type=("build", "run"), when="@2024.10.02:")
depends_on("py-h5py@3.7.0:", type=("build", "run"))
depends_on("py-matplotlib@3.3.2:", type=("build", "run"))
depends_on("py-versioneer", type=("build"))
depends_on("py-pyfiglet", type=("build", "run"))
depends_on("py-typeguard@2.10:2.12", type=("build", "run"))
depends_on("py-tabulate", type=("build", "run"))
depends_on("py-pytest@6.0:", type=("build", "run"), when="@2024.10.02")

View File

@ -9,38 +9,42 @@ class PyBasemap(PythonPackage):
"""The matplotlib basemap toolkit is a library for plotting
2D data on maps in Python."""
url = "https://github.com/matplotlib/basemap/archive/v1.2.0rel.tar.gz"
homepage = "https://matplotlib.org/basemap/"
url = "https://github.com/matplotlib/basemap/archive/refs/tags/v1.4.1.tar.gz"
license("MIT")
version("1.2.1", sha256="3fb30424f18cd4ffd505e30fd9c810ae81b999bb92f950c76553e1abc081faa7")
version("1.4.1", sha256="730b1e2ff5eb31c73680bd8ebabc6b11adfc587cfa6832c528a8a82822e5a490")
variant("hires", default=False, description="Install hi-res data.")
depends_on("c", type="build") # generated
depends_on("cxx", type="build") # generated
depends_on("py-cython@0.29:3.0", type="build")
depends_on("python@3.10:3", type=("build", "run"))
depends_on("py-numpy@1.21:1.26", type=("build", "run"))
depends_on("py-matplotlib@1.5:3.8", type=("build", "run"))
depends_on("py-pyproj@1.9.3:3.6", type=("build", "run"))
depends_on("py-pyshp@1.2:2.3", type=("build", "run"))
depends_on("py-packaging@16.0:23", type=("build", "run"))
depends_on("geos", type=("build", "run"))
# Per Github issue #3813, setuptools is required at runtime in order
# to make mpl_toolkits a namespace package that can span multiple
# directories (i.e., matplotlib and basemap)
depends_on("py-setuptools", type=("build", "run"))
depends_on("py-numpy@1.2.1:", type=("build", "run"))
depends_on("py-matplotlib@1.0.0:3.0.0,3.0.2:", type=("build", "run"))
# 1.2.1 is PROJ6 compatible
# https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=939022
depends_on("py-pyproj@1.9.3:", type=("build", "run"), when="@1.2.1:")
depends_on("py-pyshp@1.2.0:", type=("build", "run"))
depends_on("py-six", type=("build", "run"))
depends_on("pil", type=("build", "run"))
depends_on("geos")
def url_for_version(self, version):
if version >= Version("1.2.0"):
return "https://github.com/matplotlib/basemap/archive/v{0}rel.tar.gz".format(version)
else:
return "https://downloads.sourceforge.net/project/matplotlib/matplotlib-toolkits/basemap-{0}/basemap-{0}.tar.gz".format(
version
)
def setup_build_environment(self, env):
env.set("GEOS_DIR", self.spec["geos"].prefix)
def install(self, spec, prefix):
with working_dir("packages/basemap"):
python("setup.py", "install", "--prefix={0}".format(prefix))
with working_dir("packages/basemap_data"):
python("setup.py", "install", "--prefix={0}".format(prefix))
if "+hires" in spec:
with working_dir("packages/basemap_data_hires"):
python("setup.py", "install", "--prefix={0}".format(prefix))

View File

@ -13,10 +13,12 @@ class PyExecuting(PythonPackage):
license("MIT")
version("2.1.0", sha256="8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab")
version("1.2.0", sha256="19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107")
version("1.1.0", sha256="2c2c07d1ec4b2d8f9676b25170f1d8445c0ee2eb78901afb075a4b8d83608c6a")
version("1.0.0", sha256="98daefa9d1916a4f0d944880d5aeaf079e05585689bebd9ff9b32e31dd5e1017")
version("0.8.2", sha256="c23bf42e9a7b9b212f185b1b2c3c91feb895963378887bb10e64a2e612ec0023")
depends_on("python@:3.12", type=("build", "run"), when="@:2.0")
depends_on("py-setuptools", type="build")
depends_on("py-setuptools-scm+toml", type="build")

View File

@ -9,18 +9,35 @@ class PyPintXarray(PythonPackage):
"""A convenience wrapper for using pint with xarray"""
homepage = "https://github.com/xarray-contrib/pint-xarray"
pypi = "pint-xarray/pint-xarray-0.2.1.tar.gz"
pypi = "pint-xarray/pint_xarray-0.4.tar.gz"
license("Apache-2.0")
version("0.4", sha256="b6b737a9c46dfb14a8598c27a71100496994c9d79dab61fd77f0d2685ae7065e")
version("0.3", sha256="3545dfa78bee3f98eba29b8bd17500e3b5cb7c7b03a2c2781c4d4d59b6a82841")
version("0.2.1", sha256="1ee6bf74ee7b52b946f226a96469276fa4f5c68f7381c1b2aae66852562cb275")
depends_on("py-setuptools@42:", type="build")
depends_on("py-setuptools-scm@3.4:+toml", type="build")
with when("@0.4:"):
depends_on("py-setuptools@64:", type="build")
depends_on("py-setuptools-scm@7.0:+toml", type="build")
depends_on("python@3.9:", type=("build", "run"))
depends_on("py-numpy@1.23:", type=("build", "run"))
depends_on("py-xarray@2022.06.0:", type=("build", "run"))
depends_on("py-pint@0.21:", type=("build", "run"))
depends_on("python@3.8:", when="@0.3:", type=("build", "run"))
depends_on("py-numpy@1.17:", type=("build", "run"))
depends_on("py-xarray@0.16.1:", type=("build", "run"))
depends_on("py-pint@0.16:", type=("build", "run"))
depends_on("py-importlib-metadata", when="@0.2.1 ^python@:3.7", type=("build", "run"))
with when("@:0.3"):
depends_on("py-setuptools@42:", type="build")
depends_on("py-setuptools-scm@3.4:+toml", type="build")
depends_on("python@3.8:", type=("build", "run"))
depends_on("py-numpy@1.17:", type=("build", "run"))
depends_on("py-xarray@0.16.1:", type=("build", "run"))
depends_on("py-pint@0.16:", type=("build", "run"))
depends_on("py-importlib-metadata", when="@0.2.1 ^python@:3.7", type=("build", "run"))
def url_for_version(self, version):
if version >= Version("0.4"):
return super().url_for_version(version)
url = "https://files.pythonhosted.org/packages/source/p/pint-xarray/pint-xarray-{0}.tar.gz"
return url.format(version)

View File

@ -16,6 +16,7 @@ class PyPycuda(PythonPackage):
license("MIT")
version("2024.1.2", sha256="d110b727cbea859da4b63e91b6fa1e9fc32c5bade02d89ff449975996e9ccfab")
version("2021.1", sha256="ab87312d0fc349d9c17294a087bb9615cffcf966ad7b115f5b051008a48dd6ed")
version("2020.1", sha256="effa3b99b55af67f3afba9b0d1b64b4a0add4dd6a33bdd6786df1aa4cc8761a5")
version("2019.1.2", sha256="ada56ce98a41f9f95fe18809f38afbae473a5c62d346cfa126a2d5477f24cc8a")
@ -23,6 +24,28 @@ class PyPycuda(PythonPackage):
depends_on("cxx", type="build") # generated
# TODO: replace this with an explicit list of components of Boost,
# for instance depends_on('boost +filesystem')
# See https://github.com/spack/spack/pull/22303 for reference
depends_on(Boost.with_default_variants)
# TODO: for versions before 2024.1.2, find out the exact requirements with version constraints
# and enter them below. See https://github.com/spack/spack/pull/48547
depends_on("python@3.6:3", type=("build", "run"), when="@2020.1:")
depends_on("python@3.8:3", type=("build", "run"), when="@2024.1.2:")
depends_on("boost+python")
depends_on("cuda", type=("build", "run"))
depends_on("cuda@:8.0.61", when="@2016.1.2")
depends_on("py-appdirs@1.4.0:", type=("build", "run"), when="@:2021.1")
depends_on("py-decorator@3.2.0:", type=("build", "run"), when="@:2020.1")
depends_on("py-mako", type=("build", "run"))
depends_on("py-numpy@1.6:", type=("build", "run"), when="@:2021.1")
depends_on("py-numpy@1.24:", type=("build", "run"), when="@2024.1.2:")
depends_on("py-platformdirs@2.2:", type=("build", "run"), when="@2024.1.2:")
depends_on("py-pytools@2011.2:", type=("build", "run"))
depends_on("py-setuptools", type="build")
depends_on("py-six", type="run", when="@:2020.1")
depends_on("py-pytools@2011.2:", type=("build", "run"), when="@2024.1.2:")
@run_before("install")
def configure(self):
pyver = self.spec["python"].version.up_to(2).joined
@ -34,20 +57,3 @@ def configure(self):
"--boost-python-libname={0}".format(boostlib),
]
python("configure.py", *configure_args)
depends_on("py-setuptools", type="build")
depends_on("cuda")
depends_on("boost+python")
# TODO: replace this with an explicit list of components of Boost,
# for instance depends_on('boost +filesystem')
# See https://github.com/spack/spack/pull/22303 for reference
depends_on(Boost.with_default_variants)
depends_on("python@3.6:3", type=("build", "run"), when="@2020.1:")
depends_on("py-numpy@1.6:", type=("build", "run"))
depends_on("py-pytools@2011.2:", type=("build", "run"))
depends_on("py-six", type="run", when="@:2020.1")
depends_on("py-decorator@3.2.0:", type=("build", "run"), when="@:2020.1")
depends_on("py-appdirs@1.4.0:", type=("build", "run"))
depends_on("py-mako", type=("build", "run"))
depends_on("cuda@:8.0.61", when="@2016.1.2")

View File

@ -843,6 +843,7 @@ def setup_run_environment(self, env):
# the following vars are copied from thisroot.sh; silence a cppyy warning
env.set("CLING_STANDARD_PCH", "none")
env.set("CPPYY_API_PATH", "none")
env.set("CPPYY_BACKEND_LIBRARY", self.prefix.lib.root.libcppyy_backend)
if "+rpath" not in self.spec:
env.prepend_path(self.root_library_path, self.prefix.lib.root)