Compare commits

..

1 Commits

Author SHA1 Message Date
Todd Gamblin
6cbe4e1311 spec: add {install_status} format attribute
`{install_status}` is handled in a funny way in `Spec.tree()`, and it can't be used in
other useful places like `Spec.format()`.

- [x] Make `{install_status}` a format attribute like most other things we want to print
      about specs.

- [x] Refactor whitespace handling in `Spec.format()` to only strip whitespace that wasn't
      in the original format string (i.e. that was added by our own attributes)
2024-02-16 22:46:58 -08:00
402 changed files with 8769 additions and 6034 deletions

View File

@@ -43,7 +43,7 @@ jobs:
. share/spack/setup-env.sh
$(which spack) audit packages
$(which spack) audit externals
- uses: codecov/codecov-action@0cfda1dd0a4ad9efc75517f399d859cd1ea4ced1 # @v2.1.0
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044 # @v2.1.0
if: ${{ inputs.with_coverage == 'true' }}
with:
flags: unittests,audits

View File

@@ -96,7 +96,7 @@ jobs:
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@0d103c3126aa41d772a8362f6aa67afac040f80c
uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226
- name: Log in to GitHub Container Registry
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d

View File

@@ -91,7 +91,7 @@ jobs:
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@0cfda1dd0a4ad9efc75517f399d859cd1ea4ced1
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044
with:
flags: unittests,linux,${{ matrix.concretizer }}
# Test shell integration
@@ -122,7 +122,7 @@ jobs:
COVERAGE: true
run: |
share/spack/qa/run-shell-tests
- uses: codecov/codecov-action@0cfda1dd0a4ad9efc75517f399d859cd1ea4ced1
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044
with:
flags: shelltests,linux
@@ -181,7 +181,7 @@ jobs:
SPACK_TEST_SOLVER: clingo
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@0cfda1dd0a4ad9efc75517f399d859cd1ea4ced1 # @v2.1.0
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044 # @v2.1.0
with:
flags: unittests,linux,clingo
# Run unit tests on MacOS
@@ -216,6 +216,6 @@ jobs:
$(which spack) solve zlib
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
- uses: codecov/codecov-action@0cfda1dd0a4ad9efc75517f399d859cd1ea4ced1
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044
with:
flags: unittests,macos

View File

@@ -33,7 +33,7 @@ jobs:
./share/spack/qa/validate_last_exit.ps1
coverage combine -a
coverage xml
- uses: codecov/codecov-action@0cfda1dd0a4ad9efc75517f399d859cd1ea4ced1
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044
with:
flags: unittests,windows
unit-tests-cmd:
@@ -57,7 +57,7 @@ jobs:
./share/spack/qa/validate_last_exit.ps1
coverage combine -a
coverage xml
- uses: codecov/codecov-action@0cfda1dd0a4ad9efc75517f399d859cd1ea4ced1
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044
with:
flags: unittests,windows
build-abseil:

View File

@@ -59,7 +59,6 @@ upload:
apidoc:
sphinx-apidoc -f -T -o . ../spack
sphinx-apidoc -f -T -o . ../llnl
./nosearch-api-docs # set :nosearch: at top of each file
help:
@echo "Please use \`make <target>' where <target> is one of"

View File

@@ -1,6 +0,0 @@
#!/bin/sh
# Set :nosearch: at top of each api doc file
for filename in {spack,llnl}.*.rst; do
$(echo ":nosearch:"; cat $filename) > $filename
done

View File

@@ -5,8 +5,8 @@ sphinx-rtd-theme==2.0.0
python-levenshtein==0.25.0
docutils==0.20.1
pygments==2.17.2
urllib3==2.2.1
pytest==8.0.2
urllib3==2.2.0
pytest==8.0.1
isort==5.13.2
black==24.2.0
flake8==7.0.0

View File

@@ -1240,47 +1240,6 @@ def get_single_file(directory):
return fnames[0]
@system_path_filter
def windows_sfn(path: os.PathLike):
"""Returns 8.3 Filename (SFN) representation of
path
8.3 Filenames (SFN or short filename) is a file
naming convention used prior to Win95 that Windows
still (and will continue to) support. This convention
caps filenames at 8 characters, and most importantly
does not allow for spaces in addition to other specifications.
The scheme is generally the same as a normal Windows
file scheme, but all spaces are removed and the filename
is capped at 6 characters. The remaining characters are
replaced with ~N where N is the number file in a directory
that a given file represents i.e. Program Files and Program Files (x86)
would be PROGRA~1 and PROGRA~2 respectively.
Further, all file/directory names are all caps (although modern Windows
is case insensitive in practice).
Conversion is accomplished by fileapi.h GetShortPathNameW
Returns paths in 8.3 Filename form
Note: this method is a no-op on Linux
Args:
path: Path to be transformed into SFN (8.3 filename) format
"""
# This should not be run-able on linux/macos
if sys.platform != "win32":
return path
path = str(path)
import ctypes
k32 = ctypes.WinDLL("kernel32", use_last_error=True)
# stub Windows types TCHAR[LENGTH]
TCHAR_arr = ctypes.c_wchar * len(path)
ret_str = TCHAR_arr()
k32.GetShortPathNameW(path, ret_str, len(path))
return ret_str.value
@contextmanager
def temp_cwd():
tmp_dir = tempfile.mkdtemp()

View File

@@ -189,7 +189,6 @@ def _windows_can_symlink() -> bool:
import llnl.util.filesystem as fs
fs.touchp(fpath)
fs.mkdirp(dpath)
try:
os.symlink(dpath, dlink)

View File

@@ -1541,7 +1541,7 @@ def fetch_url_to_mirror(url):
response = spack.oci.opener.urlopen(
urllib.request.Request(
url=ref.manifest_url(),
headers={"Accept": ", ".join(spack.oci.oci.manifest_content_type)},
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
)
)
except Exception:

View File

@@ -69,7 +69,7 @@ class MSBuildBuilder(BaseBuilder):
@property
def build_directory(self):
"""Return the directory containing the MSBuild solution or vcxproj."""
return fs.windows_sfn(self.pkg.stage.source_path)
return self.pkg.stage.source_path
@property
def toolchain_version(self):

View File

@@ -77,11 +77,7 @@ def ignore_quotes(self):
@property
def build_directory(self):
"""Return the directory containing the makefile."""
return (
fs.windows_sfn(self.pkg.stage.source_path)
if not self.makefile_root
else fs.windows_sfn(self.makefile_root)
)
return self.pkg.stage.source_path if not self.makefile_root else self.makefile_root
@property
def std_nmake_args(self):

View File

@@ -2,10 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import functools
import inspect
import operator
import os
import re
import shutil
@@ -183,7 +180,7 @@ def add_files_to_view(self, view, merge_map, skip_if_exists=True):
except (OSError, KeyError):
target = None
if target:
os.symlink(os.path.relpath(target, os.path.dirname(dst)), dst)
os.symlink(target, dst)
else:
view.link(src, dst, spec=self.spec)
@@ -371,19 +368,16 @@ def headers(self) -> HeaderList:
# Remove py- prefix in package name
name = self.spec.name[3:]
# Headers should only be in include or platlib, but no harm in checking purelib too
# Headers may be in either location
include = self.prefix.join(self.spec["python"].package.include).join(name)
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
purelib = self.prefix.join(self.spec["python"].package.purelib).join(name)
headers_list = map(fs.find_all_headers, [include, platlib, purelib])
headers = functools.reduce(operator.add, headers_list)
headers = fs.find_all_headers(include) + fs.find_all_headers(platlib)
if headers:
return headers
msg = "Unable to locate {} headers in {}, {}, or {}"
raise NoHeadersError(msg.format(self.spec.name, include, platlib, purelib))
msg = "Unable to locate {} headers in {} or {}"
raise NoHeadersError(msg.format(self.spec.name, include, platlib))
@property
def libs(self) -> LibraryList:
@@ -392,19 +386,15 @@ def libs(self) -> LibraryList:
# Remove py- prefix in package name
name = self.spec.name[3:]
# Libraries should only be in platlib, but no harm in checking purelib too
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
purelib = self.prefix.join(self.spec["python"].package.purelib).join(name)
root = self.prefix.join(self.spec["python"].package.platlib).join(name)
find_all_libraries = functools.partial(fs.find_all_libraries, recursive=True)
libs_list = map(find_all_libraries, [platlib, purelib])
libs = functools.reduce(operator.add, libs_list)
libs = fs.find_all_libraries(root, recursive=True)
if libs:
return libs
msg = "Unable to recursively locate {} libraries in {} or {}"
raise NoLibrariesError(msg.format(self.spec.name, platlib, purelib))
msg = "Unable to recursively locate {} libraries in {}"
raise NoLibrariesError(msg.format(self.spec.name, root))
@spack.builder.builder("python_pip")

View File

@@ -162,9 +162,23 @@ def hip_flags(amdgpu_target):
# Add compiler minimum versions based on the first release where the
# processor is included in llvm/lib/Support/TargetParser.cpp
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx900:xnack-")
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx906:xnack-")
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx908:xnack-")
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx90c")
depends_on("llvm-amdgpu@4.3.0:", when="amdgpu_target=gfx90a")
depends_on("llvm-amdgpu@4.3.0:", when="amdgpu_target=gfx90a:xnack-")
depends_on("llvm-amdgpu@4.3.0:", when="amdgpu_target=gfx90a:xnack+")
depends_on("llvm-amdgpu@5.2.0:", when="amdgpu_target=gfx940")
depends_on("llvm-amdgpu@5.7.0:", when="amdgpu_target=gfx941")
depends_on("llvm-amdgpu@5.7.0:", when="amdgpu_target=gfx942")
depends_on("llvm-amdgpu@4.5.0:", when="amdgpu_target=gfx1013")
depends_on("llvm-amdgpu@3.8.0:", when="amdgpu_target=gfx1030")
depends_on("llvm-amdgpu@3.9.0:", when="amdgpu_target=gfx1031")
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx1032")
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx1033")
depends_on("llvm-amdgpu@4.3.0:", when="amdgpu_target=gfx1034")
depends_on("llvm-amdgpu@4.5.0:", when="amdgpu_target=gfx1035")
depends_on("llvm-amdgpu@5.2.0:", when="amdgpu_target=gfx1036")
depends_on("llvm-amdgpu@5.3.0:", when="amdgpu_target=gfx1100")
depends_on("llvm-amdgpu@5.3.0:", when="amdgpu_target=gfx1101")

View File

@@ -594,15 +594,6 @@ def _put_manifest(
base_manifest, base_config = base_images[architecture]
env = _retrieve_env_dict_from_config(base_config)
# If the base image uses `vnd.docker.distribution.manifest.v2+json`, then we use that too.
# This is because Singularity / Apptainer is very strict about not mixing them.
base_manifest_mediaType = base_manifest.get(
"mediaType", "application/vnd.oci.image.manifest.v1+json"
)
use_docker_format = (
base_manifest_mediaType == "application/vnd.docker.distribution.manifest.v2+json"
)
spack.user_environment.environment_modifications_for_specs(*specs).apply_modifications(env)
# Create an oci.image.config file
@@ -634,8 +625,8 @@ def _put_manifest(
# Upload the config file
upload_blob_with_retry(image_ref, file=config_file, digest=config_file_checksum)
manifest = {
"mediaType": base_manifest_mediaType,
oci_manifest = {
"mediaType": "application/vnd.oci.image.manifest.v1+json",
"schemaVersion": 2,
"config": {
"mediaType": base_manifest["config"]["mediaType"],
@@ -646,11 +637,7 @@ def _put_manifest(
*(layer for layer in base_manifest["layers"]),
*(
{
"mediaType": (
"application/vnd.docker.image.rootfs.diff.tar.gzip"
if use_docker_format
else "application/vnd.oci.image.layer.v1.tar+gzip"
),
"mediaType": "application/vnd.oci.image.layer.v1.tar+gzip",
"digest": str(checksums[s.dag_hash()].compressed_digest),
"size": checksums[s.dag_hash()].size,
}
@@ -659,11 +646,11 @@ def _put_manifest(
],
}
if not use_docker_format and annotations:
manifest["annotations"] = annotations
if annotations:
oci_manifest["annotations"] = annotations
# Finally upload the manifest
upload_manifest_with_retry(image_ref, manifest=manifest)
upload_manifest_with_retry(image_ref, oci_manifest=oci_manifest)
# delete the config file
os.unlink(config_file)

View File

@@ -115,7 +115,7 @@ def emulate_env_utility(cmd_name, context: Context, args):
f"Not all dependencies of {spec.name} are installed. "
f"Cannot setup {context} environment:",
spec.tree(
status_fn=spack.spec.Spec.install_status,
install_status=True,
hashlen=7,
hashes=True,
# This shows more than necessary, but we cannot dynamically change deptypes

View File

@@ -270,8 +270,7 @@ def create_temp_env_directory():
def _tty_info(msg):
"""tty.info like function that prints the equivalent printf statement for eval."""
decorated = f'{colorize("@*b{==>}")} {msg}\n'
executor = "echo" if sys.platform == "win32" else "printf"
print(f"{executor} {shlex.quote(decorated)};")
print(f"printf {shlex.quote(decorated)};")
def env_activate(args):

View File

@@ -127,13 +127,14 @@ def _process_result(result, show, required_format, kwargs):
print()
if result.unsolved_specs and "solutions" in show:
tty.msg(asp.Result.format_unsolved(result.unsolved_specs))
tty.msg("Unsolved specs")
for spec in result.unsolved_specs:
print(spec)
print()
def solve(parser, args):
# these are the same options as `spack spec`
install_status_fn = spack.spec.Spec.install_status
fmt = spack.spec.DISPLAY_FORMAT
if args.namespaces:
fmt = "{namespace}." + fmt
@@ -143,7 +144,7 @@ def solve(parser, args):
"format": fmt,
"hashlen": None if args.very_long else 7,
"show_types": args.types,
"status_fn": install_status_fn if args.install_status else None,
"install_status": args.install_status,
"hashes": args.long or args.very_long,
}

View File

@@ -75,8 +75,6 @@ def setup_parser(subparser):
def spec(parser, args):
install_status_fn = spack.spec.Spec.install_status
fmt = spack.spec.DISPLAY_FORMAT
if args.namespaces:
fmt = "{namespace}." + fmt
@@ -86,7 +84,7 @@ def spec(parser, args):
"format": fmt,
"hashlen": None if args.very_long else 7,
"show_types": args.types,
"status_fn": install_status_fn if args.install_status else None,
"install_status": args.install_status,
}
# use a read transaction if we are getting install status for every

View File

@@ -1687,11 +1687,7 @@ def root(key, record):
with self.read_transaction():
roots = [rec.spec for key, rec in self._data.items() if root(key, rec)]
needed = set(id(spec) for spec in tr.traverse_nodes(roots, deptype=deptype))
return [
rec.spec
for rec in self._data.values()
if id(rec.spec) not in needed and rec.installed
]
return [rec.spec for rec in self._data.values() if id(rec.spec) not in needed]
def update_explicit(self, spec, explicit):
"""

View File

@@ -1485,6 +1485,44 @@ def _concretize_separately(self, tests=False):
]
return results
def concretize_and_add(self, user_spec, concrete_spec=None, tests=False):
"""Concretize and add a single spec to the environment.
Concretize the provided ``user_spec`` and add it along with the
concretized result to the environment. If the given ``user_spec`` was
already present in the environment, this does not add a duplicate.
The concretized spec will be added unless the ``user_spec`` was
already present and an associated concrete spec was already present.
Args:
concrete_spec: if provided, then it is assumed that it is the
result of concretizing the provided ``user_spec``
"""
if self.unify is True:
msg = (
"cannot install a single spec in an environment that is "
"configured to be concretized together. Run instead:\n\n"
" $ spack add <spec>\n"
" $ spack install\n"
)
raise SpackEnvironmentError(msg)
spec = Spec(user_spec)
if self.add(spec):
concrete = concrete_spec or spec.concretized(tests=tests)
self._add_concrete_spec(spec, concrete)
else:
# spec might be in the user_specs, but not installed.
# TODO: Redo name-based comparison for old style envs
spec = next(s for s in self.user_specs if s.satisfies(user_spec))
concrete = self.specs_by_hash.get(spec.dag_hash())
if not concrete:
concrete = spec.concretized(tests=tests)
self._add_concrete_spec(spec, concrete)
return concrete
@property
def default_view(self):
if not self.has_view(default_view_name):
@@ -2174,7 +2212,7 @@ def _tree_to_display(spec):
return spec.tree(
recurse_dependencies=True,
format=spack.spec.DISPLAY_FORMAT,
status_fn=spack.spec.Spec.install_status,
install_status=True,
hashlen=7,
hashes=True,
)

View File

@@ -950,10 +950,14 @@ def _main(argv=None):
parser.print_help()
return 1
# version is special as it does not require a command or loading and additional infrastructure
# -h, -H, and -V are special as they do not require a command, but
# all the other options do nothing without a command.
if args.version:
print(get_version())
return 0
elif args.help:
sys.stdout.write(parser.format_help(level=args.help))
return 0
# ------------------------------------------------------------------------
# This part of the `main()` sets up Spack's configuration.
@@ -992,12 +996,6 @@ def _main(argv=None):
print_setup_info(*args.print_shell_vars.split(","))
return 0
# -h and -H are special as they do not require a command, but
# all the other options do nothing without a command.
if args.help:
sys.stdout.write(parser.format_help(level=args.help))
return 0
# At this point we've considered all the options to spack itself, so we
# need a command or we're done.
if not args.command:

View File

@@ -161,7 +161,7 @@ def upload_blob(
def upload_manifest(
ref: ImageReference,
manifest: dict,
oci_manifest: dict,
tag: bool = True,
_urlopen: spack.oci.opener.MaybeOpen = None,
):
@@ -169,7 +169,7 @@ def upload_manifest(
Args:
ref: The image reference.
manifest: The manifest or index.
oci_manifest: The OCI manifest or index.
tag: When true, use the tag, otherwise use the digest,
this is relevant for multi-arch images, where the
tag is an index, referencing the manifests by digest.
@@ -179,7 +179,7 @@ def upload_manifest(
"""
_urlopen = _urlopen or spack.oci.opener.urlopen
data = json.dumps(manifest, separators=(",", ":")).encode()
data = json.dumps(oci_manifest, separators=(",", ":")).encode()
digest = Digest.from_sha256(hashlib.sha256(data).hexdigest())
size = len(data)
@@ -190,7 +190,7 @@ def upload_manifest(
url=ref.manifest_url(),
method="PUT",
data=data,
headers={"Content-Type": manifest["mediaType"]},
headers={"Content-Type": oci_manifest["mediaType"]},
)
response = _urlopen(request)

View File

@@ -566,7 +566,6 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
provided: Dict["spack.spec.Spec", Set["spack.spec.Spec"]]
provided_together: Dict["spack.spec.Spec", List[Set[str]]]
patches: Dict["spack.spec.Spec", List["spack.patch.Patch"]]
variants: Dict[str, Tuple["spack.variant.Variant", "spack.spec.Spec"]]
#: By default, packages are not virtual
#: Virtual packages override this attribute

View File

@@ -3,8 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import spack.util.path
def get_projection(projections, spec):
"""
@@ -13,7 +11,7 @@ def get_projection(projections, spec):
all_projection = None
for spec_like, projection in projections.items():
if spec.satisfies(spec_like):
return spack.util.path.substitute_path_variables(projection)
return projection
elif spec_like == "all":
all_projection = spack.util.path.substitute_path_variables(projection)
all_projection = projection
return all_projection

View File

@@ -6,6 +6,7 @@
import warnings
import llnl.util.lang
import llnl.util.tty
# jsonschema is imported lazily as it is heavy to import
@@ -61,3 +62,25 @@ def _deprecated_properties(validator, deprecated, instance, schema):
Validator = llnl.util.lang.Singleton(_make_validator)
spec_list_schema = {
"type": "array",
"default": [],
"items": {
"anyOf": [
{
"type": "object",
"additionalProperties": False,
"properties": {
"matrix": {
"type": "array",
"items": {"type": "array", "items": {"type": "string"}},
},
"exclude": {"type": "array", "items": {"type": "string"}},
},
},
{"type": "string"},
{"type": "null"},
]
},
}

View File

@@ -10,7 +10,7 @@
"""
from typing import Any, Dict
from .spec_list import spec_list_schema
import spack.schema
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
@@ -20,7 +20,7 @@
"items": {
"type": "object",
"properties": {"when": {"type": "string"}},
"patternProperties": {r"^(?!when$)\w*": spec_list_schema},
"patternProperties": {r"^(?!when$)\w*": spack.schema.spec_list_schema},
},
}
}

View File

@@ -16,11 +16,11 @@
import spack.schema.merged
import spack.schema.projections
from .spec_list import spec_list_schema
#: Top level key in a manifest file
TOP_LEVEL_KEY = "spack"
projections_scheme = spack.schema.projections.properties["projections"]
properties: Dict[str, Any] = {
"spack": {
"type": "object",
@@ -34,7 +34,7 @@
# extra environment schema properties
{
"include": {"type": "array", "default": [], "items": {"type": "string"}},
"specs": spec_list_schema,
"specs": spack.schema.spec_list_schema,
},
),
}

View File

@@ -0,0 +1,46 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for spack environment
.. literalinclude:: _spack_root/lib/spack/spack/schema/spack.py
:lines: 20-
"""
from typing import Any, Dict
from llnl.util.lang import union_dicts
import spack.schema
import spack.schema.gitlab_ci as ci_schema # DEPRECATED
import spack.schema.merged as merged_schema
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
"spack": {
"type": "object",
"default": {},
"additionalProperties": False,
"properties": union_dicts(
# Include deprecated "gitlab-ci" section
ci_schema.properties,
# merged configuration scope schemas
merged_schema.properties,
# extra environment schema properties
{
"include": {"type": "array", "default": [], "items": {"type": "string"}},
"specs": spack.schema.spec_list_schema,
},
),
}
}
#: Full schema with metadata
schema = {
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "Spack environment file schema",
"type": "object",
"additionalProperties": False,
"properties": properties,
}

View File

@@ -1,24 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
matrix_schema = {"type": "array", "items": {"type": "array", "items": {"type": "string"}}}
spec_list_schema = {
"type": "array",
"default": [],
"items": {
"anyOf": [
{
"type": "object",
"additionalProperties": False,
"properties": {
"matrix": matrix_schema,
"exclude": {"type": "array", "items": {"type": "string"}},
},
},
{"type": "string"},
{"type": "null"},
]
},
}

View File

@@ -15,7 +15,7 @@
import types
import typing
import warnings
from typing import Callable, Dict, Iterator, List, NamedTuple, Optional, Set, Tuple, Type, Union
from typing import Callable, Dict, List, NamedTuple, Optional, Sequence, Set, Tuple, Union
import archspec.cpu
@@ -258,7 +258,7 @@ def remove_node(spec: spack.spec.Spec, facts: List[AspFunction]) -> List[AspFunc
return list(filter(lambda x: x.args[0] not in ("node", "virtual_node"), facts))
def _create_counter(specs: List[spack.spec.Spec], tests: bool):
def _create_counter(specs, tests):
strategy = spack.config.CONFIG.get("concretizer:duplicates:strategy", "none")
if strategy == "full":
return FullDuplicatesCounter(specs, tests=tests)
@@ -411,7 +411,7 @@ def raise_if_unsat(self):
"""
Raise an appropriate error if the result is unsatisfiable.
The error is an SolverError, and includes the minimized cores
The error is an InternalConcretizerError, and includes the minimized cores
resulting from the solve, formatted to be human readable.
"""
if self.satisfiable:
@@ -422,7 +422,7 @@ def raise_if_unsat(self):
constraints = constraints[0]
conflicts = self.format_minimal_cores()
raise SolverError(constraints, conflicts=conflicts)
raise InternalConcretizerError(constraints, conflicts=conflicts)
@property
def specs(self):
@@ -435,10 +435,7 @@ def specs(self):
@property
def unsolved_specs(self):
"""List of tuples pairing abstract input specs that were not
solved with their associated candidate spec from the solver
(if the solve completed).
"""
"""List of abstract input specs that were not solved."""
if self._unsolved_specs is None:
self._compute_specs_from_answer_set()
return self._unsolved_specs
@@ -452,7 +449,7 @@ def specs_by_input(self):
def _compute_specs_from_answer_set(self):
if not self.satisfiable:
self._concrete_specs = []
self._unsolved_specs = list((x, None) for x in self.abstract_specs)
self._unsolved_specs = self.abstract_specs
self._concrete_specs_by_input = {}
return
@@ -473,22 +470,7 @@ def _compute_specs_from_answer_set(self):
self._concrete_specs.append(answer[node])
self._concrete_specs_by_input[input_spec] = answer[node]
else:
self._unsolved_specs.append((input_spec, candidate))
@staticmethod
def format_unsolved(unsolved_specs):
"""Create a message providing info on unsolved user specs and for
each one show the associated candidate spec from the solver (if
there is one).
"""
msg = "Unsatisfied input specs:"
for input_spec, candidate in unsolved_specs:
msg += f"\n\tInput spec: {str(input_spec)}"
if candidate:
msg += f"\n\tCandidate spec: {str(candidate)}"
else:
msg += "\n\t(No candidate specs from solver)"
return msg
self._unsolved_specs.append(input_spec)
def _normalize_packages_yaml(packages_yaml):
@@ -823,13 +805,6 @@ def on_model(model):
print("Statistics:")
pprint.pprint(self.control.statistics)
if result.unsolved_specs and setup.concretize_everything:
unsolved_str = Result.format_unsolved(result.unsolved_specs)
raise InternalConcretizerError(
"Internal Spack error: the solver completed but produced specs"
f" that do not satisfy the request.\n\t{unsolved_str}"
)
return result, timer, self.control.statistics
@@ -897,41 +872,35 @@ def __iter__(self):
return iter(self.data)
# types for condition caching in solver setup
ConditionSpecKey = Tuple[str, Optional[TransformFunction]]
ConditionIdFunctionPair = Tuple[int, List[AspFunction]]
ConditionSpecCache = Dict[str, Dict[ConditionSpecKey, ConditionIdFunctionPair]]
class SpackSolverSetup:
"""Class to set up and run a Spack concretization solve."""
def __init__(self, tests: bool = False):
# these are all initialized in setup()
self.gen: "ProblemInstanceBuilder" = ProblemInstanceBuilder()
self.possible_virtuals: Set[str] = set()
def __init__(self, tests=False):
self.gen = None # set by setup()
self.assumptions: List[Tuple["clingo.Symbol", bool]] = [] # type: ignore[name-defined]
self.declared_versions: Dict[str, List[DeclaredVersion]] = collections.defaultdict(list)
self.possible_versions: Dict[str, Set[GitOrStandardVersion]] = collections.defaultdict(set)
self.deprecated_versions: Dict[str, Set[GitOrStandardVersion]] = collections.defaultdict(
set
)
self.assumptions = []
self.declared_versions = collections.defaultdict(list)
self.possible_versions = collections.defaultdict(set)
self.deprecated_versions = collections.defaultdict(set)
self.possible_compilers: List = []
self.possible_oses: Set = set()
self.variant_values_from_specs: Set = set()
self.version_constraints: Set = set()
self.target_constraints: Set = set()
self.default_targets: List = []
self.compiler_version_constraints: Set = set()
self.post_facts: List = []
self.possible_virtuals = None
self.possible_compilers = []
self.possible_oses = set()
self.variant_values_from_specs = set()
self.version_constraints = set()
self.target_constraints = set()
self.default_targets = []
self.compiler_version_constraints = set()
self.post_facts = []
self.reusable_and_possible: ConcreteSpecsByHash = ConcreteSpecsByHash()
# (ID, CompilerSpec) -> dictionary of attributes
self.compiler_info = collections.defaultdict(dict)
self._id_counter: Iterator[int] = itertools.count()
self._trigger_cache: ConditionSpecCache = collections.defaultdict(dict)
self._effect_cache: ConditionSpecCache = collections.defaultdict(dict)
self.reusable_and_possible = ConcreteSpecsByHash()
self._id_counter = itertools.count()
self._trigger_cache = collections.defaultdict(dict)
self._effect_cache = collections.defaultdict(dict)
# Caches to optimize the setup phase of the solver
self.target_specs_cache = None
@@ -943,8 +912,8 @@ def __init__(self, tests: bool = False):
self.concretize_everything = True
# Set during the call to setup
self.pkgs: Set[str] = set()
self.explicitly_required_namespaces: Dict[str, str] = {}
self.pkgs = None
self.explicitly_required_namespaces = {}
def pkg_version_rules(self, pkg):
"""Output declared versions of a package.
@@ -1228,38 +1197,6 @@ def variant_rules(self, pkg):
self.gen.newline()
def _get_condition_id(
self,
named_cond: spack.spec.Spec,
cache: ConditionSpecCache,
body: bool,
transform: Optional[TransformFunction] = None,
) -> int:
"""Get the id for one half of a condition (either a trigger or an imposed constraint).
Construct a key from the condition spec and any associated transformation, and
cache the ASP functions that they imply. The saved functions will be output
later in ``trigger_rules()`` and ``effect_rules()``.
Returns:
The id of the cached trigger or effect.
"""
pkg_cache = cache[named_cond.name]
named_cond_key = (str(named_cond), transform)
result = pkg_cache.get(named_cond_key)
if result:
return result[0]
cond_id = next(self._id_counter)
requirements = self.spec_clauses(named_cond, body=body)
if transform:
requirements = transform(named_cond, requirements)
pkg_cache[named_cond_key] = (cond_id, requirements)
return cond_id
def condition(
self,
required_spec: spack.spec.Spec,
@@ -1285,8 +1222,7 @@ def condition(
"""
named_cond = required_spec.copy()
named_cond.name = named_cond.name or name
if not named_cond.name:
raise ValueError(f"Must provide a name for anonymous condition: '{named_cond}'")
assert named_cond.name, "must provide name for anonymous conditions!"
# Check if we can emit the requirements before updating the condition ID counter.
# In this way, if a condition can't be emitted but the exception is handled in the caller,
@@ -1296,19 +1232,35 @@ def condition(
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition(condition_id)))
self.gen.fact(fn.condition_reason(condition_id, msg))
trigger_id = self._get_condition_id(
named_cond, cache=self._trigger_cache, body=True, transform=transform_required
)
cache = self._trigger_cache[named_cond.name]
named_cond_key = (str(named_cond), transform_required)
if named_cond_key not in cache:
trigger_id = next(self._id_counter)
requirements = self.spec_clauses(named_cond, body=True, required_from=name)
if transform_required:
requirements = transform_required(named_cond, requirements)
cache[named_cond_key] = (trigger_id, requirements)
trigger_id, requirements = cache[named_cond_key]
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition_trigger(condition_id, trigger_id)))
if not imposed_spec:
return condition_id
effect_id = self._get_condition_id(
imposed_spec, cache=self._effect_cache, body=False, transform=transform_imposed
)
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition_effect(condition_id, effect_id)))
cache = self._effect_cache[named_cond.name]
imposed_spec_key = (str(imposed_spec), transform_imposed)
if imposed_spec_key not in cache:
effect_id = next(self._id_counter)
requirements = self.spec_clauses(imposed_spec, body=False, required_from=name)
if transform_imposed:
requirements = transform_imposed(imposed_spec, requirements)
cache[imposed_spec_key] = (effect_id, requirements)
effect_id, requirements = cache[imposed_spec_key]
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition_effect(condition_id, effect_id)))
return condition_id
def impose(self, condition_id, imposed_spec, node=True, name=None, body=False):
@@ -1410,13 +1362,23 @@ def virtual_preferences(self, pkg_name, func):
def provider_defaults(self):
self.gen.h2("Default virtual providers")
msg = (
"Internal Error: possible_virtuals is not populated. Please report to the spack"
" maintainers"
)
assert self.possible_virtuals is not None, msg
self.virtual_preferences(
"all", lambda v, p, i: self.gen.fact(fn.default_provider_preference(v, p, i))
)
def provider_requirements(self):
self.gen.h2("Requirements on virtual providers")
msg = (
"Internal Error: possible_virtuals is not populated. Please report to the spack"
" maintainers"
)
parser = RequirementParser(spack.config.CONFIG)
assert self.possible_virtuals is not None, msg
for virtual_str in sorted(self.possible_virtuals):
rules = parser.rules_from_virtual(virtual_str)
if rules:
@@ -1615,57 +1577,35 @@ def flag_defaults(self):
fn.compiler_version_flag(compiler.name, compiler.version, name, flag)
)
def spec_clauses(
self,
spec: spack.spec.Spec,
*,
body: bool = False,
transitive: bool = True,
expand_hashes: bool = False,
concrete_build_deps=False,
required_from: Optional[str] = None,
) -> List[AspFunction]:
"""Wrap a call to `_spec_clauses()` into a try/except block with better error handling.
Arguments are as for ``_spec_clauses()`` except ``required_from``.
Arguments:
required_from: name of package that caused this call.
def spec_clauses(self, *args, **kwargs):
"""Wrap a call to `_spec_clauses()` into a try/except block that
raises a comprehensible error message in case of failure.
"""
requestor = kwargs.pop("required_from", None)
try:
clauses = self._spec_clauses(
spec,
body=body,
transitive=transitive,
expand_hashes=expand_hashes,
concrete_build_deps=concrete_build_deps,
)
clauses = self._spec_clauses(*args, **kwargs)
except RuntimeError as exc:
msg = str(exc)
if required_from:
msg += f" [required from package '{required_from}']"
if requestor:
msg += ' [required from package "{0}"]'.format(requestor)
raise RuntimeError(msg)
return clauses
def _spec_clauses(
self,
spec: spack.spec.Spec,
*,
body: bool = False,
transitive: bool = True,
expand_hashes: bool = False,
concrete_build_deps: bool = False,
) -> List[AspFunction]:
self, spec, body=False, transitive=True, expand_hashes=False, concrete_build_deps=False
):
"""Return a list of clauses for a spec mandates are true.
Arguments:
spec: the spec to analyze
body: if True, generate clauses to be used in rule bodies (final values) instead
of rule heads (setters).
transitive: if False, don't generate clauses from dependencies (default True)
expand_hashes: if True, descend into hashes of concrete specs (default False)
concrete_build_deps: if False, do not include pure build deps of concrete specs
(as they have no effect on runtime constraints)
spec (spack.spec.Spec): the spec to analyze
body (bool): if True, generate clauses to be used in rule bodies
(final values) instead of rule heads (setters).
transitive (bool): if False, don't generate clauses from
dependencies (default True)
expand_hashes (bool): if True, descend into hashes of concrete specs
(default False)
concrete_build_deps (bool): if False, do not include pure build deps
of concrete specs (as they have no effect on runtime constraints)
Normally, if called with ``transitive=True``, ``spec_clauses()`` just generates
hashes for the dependency requirements of concrete specs. If ``expand_hashes``
@@ -1675,7 +1615,7 @@ def _spec_clauses(
"""
clauses = []
f: Union[Type[_Head], Type[_Body]] = _Body if body else _Head
f = _Body if body else _Head
if spec.name:
clauses.append(f.node(spec.name) if not spec.virtual else f.virtual_node(spec.name))
@@ -1764,9 +1704,8 @@ def _spec_clauses(
# dependencies
if spec.concrete:
# older specs do not have package hashes, so we have to do this carefully
package_hash = getattr(spec, "_package_hash", None)
if package_hash:
clauses.append(fn.attr("package_hash", spec.name, package_hash))
if getattr(spec, "_package_hash", None):
clauses.append(fn.attr("package_hash", spec.name, spec._package_hash))
clauses.append(fn.attr("hash", spec.name, spec.dag_hash()))
edges = spec.edges_from_dependents()
@@ -1825,7 +1764,7 @@ def _spec_clauses(
return clauses
def define_package_versions_and_validate_preferences(
self, possible_pkgs: Set[str], *, require_checksum: bool, allow_deprecated: bool
self, possible_pkgs, *, require_checksum: bool, allow_deprecated: bool
):
"""Declare any versions in specs not declared in packages."""
packages_yaml = spack.config.get("packages")
@@ -1858,7 +1797,7 @@ def define_package_versions_and_validate_preferences(
if pkg_name not in packages_yaml or "version" not in packages_yaml[pkg_name]:
continue
version_defs: List[GitOrStandardVersion] = []
version_defs = []
for vstr in packages_yaml[pkg_name]["version"]:
v = vn.ver(vstr)
@@ -2069,6 +2008,13 @@ def target_defaults(self, specs):
def virtual_providers(self):
self.gen.h2("Virtual providers")
msg = (
"Internal Error: possible_virtuals is not populated. Please report to the spack"
" maintainers"
)
assert self.possible_virtuals is not None, msg
# what provides what
for vspec in sorted(self.possible_virtuals):
self.gen.fact(fn.virtual(vspec))
self.gen.newline()
@@ -2265,7 +2211,7 @@ def define_concrete_input_specs(self, specs, possible):
def setup(
self,
specs: List[spack.spec.Spec],
specs: Sequence[spack.spec.Spec],
*,
reuse: Optional[List[spack.spec.Spec]] = None,
allow_deprecated: bool = False,
@@ -3483,13 +3429,15 @@ def solve_in_rounds(
if not result.satisfiable or not result.specs:
break
input_specs = list(x for (x, y) in result.unsolved_specs)
input_specs = result.unsolved_specs
for spec in result.specs:
reusable_specs.extend(spec.traverse())
class UnsatisfiableSpecError(spack.error.UnsatisfiableSpecError):
"""There was an issue with the spec that was requested (i.e. a user error)."""
"""
Subclass for new constructor signature for new concretizer
"""
def __init__(self, msg):
super(spack.error.UnsatisfiableSpecError, self).__init__(msg)
@@ -3499,21 +3447,8 @@ def __init__(self, msg):
class InternalConcretizerError(spack.error.UnsatisfiableSpecError):
"""Errors that indicate a bug in Spack."""
def __init__(self, msg):
super(spack.error.UnsatisfiableSpecError, self).__init__(msg)
self.provided = None
self.required = None
self.constraint_type = None
class SolverError(InternalConcretizerError):
"""For cases where the solver is unable to produce a solution.
Such cases are unexpected because we allow for solutions with errors,
so for example user specs that are over-constrained should still
get a solution.
"""
Subclass for new constructor signature for new concretizer
"""
def __init__(self, provided, conflicts):
@@ -3526,7 +3461,7 @@ def __init__(self, provided, conflicts):
if conflicts:
msg += ", errors are:" + "".join([f"\n {conflict}" for conflict in conflicts])
super().__init__(msg)
super(spack.error.UnsatisfiableSpecError, self).__init__(msg)
self.provided = provided

View File

@@ -186,11 +186,11 @@ class InstallStatus(enum.Enum):
Options are artificially disjoint for display purposes
"""
installed = "@g{[+]} "
upstream = "@g{[^]} "
external = "@g{[e]} "
absent = "@K{ - } "
missing = "@r{[-]} "
INSTALLED = "@g{[+]}"
UPSTREAM = "@g{[^]}"
EXTERNAL = "@g{[e]}"
ABSENT = "@K{ - }"
MISSING = "@r{[-]}"
def colorize_spec(spec):
@@ -1499,7 +1499,7 @@ def edge_attributes(self) -> str:
if not deptypes_str and not virtuals_str:
return ""
result = f"{deptypes_str} {virtuals_str}".strip()
return f"[{result}]"
return f"[{result}] "
def dependencies(
self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL
@@ -2091,12 +2091,7 @@ def to_node_dict(self, hash=ht.dag_hash):
if hasattr(variant, "_patches_in_order_of_appearance"):
d["patches"] = variant._patches_in_order_of_appearance
if (
self._concrete
and hash.package_hash
and hasattr(self, "_package_hash")
and self._package_hash
):
if self._concrete and hash.package_hash and self._package_hash:
# We use the attribute here instead of `self.package_hash()` because this
# should *always* be assignhed at concretization time. We don't want to try
# to compute a package hash for concrete spec where a) the package might not
@@ -4324,8 +4319,7 @@ def colorized(self):
return colorize_spec(self)
def format(self, format_string=DEFAULT_FORMAT, **kwargs):
r"""Prints out particular pieces of a spec, depending on what is
in the format string.
r"""Prints out particular pieces of a spec, depending on what is in the format string.
Using the ``{attribute}`` syntax, any field of the spec can be
selected. Those attributes can be recursive. For example,
@@ -4451,6 +4445,9 @@ def write_attribute(spec, attribute, color):
elif attribute == "spack_install":
write(morph(spec, spack.store.STORE.layout.root))
return
elif re.match(r"install_status", attribute):
write(self.install_status_symbol())
return
elif re.match(r"hash(:\d)?", attribute):
col = "#"
if ":" in attribute:
@@ -4545,8 +4542,18 @@ def write_attribute(spec, attribute, color):
"Format string terminated while reading attribute." "Missing terminating }."
)
# remove leading whitespace from directives that add it for internal formatting.
# Arch, compiler flags, and variants add spaces for spec format correctness, but
# we don't really want them in formatted string output. We do want to preserve
# whitespace from the format string.
formatted_spec = out.getvalue()
return formatted_spec.strip()
whitespace_attrs = [r"{arch=[^}]*}", r"{architecture}", r"{compiler_flags}", r"{variants}"]
if any(re.match(rx, format_string) for rx in whitespace_attrs):
formatted_spec = formatted_spec.lstrip()
if any(re.search(f"{rx}$", format_string) for rx in whitespace_attrs):
formatted_spec = formatted_spec.rstrip()
return formatted_spec
def cformat(self, *args, **kwargs):
"""Same as format, but color defaults to auto instead of False."""
@@ -4596,7 +4603,7 @@ def __str__(self):
self.traverse(root=False), key=lambda x: (x.name, x.abstract_hash)
)
sorted_dependencies = [
d.format("{edge_attributes} " + DEFAULT_FORMAT) for d in sorted_dependencies
d.format("{edge_attributes}" + DEFAULT_FORMAT) for d in sorted_dependencies
]
spec_str = " ^".join(root_str + sorted_dependencies)
return spec_str.strip()
@@ -4616,20 +4623,25 @@ def colored_str(self):
def install_status(self):
"""Helper for tree to print DB install status."""
if not self.concrete:
return InstallStatus.absent
return InstallStatus.ABSENT
if self.external:
return InstallStatus.external
return InstallStatus.EXTERNAL
upstream, record = spack.store.STORE.db.query_by_spec_hash(self.dag_hash())
if not record:
return InstallStatus.absent
return InstallStatus.ABSENT
elif upstream and record.installed:
return InstallStatus.upstream
return InstallStatus.UPSTREAM
elif record.installed:
return InstallStatus.installed
return InstallStatus.INSTALLED
else:
return InstallStatus.missing
return InstallStatus.MISSING
def install_status_symbol(self):
"""Get an install status symbol."""
status = self.install_status()
return clr.colorize(status.value)
def _installed_explicitly(self):
"""Helper for tree to print DB install status."""
@@ -4655,7 +4667,7 @@ def tree(
show_types: bool = False,
depth_first: bool = False,
recurse_dependencies: bool = True,
status_fn: Optional[Callable[["Spec"], InstallStatus]] = None,
install_status: bool = False,
prefix: Optional[Callable[["Spec"], str]] = None,
) -> str:
"""Prints out this spec and its dependencies, tree-formatted
@@ -4676,8 +4688,7 @@ def tree(
show_types: if True, show the (merged) dependency type of a node
depth_first: if True, traverse the DAG depth first when representing it as a tree
recurse_dependencies: if True, recurse on dependencies
status_fn: optional callable that takes a node as an argument and return its
installation status
install_status: if True, show installation status next to each spec
prefix: optional callable that takes a node as an argument and return its
installation prefix
"""
@@ -4691,6 +4702,9 @@ def tree(
):
node = dep_spec.spec
if install_status:
out += node.format("{install_status} ")
if prefix is not None:
out += prefix(node)
out += " " * indent
@@ -4698,15 +4712,6 @@ def tree(
if depth:
out += "%-4d" % d
if status_fn:
status = status_fn(node)
if status in list(InstallStatus):
out += clr.colorize(status.value, color=color)
elif status:
out += clr.colorize("@g{[+]} ", color=color)
else:
out += clr.colorize("@r{[-]} ", color=color)
if hashes:
out += clr.colorize("@K{%s} ", color=color) % node.dag_hash(hashlen)

View File

@@ -2731,6 +2731,15 @@ def test_concretize_user_specs_together():
assert all("mpich" not in spec for _, spec in e.concretized_specs())
def test_cant_install_single_spec_when_concretizing_together():
e = ev.create("coconcretization")
e.unify = True
with pytest.raises(ev.SpackEnvironmentError, match=r"cannot install"):
e.concretize_and_add("zlib")
e.install_all()
def test_duplicate_packages_raise_when_concretizing_together():
e = ev.create("coconcretization")
e.unify = True

View File

@@ -94,9 +94,6 @@ def test_get_executables(working_env, mock_executable):
external = SpackCommand("external")
# TODO: this test should be made to work, but in the meantime it is
# causing intermittent (spurious) CI failures on all PRs
@pytest.mark.skipif(sys.platform == "win32", reason="Test fails intermittently on Windows")
def test_find_external_cmd_not_buildable(mutable_config, working_env, mock_executable):
"""When the user invokes 'spack external find --not-buildable', the config
for any package where Spack finds an external version should be marked as
@@ -251,7 +248,6 @@ def _determine_variants(cls, exes, version_str):
assert gcc.external_path == os.path.sep + os.path.join("opt", "gcc", "bin")
@pytest.mark.not_on_windows("Fails spuriously on Windows")
def test_new_entries_are_reported_correctly(mock_executable, mutable_config, monkeypatch):
# Prepare an environment to detect a fake gcc
gcc_exe = mock_executable("gcc", output="echo 4.2.1")

View File

@@ -6,11 +6,9 @@
import pytest
import spack.deptypes as dt
import spack.environment as ev
import spack.main
import spack.spec
import spack.traverse
gc = spack.main.SpackCommand("gc")
add = spack.main.SpackCommand("add")
@@ -21,8 +19,11 @@
@pytest.mark.db
def test_gc_without_build_dependency(config, mutable_database):
assert "There are no unused specs." in gc("-yb")
assert "There are no unused specs." in gc("-y")
output = gc("-yb")
assert "There are no unused specs." in output
output = gc("-y")
assert "There are no unused specs." in output
@pytest.mark.db
@@ -31,9 +32,11 @@ def test_gc_with_build_dependency(config, mutable_database):
s.concretize()
s.package.do_install(fake=True, explicit=True)
assert "There are no unused specs." in gc("-yb")
assert "Successfully uninstalled cmake" in gc("-y")
assert "There are no unused specs." in gc("-y")
output = gc("-yb")
assert "There are no unused specs." in output
output = gc("-y")
assert "Successfully uninstalled cmake" in output
@pytest.mark.db
@@ -69,39 +72,34 @@ def test_gc_with_build_dependency_in_environment(config, mutable_database, mutab
with e:
assert mutable_database.query_local("simple-inheritance")
fst = gc("-y")
assert "Restricting garbage collection" in fst
assert "Successfully uninstalled cmake" in fst
snd = gc("-y")
assert "Restricting garbage collection" in snd
assert "There are no unused specs" in snd
output = gc("-y")
assert "Restricting garbage collection" in output
assert "Successfully uninstalled cmake" in output
@pytest.mark.db
def test_gc_except_any_environments(config, mutable_database, mutable_mock_env_path):
"""Tests whether the garbage collector can remove all specs except those still needed in some
environment (needed in the sense of roots + link/run deps)."""
s = spack.spec.Spec("simple-inheritance")
s.concretize()
s.package.do_install(fake=True, explicit=True)
assert mutable_database.query_local("zmpi")
e = ev.create("test_gc")
e.add("simple-inheritance")
e.concretize()
e.install_all(fake=True)
e.write()
assert mutable_database.query_local("simple-inheritance")
assert not e.all_matching_specs(spack.spec.Spec("zmpi"))
with e:
add("simple-inheritance")
install()
assert mutable_database.query_local("simple-inheritance")
output = gc("-yE")
assert "Restricting garbage collection" not in output
assert "Successfully uninstalled zmpi" in output
assert not mutable_database.query_local("zmpi")
# All runtime specs in this env should still be installed.
assert all(
s.installed
for s in spack.traverse.traverse_nodes(e.concrete_roots(), deptype=dt.LINK | dt.RUN)
)
with e:
output = gc("-yE")
assert "Restricting garbage collection" not in output
assert "There are no unused specs" not in output
@pytest.mark.db

View File

@@ -341,7 +341,6 @@ def test_different_compilers_get_different_flags(self):
assert set(client.compiler_flags["fflags"]) == set(["-O0", "-g"])
assert not set(cmake.compiler_flags["fflags"])
@pytest.mark.xfail(reason="Broken, needs to be fixed")
def test_compiler_flags_from_compiler_and_dependent(self):
client = Spec("cmake-client %clang@12.2.0 platform=test os=fe target=fe cflags==-g")
client.concretize()
@@ -2094,25 +2093,7 @@ def test_result_specs_is_not_empty(self, specs):
result, _, _ = solver.driver.solve(setup, specs, reuse=[])
assert result.specs
@pytest.mark.regression("38664")
def test_unsolved_specs_raises_error(self, monkeypatch, mock_packages, config):
"""Check that the solver raises an exception when input specs are not
satisfied.
"""
specs = [Spec("zlib")]
solver = spack.solver.asp.Solver()
setup = spack.solver.asp.SpackSolverSetup()
simulate_unsolved_property = list((x, None) for x in specs)
monkeypatch.setattr(spack.solver.asp.Result, "unsolved_specs", simulate_unsolved_property)
with pytest.raises(
spack.solver.asp.InternalConcretizerError,
match="the solver completed but produced specs",
):
solver.driver.solve(setup, specs, reuse=[])
assert not result.unsolved_specs
@pytest.mark.regression("36339")
def test_compiler_match_constraints_when_selected(self):

View File

@@ -141,7 +141,6 @@ def test_partial_install_delete_prefix_and_stage(install_mockery, mock_fetch, wo
assert s.package.spec.installed
@pytest.mark.not_on_windows("Fails spuriously on Windows")
@pytest.mark.disable_clean_stage_check
def test_failing_overwrite_install_should_keep_previous_installation(
mock_fetch, install_mockery, working_env

View File

@@ -9,7 +9,6 @@
import hashlib
import json
import os
import pathlib
from contextlib import contextmanager
import spack.environment as ev
@@ -173,12 +172,6 @@ def test_buildcache_push_with_base_image_command(
dst_image = ImageReference.from_string(f"dst.example.com/image:{tag}")
retrieved_manifest, retrieved_config = get_manifest_and_config(dst_image)
# Check that the media type is OCI
assert retrieved_manifest["mediaType"] == "application/vnd.oci.image.manifest.v1+json"
assert (
retrieved_manifest["config"]["mediaType"] == "application/vnd.oci.image.config.v1+json"
)
# Check that the base image layer is first.
assert retrieved_manifest["layers"][0]["digest"] == str(tar_gz_digest)
assert retrieved_config["rootfs"]["diff_ids"][0] == str(tar_digest)
@@ -196,93 +189,3 @@ def test_buildcache_push_with_base_image_command(
# And verify that all layers including the base layer are present
for layer in retrieved_manifest["layers"]:
assert blob_exists(dst_image, digest=Digest.from_string(layer["digest"]))
assert layer["mediaType"] == "application/vnd.oci.image.layer.v1.tar+gzip"
def test_uploading_with_base_image_in_docker_image_manifest_v2_format(
tmp_path: pathlib.Path, mutable_database, disable_parallel_buildcache_push
):
"""If the base image uses an old manifest schema, Spack should also use that.
That is necessary for container images to work with Apptainer, which is rather strict about
mismatching manifest/layer types."""
registry_src = InMemoryOCIRegistry("src.example.com")
registry_dst = InMemoryOCIRegistry("dst.example.com")
base_image = ImageReference.from_string("src.example.com/my-base-image:latest")
with oci_servers(registry_src, registry_dst):
mirror("add", "oci-test", "oci://dst.example.com/image")
# Create a dummy base image (blob, config, manifest) in registry A in the Docker Image
# Manifest V2 format.
rootfs = tmp_path / "rootfs"
(rootfs / "bin").mkdir(parents=True)
(rootfs / "bin" / "sh").write_text("hello world")
tarball = tmp_path / "base.tar.gz"
with gzip_compressed_tarfile(tarball) as (tar, tar_gz_checksum, tar_checksum):
tar.add(rootfs, arcname=".")
tar_gz_digest = Digest.from_sha256(tar_gz_checksum.hexdigest())
tar_digest = Digest.from_sha256(tar_checksum.hexdigest())
upload_blob(base_image, str(tarball), tar_gz_digest)
config = {
"created": "2015-10-31T22:22:56.015925234Z",
"author": "Foo <example@example.com>",
"architecture": "amd64",
"os": "linux",
"config": {
"User": "foo",
"Memory": 2048,
"MemorySwap": 4096,
"CpuShares": 8,
"ExposedPorts": {"8080/tcp": {}},
"Env": ["PATH=/usr/bin:/bin"],
"Entrypoint": ["/bin/sh"],
"Cmd": ["-c", "'echo hello world'"],
"Volumes": {"/x": {}},
"WorkingDir": "/",
},
"rootfs": {"diff_ids": [str(tar_digest)], "type": "layers"},
"history": [
{
"created": "2015-10-31T22:22:54.690851953Z",
"created_by": "/bin/sh -c #(nop) ADD file:a3bc1e842b69636f9df5256c49c5374fb4eef1e281fe3f282c65fb853ee171c5 in /",
}
],
}
config_file = tmp_path / "config.json"
config_file.write_text(json.dumps(config))
config_digest = Digest.from_sha256(hashlib.sha256(config_file.read_bytes()).hexdigest())
upload_blob(base_image, str(config_file), config_digest)
manifest = {
"schemaVersion": 2,
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
"config": {
"mediaType": "application/vnd.docker.container.image.v1+json",
"size": config_file.stat().st_size,
"digest": str(config_digest),
},
"layers": [
{
"mediaType": "application/vnd.docker.image.rootfs.diff.tar.gzip",
"size": tarball.stat().st_size,
"digest": str(tar_gz_digest),
}
],
}
upload_manifest(base_image, manifest)
# Finally upload some package to registry B with registry A's image as base
buildcache("push", "--base-image", str(base_image), "oci-test", "mpileaks^mpich")
# Should have some manifests uploaded to registry B now.
assert registry_dst.manifests
# Verify that all manifest are in the Docker Image Manifest V2 format, not OCI.
# And also check that we're not using annotations, which is an OCI-only "feature".
for m in registry_dst.manifests.values():
assert m["mediaType"] == "application/vnd.docker.distribution.manifest.v2+json"
assert m["config"]["mediaType"] == "application/vnd.docker.container.image.v1+json"
for layer in m["layers"]:
assert layer["mediaType"] == "application/vnd.docker.image.rootfs.diff.tar.gzip"
assert "annotations" not in m

View File

@@ -17,7 +17,6 @@
from typing import Callable, Dict, List, Optional, Pattern, Tuple
from urllib.request import Request
import spack.oci.oci
from spack.oci.image import Digest
from spack.oci.opener import OCIAuthHandler
@@ -172,7 +171,7 @@ def __init__(self, domain: str, allow_single_post: bool = True) -> None:
self.blobs: Dict[str, bytes] = {}
# Map from (name, tag) to manifest
self.manifests: Dict[Tuple[str, str], dict] = {}
self.manifests: Dict[Tuple[str, str], Dict] = {}
def index(self, req: Request):
return MockHTTPResponse.with_json(200, "OK", body={})
@@ -226,12 +225,15 @@ def put_session(self, req: Request):
def put_manifest(self, req: Request, name: str, ref: str):
# In requests, Python runs header.capitalize().
content_type = req.get_header("Content-type")
assert content_type in spack.oci.oci.all_content_type
assert content_type in (
"application/vnd.oci.image.manifest.v1+json",
"application/vnd.oci.image.index.v1+json",
)
index_or_manifest = json.loads(self._require_data(req))
# Verify that we have all blobs (layers for manifest, manifests for index)
if content_type in spack.oci.oci.manifest_content_type:
if content_type == "application/vnd.oci.image.manifest.v1+json":
for layer in index_or_manifest["layers"]:
assert layer["digest"] in self.blobs, "Missing blob while uploading manifest"

View File

@@ -703,6 +703,13 @@ def check_prop(check_spec, fmt_str, prop, getter):
actual = spec.format(named_str)
assert expected == actual
def test_spec_format_instalL_status(self, database):
installed = database.query_one("mpileaks^zmpi")
assert installed.format("{install_status}") == "[+]"
not_installed = Spec("foo")
assert not_installed.format("{install_status}") == " - "
def test_spec_formatting_escapes(self, default_mock_concretization):
spec = default_mock_concretization("multivalue-variant cflags=-O2")

View File

@@ -906,13 +906,6 @@ def test_version_list_normalization():
assert ver("1.0:2.0,=1.0,ref=1.0") == ver(["1.0:2.0"])
def test_version_list_connected_union_of_disjoint_ranges():
# Make sure that we also simplify lists of ranges if their intersection is empty, but their
# union is connected.
assert ver("1.0:2.0,2.1,2.2:3,4:6") == ver(["1.0:6"])
assert ver("1.0:1.2,1.3:2") == ver("1.0:1.5,1.6:2")
@pytest.mark.parametrize("version", ["=1.2", "git.ref=1.2", "1.2"])
def test_version_comparison_with_list_fails(version):
vlist = VersionList(["=1.3"])

View File

@@ -695,35 +695,26 @@ def satisfies(self, other: Union["ClosedOpenRange", ConcreteVersion, "VersionLis
def overlaps(self, other: Union["ClosedOpenRange", ConcreteVersion, "VersionList"]) -> bool:
return self.intersects(other)
def _union_if_not_disjoint(
self, other: Union["ClosedOpenRange", ConcreteVersion]
) -> Optional["ClosedOpenRange"]:
"""Same as union, but returns None when the union is not connected. This function is not
implemented for version lists as right-hand side, as that makes little sense."""
def union(self, other: Union["ClosedOpenRange", ConcreteVersion, "VersionList"]):
if isinstance(other, StandardVersion):
return self if self.lo <= other < self.hi else None
return self if self.lo <= other < self.hi else VersionList([self, other])
if isinstance(other, GitVersion):
return self if self.lo <= other.ref_version < self.hi else None
return self if self.lo <= other.ref_version < self.hi else VersionList([self, other])
if isinstance(other, ClosedOpenRange):
# Notice <= cause we want union(1:2, 3:4) = 1:4.
return (
ClosedOpenRange(min(self.lo, other.lo), max(self.hi, other.hi))
if self.lo <= other.hi and other.lo <= self.hi
else None
)
if self.lo <= other.hi and other.lo <= self.hi:
return ClosedOpenRange(min(self.lo, other.lo), max(self.hi, other.hi))
raise TypeError(f"Unexpected type {type(other)}")
return VersionList([self, other])
def union(self, other: Union["ClosedOpenRange", ConcreteVersion, "VersionList"]):
if isinstance(other, VersionList):
v = other.copy()
v.add(self)
return v
result = self._union_if_not_disjoint(other)
return result if result is not None else VersionList([self, other])
raise ValueError(f"Unexpected type {type(other)}")
def intersection(self, other: Union["ClosedOpenRange", ConcreteVersion]):
# range - version -> singleton or nothing.
@@ -741,20 +732,19 @@ class VersionList:
def __init__(self, vlist=None):
self.versions: List[StandardVersion, GitVersion, ClosedOpenRange] = []
if vlist is None:
pass
elif isinstance(vlist, str):
vlist = from_string(vlist)
if isinstance(vlist, VersionList):
self.versions = vlist.versions
if vlist is not None:
if isinstance(vlist, str):
vlist = from_string(vlist)
if isinstance(vlist, VersionList):
self.versions = vlist.versions
else:
self.versions = [vlist]
else:
self.versions = [vlist]
else:
for v in vlist:
self.add(ver(v))
for v in vlist:
self.add(ver(v))
def add(self, item: Union[StandardVersion, GitVersion, ClosedOpenRange, "VersionList"]):
if isinstance(item, (StandardVersion, GitVersion)):
def add(self, item):
if isinstance(item, ConcreteVersion):
i = bisect_left(self, item)
# Only insert when prev and next are not intersected.
if (i == 0 or not item.intersects(self[i - 1])) and (
@@ -765,22 +755,16 @@ def add(self, item: Union[StandardVersion, GitVersion, ClosedOpenRange, "Version
elif isinstance(item, ClosedOpenRange):
i = bisect_left(self, item)
# Note: can span multiple concrete versions to the left (as well as to the right).
# For instance insert 1.2: into [1.2, hash=1.2, 1.3, 1.4:1.5]
# would bisect at i = 1 and merge i = 0 too.
while i > 0:
union = item._union_if_not_disjoint(self[i - 1])
if union is None: # disjoint
break
item = union
# Note: can span multiple concrete versions to the left,
# For instance insert 1.2: into [1.2, hash=1.2, 1.3]
# would bisect to i = 1.
while i > 0 and item.intersects(self[i - 1]):
item = item.union(self[i - 1])
del self.versions[i - 1]
i -= 1
while i < len(self):
union = item._union_if_not_disjoint(self[i])
if union is None:
break
item = union
while i < len(self) and item.intersects(self[i]):
item = item.union(self[i])
del self.versions[i]
self.versions.insert(i, item)

View File

@@ -330,7 +330,7 @@ protected-publish:
e4s-generate:
extends: [ ".e4s", ".generate-x86_64"]
image: ecpe4s/ubuntu22.04-runner-amd64-gcc-11.4:2024.03.01
image: ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4:2023.08.01
e4s-build:
extends: [ ".e4s", ".build" ]
@@ -353,7 +353,7 @@ e4s-build:
e4s-neoverse-v2-generate:
extends: [ ".e4s-neoverse-v2", ".generate-neoverse-v2" ]
image: ecpe4s/ubuntu22.04-runner-arm64-gcc-11.4:2024.03.01
image: ghcr.io/spack/ubuntu22.04-runner-arm64-gcc-11.4:2024.01.01
e4s-neoverse-v2-build:
extends: [ ".e4s-neoverse-v2", ".build" ]
@@ -376,7 +376,7 @@ e4s-neoverse-v2-build:
e4s-neoverse_v1-generate:
extends: [ ".e4s-neoverse_v1", ".generate-neoverse_v1" ]
image: ecpe4s/ubuntu22.04-runner-arm64-gcc-11.4:2024.03.01
image: ghcr.io/spack/ubuntu20.04-runner-arm64-gcc-11.4:2023.08.01
e4s-neoverse_v1-build:
extends: [ ".e4s-neoverse_v1", ".build" ]
@@ -399,7 +399,7 @@ e4s-neoverse_v1-build:
e4s-rocm-external-generate:
extends: [ ".e4s-rocm-external", ".generate-x86_64"]
image: ecpe4s/ubuntu22.04-runner-amd64-gcc-11.4-rocm5.7.1:2024.03.01
image: ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4-rocm5.4.3:2023.08.01
e4s-rocm-external-build:
extends: [ ".e4s-rocm-external", ".build" ]

View File

@@ -151,6 +151,7 @@ spack:
# - alquimia # pflotran: petsc-3.19.4-c6pmpdtpzarytxo434zf76jqdkhdyn37/lib/petsc/conf/rules:169: material_aux.o] Error 1: fortran errors
# - amrex # disabled temporarily pending resolution of unreproducible CI failure
# - archer # subsumed by llvm +omp_tsan
# - axom # axom: CMake Error at axom/sidre/cmake_install.cmake:154 (file): file INSTALL cannot find "/tmp/gitlab-runner-2/spack-stage/spack-stage-axom-0.8.1-jvol6riu34vuyqvrd5ft2gyhrxdqvf63/spack-build-jvol6ri/lib/fortran/axom_spio.mod": No such file or directory.
# - bricks # bricks: clang-15: error: clang frontend command failed with exit code 134 (use -v to see invocation)
# - dealii # llvm@14.0.6: ?; intel-tbb@2020.3: clang-15: error: unknown argument: '-flifetime-dse=1'; assimp@5.2.5: clang-15: error: clang frontend command failed with exit code 134 (use -v to see invocation)

View File

@@ -147,6 +147,7 @@ spack:
# HOLDING THESE BACK UNTIL CRAY SLES CAPACITY IS EXPANDED AT UO
# - alquimia
# - amrex
# - archer
# - axom
# - bricks
# - dealii

View File

@@ -7,7 +7,7 @@ spack:
packages:
all:
require: '%gcc target=neoverse_v2'
require: '%gcc@11.4.0 target=neoverse_v2'
providers:
blas: [openblas]
mpi: [mpich]
@@ -31,13 +31,18 @@ spack:
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
xz:
variants: +pic
mesa:
version: [21.3.8]
mpi:
require: mpich
mpich:
require: '~wrapperrpath ~hwloc'
ncurses:
require: '@6.3 +termlib'
tbb:
require: intel-tbb
boost:
version: [1.79.0]
variants: +atomic +chrono +container +date_time +exception +filesystem +graph
+iostreams +locale +log +math +mpi +multithreaded +program_options +random
+regex +serialization +shared +signals +stacktrace +system +test +thread +timer
@@ -151,7 +156,6 @@ spack:
- umap
- umpire
- upcxx
- veloc
- wannier90
- xyce +mpi +shared +pymi +pymi_static_tpls
# INCLUDED IN ECP DAV CPU
@@ -166,39 +170,18 @@ spack:
- py-cinemasci
- sz
- unifyfs
- laghos
- veloc
# - visit # silo: https://github.com/spack/spack/issues/39538
- vtk-m
- zfp
# --
# - archer # part of llvm +omp_tsan
# - bricks ~cuda # not respecting target=aarch64?
# - dealii # slepc: make[1]: *** internal error: invalid --jobserver-auth string 'fifo:/tmp/GMfifo1313'.
# - geopm # geopm: https://github.com/spack/spack/issues/38795
# - glvis # glvis: https://github.com/spack/spack/issues/42839
# - libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp # py-numcodecs@0.7.3: gcc: error: unrecognized command-line option '-mno-sse2'
# - variorum # variorum: https://github.com/spack/spack/issues/38786
# PYTHON PACKAGES
- opencv +python3
- py-horovod
- py-jax
- py-jupyterlab
- py-matplotlib
- py-mpi4py
- py-notebook
- py-numba
- py-numpy
- py-openai
- py-pandas
- py-plotly
- py-pooch
- py-pytest
- py-scikit-learn
- py-scipy
- py-seaborn
- py-tensorflow
- py-torch
# CUDA NOARCH
- flux-core +cuda
- hpctoolkit +cuda
@@ -349,7 +332,7 @@ spack:
ci:
pipeline-gen:
- build-job:
image: ecpe4s/ubuntu22.04-runner-arm64-gcc-11.4:2024.03.01
image: "ghcr.io/spack/ubuntu22.04-runner-arm64-gcc-11.4:2024.01.01"
cdash:
build-group: E4S ARM Neoverse V2

View File

@@ -7,7 +7,7 @@ spack:
packages:
all:
require: '%gcc target=neoverse_v1'
require: '%gcc@11.4.0 target=neoverse_v1'
providers:
blas: [openblas]
mpi: [mpich]
@@ -35,9 +35,12 @@ spack:
require: mpich
mpich:
require: '~wrapperrpath ~hwloc'
ncurses:
require: '@6.3 +termlib'
tbb:
require: intel-tbb
boost:
version: [1.79.0]
variants: +atomic +chrono +container +date_time +exception +filesystem +graph
+iostreams +locale +log +math +mpi +multithreaded +program_options +random
+regex +serialization +shared +signals +stacktrace +system +test +thread +timer
@@ -92,7 +95,6 @@ spack:
- hypre
- kokkos +openmp
- kokkos-kernels +openmp
- laghos
- lammps
- lbann
- legion
@@ -171,34 +173,13 @@ spack:
- vtk-m
- zfp
# --
# - archer # part of llvm +omp_tsan
# - bricks ~cuda # not respecting target=aarch64?
# - dealii # slepc: make[1]: *** internal error: invalid --jobserver-auth string 'fifo:/tmp/GMfifo1313'.
# - geopm # geopm: https://github.com/spack/spack/issues/38795
# - glvis # glvis: https://github.com/spack/spack/issues/42839
# - libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp # py-numcodecs@0.7.3: gcc: error: unrecognized command-line option '-mno-sse2'
# - variorum # variorum: https://github.com/spack/spack/issues/38786
# PYTHON PACKAGES
- opencv +python3
- py-horovod
- py-jax
- py-jupyterlab
- py-matplotlib
- py-mpi4py
- py-notebook
- py-numba
- py-numpy
- py-openai
- py-pandas
- py-plotly
- py-pooch
- py-pytest
- py-scikit-learn
- py-scipy
- py-seaborn
- py-tensorflow
- py-torch
# CUDA NOARCH
- flux-core +cuda
- hpctoolkit +cuda
@@ -349,7 +330,7 @@ spack:
ci:
pipeline-gen:
- build-job:
image: ecpe4s/ubuntu22.04-runner-arm64-gcc-11.4:2024.03.01
image: "ghcr.io/spack/ubuntu20.04-runner-arm64-gcc-11.4:2023.08.01"
cdash:
build-group: E4S ARM Neoverse V1

View File

@@ -13,10 +13,9 @@ spack:
- "%oneapi"
providers:
blas: [openblas]
mpi: [mpich]
tbb: [intel-tbb]
variants: +mpi
gl:
require: osmesa
elfutils:
variants: ~nls
gcc-runtime:
@@ -39,10 +38,14 @@ spack:
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
xz:
variants: +pic
mesa:
version: [21.3.8]
mpi:
require: 'mpich@4:'
mpich:
require: '~wrapperrpath ~hwloc'
py-cryptography:
require: '@38.0.1'
unzip:
require: '%gcc'
binutils:
@@ -60,6 +63,8 @@ spack:
require: '%gcc'
openssh:
require: '%gcc'
libffi:
require: "@3.4.4"
dyninst:
require: "%gcc"
bison:
@@ -103,7 +108,6 @@ spack:
- hypre
- kokkos +openmp
- kokkos-kernels +openmp
- laghos
- lammps
- lbann
- legion
@@ -189,32 +193,10 @@ spack:
# - upcxx # upcxx: /opt/intel/oneapi/mpi/2021.10.0//libfabric/bin/fi_info: error while loading shared libraries: libfabric.so.1: cannot open shared object file: No such file or directory
# --
# - bricks ~cuda # bricks: /opt/intel/oneapi/compiler/2024.0/bin/sycl-post-link: error while loading shared libraries: libonnxruntime.1.12.22.721.so: cannot open shared object file: No such file or directory
# - glvis ^llvm # glvis: https://github.com/spack/spack/issues/42839
# - pdt # pdt: pdbType.cc:193:21: warning: ISO C++11 does not allow conversion from string literal to 'char *' [-Wwritable-strings]
# - quantum-espresso # quantum-espresso@7.2 /i3fqdx5: warning: <unknown>:0:0: loop not unroll-and-jammed: the optimizer was unable to perform the requested transformation; the transformation might be disabled or specified as part of an unsupported transformation ordering
# - tau +mpi +python +syscall # pdt: pdbType.cc:193:21: warning: ISO C++11 does not allow conversion from string literal to 'char *' [-Wwritable-strings]
# PYTHON PACKAGES
- opencv +python3
- py-jupyterlab
- py-notebook
- py-numpy
- py-openai
- py-pandas
- py-plotly
- py-pooch
- py-pytest
- py-scikit-learn
- py-scipy
- py-seaborn
- py-mpi4py
- py-numba
# - py-horovod # error
# - py-jax # error
# - py-matplotlib # error
# - py-tensorflow # error
# - py-torch # error
# GPU
- amrex +sycl
- tau +mpi +opencl +level_zero ~pdt +syscall # requires libdrm.so to be installed

View File

@@ -32,6 +32,8 @@ spack:
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
xz:
variants: +pic
mesa:
version: [21.3.8]
mpi:
require: mpich
mpich:
@@ -72,6 +74,7 @@ spack:
- drishti
- dxt-explorer
- dyninst
# - ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 ~paraview +pnetcdf +sz +unifyfs +veloc ~visit +vtkm +zfp # +visit: libext, libxkbfile, libxrender, libxt, silo (https://github.com/spack/spack/issues/39538), cairo
- exaworks
- flecsi
- flit
@@ -94,7 +97,6 @@ spack:
- hypre
- kokkos +openmp
- kokkos-kernels +openmp
- laghos
- lammps
- lbann
- legion
@@ -156,7 +158,6 @@ spack:
- upcxx
- wannier90
- xyce +mpi +shared +pymi +pymi_static_tpls
# - ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 ~paraview +pnetcdf +sz +unifyfs +veloc ~visit +vtkm +zfp # +visit: libext, libxkbfile, libxrender, libxt, silo (https://github.com/spack/spack/issues/39538), cairo
# INCLUDED IN ECP DAV CPU
- adios2
- ascent
@@ -175,34 +176,13 @@ spack:
- vtk-m
- zfp
# --
# - archer # part of llvm +omp_tsan
# - dealii # fltk: https://github.com/spack/spack/issues/38791
# - geopm # geopm: https://github.com/spack/spack/issues/38798
# - glvis # glvis: https://github.com/spack/spack/issues/42839
# - libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp # py-numcodecs: gcc: error: unrecognized command line option '-mno-sse2'; did you mean '-mno-isel'? gcc: error: unrecognized command line option '-mno-avx2'
# - phist +mpi # ghost@develop: gcc-9: error: unrecognized command line option '-march=native'; did you mean '-mcpu=native'?
# - variorum # variorum: https://github.com/spack/spack/issues/38786
# PYTHON PACKAGES
- opencv +python3
- py-jax
- py-jupyterlab
- py-matplotlib
- py-mpi4py
- py-notebook
- py-numba
- py-numpy
- py-openai
- py-pandas
- py-plotly
- py-pooch
- py-pytest
- py-scikit-learn
- py-scipy
- py-seaborn
# - py-horovod # py-torch, py-tensorflow
# - py-tensorflow # error
# - py-torch # error
# CUDA NOARCH
- bricks +cuda
- cabana +cuda ^kokkos +wrapper +cuda_lambda +cuda cuda_arch=70

View File

@@ -10,15 +10,49 @@ spack:
require: '%gcc target=x86_64_v3'
providers:
blas: [openblas]
mpi: [mpich]
variants: +mpi
tbb:
require: intel-tbb
binutils:
variants: +ld +gold +headers +libiberty ~nls
elfutils:
variants: ~nls
hdf5:
variants: +fortran +hl +shared
libfabric:
variants: fabrics=sockets,tcp,udp,rxm
libunwind:
variants: +pic +xz
openblas:
variants: threads=openmp
trilinos:
variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext
+ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
+nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
xz:
variants: +pic
mesa:
version: [21.3.8]
mpi:
require: mpich
mpich:
require: '~wrapperrpath ~hwloc'
openblas:
variants: threads=openmp
ncurses:
require: '@6.3 +termlib'
tbb:
require: intel-tbb
boost:
version: [1.79.0]
variants: +atomic +chrono +container +date_time +exception +filesystem +graph
+iostreams +locale +log +math +mpi +multithreaded +program_options +random
+regex +serialization +shared +signals +stacktrace +system +test +thread +timer
cxxstd=17 visibility=global
libffi:
require: "@3.4.4"
vtk-m:
require: "+examples"
cuda:
version: [11.8.0]
paraview:
# Don't build GUI support or GLX rendering for HPC/container deployments
require: "@5.11 ~qt+osmesa"
@@ -27,181 +61,181 @@ spack:
comgr:
buildable: false
externals:
- spec: comgr@5.7.1
prefix: /opt/rocm-5.7.1/
- spec: comgr@5.4.3
prefix: /opt/rocm-5.4.3/
hip-rocclr:
buildable: false
externals:
- spec: hip-rocclr@5.7.1
prefix: /opt/rocm-5.7.1/hip
- spec: hip-rocclr@5.4.3
prefix: /opt/rocm-5.4.3/hip
hipblas:
buildable: false
externals:
- spec: hipblas@5.7.1
prefix: /opt/rocm-5.7.1/
- spec: hipblas@5.4.3
prefix: /opt/rocm-5.4.3/
hipcub:
buildable: false
externals:
- spec: hipcub@5.7.1
prefix: /opt/rocm-5.7.1/
- spec: hipcub@5.4.3
prefix: /opt/rocm-5.4.3/
hipfft:
buildable: false
externals:
- spec: hipfft@5.7.1
prefix: /opt/rocm-5.7.1/
- spec: hipfft@5.4.3
prefix: /opt/rocm-5.4.3/
hipsparse:
buildable: false
externals:
- spec: hipsparse@5.7.1
prefix: /opt/rocm-5.7.1/
- spec: hipsparse@5.4.3
prefix: /opt/rocm-5.4.3/
miopen-hip:
buildable: false
externals:
- spec: miopen-hip@5.7.1
prefix: /opt/rocm-5.7.1/
- spec: miopen-hip@5.4.3
prefix: /opt/rocm-5.4.3/
miopengemm:
buildable: false
externals:
- spec: miopengemm@5.7.1
prefix: /opt/rocm-5.7.1/
- spec: miopengemm@5.4.3
prefix: /opt/rocm-5.4.3/
rccl:
buildable: false
externals:
- spec: rccl@5.7.1
prefix: /opt/rocm-5.7.1/
- spec: rccl@5.4.3
prefix: /opt/rocm-5.4.3/
rocblas:
buildable: false
externals:
- spec: rocblas@5.7.1
prefix: /opt/rocm-5.7.1/
- spec: rocblas@5.4.3
prefix: /opt/rocm-5.4.3/
rocfft:
buildable: false
externals:
- spec: rocfft@5.7.1
prefix: /opt/rocm-5.7.1/
- spec: rocfft@5.4.3
prefix: /opt/rocm-5.4.3/
rocm-clang-ocl:
buildable: false
externals:
- spec: rocm-clang-ocl@5.7.1
prefix: /opt/rocm-5.7.1/
- spec: rocm-clang-ocl@5.4.3
prefix: /opt/rocm-5.4.3/
rocm-cmake:
buildable: false
externals:
- spec: rocm-cmake@5.7.1
prefix: /opt/rocm-5.7.1/
- spec: rocm-cmake@5.4.3
prefix: /opt/rocm-5.4.3/
rocm-dbgapi:
buildable: false
externals:
- spec: rocm-dbgapi@5.7.1
prefix: /opt/rocm-5.7.1/
- spec: rocm-dbgapi@5.4.3
prefix: /opt/rocm-5.4.3/
rocm-debug-agent:
buildable: false
externals:
- spec: rocm-debug-agent@5.7.1
prefix: /opt/rocm-5.7.1/
- spec: rocm-debug-agent@5.4.3
prefix: /opt/rocm-5.4.3/
rocm-device-libs:
buildable: false
externals:
- spec: rocm-device-libs@5.7.1
prefix: /opt/rocm-5.7.1/
- spec: rocm-device-libs@5.4.3
prefix: /opt/rocm-5.4.3/
rocm-gdb:
buildable: false
externals:
- spec: rocm-gdb@5.7.1
prefix: /opt/rocm-5.7.1/
- spec: rocm-gdb@5.4.3
prefix: /opt/rocm-5.4.3/
rocm-opencl:
buildable: false
externals:
- spec: rocm-opencl@5.7.1
prefix: /opt/rocm-5.7.1/opencl
- spec: rocm-opencl@5.4.3
prefix: /opt/rocm-5.4.3/opencl
rocm-smi-lib:
buildable: false
externals:
- spec: rocm-smi-lib@5.7.1
prefix: /opt/rocm-5.7.1/
- spec: rocm-smi-lib@5.4.3
prefix: /opt/rocm-5.4.3/
hip:
buildable: false
externals:
- spec: hip@5.7.1
prefix: /opt/rocm-5.7.1
- spec: hip@5.4.3
prefix: /opt/rocm-5.4.3
extra_attributes:
compilers:
c: /opt/rocm-5.7.1/llvm/bin/clang++
c++: /opt/rocm-5.7.1/llvm/bin/clang++
hip: /opt/rocm-5.7.1/hip/bin/hipcc
c: /opt/rocm-5.4.3/llvm/bin/clang++
c++: /opt/rocm-5.4.3/llvm/bin/clang++
hip: /opt/rocm-5.4.3/hip/bin/hipcc
hipify-clang:
buildable: false
externals:
- spec: hipify-clang@5.7.1
prefix: /opt/rocm-5.7.1
- spec: hipify-clang@5.4.3
prefix: /opt/rocm-5.4.3
llvm-amdgpu:
buildable: false
externals:
- spec: llvm-amdgpu@5.7.1
prefix: /opt/rocm-5.7.1/llvm
- spec: llvm-amdgpu@5.4.3
prefix: /opt/rocm-5.4.3/llvm
extra_attributes:
compilers:
c: /opt/rocm-5.7.1/llvm/bin/clang++
cxx: /opt/rocm-5.7.1/llvm/bin/clang++
c: /opt/rocm-5.4.3/llvm/bin/clang++
cxx: /opt/rocm-5.4.3/llvm/bin/clang++
hsakmt-roct:
buildable: false
externals:
- spec: hsakmt-roct@5.7.1
prefix: /opt/rocm-5.7.1/
- spec: hsakmt-roct@5.4.3
prefix: /opt/rocm-5.4.3/
hsa-rocr-dev:
buildable: false
externals:
- spec: hsa-rocr-dev@5.7.1
prefix: /opt/rocm-5.7.1/
- spec: hsa-rocr-dev@5.4.3
prefix: /opt/rocm-5.4.3/
extra_atributes:
compilers:
c: /opt/rocm-5.7.1/llvm/bin/clang++
cxx: /opt/rocm-5.7.1/llvm/bin/clang++
c: /opt/rocm-5.4.3/llvm/bin/clang++
cxx: /opt/rocm-5.4.3/llvm/bin/clang++
roctracer-dev-api:
buildable: false
externals:
- spec: roctracer-dev-api@5.7.1
prefix: /opt/rocm-5.7.1
- spec: roctracer-dev-api@5.4.3
prefix: /opt/rocm-5.4.3
roctracer-dev:
buildable: false
externals:
- spec: roctracer-dev@4.5.3
prefix: /opt/rocm-5.7.1
prefix: /opt/rocm-5.4.3
rocprim:
buildable: false
externals:
- spec: rocprim@5.7.1
prefix: /opt/rocm-5.7.1
- spec: rocprim@5.4.3
prefix: /opt/rocm-5.4.3
rocrand:
buildable: false
externals:
- spec: rocrand@5.7.1
prefix: /opt/rocm-5.7.1
- spec: rocrand@5.4.3
prefix: /opt/rocm-5.4.3
hipsolver:
buildable: false
externals:
- spec: hipsolver@5.7.1
prefix: /opt/rocm-5.7.1
- spec: hipsolver@5.4.3
prefix: /opt/rocm-5.4.3
rocsolver:
buildable: false
externals:
- spec: rocsolver@5.7.1
prefix: /opt/rocm-5.7.1
- spec: rocsolver@5.4.3
prefix: /opt/rocm-5.4.3
rocsparse:
buildable: false
externals:
- spec: rocsparse@5.7.1
prefix: /opt/rocm-5.7.1
- spec: rocsparse@5.4.3
prefix: /opt/rocm-5.4.3
rocthrust:
buildable: false
externals:
- spec: rocthrust@5.7.1
prefix: /opt/rocm-5.7.1
- spec: rocthrust@5.4.3
prefix: /opt/rocm-5.4.3
rocprofiler-dev:
buildable: false
externals:
- spec: rocprofiler-dev@5.7.1
prefix: /opt/rocm-5.7.1
- spec: rocprofiler-dev@5.4.3
prefix: /opt/rocm-5.4.3
specs:
# ROCM NOARCH
@@ -228,7 +262,7 @@ spack:
- mfem +rocm amdgpu_target=gfx908
- petsc +rocm amdgpu_target=gfx908
- raja ~openmp +rocm amdgpu_target=gfx908
# - slate +rocm amdgpu_target=gfx908 # slate: hip/device_gescale_row_col.hip.cc:58:49: error: use of overloaded operator '*' is ambiguous (with operand types 'HIP_vector_type<double, 2>' and 'const HIP_vector_type<double, 2>')
- slate +rocm amdgpu_target=gfx908
- slepc +rocm amdgpu_target=gfx908 ^petsc +rocm amdgpu_target=gfx908
- strumpack ~slate +rocm amdgpu_target=gfx908
- sundials +rocm amdgpu_target=gfx908
@@ -269,7 +303,7 @@ spack:
- mfem +rocm amdgpu_target=gfx90a
- petsc +rocm amdgpu_target=gfx90a
- raja ~openmp +rocm amdgpu_target=gfx90a
# - slate +rocm amdgpu_target=gfx90a # slate: hip/device_gescale_row_col.hip.cc:58:49: error: use of overloaded operator '*' is ambiguous (with operand types 'HIP_vector_type<double, 2>' and 'const HIP_vector_type<double, 2>')
- slate +rocm amdgpu_target=gfx90a
- slepc +rocm amdgpu_target=gfx90a ^petsc +rocm amdgpu_target=gfx90a
- strumpack ~slate +rocm amdgpu_target=gfx90a
- sundials +rocm amdgpu_target=gfx90a
@@ -293,7 +327,7 @@ spack:
ci:
pipeline-gen:
- build-job:
image: ecpe4s/ubuntu22.04-runner-amd64-gcc-11.4-rocm5.7.1:2024.03.01
image: "ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4-rocm5.4.3:2023.08.01"
cdash:
build-group: E4S ROCm External

View File

@@ -31,13 +31,18 @@ spack:
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
xz:
variants: +pic
mesa:
version: [21.3.8]
mpi:
require: mpich
mpich:
require: '~wrapperrpath ~hwloc'
ncurses:
require: '@6.3 +termlib'
tbb:
require: intel-tbb
boost:
version: [1.79.0]
variants: +atomic +chrono +container +date_time +exception +filesystem +graph
+iostreams +locale +log +math +mpi +multithreaded +program_options +random
+regex +serialization +shared +signals +stacktrace +system +test +thread +timer
@@ -101,7 +106,6 @@ spack:
- julia ^llvm ~clang ~gold ~polly targets=amdgpu,bpf,nvptx,webassembly
- kokkos +openmp
- kokkos-kernels +openmp
- laghos
- lammps
- lbann
- legion
@@ -183,222 +187,201 @@ spack:
- vtk-m
- zfp
# --
# - archer # submerged into llvm +libomp_tsan
# - geopm # geopm: https://github.com/spack/spack/issues/38795
# - glvis # glvis: https://github.com/spack/spack/issues/42839
# PYTHON PACKAGES
- opencv +python3
- py-horovod
- py-jax
- py-jupyterlab
- py-matplotlib
- py-mpi4py
- py-notebook
- py-numba
- py-numpy
- py-openai
- py-pandas
- py-plotly
- py-pooch
- py-pytest
- py-scikit-learn
- py-scipy
- py-seaborn
- py-tensorflow
- py-torch
# CUDA NOARCH
- bricks +cuda
- flux-core +cuda
- hpctoolkit +cuda
- papi +cuda
- tau +mpi +cuda +syscall
# --
# - legion +cuda # legion: needs NVIDIA driver
# # CUDA NOARCH
# - bricks +cuda
# - flux-core +cuda
# - hpctoolkit +cuda
# - papi +cuda
# - tau +mpi +cuda +syscall
# # --
# # - legion +cuda # legion: needs NVIDIA driver
# CUDA 80
- amrex +cuda cuda_arch=80
- arborx +cuda cuda_arch=80 ^kokkos +wrapper
- cabana +cuda cuda_arch=80 ^kokkos +wrapper +cuda_lambda +cuda cuda_arch=80
- caliper +cuda cuda_arch=80
- chai ~benchmarks ~tests +cuda cuda_arch=80 ^umpire ~shared
- cusz +cuda cuda_arch=80
- dealii +cuda cuda_arch=80
- ecp-data-vis-sdk ~rocm +adios2 ~ascent +hdf5 +vtkm +zfp +paraview +cuda cuda_arch=80 # +ascent fails because fides fetch error
- exago +mpi +python +raja +hiop ~rocm +cuda cuda_arch=80 ~ipopt ^hiop@1.0.0 ~sparse +mpi +raja ~rocm +cuda cuda_arch=80 #^raja@0.14.0
- flecsi +cuda cuda_arch=80
- ginkgo +cuda cuda_arch=80
- gromacs +cuda cuda_arch=80
- heffte +cuda cuda_arch=80
- hpx +cuda cuda_arch=80
- hypre +cuda cuda_arch=80
- kokkos +wrapper +cuda cuda_arch=80
- kokkos-kernels +cuda cuda_arch=80 ^kokkos +wrapper +cuda cuda_arch=80
- libpressio +bitgrooming +bzip2 +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf +cusz +mgard +cuda cuda_arch=80 ^cusz +cuda cuda_arch=80
- magma +cuda cuda_arch=80
- mfem +cuda cuda_arch=80
- mgard +serial +openmp +timing +unstructured +cuda cuda_arch=80
- omega-h +cuda cuda_arch=80
- parsec +cuda cuda_arch=80
- petsc +cuda cuda_arch=80
- py-torch +cuda cuda_arch=80
- raja +cuda cuda_arch=80
- slate +cuda cuda_arch=80
- slepc +cuda cuda_arch=80
- strumpack ~slate +cuda cuda_arch=80
- sundials +cuda cuda_arch=80
- superlu-dist +cuda cuda_arch=80
- tasmanian +cuda cuda_arch=80
- trilinos +cuda cuda_arch=80
- umpire ~shared +cuda cuda_arch=80
# INCLUDED IN ECP DAV CUDA
# - adios2 +cuda cuda_arch=80
# - ascent +cuda cuda_arch=80 # ascent: https://github.com/spack/spack/issues/38045
# - paraview +cuda cuda_arch=80
# - vtk-m +cuda cuda_arch=80
# - zfp +cuda cuda_arch=80
# --
# - lammps +cuda cuda_arch=80 # lammps: needs NVIDIA driver
# - upcxx +cuda cuda_arch=80 # upcxx: needs NVIDIA driver
# - axom +cuda cuda_arch=80 # axom: https://github.com/spack/spack/issues/29520
# - lbann +cuda cuda_arch=80 # lbann: https://github.com/spack/spack/issues/38788
# # CUDA 80
# - amrex +cuda cuda_arch=80
# - arborx +cuda cuda_arch=80 ^kokkos +wrapper
# - cabana +cuda cuda_arch=80 ^kokkos +wrapper +cuda_lambda +cuda cuda_arch=80
# - caliper +cuda cuda_arch=80
# - chai ~benchmarks ~tests +cuda cuda_arch=80 ^umpire ~shared
# - cusz +cuda cuda_arch=80
# - dealii +cuda cuda_arch=80
# - ecp-data-vis-sdk ~rocm +adios2 ~ascent +hdf5 +vtkm +zfp +paraview +cuda cuda_arch=80 # +ascent fails because fides fetch error
# - exago +mpi +python +raja +hiop ~rocm +cuda cuda_arch=80 ~ipopt ^hiop@1.0.0 ~sparse +mpi +raja ~rocm +cuda cuda_arch=80 #^raja@0.14.0
# - flecsi +cuda cuda_arch=80
# - ginkgo +cuda cuda_arch=80
# - gromacs +cuda cuda_arch=80
# - heffte +cuda cuda_arch=80
# - hpx +cuda cuda_arch=80
# - hypre +cuda cuda_arch=80
# - kokkos +wrapper +cuda cuda_arch=80
# - kokkos-kernels +cuda cuda_arch=80 ^kokkos +wrapper +cuda cuda_arch=80
# - libpressio +bitgrooming +bzip2 +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf +cusz +mgard +cuda cuda_arch=80 ^cusz +cuda cuda_arch=80
# - magma +cuda cuda_arch=80
# - mfem +cuda cuda_arch=80
# - mgard +serial +openmp +timing +unstructured +cuda cuda_arch=80
# - omega-h +cuda cuda_arch=80
# - parsec +cuda cuda_arch=80
# - petsc +cuda cuda_arch=80
# - py-torch +cuda cuda_arch=80
# - raja +cuda cuda_arch=80
# - slate +cuda cuda_arch=80
# - slepc +cuda cuda_arch=80
# - strumpack ~slate +cuda cuda_arch=80
# - sundials +cuda cuda_arch=80
# - superlu-dist +cuda cuda_arch=80
# - tasmanian +cuda cuda_arch=80
# - trilinos +cuda cuda_arch=80
# - umpire ~shared +cuda cuda_arch=80
# # INCLUDED IN ECP DAV CUDA
# # - adios2 +cuda cuda_arch=80
# # - ascent +cuda cuda_arch=80 # ascent: https://github.com/spack/spack/issues/38045
# # - paraview +cuda cuda_arch=80
# # - vtk-m +cuda cuda_arch=80
# # - zfp +cuda cuda_arch=80
# # --
# # - lammps +cuda cuda_arch=80 # lammps: needs NVIDIA driver
# # - upcxx +cuda cuda_arch=80 # upcxx: needs NVIDIA driver
# # - axom +cuda cuda_arch=80 # axom: https://github.com/spack/spack/issues/29520
# # - lbann +cuda cuda_arch=80 # lbann: https://github.com/spack/spack/issues/38788
# CUDA 90
- amrex +cuda cuda_arch=90
- arborx +cuda cuda_arch=90 ^kokkos +wrapper
- cabana +cuda cuda_arch=90 ^kokkos +wrapper +cuda_lambda +cuda cuda_arch=90
- caliper +cuda cuda_arch=90
- chai ~benchmarks ~tests +cuda cuda_arch=90 ^umpire ~shared
- cusz +cuda cuda_arch=90
- flecsi +cuda cuda_arch=90
- ginkgo +cuda cuda_arch=90
- gromacs +cuda cuda_arch=90
- heffte +cuda cuda_arch=90
- hpx +cuda cuda_arch=90
- kokkos +wrapper +cuda cuda_arch=90
- kokkos-kernels +cuda cuda_arch=90 ^kokkos +wrapper +cuda cuda_arch=90
- libpressio +bitgrooming +bzip2 +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf +cusz +mgard +cuda cuda_arch=90 ^cusz +cuda cuda_arch=90
- magma +cuda cuda_arch=90
- mfem +cuda cuda_arch=90
- mgard +serial +openmp +timing +unstructured +cuda cuda_arch=90
- parsec +cuda cuda_arch=90
- petsc +cuda cuda_arch=90
- py-torch +cuda cuda_arch=90
- raja +cuda cuda_arch=90
- slate +cuda cuda_arch=90
- slepc +cuda cuda_arch=90
- strumpack ~slate +cuda cuda_arch=90
- sundials +cuda cuda_arch=90
- superlu-dist +cuda cuda_arch=90
- trilinos +cuda cuda_arch=90
- umpire ~shared +cuda cuda_arch=90
# INCLUDED IN ECP DAV CUDA
- adios2 +cuda cuda_arch=90
# - ascent +cuda cuda_arch=90 # ascent: https://github.com/spack/spack/issues/38045
# - paraview +cuda cuda_arch=90 # paraview: InstallError: Incompatible cuda_arch=90
- vtk-m +cuda cuda_arch=90
- zfp +cuda cuda_arch=90
# --
# - axom +cuda cuda_arch=90 # axom: https://github.com/spack/spack/issues/29520
# - dealii +cuda cuda_arch=90 # dealii: https://github.com/spack/spack/issues/39532
# - ecp-data-vis-sdk ~rocm +adios2 +ascent +hdf5 +vtkm +zfp +paraview +cuda cuda_arch=90 # paraview: incompatible cuda_arch; vtk-m: CMake Error at CMake/VTKmWrappers.cmake:413 (message): vtkm_cont needs to be built STATIC as CUDA doesn't support virtual methods across dynamic library boundaries. You need to set the CMake opt ion BUILD_SHARED_LIBS to `OFF` or (better) turn VTKm_NO_DEPRECATED_VIRTUAL to `ON`.
# - hypre +cuda cuda_arch=90 # concretizer: hypre +cuda requires cuda@:11, but cuda_arch=90 requires cuda@12:
# - lammps +cuda cuda_arch=90 # lammps: needs NVIDIA driver
# - lbann +cuda cuda_arch=90 # concretizer: Cannot select a single "version" for package "lbann"
# - omega-h +cuda cuda_arch=90 # omega-h: https://github.com/spack/spack/issues/39535
# - tasmanian +cuda cuda_arch=90 # tasmanian: conflicts with cuda@12
# - upcxx +cuda cuda_arch=90 # upcxx: needs NVIDIA driver
# # CUDA 90
# - amrex +cuda cuda_arch=90
# - arborx +cuda cuda_arch=90 ^kokkos +wrapper
# - cabana +cuda cuda_arch=90 ^kokkos +wrapper +cuda_lambda +cuda cuda_arch=90
# - caliper +cuda cuda_arch=90
# - chai ~benchmarks ~tests +cuda cuda_arch=90 ^umpire ~shared
# - cusz +cuda cuda_arch=90
# - flecsi +cuda cuda_arch=90
# - ginkgo +cuda cuda_arch=90
# - gromacs +cuda cuda_arch=90
# - heffte +cuda cuda_arch=90
# - hpx +cuda cuda_arch=90
# - kokkos +wrapper +cuda cuda_arch=90
# - kokkos-kernels +cuda cuda_arch=90 ^kokkos +wrapper +cuda cuda_arch=90
# - libpressio +bitgrooming +bzip2 +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf +cusz +mgard +cuda cuda_arch=90 ^cusz +cuda cuda_arch=90
# - magma +cuda cuda_arch=90
# - mfem +cuda cuda_arch=90
# - mgard +serial +openmp +timing +unstructured +cuda cuda_arch=90
# - parsec +cuda cuda_arch=90
# - petsc +cuda cuda_arch=90
# - py-torch +cuda cuda_arch=90
# - raja +cuda cuda_arch=90
# - slate +cuda cuda_arch=90
# - slepc +cuda cuda_arch=90
# - strumpack ~slate +cuda cuda_arch=90
# - sundials +cuda cuda_arch=90
# - superlu-dist +cuda cuda_arch=90
# - trilinos +cuda cuda_arch=90
# - umpire ~shared +cuda cuda_arch=90
# # INCLUDED IN ECP DAV CUDA
# - adios2 +cuda cuda_arch=90
# # - ascent +cuda cuda_arch=90 # ascent: https://github.com/spack/spack/issues/38045
# # - paraview +cuda cuda_arch=90 # paraview: InstallError: Incompatible cuda_arch=90
# - vtk-m +cuda cuda_arch=90
# - zfp +cuda cuda_arch=90
# # --
# # - axom +cuda cuda_arch=90 # axom: https://github.com/spack/spack/issues/29520
# # - dealii +cuda cuda_arch=90 # dealii: https://github.com/spack/spack/issues/39532
# # - ecp-data-vis-sdk ~rocm +adios2 +ascent +hdf5 +vtkm +zfp +paraview +cuda cuda_arch=90 # paraview: incompatible cuda_arch; vtk-m: CMake Error at CMake/VTKmWrappers.cmake:413 (message): vtkm_cont needs to be built STATIC as CUDA doesn't support virtual methods across dynamic library boundaries. You need to set the CMake opt ion BUILD_SHARED_LIBS to `OFF` or (better) turn VTKm_NO_DEPRECATED_VIRTUAL to `ON`.
# # - hypre +cuda cuda_arch=90 # concretizer: hypre +cuda requires cuda@:11, but cuda_arch=90 requires cuda@12:
# # - lammps +cuda cuda_arch=90 # lammps: needs NVIDIA driver
# # - lbann +cuda cuda_arch=90 # concretizer: Cannot select a single "version" for package "lbann"
# # - omega-h +cuda cuda_arch=90 # omega-h: https://github.com/spack/spack/issues/39535
# # - tasmanian +cuda cuda_arch=90 # tasmanian: conflicts with cuda@12
# # - upcxx +cuda cuda_arch=90 # upcxx: needs NVIDIA driver
# ROCM NOARCH
- hpctoolkit +rocm
- tau +mpi +rocm +syscall # tau: has issue with `spack env depfile` build
# # ROCM NOARCH
# - hpctoolkit +rocm
# - tau +mpi +rocm +syscall # tau: has issue with `spack env depfile` build
# ROCM 908
- adios2 +kokkos +rocm amdgpu_target=gfx908
- amrex +rocm amdgpu_target=gfx908
- arborx +rocm amdgpu_target=gfx908
- cabana +rocm amdgpu_target=gfx908
- caliper +rocm amdgpu_target=gfx908
- chai ~benchmarks +rocm amdgpu_target=gfx908
- ecp-data-vis-sdk +paraview +vtkm +rocm amdgpu_target=gfx908
- gasnet +rocm amdgpu_target=gfx908
- ginkgo +rocm amdgpu_target=gfx908
- heffte +rocm amdgpu_target=gfx908
- hpx +rocm amdgpu_target=gfx908
- hypre +rocm amdgpu_target=gfx908
- kokkos +rocm amdgpu_target=gfx908
- legion +rocm amdgpu_target=gfx908
- magma ~cuda +rocm amdgpu_target=gfx908
- mfem +rocm amdgpu_target=gfx908
- petsc +rocm amdgpu_target=gfx908
- raja ~openmp +rocm amdgpu_target=gfx908
- slate +rocm amdgpu_target=gfx908
- slepc +rocm amdgpu_target=gfx908 ^petsc +rocm amdgpu_target=gfx908
- strumpack ~slate +rocm amdgpu_target=gfx908
- sundials +rocm amdgpu_target=gfx908
- superlu-dist +rocm amdgpu_target=gfx908
- tasmanian ~openmp +rocm amdgpu_target=gfx908
- trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack ~ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu ~stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long +rocm amdgpu_target=gfx908
- umpire +rocm amdgpu_target=gfx908
- upcxx +rocm amdgpu_target=gfx908
# INCLUDED IN ECP DAV ROCM
# - hdf5
# - hdf5-vol-async
# - hdf5-vol-cache
# - hdf5-vol-log
# - libcatalyst
- paraview +rocm amdgpu_target=gfx908
# - vtk-m ~openmp +rocm amdgpu_target=gfx908 # vtk-m: https://github.com/spack/spack/issues/40268
# --
# - exago +mpi +python +raja +hiop +rocm amdgpu_target=gfx908 ~ipopt cxxflags="-Wno-error=non-pod-varargs" ^hiop@1.0.0 ~sparse +mpi +raja +rocm amdgpu_target=gfx908 # hiop: CMake Error at cmake/FindHiopHipLibraries.cmake:23 (find_package)
# - lbann ~cuda +rocm amdgpu_target=gfx908 # aluminum: https://github.com/spack/spack/issues/38807
# - papi +rocm amdgpu_target=gfx908 # papi: https://github.com/spack/spack/issues/27898
# # ROCM 908
# - adios2 +kokkos +rocm amdgpu_target=gfx908
# - amrex +rocm amdgpu_target=gfx908
# - arborx +rocm amdgpu_target=gfx908
# - cabana +rocm amdgpu_target=gfx908
# - caliper +rocm amdgpu_target=gfx908
# - chai ~benchmarks +rocm amdgpu_target=gfx908
# - ecp-data-vis-sdk +paraview +vtkm +rocm amdgpu_target=gfx908
# - gasnet +rocm amdgpu_target=gfx908
# - ginkgo +rocm amdgpu_target=gfx908
# - heffte +rocm amdgpu_target=gfx908
# - hpx +rocm amdgpu_target=gfx908
# - hypre +rocm amdgpu_target=gfx908
# - kokkos +rocm amdgpu_target=gfx908
# - legion +rocm amdgpu_target=gfx908
# - magma ~cuda +rocm amdgpu_target=gfx908
# - mfem +rocm amdgpu_target=gfx908
# - petsc +rocm amdgpu_target=gfx908
# - raja ~openmp +rocm amdgpu_target=gfx908
# - slate +rocm amdgpu_target=gfx908
# - slepc +rocm amdgpu_target=gfx908 ^petsc +rocm amdgpu_target=gfx908
# - strumpack ~slate +rocm amdgpu_target=gfx908
# - sundials +rocm amdgpu_target=gfx908
# - superlu-dist +rocm amdgpu_target=gfx908
# - tasmanian ~openmp +rocm amdgpu_target=gfx908
# - trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack ~ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu ~stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long +rocm amdgpu_target=gfx908
# - umpire +rocm amdgpu_target=gfx908
# - upcxx +rocm amdgpu_target=gfx908
# # INCLUDED IN ECP DAV ROCM
# # - hdf5
# # - hdf5-vol-async
# # - hdf5-vol-cache
# # - hdf5-vol-log
# # - libcatalyst
# - paraview +rocm amdgpu_target=gfx908
# # - vtk-m ~openmp +rocm amdgpu_target=gfx908 # vtk-m: https://github.com/spack/spack/issues/40268
# # --
# # - exago +mpi +python +raja +hiop +rocm amdgpu_target=gfx908 ~ipopt cxxflags="-Wno-error=non-pod-varargs" ^hiop@1.0.0 ~sparse +mpi +raja +rocm amdgpu_target=gfx908 # hiop: CMake Error at cmake/FindHiopHipLibraries.cmake:23 (find_package)
# # - lbann ~cuda +rocm amdgpu_target=gfx908 # aluminum: https://github.com/spack/spack/issues/38807
# # - papi +rocm amdgpu_target=gfx908 # papi: https://github.com/spack/spack/issues/27898
# # ROCM 90a
# - adios2 +kokkos +rocm amdgpu_target=gfx90a
# - amrex +rocm amdgpu_target=gfx90a
# - arborx +rocm amdgpu_target=gfx90a
# - cabana +rocm amdgpu_target=gfx90a
# - caliper +rocm amdgpu_target=gfx90a
# - chai ~benchmarks +rocm amdgpu_target=gfx90a
# - ecp-data-vis-sdk +paraview +vtkm +rocm amdgpu_target=gfx90a
# - gasnet +rocm amdgpu_target=gfx90a
# - ginkgo +rocm amdgpu_target=gfx90a
# - heffte +rocm amdgpu_target=gfx90a
# - hpx +rocm amdgpu_target=gfx90a
# - hypre +rocm amdgpu_target=gfx90a
# - kokkos +rocm amdgpu_target=gfx90a
# - legion +rocm amdgpu_target=gfx90a
# - magma ~cuda +rocm amdgpu_target=gfx90a
# - mfem +rocm amdgpu_target=gfx90a
# - petsc +rocm amdgpu_target=gfx90a
# - raja ~openmp +rocm amdgpu_target=gfx90a
# - slate +rocm amdgpu_target=gfx90a
# - slepc +rocm amdgpu_target=gfx90a ^petsc +rocm amdgpu_target=gfx90a
# - strumpack ~slate +rocm amdgpu_target=gfx90a
# - sundials +rocm amdgpu_target=gfx90a
# - superlu-dist +rocm amdgpu_target=gfx90a
# - tasmanian ~openmp +rocm amdgpu_target=gfx90a
# - trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack ~ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu ~stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long +rocm amdgpu_target=gfx90a
# - umpire +rocm amdgpu_target=gfx90a
# - upcxx +rocm amdgpu_target=gfx90a
# # INCLUDED IN ECP DAV ROCM
# # - hdf5
# # - hdf5-vol-async
# # - hdf5-vol-cache
# # - hdf5-vol-log
# # - libcatalyst
# - paraview +rocm amdgpu_target=gfx90a
# # - vtk-m ~openmp +rocm amdgpu_target=gfx90a # vtk-m: https://github.com/spack/spack/issues/40268
# # --
# # - exago +mpi +python +raja +hiop +rocm amdgpu_target=gfx90a ~ipopt cxxflags="-Wno-error=non-pod-varargs" ^hiop@1.0.0 ~sparse +mpi +raja +rocm amdgpu_target=gfx90a # hiop: CMake Error at cmake/FindHiopHipLibraries.cmake:23 (find_package)
# # - lbann ~cuda +rocm amdgpu_target=gfx90a # aluminum: https://github.com/spack/spack/issues/38807
# # - papi +rocm amdgpu_target=gfx90a # papi: https://github.com/spack/spack/issues/27898
# ROCM 90a
- adios2 +kokkos +rocm amdgpu_target=gfx90a
- amrex +rocm amdgpu_target=gfx90a
- arborx +rocm amdgpu_target=gfx90a
- cabana +rocm amdgpu_target=gfx90a
- caliper +rocm amdgpu_target=gfx90a
- chai ~benchmarks +rocm amdgpu_target=gfx90a
- ecp-data-vis-sdk +paraview +vtkm +rocm amdgpu_target=gfx90a
- gasnet +rocm amdgpu_target=gfx90a
- ginkgo +rocm amdgpu_target=gfx90a
- heffte +rocm amdgpu_target=gfx90a
- hpx +rocm amdgpu_target=gfx90a
- hypre +rocm amdgpu_target=gfx90a
- kokkos +rocm amdgpu_target=gfx90a
- legion +rocm amdgpu_target=gfx90a
- magma ~cuda +rocm amdgpu_target=gfx90a
- mfem +rocm amdgpu_target=gfx90a
- petsc +rocm amdgpu_target=gfx90a
- raja ~openmp +rocm amdgpu_target=gfx90a
- slate +rocm amdgpu_target=gfx90a
- slepc +rocm amdgpu_target=gfx90a ^petsc +rocm amdgpu_target=gfx90a
- strumpack ~slate +rocm amdgpu_target=gfx90a
- sundials +rocm amdgpu_target=gfx90a
- superlu-dist +rocm amdgpu_target=gfx90a
- tasmanian ~openmp +rocm amdgpu_target=gfx90a
- trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack ~ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu ~stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long +rocm amdgpu_target=gfx90a
- umpire +rocm amdgpu_target=gfx90a
- upcxx +rocm amdgpu_target=gfx90a
# INCLUDED IN ECP DAV ROCM
# - hdf5
# - hdf5-vol-async
# - hdf5-vol-cache
# - hdf5-vol-log
# - libcatalyst
- paraview +rocm amdgpu_target=gfx90a
# - vtk-m ~openmp +rocm amdgpu_target=gfx90a # vtk-m: https://github.com/spack/spack/issues/40268
# --
# - exago +mpi +python +raja +hiop +rocm amdgpu_target=gfx90a ~ipopt cxxflags="-Wno-error=non-pod-varargs" ^hiop@1.0.0 ~sparse +mpi +raja +rocm amdgpu_target=gfx90a # hiop: CMake Error at cmake/FindHiopHipLibraries.cmake:23 (find_package)
# - lbann ~cuda +rocm amdgpu_target=gfx90a # aluminum: https://github.com/spack/spack/issues/38807
# - papi +rocm amdgpu_target=gfx90a # papi: https://github.com/spack/spack/issues/27898
ci:
pipeline-gen:
- build-job:
image: ecpe4s/ubuntu22.04-runner-amd64-gcc-11.4:2024.03.01
image: "ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4:2023.08.01"
cdash:
build-group: E4S

View File

@@ -120,11 +120,9 @@ class Adios2(CMakePackage, CudaPackage, ROCmPackage):
depends_on("cuda", when="+cuda ~kokkos")
# Kokkos support
with when("+kokkos"):
depends_on("kokkos@3.7:")
depends_on("kokkos +cuda +wrapper", when="+cuda")
depends_on("kokkos +rocm", when="+rocm")
depends_on("kokkos +sycl", when="+sycl")
depends_on("kokkos@3.7: +cuda +wrapper", when="+kokkos +cuda")
depends_on("kokkos@3.7: +rocm", when="+kokkos +rocm")
depends_on("kokkos@3.7: +sycl", when="+kokkos +sycl")
# Propagate CUDA target to kokkos for +cuda
for cuda_arch in CudaPackage.cuda_arch_values:

View File

@@ -33,11 +33,6 @@ class Aocc(Package):
maintainers("amd-toolchain-support")
version(
ver="4.2.0",
sha256="ed5a560ec745b24dc0685ccdcbde914843fb2f2dfbfce1ba592de4ffbce1ccab",
url="https://download.amd.com/developer/eula/aocc/aocc-4-2/aocc-compiler-4.2.0.tar",
)
version(
ver="4.1.0",
sha256="5b04bfdb751c68dfb9470b34235d76efa80a6b662a123c3375b255982cb52acd",
@@ -61,6 +56,7 @@ class Aocc(Package):
depends_on("zlib-api")
depends_on("ncurses")
depends_on("libtool")
depends_on("texinfo")
variant(
"license-agreed",

View File

@@ -0,0 +1,41 @@
From eb1e1351da41a0da25aa056636932acd8a4f955f Mon Sep 17 00:00:00 2001
From: Ethan Stewart <ethan.stewart@amd.com>
Date: Fri, 25 Sep 2020 09:53:42 -0500
Subject: [PATCH] Add amdgcn to devicelibs bitcode names 3.8
---
clang/lib/Driver/ToolChains/AMDGPUOpenMP.cpp | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/clang/lib/Driver/ToolChains/AMDGPUOpenMP.cpp b/clang/lib/Driver/ToolChains/AMDGPUOpenMP.cpp
index 25d3db59d44..1bb9d993bf7 100644
--- a/clang/lib/Driver/ToolChains/AMDGPUOpenMP.cpp
+++ b/clang/lib/Driver/ToolChains/AMDGPUOpenMP.cpp
@@ -148,21 +148,21 @@ const char *AMDGCN::OpenMPLinker::constructOmpExtraCmds(
llvm::StringRef WaveFrontSizeBC;
std::string GFXVersion = SubArchName.drop_front(3).str();
if (stoi(GFXVersion) < 1000)
- WaveFrontSizeBC = "oclc_wavefrontsize64_on.bc";
+ WaveFrontSizeBC = "oclc_wavefrontsize64_on.amdgcn.bc";
else
- WaveFrontSizeBC = "oclc_wavefrontsize64_off.bc";
+ WaveFrontSizeBC = "oclc_wavefrontsize64_off.amdgcn.bc";
// FIXME: remove double link of hip aompextras, ockl, and WaveFrontSizeBC
if (Args.hasArg(options::OPT_cuda_device_only))
BCLibs.append(
{Args.MakeArgString("libomptarget-amdgcn-" + SubArchName + ".bc"),
- "hip.bc", "ockl.bc",
+ "hip.amdgcn.bc", "ockl.amdgcn.bc",
std::string(WaveFrontSizeBC)});
else {
BCLibs.append(
{Args.MakeArgString("libomptarget-amdgcn-" + SubArchName + ".bc"),
Args.MakeArgString("libaompextras-amdgcn-" + SubArchName + ".bc"),
- "hip.bc", "ockl.bc",
+ "hip.amdgcn.bc", "ockl.amdgcn.bc",
Args.MakeArgString("libbc-hostrpc-amdgcn.a"),
std::string(WaveFrontSizeBC)});
--
2.17.1

View File

@@ -0,0 +1,41 @@
From 2414b9faee9c264ce4b92b4d709375313df03344 Mon Sep 17 00:00:00 2001
From: Ethan Stewart <ethan.stewart@amd.com>
Date: Tue, 22 Sep 2020 13:39:22 -0500
Subject: [PATCH] Add amdgcn to devicelibs bitcode names
---
clang/lib/Driver/ToolChains/AMDGPUOpenMP.cpp | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/clang/lib/Driver/ToolChains/AMDGPUOpenMP.cpp b/clang/lib/Driver/ToolChains/AMDGPUOpenMP.cpp
index cc9b4f1caba..d22609fbe62 100644
--- a/clang/lib/Driver/ToolChains/AMDGPUOpenMP.cpp
+++ b/clang/lib/Driver/ToolChains/AMDGPUOpenMP.cpp
@@ -148,21 +148,21 @@ const char *AMDGCN::OpenMPLinker::constructOmpExtraCmds(
llvm::StringRef WaveFrontSizeBC;
std::string GFXVersion = SubArchName.drop_front(3).str();
if (stoi(GFXVersion) < 1000)
- WaveFrontSizeBC = "oclc_wavefrontsize64_on.bc";
+ WaveFrontSizeBC = "oclc_wavefrontsize64_on.amdgcn.bc";
else
- WaveFrontSizeBC = "oclc_wavefrontsize64_off.bc";
+ WaveFrontSizeBC = "oclc_wavefrontsize64_off.amdgcn.bc";
// FIXME: remove double link of hip aompextras, ockl, and WaveFrontSizeBC
if (Args.hasArg(options::OPT_cuda_device_only))
BCLibs.append(
{Args.MakeArgString("libomptarget-amdgcn-" + SubArchName + ".bc"),
- "hip.bc", "ockl.bc",
+ "hip.amdgcn.bc", "ockl.amdgcn.bc",
std::string(WaveFrontSizeBC)});
else {
BCLibs.append(
{Args.MakeArgString("libomptarget-amdgcn-" + SubArchName + ".bc"),
Args.MakeArgString("libaompextras-amdgcn-" + SubArchName + ".bc"),
- "hip.bc", "ockl.bc",
+ "hip.amdgcn.bc", "ockl.amdgcn.bc",
Args.MakeArgString("libbc-hostrpc-amdgcn.a"),
std::string(WaveFrontSizeBC)});
--
2.17.1

View File

@@ -0,0 +1,28 @@
From 526efe86427a4d49da38773534d84025dd4246c3 Mon Sep 17 00:00:00 2001
From: Ethan Stewart <ethan.stewart@amd.com>
Date: Tue, 10 Nov 2020 15:32:59 -0600
Subject: [PATCH] Add cmake option for copying source for debugging.
---
openmp/CMakeLists.txt | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/openmp/CMakeLists.txt b/openmp/CMakeLists.txt
index a86e83c50212..51962b561a3b 100644
--- a/openmp/CMakeLists.txt
+++ b/openmp/CMakeLists.txt
@@ -103,3 +103,11 @@ endif()
# Now that we have seen all testsuites, create the check-openmp target.
construct_check_openmp_target()
+
+option(DEBUG_COPY_SOURCE "Enable source code copy for openmp debug build."
+ ${ENABLE_SOURCE_COPY})
+if (${ENABLE_SOURCE_COPY})
+ install(DIRECTORY runtime/src DESTINATION ${OPENMP_INSTALL_LIBDIR}/src/openmp/runtime)
+ install(DIRECTORY libomptarget/src libomptarget/plugins DESTINATION ${OPENMP_INSTALL_LIBDIR}/src/openmp/libomptarget)
+ install(DIRECTORY libompd/src DESTINATION ${OPENMP_INSTALL_LIBDIR}/src/openmp/libompd)
+endif()
--
2.17.1

View File

@@ -0,0 +1,528 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import re
from spack.package import *
tools_url = "https://github.com/ROCm"
compute_url = "https://github.com/ROCm"
aomp = [
"e4526489833896bbc47ba865e0d115fab278ce269789a8c99a97f444595f5f6a",
"970374c3acb9dda8b9a17d7a579dbaab48fac731db8fdce566a65abee37e5ed3",
"86f90d6505eccdb2840069cadf57f7111d4685653c4974cf65fb22b172e55478",
"14fc6867af0b17e3bff8cb42cb36f509c95a29b7a933a106bf6778de21f6c123",
"ce29cead5391a4a13f2c567e2e059de9291888d24985460725e43a91b740be7a",
]
devlib = [
"dce3a4ba672c4a2da4c2260ee4dc96ff6dd51877f5e7e1993cb107372a35a378",
"b3a114180bf184b3b829c356067bc6a98021d52c1c6f9db6bc57272ebafc5f1d",
"e82cc9a8eb7d92de02cabb856583e28f17a05c8cf9c97aec5275608ef1a38574",
"c99f45dacf5967aef9a31e3731011b9c142446d4a12bac69774998976f2576d7",
"bca9291385d6bdc91a8b39a46f0fd816157d38abb1725ff5222e6a0daa0834cc",
]
llvm = [
"b4fd7305dc57887eec17cce77bbf42215db46a4a3d14d8e517ab92f4e200b29d",
"89b967de5e79f6df7c62fdc12529671fa30989ae7b634d5a7c7996629ec1140e",
"98deabedb6cb3067ee960a643099631902507f236e4d9dc65b3e0f8d659eb55c",
"f0a0b9fec0626878340a15742e73a56f155090011716461edcb069dcf05e6b30",
"3ff18a8bd31d5b55232327e574dfa3556cf26787e105d0ba99411c5687325a8d",
]
flang = [
"cc27f8bfb49257b7a4f0b03f4ba5e06a28dcb6c337065c4201b6075dd2d5bc48",
"1fe07a0da20eb66a2a2aa8d354bf95c6f216ec38cc4a051e98041e0d13c34b36",
"54cc6a9706dba6d7808258632ed40fa6493838edb309709d3b25e0f9b02507f8",
"43d57bcc87fab092ac242e36da62588a87b6fa91f9e81fdb330159497afdecb3",
"81674bf3c9d8fd9b16fb3e5c66a870537c25ff8302fc1b162ab9e95944167163",
]
extras = [
"5dbf27f58b8114318208b97ba99a90483b78eebbcad4117cac6881441977e855",
"adaf7670b2497ff3ac09636e0dd30f666a5a5b742ecdcb8551d722102dcfbd85",
"4460a4f4b03022947f536221483e85dcd9b07064a54516ec103a1939c3f587b5",
"014fca1fba54997c6db0e84822df274fb6807698b6856da4f737f38f10ab0e5d",
"ee146cff4b9ee7aae90d7bb1d6b4957839232be0e7dab1865e0ae39832f8f795",
]
# Used only for 3.5.0
hip = ["86eb7749ff6f6c5f6851cd6c528504d42f9286967324a50dd0dd54a6a74cacc7"]
vdi = ["b21866c7c23dc536356db139b88b6beb3c97f58658836974a7fc167feb31ad7f"]
opencl = ["8963fcd5a167583b3db8b94363778d4df4593bfce8141e1d3c32a59fb64a0cf6"]
versions = ["3.5.0", "3.7.0", "3.8.0", "3.9.0", "3.10.0"]
versions_dict = dict() # type: Dict[str, Dict[str, str]]
hashes = [aomp, devlib, llvm, flang, extras]
hashes_35 = [aomp, devlib, llvm, flang, extras, hip, vdi, opencl]
components = ["aomp", "devlib", "llvm", "flang", "extras"]
components_35 = ["aomp", "devlib", "llvm", "flang", "extras", "hip", "vdi", "opencl"]
for outer_index, item in enumerate(versions):
if item == "3.5.0":
use_components = components_35
use_hashes = hashes_35
else:
use_components = components
use_hashes = hashes
for inner_index, component in enumerate(use_hashes):
versions_dict.setdefault(item, {})[use_components[inner_index]] = use_hashes[inner_index][
outer_index
]
class Aomp(Package):
"""llvm openmp compiler from AMD."""
homepage = tools_url + "/aomp"
url = tools_url + "/aomp/archive/rocm-3.10.0.tar.gz"
maintainers("srekolam", "arjun-raj-kuppala", "estewart08")
tags = ["e4s"]
version("3.10.0", sha256=versions_dict["3.10.0"]["aomp"])
version("3.9.0", sha256=versions_dict["3.9.0"]["aomp"])
# Cmake above 3.18 would fail the build on 3.5.0
depends_on("cmake@3:", type="build")
depends_on("cmake@3:3.17", when="@3.5.0", type="build")
depends_on("python@3:", type="build", when="@3.9.0:")
depends_on("py-setuptools", when="@3.9.0:", type="build")
depends_on("gl@4.5:", type=("build", "link"))
depends_on("py-pip", when="@3.8.0:", type="build")
depends_on("py-wheel", when="@3.8.0:", type=("build", "run"))
depends_on("perl-data-dumper", type="build")
depends_on("awk", type="build")
depends_on("elfutils", type=("build", "link"))
depends_on("libffi", type=("build", "link"))
for ver in ["3.5.0", "3.7.0", "3.8.0", "3.9.0", "3.10.0"]:
depends_on("hsakmt-roct@" + ver, when="@" + ver)
depends_on("comgr@" + ver, type="build", when="@" + ver)
depends_on("hsa-rocr-dev@" + ver, when="@" + ver)
depends_on("rocm-device-libs@" + ver, when="@" + ver)
if ver != "3.5.0":
depends_on("hip@" + ver, when="@" + ver)
depends_on("hip-rocclr@" + ver, when="@" + ver)
if ver == "3.9.0" or ver == "3.10.0":
depends_on("rocm-gdb@" + ver, when="@" + ver)
resource(
name="rocm-device-libs",
url=compute_url + "/ROCm-Device-Libs/archive/rocm-" + ver + ".tar.gz",
sha256=versions_dict[ver]["devlib"],
expand=True,
destination="aomp-dir",
placement="rocm-device-libs",
when="@" + ver,
)
resource(
name="amd-llvm-project",
url=tools_url + "/amd-llvm-project/archive/rocm-" + ver + ".tar.gz",
sha256=versions_dict[ver]["llvm"],
expand=True,
destination="aomp-dir",
placement="amd-llvm-project",
when="@" + ver,
)
resource(
name="flang",
url=tools_url + "/flang/archive/rocm-" + ver + ".tar.gz",
sha256=versions_dict[ver]["flang"],
expand=True,
destination="aomp-dir",
placement="flang",
when="@" + ver,
)
resource(
name="aomp-extras",
url=tools_url + "/aomp-extras/archive/rocm-" + ver + ".tar.gz",
sha256=versions_dict[ver]["extras"],
expand=True,
destination="aomp-dir",
placement="aomp-extras",
when="@" + ver,
)
if ver == "3.5.0":
resource(
name="hip-on-vdi",
url=tools_url + "/hip/archive/aomp-3.5.0.tar.gz",
sha256=versions_dict["3.5.0"]["hip"],
expand=True,
destination="aomp-dir",
placement="hip-on-vdi",
when="@3.5.0",
)
resource(
name="vdi",
url=tools_url + "/rocclr/archive/aomp-3.5.0.tar.gz",
sha256=versions_dict["3.5.0"]["vdi"],
expand=True,
destination="aomp-dir",
placement="vdi",
when="@3.5.0",
)
resource(
name="opencl-on-vdi",
sha256=versions_dict["3.5.0"]["opencl"],
url=compute_url + "/ROCm-OpenCL-Runtime/archive/aomp-3.5.0.tar.gz",
expand=True,
destination="aomp-dir",
placement="opencl-on-vdi",
when="@3.5.0",
)
# Copy source files over for debug build in 3.9.0
patch(
"0001-Add-cmake-option-for-copying-source-for-debugging.patch",
working_dir="aomp-dir/amd-llvm-project",
when="@3.9.0:",
)
# Revert back to .amdgcn.bc naming scheme for 3.8.0
patch(
"0001-Add-amdgcn-to-devicelibs-bitcode-names-3.8.patch",
working_dir="aomp-dir/amd-llvm-project",
when="@3.8.0",
)
# Revert back to .amdgcn.bc naming scheme for 3.7.0
patch(
"0001-Add-amdgcn-to-devicelibs-bitcode-names.patch",
working_dir="aomp-dir/amd-llvm-project",
when="@3.7.0",
)
def patch(self):
# Make sure python2.7 is used for the generation of hip header
if self.spec.version == Version("3.5.0"):
kwargs = {"ignore_absent": False, "backup": False, "string": False}
with working_dir("aomp-dir/hip-on-vdi"):
match = "^#!/usr/bin/python"
python = self.spec["python"].command.path
substitute = "#!{python}".format(python=python)
files = ["hip_prof_gen.py", "vdi/hip_prof_gen.py"]
filter_file(match, substitute, *files, **kwargs)
src = self.stage.source_path
libomptarget = "{0}/aomp-dir/amd-llvm-project/openmp/libomptarget"
aomp_extras = "{0}/aomp-dir/aomp-extras/aomp-device-libs"
flang = "{0}/aomp-dir/flang/"
if self.spec.version >= Version("3.9.0"):
filter_file(
"ADDITIONAL_VERSIONS 2.7",
"ADDITIONAL_VERSIONS 3",
flang.format(src) + "CMakeLists.txt",
)
if self.spec.version >= Version("3.8.0"):
filter_file(
"{CMAKE_INSTALL_PREFIX}",
"{HSA_INCLUDE}",
libomptarget.format(src) + "/hostrpc/services/CMakeLists.txt",
)
filter_file(
"CONFIG",
"CONFIG PATHS ${CMAKE_INSTALL_PREFIX} NO_DEFAULT_PATH",
libomptarget.format(src) + "/../libompd/test/CMakeLists.txt",
)
if self.spec.version != Version("3.5.0"):
filter_file(
"{ROCM_DIR}/aomp/amdgcn/bitcode",
"{DEVICE_LIBS_DIR}",
libomptarget.format(src) + "/hostrpc/CMakeLists.txt",
libomptarget.format(src) + "/deviceRTLs/amdgcn/CMakeLists.txt",
)
if self.spec.version == Version("3.5.0"):
filter_file(
"{ROCM_DIR}/lib/bitcode",
"{DEVICE_LIBS_DIR}",
libomptarget.format(src) + "/deviceRTLs/hostcall/CMakeLists.txt",
)
filter_file(
"{ROCM_DIR}/lib/bitcode",
"{DEVICE_LIBS_DIR}",
aomp_extras.format(src) + "/aompextras/CMakeLists.txt",
aomp_extras.format(src) + "/libm/CMakeLists.txt",
libomptarget.format(src) + "/deviceRTLs/amdgcn/CMakeLists.txt",
string=True,
)
filter_file(
r"${ROCM_DIR}/hsa/include ${ROCM_DIR}/hsa/include/hsa",
"${HSA_INCLUDE}/hsa/include ${HSA_INCLUDE}/hsa/include/hsa",
libomptarget.format(src) + "/plugins/hsa/CMakeLists.txt",
string=True,
)
filter_file(
"{ROCM_DIR}/hsa/lib",
"{HSA_LIB}",
libomptarget.format(src) + "/plugins/hsa/CMakeLists.txt",
)
filter_file(
r"{ROCM_DIR}/lib\)",
"{HSAKMT_LIB})\nset(HSAKMT_LIB64 ${HSAKMT_LIB64})",
libomptarget.format(src) + "/plugins/hsa/CMakeLists.txt",
)
filter_file(
r"-L${LIBOMPTARGET_DEP_LIBHSAKMT_LIBRARIES_DIRS}",
"-L${LIBOMPTARGET_DEP_LIBHSAKMT_LIBRARIES_DIRS} -L${HSAKMT_LIB64}",
libomptarget.format(src) + "/plugins/hsa/CMakeLists.txt",
string=True,
)
filter_file(
r"-rpath,${LIBOMPTARGET_DEP_LIBHSAKMT_LIBRARIES_DIRS}",
"-rpath,${LIBOMPTARGET_DEP_LIBHSAKMT_LIBRARIES_DIRS}" + ",-rpath,${HSAKMT_LIB64}",
libomptarget.format(src) + "/plugins/hsa/CMakeLists.txt",
string=True,
)
filter_file(
"{ROCM_DIR}/include",
"{COMGR_INCLUDE}",
libomptarget.format(src) + "/plugins/hsa/CMakeLists.txt",
)
filter_file(
"{ROCM_DIR}/include",
"{COMGR_INCLUDE}",
libomptarget.format(src) + "/plugins/hsa/CMakeLists.txt",
)
filter_file(
r"-L${LLVM_LIBDIR}${OPENMP_LIBDIR_SUFFIX}",
"-L${LLVM_LIBDIR}${OPENMP_LIBDIR_SUFFIX} -L${COMGR_LIB}",
libomptarget.format(src) + "/plugins/hsa/CMakeLists.txt",
string=True,
)
filter_file(
r"rpath,${LLVM_LIBDIR}${OPENMP_LIBDIR_SUFFIX}",
"rpath,${LLVM_LIBDIR}${OPENMP_LIBDIR_SUFFIX}" + "-Wl,-rpath,${COMGR_LIB}",
libomptarget.format(src) + "/plugins/hsa/CMakeLists.txt",
string=True,
)
def setup_run_environment(self, env):
devlibs_prefix = self.spec["rocm-device-libs"].prefix
aomp_prefix = self.spec["aomp"].prefix
env.set("HIP_DEVICE_LIB_PATH", "{0}/amdgcn/bitcode".format(format(devlibs_prefix)))
env.set("AOMP", "{0}".format(format(aomp_prefix)))
def setup_build_environment(self, env):
aomp_prefix = self.spec["aomp"].prefix
env.set("AOMP", "{0}".format(format(aomp_prefix)))
env.set("FC", "{0}/bin/flang".format(format(aomp_prefix)))
env.set("GFXLIST", "gfx700 gfx701 gfx801 gfx803 gfx900 gfx902 gfx906 gfx908")
def install(self, spec, prefix):
src = self.stage.source_path
gfx_list = "gfx700;gfx701;gfx801;gfx803;gfx900;gfx902;gfx906;gfx908"
aomp_prefix = self.spec["aomp"].prefix
devlibs_prefix = self.spec["rocm-device-libs"].prefix
hsa_prefix = self.spec["hsa-rocr-dev"].prefix
hsakmt_prefix = self.spec["hsakmt-roct"].prefix
comgr_prefix = self.spec["comgr"].prefix
opencl_src = "/aomp-dir/opencl-on-vdi/api/opencl"
omp_src = "/aomp-dir/amd-llvm-project/openmp"
debug_map_format = "-fdebug-prefix-map={0}{1}={2}".format(src, omp_src, aomp_prefix)
if self.spec.version >= Version("3.9.0"):
bitcode_dir = "/amdgcn/bitcode"
else:
bitcode_dir = "/lib"
components = dict()
components["amd-llvm-project"] = [
"../aomp-dir/amd-llvm-project/llvm",
"-DLLVM_ENABLE_PROJECTS=clang;lld;compiler-rt",
"-DCMAKE_BUILD_TYPE=release",
"-DLLVM_ENABLE_ASSERTIONS=ON",
"-DLLVM_TARGETS_TO_BUILD=AMDGPU;X86",
"-DCMAKE_C_COMPILER={0}".format(self.compiler.cc),
"-DCMAKE_CXX_COMPILER={0}".format(self.compiler.cxx),
"-DCMAKE_ASM_COMPILER={0}".format(self.compiler.cc),
"-DBUG_REPORT_URL=https://github.com/ROCm/aomp",
"-DLLVM_ENABLE_BINDINGS=OFF",
"-DLLVM_INCLUDE_BENCHMARKS=OFF",
"-DLLVM_BUILD_TESTS=OFF",
"-DLLVM_INCLUDE_TESTS=OFF",
"-DCLANG_INCLUDE_TESTS=OFF",
"-DCMAKE_VERBOSE_MAKEFILE=1",
"-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE",
]
if self.spec.version == Version("3.5.0"):
components["vdi"] = [
"../aomp-dir/vdi",
"-DUSE_COMGR_LIBRARY=yes",
"-DOPENCL_DIR={0}{1}".format(src, opencl_src),
]
components["hip-on-vdi"] = [
"../aomp-dir/hip-on-vdi",
"-DVDI_ROOT={0}/aomp-dir/vdi".format(src),
"-DHIP_COMPILER=clang",
"-DHIP_PLATFORM=vdi",
"-DVDI_DIR={0}/aomp-dir/vdi".format(src),
"-DHSA_PATH={0}".format(hsa_prefix),
"-DLIBVDI_STATIC_DIR={0}/spack-build-vdi".format(src),
"-DCMAKE_CXX_FLAGS=-Wno-ignored-attributes",
]
components["aomp-extras"] = [
"../aomp-dir/aomp-extras",
"-DROCM_PATH=$ROCM_DIR ",
"-DDEVICE_LIBS_DIR={0}{1}".format(devlibs_prefix, bitcode_dir),
"-DAOMP_STANDALONE_BUILD=0",
"-DDEVICELIBS_ROOT={0}/aomp-dir/rocm-device-libs".format(src),
"-DCMAKE_VERBOSE_MAKEFILE=1",
]
openmp_common_args = [
"-DROCM_DIR={0}".format(hsa_prefix),
"-DDEVICE_LIBS_DIR={0}{1}".format(devlibs_prefix, bitcode_dir),
"-DAOMP_STANDALONE_BUILD=0",
"-DDEVICELIBS_ROOT={0}/aomp-dir/rocm-device-libs".format(src),
"-DOPENMP_TEST_C_COMPILER={0}/bin/clang".format(aomp_prefix),
"-DOPENMP_TEST_CXX_COMPILER={0}/bin/clang++".format(aomp_prefix),
"-DLIBOMPTARGET_AMDGCN_GFXLIST={0}".format(gfx_list),
"-DLIBOMP_COPY_EXPORTS=OFF",
"-DHSA_INCLUDE={0}".format(hsa_prefix),
"-DHSA_LIB={0}/lib".format(hsa_prefix),
"-DHSAKMT_LIB={0}/lib".format(hsakmt_prefix),
"-DHSAKMT_LIB64={0}/lib64".format(hsakmt_prefix),
"-DCOMGR_INCLUDE={0}/include".format(comgr_prefix),
"-DCOMGR_LIB={0}/lib".format(comgr_prefix),
"-DOPENMP_ENABLE_LIBOMPTARGET=1",
"-DOPENMP_ENABLE_LIBOMPTARGET_HSA=1",
]
components["openmp"] = ["../aomp-dir/amd-llvm-project/openmp"]
components["openmp"] += openmp_common_args
components["openmp-debug"] = [
"../aomp-dir/amd-llvm-project/openmp",
"-DLIBOMPTARGET_NVPTX_DEBUG=ON",
"-DOPENMP_ENABLE_LIBOMPTARGET=1",
"-DOPENMP_ENABLE_LIBOMPTARGET_HSA=1" "-DCMAKE_CXX_FLAGS=-g",
"-DCMAKE_C_FLAGS=-g",
]
if self.spec.version >= Version("3.9.0"):
components["openmp-debug"] += [
"-DENABLE_SOURCE_COPY=ON",
"-DOPENMP_SOURCE_DEBUG_MAP={0}".format(debug_map_format),
]
if self.spec.version >= Version("3.8.0"):
components["openmp-debug"] += [
"-DLIBOMP_ARCH=x86_64",
"-DLIBOMP_OMP_VERSION=50",
"-DLIBOMP_OMPT_SUPPORT=ON",
"-DLIBOMP_USE_DEBUGGER=ON",
"-DLIBOMP_CFLAGS=-O0",
"-DLIBOMP_CPPFLAGS=-O0",
"-DLIBOMP_OMPD_ENABLED=ON",
"-DLIBOMP_OMPD_SUPPORT=ON",
"-DLIBOMP_OMPT_DEBUG=ON",
]
components["openmp-debug"] += openmp_common_args
flang_common_args = [
"-DLLVM_ENABLE_ASSERTIONS=ON",
"-DLLVM_CONFIG={0}/bin/llvm-config".format(aomp_prefix),
"-DCMAKE_CXX_COMPILER={0}/bin/clang++".format(aomp_prefix),
"-DCMAKE_C_COMPILER={0}/bin/clang".format(aomp_prefix),
"-DCMAKE_Fortran_COMPILER={0}/bin/flang".format(aomp_prefix),
"-DLLVM_TARGETS_TO_BUILD=AMDGPU;x86",
]
components["pgmath"] = ["../aomp-dir/flang/runtime/libpgmath"]
components["pgmath"] += flang_common_args
components["flang"] = [
"../aomp-dir/flang",
"-DFLANG_OPENMP_GPU_AMD=ON",
"-DFLANG_OPENMP_GPU_NVIDIA=ON",
]
components["flang"] += flang_common_args
components["flang-runtime"] = [
"../aomp-dir/flang",
"-DLLVM_INSTALL_RUNTIME=ON",
"-DFLANG_BUILD_RUNTIME=ON",
"-DOPENMP_BUILD_DIR={0}/spack-build-openmp/runtime/src".format(src),
]
components["flang-runtime"] += flang_common_args
if self.spec.version != Version("3.5.0"):
build_order = [
"amd-llvm-project",
"aomp-extras",
"openmp",
"openmp-debug",
"pgmath",
"flang",
"flang-runtime",
]
elif self.spec.version == Version("3.5.0"):
build_order = [
"amd-llvm-project",
"vdi",
"hip-on-vdi",
"aomp-extras",
"openmp",
"openmp-debug",
"pgmath",
"flang",
"flang-runtime",
]
# Override standard CMAKE_BUILD_TYPE
for arg in std_cmake_args:
found = re.search("CMAKE_BUILD_TYPE", arg)
if found:
std_cmake_args.remove(arg)
for component in build_order:
with working_dir("spack-build-{0}".format(component), create=True):
cmake_args = components[component]
cmake_args.extend(std_cmake_args)
# OpenMP build needs to be run twice(Release, Debug)
if component == "openmp-debug":
cmake_args.append("-DCMAKE_BUILD_TYPE=Debug")
else:
cmake_args.append("-DCMAKE_BUILD_TYPE=Release")
cmake(*cmake_args)
make()
make("install")

View File

@@ -19,8 +19,6 @@ class Asio(AutotoolsPackage):
license("BSL-1.0")
# As uneven minor versions of asio are not considered stable, they wont be added anymore
version("1.28.2", sha256="5705a0e403017eba276625107160498518838064a6dd7fd8b00b2e30c0ffbdee")
version("1.28.1", sha256="5ff6111ec8cbe73a168d997c547f562713aa7bd004c5c02326f0e9d579a5f2ce")
version("1.28.0", sha256="226438b0798099ad2a202563a83571ce06dd13b570d8fded4840dbc1f97fa328")
version("1.26.0", sha256="935583f86825b7b212479277d03543e0f419a55677fa8cb73a79a927b858a72d")
version("1.24.0", sha256="cbcaaba0f66722787b1a7c33afe1befb3a012b5af3ad7da7ff0f6b8c9b7a8a5b")

View File

@@ -0,0 +1,29 @@
From 7445b29108ea2f0b3f28f947a1ca627575173292 Mon Sep 17 00:00:00 2001
From: Harmen Stoppels <harmenstoppels@gmail.com>
Date: Fri, 7 Aug 2020 13:24:00 +0200
Subject: [PATCH] Remove relative link paths to external libraries
---
src/runtime/core/CMakeLists.txt | 6 ------
1 file changed, 6 deletions(-)
diff --git a/src/runtime/core/CMakeLists.txt b/src/runtime/core/CMakeLists.txt
index 29abffb..f2cd4b4 100644
--- a/src/runtime/core/CMakeLists.txt
+++ b/src/runtime/core/CMakeLists.txt
@@ -109,12 +109,6 @@ target_link_libraries(
${amd_comgr_LIBRARIES}
${LIBELF_LIBRARIES}
${ROCM_LIBRARIES}
- -L${ROCM_LIBRARIES_DIR}/../hsa/lib
- -L${ROCM_LIBRARIES_DIR}
- -Wl,--enable-new-dtags
- -Wl,-rpath,\$ORIGIN
- -Wl,-rpath,\$ORIGIN/../../hsa/lib
- -Wl,-rpath,\$ORIGIN/../../lib
)
# set output dir for .h files
--
2.25.1

View File

@@ -0,0 +1,68 @@
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index ab4533a..9249de2 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -64,7 +64,7 @@ add_subdirectory(device_runtime)
# make examples available in local build
add_custom_command(
OUTPUT examples
- COMMAND /usr/bin/rsync -rl ${CMAKE_CURRENT_SOURCE_DIR}/../examples .
+ COMMAND rsync -rl ${CMAKE_CURRENT_SOURCE_DIR}/../examples .
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/../examples/*
)
add_custom_target(example ALL DEPENDS examples)
@@ -72,7 +72,7 @@ add_custom_target(example ALL DEPENDS examples)
# make bin available in local build
add_custom_command(
OUTPUT bin
- COMMAND /usr/bin/rsync -rl ${CMAKE_CURRENT_SOURCE_DIR}/../bin .
+ COMMAND rsync -rl ${CMAKE_CURRENT_SOURCE_DIR}/../bin .
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/../bin/*
)
add_custom_target(script ALL DEPENDS bin)
diff --git a/src/device_runtime/CMakeLists.txt b/src/device_runtime/CMakeLists.txt
index 20e15a8..333cb47 100644
--- a/src/device_runtime/CMakeLists.txt
+++ b/src/device_runtime/CMakeLists.txt
@@ -108,7 +108,7 @@ set (OUTPUT_INC_DIRECTORY ${ATMI_RUNTIME_PATH}/include)
execute_process(COMMAND "/bin/mkdir" "-p" "${OUTPUT_INC_DIRECTORY}")
add_custom_command(
OUTPUT ${OUTPUT_INC_DIRECTORY}/atmi_kl.h
- COMMAND /usr/bin/rsync ${CMAKE_CURRENT_SOURCE_DIR}/../../include/atmi_kl.h ${OUTPUT_INC_DIRECTORY}/atmi_kl.h
+ COMMAND rsync ${CMAKE_CURRENT_SOURCE_DIR}/../../include/atmi_kl.h ${OUTPUT_INC_DIRECTORY}/atmi_kl.h
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/../../include/*.h
)
add_custom_target(device_header ALL DEPENDS ${OUTPUT_INC_DIRECTORY}/atmi_kl.h)
diff --git a/src/runtime/core/CMakeLists.txt b/src/runtime/core/CMakeLists.txt
index 132a2f6..034a728 100644
--- a/src/runtime/core/CMakeLists.txt
+++ b/src/runtime/core/CMakeLists.txt
@@ -128,13 +128,13 @@ execute_process(COMMAND "/bin/mkdir" "-p" "${OUTPUT_INC_DIRECTORY}")
add_custom_command(
OUTPUT ${OUTPUT_INC_DIRECTORY}/atmi.h
- COMMAND /usr/bin/rsync ${CMAKE_CURRENT_SOURCE_DIR}/../../../include/atmi.h ${OUTPUT_INC_DIRECTORY}/atmi.h
+ COMMAND rsync ${CMAKE_CURRENT_SOURCE_DIR}/../../../include/atmi.h ${OUTPUT_INC_DIRECTORY}/atmi.h
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/../../../include/*.h
)
add_custom_command(
OUTPUT ${OUTPUT_INC_DIRECTORY}/atmi_runtime.h
- COMMAND /usr/bin/rsync ${CMAKE_CURRENT_SOURCE_DIR}/../../../include/atmi_runtime.h ${OUTPUT_INC_DIRECTORY}/atmi_runtime.h
+ COMMAND rsync ${CMAKE_CURRENT_SOURCE_DIR}/../../../include/atmi_runtime.h ${OUTPUT_INC_DIRECTORY}/atmi_runtime.h
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/../../../include/*.h
)
diff --git a/src/runtime/interop/hsa/CMakeLists.txt b/src/runtime/interop/hsa/CMakeLists.txt
index f868972..97d8ae8 100644
--- a/src/runtime/interop/hsa/CMakeLists.txt
+++ b/src/runtime/interop/hsa/CMakeLists.txt
@@ -22,7 +22,7 @@ execute_process(COMMAND "/bin/mkdir" "-p" "${OUTPUT_INC_DIRECTORY}")
add_custom_command(
OUTPUT ${OUTPUT_INC_DIRECTORY}/atmi_interop_hsa.h
- COMMAND /usr/bin/rsync ${CMAKE_CURRENT_SOURCE_DIR}/../../../../include/atmi_interop_hsa.h ${OUTPUT_INC_DIRECTORY}/atmi_interop_hsa.h
+ COMMAND rsync ${CMAKE_CURRENT_SOURCE_DIR}/../../../../include/atmi_interop_hsa.h ${OUTPUT_INC_DIRECTORY}/atmi_interop_hsa.h
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/../../../../include/*.h
)

View File

@@ -34,10 +34,94 @@ class Atmi(CMakePackage):
version("5.1.3", sha256="a43448d77705b2b07e1758ffe8035aa6ba146abc2167984e8cb0f1615797b341")
version("5.1.0", sha256="6a758f5a8332e6774cd8e14a4e5ce05e43b1e05298d817b4068c35fa1793d333")
version(
"5.0.2",
sha256="3aea040f5a246539ab118f2183cf3e802a21e0e6215a53025eda77f382341747",
deprecated=True,
)
version(
"5.0.0",
sha256="208c1773170722b60b74357e264e698df5871e9d9d490d64011e6ea76750d9cf",
deprecated=True,
)
version(
"4.5.2",
sha256="c235cfb8bdd89deafecf9123264217b8cc5577a5469e3e1f24587fa820d0792e",
deprecated=True,
)
version(
"4.5.0",
sha256="64eeb0244cedae99db7dfdb365e0ad624106cc1090a531f94885ae81e254aabf",
deprecated=True,
)
version(
"4.3.1",
sha256="4497fa6d33547b946e2a51619f2777ec36e9cff1b07fd534eb8a5ef0d8e30650",
deprecated=True,
)
version(
"4.3.0",
sha256="1cbe0e9258ce7cce7b7ccc288335dffbac821ceb745c4f3fd48e2a258abada89",
deprecated=True,
)
version(
"4.2.0",
sha256="c1c89c00d2dc3e764c63b2e51ff7fd5c06d5881ed56aed0adf639582d3389585",
deprecated=True,
)
version(
"4.1.0",
sha256="b31849f86c79f90466a9d67f0a28a93c1675181e38e2a5f571ffc963e4b06f5f",
deprecated=True,
)
version(
"4.0.0",
sha256="8a2e5789ee7165aff0f0669eecd23ac0a5c8a5bfbc1acd9380fe9a8ed5bffe3a",
deprecated=True,
)
version(
"3.10.0",
sha256="387e87c622ec334d3ba7a2f4f015ea9a219712722f4c56c1ef572203d0d072ea",
deprecated=True,
)
version(
"3.9.0",
sha256="0a305e85bab210dd9a0410aa01d46227e00b59141e4675c50d731ad1232ab828",
deprecated=True,
)
version(
"3.8.0",
sha256="039f0c2b369d0dbc01000754893d9210828f4cb9b36c3e70da8c3819b131c933",
deprecated=True,
)
version(
"3.7.0",
sha256="8df08489a10ee04cea911811393e0e7d91bd437fc1fd81a23a4e7ab924a974f3",
deprecated=True,
)
version(
"3.5.0",
sha256="3fb57d2e583fab82bd0582d0c2bccff059ca91122c18ac49a7770a8bb041a37b",
deprecated=True,
)
depends_on("cmake@3:", type="build")
depends_on("rsync")
for ver in [
"3.5.0",
"3.7.0",
"3.8.0",
"3.9.0",
"3.10.0",
"4.0.0",
"4.1.0",
"4.2.0",
"4.3.0",
"4.3.1",
"4.5.0",
"4.5.2",
"5.0.0",
"5.0.2",
"5.1.0",
"5.1.3",
"5.2.0",
@@ -50,15 +134,18 @@ class Atmi(CMakePackage):
"5.5.0",
"5.5.1",
]:
depends_on(f"comgr@{ver}", type="link", when=f"@{ver}")
depends_on(f"hsa-rocr-dev@{ver}", type="link", when=f"@{ver}")
depends_on("elf", type="link", when=f"@{ver}")
depends_on("comgr@" + ver, type="link", when="@" + ver)
depends_on("hsa-rocr-dev@" + ver, type="link", when="@" + ver)
depends_on("elf", type="link", when="@" + ver)
for ver in ["5.5.0", "5.5.1"]:
depends_on(f"rocm-core@{ver}", when=f"@{ver}")
depends_on("rocm-core@" + ver, when="@" + ver)
root_cmakelists_dir = "src"
patch("0001-Remove-relative-link-paths-to-external-libraries.patch", when="@3.5.0")
# Removing direct reference to /usr/bin/rysnc for rsync command.
patch("0002-Remove-usr-bin-rsync-reference.patch", when="@4.0.0:5.0.0")
# Reset the installation path and remove direct reference to rsync.
patch("0002-Remove-usr-bin-rsync-reference-5.2.0.patch", when="@5.0.2:5.2.0")
# Remove direct reference to /usr/bin/rsync path for rsync command

View File

@@ -29,11 +29,3 @@ class AwscliV2(PythonPackage):
depends_on("py-python-dateutil@2.1:2", type=("build", "run"))
depends_on("py-jmespath@0.7.1:1.0", type=("build", "run"))
depends_on("py-urllib3@1.25.4:1.26", type=("build", "run"))
variant("examples", default=True, description="Install code examples")
@run_after("install")
@when("~examples")
def post_install(self):
examples_dir = join_path(python_purelib, "awscli", "examples")
remove_directory_contents(examples_dir)

View File

@@ -1,12 +0,0 @@
diff --color=auto --color=auto -Naur a/src/main/java/com/google/devtools/build/lib/rules/cpp/HeaderDiscovery.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/HeaderDiscovery.java
--- a/src/main/java/com/google/devtools/build/lib/rules/cpp/HeaderDiscovery.java 1980-01-01 00:00:00
+++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/HeaderDiscovery.java 2024-02-15 13:36:37
@@ -143,7 +143,7 @@
LabelConstants.EXPERIMENTAL_EXTERNAL_PATH_PREFIX.getRelative(
execPath.relativeTo(execRoot.getParentDirectory()));
} else {
- absolutePathProblems.add(execPathFragment.getPathString());
+ // absolutePathProblems.add(execPathFragment.getPathString());
continue;
}
}

View File

@@ -18,23 +18,10 @@ class Bazel(Package):
homepage = "https://bazel.build/"
url = "https://github.com/bazelbuild/bazel/releases/download/3.1.0/bazel-3.1.0-dist.zip"
maintainers("LydDeb")
tags = ["build-tools"]
license("Apache-2.0")
version("7.0.2", sha256="dea2b90575d43ef3e41c402f64c2481844ecbf0b40f8548b75a204a4d504e035")
version("7.0.1", sha256="596b13e071d27c43343ec8f5d263cb5312fafe7ef8702401f7ed492f182f4e6c")
version("7.0.0", sha256="477e54f6374001f439a9471ba1de9d7824daf129db95510849ecc5e19ce88170")
version("6.5.0", sha256="fc89da919415289f29e4ff18a5e01270ece9a6fe83cb60967218bac4a3bb3ed2")
version("6.4.0", sha256="bd88ff602c8bbb29ee82ba2a6b12ad092d51ec668c6577f9628f18e48ff4e51e")
version("6.3.2", sha256="8cd7feac58193be2bcba451ba6688a46824d37ca6359ff58e0d44eb98f042948")
version("6.3.1", sha256="2676319e86c5aeab142dccd42434364a33aa330a091c13562b7de87a10e68775")
version("6.3.0", sha256="902198981b1d26112fc05913e79f1b3e9772c3f95594caf85619d041ba06ede0")
version("6.2.1", sha256="4cf4d264bff388ee0012735728630d23832d3c9d021383b2fadceadb0775dd6b")
version("6.2.0", sha256="f1e8f788637ac574d471d619d2096baaca04a19b57a034399e079633db441945")
version("6.1.2", sha256="6fb3ee22fe9fa86d82e173572d504c089f10825d749725592626e090b38c9679")
version("6.1.1", sha256="6b900f26d676c7eca1d2e7dff9b71890dabd3ff59cab2a2d2178bc8a0395342a")
version("6.1.0", sha256="c4b85675541cf66ee7cb71514097fdd6c5fc0e02527243617a4f20ca6b4f2932")
version("6.0.0", sha256="7bc0c5145c19a56d82a08fce6908c5e1a0e75e4fbfb3b6f12b4deae7f4b38cbc")
@@ -46,7 +33,6 @@ class Bazel(Package):
version("5.1.1", sha256="7f5d3bc1d344692b2400f3765fd4b5c0b636eb4e7a8a7b17923095c7b56a4f78")
version("5.1.0", sha256="4de301f509fc6d0cbc697b2017384ecdc94df8f36245bbcbedc7ea6780acc9f5")
version("5.0.0", sha256="072dd62d237dbc11e0bac02e118d8c2db4d0ba3ba09f1a0eb1e2a460fb8419db")
version("4.2.4", sha256="d5ba2ef28da5275f22e832aaa7f9319c61ea5db9b6a3e23b28a6a64ad03078f3")
version("4.2.3", sha256="b0e84d0538f3ec2b95a49bae31a5066f0967281a3ca99965016fbe178acd2d3d")
version("4.2.2", sha256="9981d0d53a356c4e87962847750a97c9e8054e460854748006c80f0d7e2b2d33")
version("4.2.1", sha256="12ea7aa11e2bdb12de1dceb9939a22e96f5a480437cb17c123379d8e0fdf5e82")
@@ -80,6 +66,7 @@ class Bazel(Package):
)
# https://bazel.build/install/compile-source#bootstrap-unix-prereq
depends_on("java", type=("build", "run"))
depends_on("java@11", when="@5.3:", type=("build", "run"))
depends_on("java@8,11", when="@3.3:5.2", type=("build", "run"))
depends_on("java@8", when="@0.6:3.2", type=("build", "run"))
@@ -97,11 +84,10 @@ class Bazel(Package):
patch("compile-0.29.patch")
# Disable dependency search
patch("cppcompileaction-7.0.0.patch", when="@7: +nodepfail")
patch("cppcompileaction-0.3.2.patch", when="@:6 +nodepfail")
patch("cppcompileaction-0.3.2.patch", when="+nodepfail")
# https://github.com/bazelbuild/bazel/issues/17956
patch("apple-clang-14.0.3.patch", when="@:5.4.0,6.0:6.1.1")
patch("apple-clang-14.0.3.patch", when="@:5.4.0,6.0:6.1")
# https://github.com/bazelbuild/bazel/issues/17958
patch(
@@ -134,39 +120,6 @@ class Bazel(Package):
executables = ["^bazel$"]
# Download resources to perform offline build with bazel.
# The following URLs and sha256 are in the file distdir_deps.bzl at the root of bazel sources.
resource_dictionary = {}
resource_dictionary["bazel_skylib"] = {
"url": "https://github.com/bazelbuild/bazel-skylib/releases/download/1.0.1/bazel-skylib-1.0.1.tar.gz",
"sha256": "f1c8360c01fcf276778d3519394805dc2a71a64274a3a0908bc9edff7b5aebc8",
"when": "@4:6",
}
resource_dictionary["zulu_11_56_19"] = {
"url": "https://mirror.bazel.build/cdn.azul.com/zulu/bin/zulu11.56.19-ca-jdk11.0.15-linux_x64.tar.gz",
"sha256": "e064b61d93304012351242bf0823c6a2e41d9e28add7ea7f05378b7243d34247",
"when": "@6",
}
resource_dictionary["zulu_11_50_19"] = {
"url": "https://mirror.bazel.build/openjdk/azul-zulu11.50.19-ca-jdk11.0.12/zulu11.50.19-ca-jdk11.0.12-linux_x64.tar.gz",
"sha256": "b8e8a63b79bc312aa90f3558edbea59e71495ef1a9c340e38900dd28a1c579f3",
"when": "@5",
}
resource_dictionary["zulu_11_37_17"] = {
"url": "https://mirror.bazel.build/openjdk/azul-zulu11.37.17-ca-jdk11.0.6/zulu11.37.17-ca-jdk11.0.6-linux_x64.tar.gz",
"sha256": "360626cc19063bc411bfed2914301b908a8f77a7919aaea007a977fa8fb3cde1",
"when": "@4",
}
for resource_name in resource_dictionary.keys():
resource(
when=resource_dictionary[resource_name]["when"],
name=resource_name,
url=resource_dictionary[resource_name]["url"],
sha256=resource_dictionary[resource_name]["sha256"],
destination="archive",
expand=False,
)
@classmethod
def determine_version(cls, exe):
output = Executable(exe)("version", output=str, error=str)
@@ -179,24 +132,14 @@ def setup_build_environment(self, env):
env.set("BAZEL_LINKOPTS", "")
env.set("BAZEL_LINKLIBS", "-lstdc++")
# .WARNING: Option 'host_javabase' is deprecated
# Use local java installation
args = "--color=no --define=ABSOLUTE_JAVABASE={0} --verbose_failures --jobs={1}".format(
self.spec["java"].prefix, make_jobs
env.set(
"EXTRA_BAZEL_ARGS",
# Spack's logs don't handle colored output well
"--color=no --host_javabase=@local_jdk//:jdk"
# Enable verbose output for failures
" --verbose_failures --jobs={0}".format(make_jobs),
)
resource_stages = self.stage[1:]
for _resource in resource_stages:
try:
resource_name = _resource.resource.name
if self.spec.satisfies(self.resource_dictionary[resource_name]["when"]):
archive_path = _resource.source_path
args += " --distdir={0}".format(archive_path)
except AttributeError:
continue
env.set("EXTRA_BAZEL_ARGS", args)
@run_before("install")
def bootstrap(self):
bash = which("bash")

View File

@@ -1,13 +0,0 @@
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 4750d9a..7b10b57 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -5,7 +5,7 @@ if (CMAKE_VERSION VERSION_LESS 3.9.0)
cmake_policy(SET CMP0042 OLD)
endif()
-set(CMAKE_CXX_STANDARD 11)
+set(CMAKE_CXX_STANDARD 11 CACHE STRING "")
set(CMAKE_CXX_STANDARD_REQUIRED ON)
include(GenerateExportHeader)

View File

@@ -22,9 +22,7 @@ class Bdsim(CMakePackage):
license("GPL-3.0-or-later")
version("master", branch="master")
version("1.7.6", sha256="92f53aa0a9fbd3cafd218f9e58ae4d1e7115733e641191c1658243fefb436600")
version("1.7.0", sha256="713ce3c9d94f340ca774ce1803e0c4f992b904dbc28ce4129713abe883e98683")
version("develop", branch="develop")
version("1.6.0", sha256="e3241d2d097cb4e22249e315c1474da9b3657b9c6893232d9f9e543a5323f717")
depends_on("cmake")
@@ -34,10 +32,3 @@ class Bdsim(CMakePackage):
depends_on("clhep")
depends_on("flex")
depends_on("bison")
patch("c++-standard.patch", when="@:1.7.6")
def cmake_args(self):
args = []
args.append(f"-DCMAKE_CXX_STANDARD={self.spec['root'].variants['cxxstd'].value}")
return args

View File

@@ -17,7 +17,6 @@ class Bedtools2(Package):
license("MIT")
version("2.31.1", sha256="79a1ba318d309f4e74bfa74258b73ef578dccb1045e270998d7fe9da9f43a50e")
version("2.31.0", sha256="183cf9a96aabc50ef4bd557a53fd01557a123c05a0dc87651371878f357439ec")
version("2.30.0", sha256="c575861ec746322961cd15d8c0b532bb2a19333f1cf167bbff73230a7d67302f")
version("2.29.2", sha256="bc2f36b5d4fc9890c69f607d54da873032628462e88c545dd633d2c787a544a5")
@@ -32,7 +31,7 @@ class Bedtools2(Package):
depends_on("xz", when="@2.29:")
depends_on("python", type="build")
patch("bedtools-gcc13.patch", level=1, when="@2.27:2.31.0%gcc@13:")
patch("bedtools-gcc13.patch", level=1, when="@2.27:%gcc@13:")
def install(self, spec, prefix):
make("prefix=%s" % prefix, "install")

View File

@@ -44,24 +44,14 @@ class Benchmark(CMakePackage):
description="The build type to build",
values=("Debug", "Release", "RelWithDebInfo", "MinSizeRel", "Coverage"),
)
variant(
"performance_counters",
default=True,
when="@1.5.4:",
description="Enable performance counters provided by libpfm",
)
depends_on("cmake@2.8.11:", type="build", when="@:1.1.0")
depends_on("cmake@2.8.12:", type="build", when="@1.2.0:1.4")
depends_on("cmake@3.5.1:", type="build", when="@1.5.0:")
depends_on("libpfm4", type=("build", "link"), when="+performance_counters")
def cmake_args(self):
# No need for testing for the install
args = [
self.define("BENCHMARK_ENABLE_TESTING", False),
self.define_from_variant("BENCHMARK_ENABLE_LIBPFM", "performance_counters"),
]
args = ["-DBENCHMARK_ENABLE_TESTING=OFF"]
return args
def patch(self):

View File

@@ -16,7 +16,6 @@ class Bfs(MakefilePackage):
license("0BSD")
version("3.1.1", sha256="d73f345c1021e0630e0db930a3fa68dd1f968833037d8471ee1096e5040bf91b")
version("3.1", sha256="aa6a94231915d3d37e5dd62d194cb58a575a8f45270020f2bdd5ab41e31d1492")
version("3.0.4", sha256="7196f5a624871c91ad051752ea21043c198a875189e08c70ab3167567a72889d")
version("3.0.2", sha256="d3456a9aeecc031064db0dbe012e55a11eb97be88d0ab33a90e570fe66457f92")

View File

@@ -14,8 +14,6 @@ class BigdftAtlab(AutotoolsPackage):
git = "https://gitlab.com/l_sim/bigdft-suite.git"
version("develop", branch="devel")
version("1.9.4", sha256="fa22115e6353e553d2277bf054eb73a4710e92dfeb1ed9c5bf245337187f393d")
version("1.9.3", sha256="f5f3da95d7552219f94366b4d2a524b2beac988fb2921673a65a128f9a8f0489")
version("1.9.2", sha256="dc9e49b68f122a9886fa0ef09970f62e7ba21bb9ab1b86be9b7d7e22ed8fbe0f")
version("1.9.1", sha256="3c334da26d2a201b572579fc1a7f8caad1cbf971e848a3e10d83bc4dc8c82e41")
version("1.9.0", sha256="4500e505f5a29d213f678a91d00a10fef9dc00860ea4b3edf9280f33ed0d1ac8")
@@ -34,7 +32,7 @@ class BigdftAtlab(AutotoolsPackage):
depends_on("mpi", when="+mpi")
depends_on("openbabel", when="+openbabel")
for vers in ["1.9.0", "1.9.1", "1.9.2", "1.9.3", "1.9.4", "develop"]:
for vers in ["1.9.0", "1.9.1", "1.9.2", "develop"]:
depends_on(f"bigdft-futile@{vers}", when=f"@{vers}")
configure_directory = "atlab"

View File

@@ -26,7 +26,6 @@ class Binutils(AutotoolsPackage, GNUMirrorPackage):
checked_by="tgamblin",
)
version("2.42", sha256="aa54850ebda5064c72cd4ec2d9b056c294252991486350d9a97ab2a6dfdfaf12")
version("2.41", sha256="a4c4bec052f7b8370024e60389e194377f3f48b56618418ea51067f67aaab30b")
version("2.40", sha256="f8298eb153a4b37d112e945aa5cb2850040bcf26a3ea65b5a715c83afe05e48a")
version("2.39", sha256="da24a84fef220102dd24042df06fdea851c2614a5377f86effa28f33b7b16148")

View File

@@ -50,15 +50,15 @@ class Bison(AutotoolsPackage, GNUMirrorPackage):
version("3.0.4", sha256="b67fd2daae7a64b5ba862c66c07c1addb9e6b1b05c5f2049392cfd8a2172952e")
version("2.7", sha256="19bbe7374fd602f7a6654c131c21a15aebdc06cc89493e8ff250cb7f9ed0a831")
variant("color", default=False, description="Enable experimental colored output", when="@3.4:")
# https://lists.gnu.org/archive/html/bug-bison/2019-08/msg00008.html
patch("parallel.patch", when="@3.4.2")
provides("yacc")
depends_on("gettext", when="+color")
depends_on("m4@1.4.6:", type=("build", "run"))
depends_on("gettext", when="@3.4:")
depends_on("diffutils", type="build")
depends_on("m4", type=("build", "run"))
depends_on("perl", type="build")
patch("pgi.patch", when="@3.0.4")
# The NVIDIA compilers do not currently support some GNU builtins.

View File

@@ -18,7 +18,6 @@ class Btop(MakefilePackage, CMakePackage):
license("Apache-2.0")
version("1.3.2", sha256="331d18488b1dc7f06cfa12cff909230816a24c57790ba3e8224b117e3f0ae03e")
version("1.3.0", sha256="375e078ce2091969f0cd14030620bd1a94987451cf7a73859127a786006a32cf")
version("1.2.13", sha256="668dc4782432564c35ad0d32748f972248cc5c5448c9009faeb3445282920e02")
@@ -26,8 +25,6 @@ class Btop(MakefilePackage, CMakePackage):
variant("gpu", default=False, description="Enable GPU support", when="build_system=cmake")
depends_on("cmake@3.24:", type="build", when="@1.3.0: build_system=cmake")
# Fix linking GPU support, by adding an explicit "target_link_libraries" to ${CMAKE_DL_LIBS}
patch("link-dl.patch", when="+gpu")

View File

@@ -1,21 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class Byfl(CMakePackage):
"""Compiler-based Application Analysis"""
homepage = "https://github.com/lanl/Byfl"
url = "https://github.com/lanl/Byfl/archive/refs/tags/v1.8.0.tar.gz"
maintainers("spakin", "ltang85")
license("BSD-3-Clause")
version("1.8.0", sha256="45a9640ba2d77153a425c72349c18b124754123b30c411707b71abd217bbfce0")
depends_on("llvm@:13.9999")

View File

@@ -69,7 +69,6 @@ def cmake_args(self):
self.define("BUILD_TESTS", self.run_tests),
self.define("BUILD_BENCHMARKS", self.run_tests),
self.define("BUILD_EXAMPLES", self.run_tests),
self.define("BUILD_FUZZERS", self.run_tests),
]
return args

View File

@@ -58,7 +58,7 @@ class Camp(CMakePackage, CudaPackage, ROCmPackage):
depends_on("blt", type="build")
depends_on("blt@0.5.0:0.5.3", type="build", when="@2022.03.0:")
patch("libstdc++-13-missing-header.patch", when="@:2022.10")
patch("gcc-13-missing-header.patch", when="@:2022.10 %gcc@13:")
conflicts("^blt@:0.3.6", when="+rocm")

View File

@@ -18,7 +18,6 @@ class Cgal(CMakePackage):
url = "https://github.com/CGAL/cgal/releases/download/v5.4.1/CGAL-5.4.1.tar.xz"
version("5.6", sha256="dcab9b08a50a06a7cc2cc69a8a12200f8d8f391b9b8013ae476965c10b45161f")
version("5.5.3", sha256="0a04f662693256328b05babfabb5e3a5b7db2f5a58d52e3c520df9d0828ddd73")
version("5.5.2", sha256="b2b05d5616ecc69facdc24417cce0b04fb4321491d107db45103add520e3d8c3")
version("5.4.1", sha256="4c3dd7ee4d36d237111a4d72b6e14170093271595d5b695148532daa95323d76")
version("5.3.2", sha256="af917dbc550388ebcb206f774e610fbdb914d95a4b2932fa952279129103852b")

View File

@@ -7,8 +7,6 @@
from spack.package import *
is_windows = sys.platform == "win32"
class Cgns(CMakePackage):
"""The CFD General Notation System (CGNS) provides a general, portable,
@@ -95,7 +93,7 @@ def cmake_args(self):
]
)
if "+mpi" in spec and not is_windows:
if "+mpi" in spec:
options.extend(
[
"-DCMAKE_C_COMPILER=%s" % spec["mpi"].mpicc,

View File

@@ -29,8 +29,6 @@ class Clingo(CMakePackage):
version("master", branch="master", submodules=True)
version("spack", commit="2a025667090d71b2c9dce60fe924feb6bde8f667", submodules=True)
version("5.7.1", sha256="544b76779676075bb4f557f05a015cbdbfbd0df4b2cc925ad976e86870154d81")
version("5.7.0", sha256="ed5401bda54315184697fd69ff0f15389c62779e812058a5f296ba587ed9c10b")
version("5.6.2", sha256="81eb7b14977ac57c97c905bd570f30be2859eabc7fe534da3cdc65eaca44f5be")
version("5.5.2", sha256="a2a0a590485e26dce18860ac002576232d70accc5bfcb11c0c22e66beb23baa6")

View File

@@ -41,12 +41,83 @@ class Comgr(CMakePackage):
version("5.2.0", sha256="5f63fa93739ee9230756ef93c53019474b6cdddea3b588492d785dae1b08c087")
version("5.1.3", sha256="3078c10e9a852fe8357712a263ad775b15944e083f93a879935c877511066ac9")
version("5.1.0", sha256="1cdcfe5acb768ef50fb0026d4ee7ba01e615251ad3c27bb2593cdcf8c070a894")
version(
"5.0.2",
sha256="20d733f70d8edb573d8c92707f663d7d46dcaff08026cd6addbb83266679f92a",
deprecated=True,
)
version(
"5.0.0",
sha256="da1bbc694bd930a504406eb0a0018c2e317d8b2c136fb2cab8de426870efe9a8",
deprecated=True,
)
version(
"4.5.2",
sha256="e45f387fb6635fc1713714d09364204cd28fea97655b313c857beb1f8524e593",
deprecated=True,
)
version(
"4.5.0",
sha256="03c5880e0922fcff31306f7da2eb9d3a3709d9b5b75b3524dcfae85f4b181678",
deprecated=True,
)
version(
"4.3.1",
sha256="f1d99550383ed7b3a01d304eedc3d86a8e45b271aa5a80b1dd099c22fda3f745",
deprecated=True,
)
version(
"4.3.0",
sha256="f77b505abb474078374701dfc49e651ad3eeec5349ce6edda54549943a3775ee",
deprecated=True,
)
version(
"4.2.0",
sha256="40a1ea50d2aea0cf75c4d17cdd6a7fe44ae999bf0147d24a756ca4675ce24e36",
deprecated=True,
)
version(
"4.1.0",
sha256="ffb625978555c63582aa46857672431793261166aa31761eff4fe5c2cab661ae",
deprecated=True,
)
version(
"4.0.0",
sha256="f389601fb70b2d9a60d0e2798919af9ddf7b8376a2e460141507fe50073dfb31",
deprecated=True,
)
version(
"3.10.0",
sha256="b44ee5805a6236213d758fa4b612bb859d8f774b9b4bdc3a2699bb009dd631bc",
deprecated=True,
)
version(
"3.9.0",
sha256="6600e144d72dadb6d893a3388b42af103b9443755ce556f4e9e205ccd8ec0c83",
deprecated=True,
)
version(
"3.8.0",
sha256="62a35480dfabaa98883d91ed0f7c490daa9bbd424af37e07e5d85a6e8030b146",
deprecated=True,
)
version(
"3.7.0",
sha256="73e56ec3c63dade24ad351e9340e2f8e127694028c1fb7cec5035376bf098432",
deprecated=True,
)
version(
"3.5.0",
sha256="25c963b46a82d76d55b2302e0e18aac8175362656a465549999ad13d07b689b9",
deprecated=True,
)
# Disable the hip compile tests. Spack should not be using
# /opt/rocm, and this breaks the build when /opt/rocm exists.
patch("hip-tests.patch", when="@:4.2.0")
depends_on("cmake@3.13.4:", type="build")
depends_on("cmake@3.2.0:", type="build", when="@:3.8")
depends_on("cmake@3.13.4:", type="build", when="@3.9.0:")
depends_on("zlib-api", type="link")
depends_on("z3", type="link")
@@ -55,6 +126,20 @@ class Comgr(CMakePackage):
depends_on("rocm-cmake@3.5.0:", type="build")
for ver in [
"3.5.0",
"3.7.0",
"3.8.0",
"3.9.0",
"3.10.0",
"4.0.0",
"4.1.0",
"4.2.0",
"4.3.0",
"4.3.1",
"4.5.0",
"4.5.2",
"5.0.0",
"5.0.2",
"5.1.0",
"5.1.3",
"5.2.0",
@@ -75,14 +160,16 @@ class Comgr(CMakePackage):
"master",
]:
# llvm libs are linked statically, so this *could* be a build dep
depends_on(f"llvm-amdgpu@{ver}", when=f"@{ver}")
depends_on("llvm-amdgpu@" + ver, when="@" + ver)
# aomp may not build rocm-device-libs as part of llvm-amdgpu, so make
# that a conditional dependency
depends_on(f"rocm-device-libs@{ver}", when=f"@{ver} ^llvm-amdgpu ~rocm-device-libs")
depends_on(
"rocm-device-libs@" + ver, when="@{0} ^llvm-amdgpu ~rocm-device-libs".format(ver)
)
for ver in ["5.5.0", "5.5.1", "5.6.0", "5.6.1", "5.7.0", "5.7.1", "6.0.0"]:
depends_on(f"rocm-core@{ver}", when=f"@{ver}")
depends_on("rocm-core@" + ver, when="@" + ver)
root_cmakelists_dir = join_path("lib", "comgr")

View File

@@ -75,14 +75,15 @@ def cmake_args(self):
"CMAKE_CXX_COMPILER", "{0}/bin/clang++".format(spec["llvm-amdgpu"].prefix)
),
self.define("CMAKE_C_COMPILER", "{0}/bin/clang".format(spec["llvm-amdgpu"].prefix)),
self.define("HIP_PATH", spec["hip"].prefix),
self.define("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)),
self.define("CMAKE_CXX_FLAGS", "-O3"),
self.define("CMAKE_BUILD_TYPE", "Release"),
]
if "auto" not in self.spec.variants["amdgpu_target"]:
args.append(self.define_from_variant("GPU_TARGETS", "amdgpu_target"))
args.append(self.define_from_variant("AMDGPU_TARGETS", "amdgpu_target"))
if self.spec.satisfies("@5.6.0:"):
args.append(self.define("INSTANCES_ONLY", "ON"))
if self.spec.satisfies("@:5.7"):
args.append(self.define("CMAKE_CXX_FLAGS", "-O3"))
return args
def build(self, spec, prefix):

View File

@@ -384,7 +384,7 @@ def edit(self, pkg, spec, prefix):
}
dflags = ["-DNDEBUG"] if spec.satisfies("@:2023.2") else []
if fftw.name in ("intel-mkl", "intel-parallel-studio", "intel-oneapi-mkl"):
if spec["fftw-api"].name in ("intel-mkl", "intel-parallel-studio", "intel-oneapi-mkl"):
cppflags = ["-D__FFTW3_MKL", "-I{0}".format(fftw_header_dir)]
else:
cppflags = ["-D__FFTW3", "-I{0}".format(fftw_header_dir)]
@@ -888,8 +888,7 @@ def fix_package_config(self):
content += " " + self.spec["lapack"].libs.ld_flags
content += " " + self.spec["fftw-api"].libs.ld_flags
fftw = self.spec["fftw-api"]
if fftw.name in ["fftw", "amdfftw"] and fftw.satisfies("+openmp"):
if (self.spec["fftw-api"].name == "fftw") and ("+openmp" in self.spec["fftw"]):
content += " -lfftw3_omp"
content += "\n"

View File

@@ -77,6 +77,7 @@ def setup_run_environment(self, env):
env.set("MPIF90", spack_fc)
def setup_dependent_build_environment(self, env, dependent_spec):
self.setup_run_environment(env)
env.set("MPICH_CC", spack_cc)
env.set("MPICH_CXX", spack_cxx)
env.set("MPICH_F77", spack_f77)

View File

@@ -33,6 +33,8 @@ def setup_run_environment(self, env):
env.set("MPIF90", spack_fc)
def setup_dependent_build_environment(self, env, dependent_spec):
self.setup_run_environment(env)
env.set("MPICH_CC", spack_cc)
env.set("MPICH_CXX", spack_cxx)
env.set("MPICH_F77", spack_f77)

View File

@@ -1,38 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class Cryodrgn(PythonPackage):
"""CryoDRGN is a neural network based algorithm for heterogeneous cryo-EM reconstruction.
In particular, the method models a continuous distribution over 3D structures by using a
neural network based representation for the volume"""
homepage = "https://cryodrgn.cs.princeton.edu/"
pypi = "cryodrgn/cryodrgn-2.3.0.tar.gz"
license("GPL-3.0-only", checked_by="A-N-Other")
version("2.3.0", sha256="9dd75967fddfa56d6b2fbfc56933c50c9fb994326112513f223e8296adbf0afc")
depends_on("python@3.7:", type=("build", "run"))
depends_on("py-setuptools@61:", type="build")
depends_on("py-setuptools-scm@6.2:", type="build")
depends_on("py-torch@1:", type=("build", "run"))
depends_on("py-pandas@:1", type=("build", "run"))
depends_on("py-numpy", type=("build", "run"))
depends_on("py-matplotlib", type=("build", "run"))
depends_on("py-pyyaml", type=("build", "run"))
depends_on("py-scipy@1.3.1:", type=("build", "run"))
depends_on("py-scikit-learn", type=("build", "run"))
depends_on("py-seaborn@:0.11", type=("build", "run"))
depends_on("py-cufflinks", type=("build", "run"))
depends_on("py-jupyterlab", type=("build", "run"))
depends_on("py-umap-learn", type=("build", "run"))
depends_on("py-ipywidgets@:7", type=("build", "run"))
depends_on("py-healpy", type=("build", "run"))

View File

@@ -381,6 +381,12 @@ def setup_run_environment(self, env):
if "target=ppc64le: platform=linux" in self.spec:
env.set("cuDNN_ROOT", os.path.join(self.prefix, "targets", "ppc64le-linux"))
def setup_dependent_build_environment(self, env, dependent_spec):
self.setup_run_environment(env)
def setup_dependent_run_environment(self, env, dependent_spec):
self.setup_run_environment(env)
def install(self, spec, prefix):
install_tree(".", prefix)

View File

@@ -8,8 +8,6 @@
import re
import sys
from llnl.util.filesystem import windows_sfn
from spack.build_systems.autotools import AutotoolsBuilder
from spack.build_systems.nmake import NMakeBuilder
from spack.package import *
@@ -472,8 +470,7 @@ def nmake_args(self):
# The trailing path seperator is REQUIRED for cURL to install
# otherwise cURLs build system will interpret the path as a file
# and the install will fail with ambiguous errors
inst_prefix = self.prefix + "\\"
args.append(f"WITH_PREFIX={windows_sfn(inst_prefix)}")
args.append("WITH_PREFIX=%s" % self.prefix + "\\")
return args
def install(self, pkg, spec, prefix):
@@ -488,7 +485,6 @@ def install(self, pkg, spec, prefix):
env["CC"] = ""
env["CXX"] = ""
winbuild_dir = os.path.join(self.stage.source_path, "winbuild")
winbuild_dir = windows_sfn(winbuild_dir)
with working_dir(winbuild_dir):
nmake("/f", "Makefile.vc", *self.nmake_args(), ignore_quotes=True)
with working_dir(os.path.join(self.stage.source_path, "builds")):

View File

@@ -66,7 +66,6 @@ class Dakota(CMakePackage):
depends_on("perl-data-dumper", type="build", when="@6.12:")
depends_on("boost@:1.68.0", when="@:6.12")
depends_on("boost@1.69.0:", when="@6.18:")
depends_on("boost +filesystem +program_options +regex +serialization +system")
# TODO: replace this with an explicit list of components of Boost,
# for instance depends_on('boost +filesystem')

View File

@@ -35,7 +35,6 @@ class Datatransferkit(CMakePackage):
variant("serial", default=True, description="enable Serial backend (default)")
variant("shared", default=True, description="enable the build of shared lib")
depends_on("mpi")
depends_on("arborx@1.0:", when="+external-arborx")
depends_on("boost")
depends_on("cmake", type="build")
@@ -57,10 +56,6 @@ def cmake_args(self):
"-DDataTransferKit_ENABLE_EXAMPLES=OFF",
"-DCMAKE_CXX_EXTENSIONS=OFF",
"-DCMAKE_CXX_STANDARD=14",
"-DCMAKE_C_COMPILER=" + spec["mpi"].mpicc,
"-DCMAKE_CXX_COMPILER=" + spec["mpi"].mpicxx,
"-DCMAKE_Fortran_COMPILER=" + spec["mpi"].mpifc,
"-DMPI_BASE_DIR=" + spec["mpi"].prefix,
]
if "+openmp" in spec:

View File

@@ -26,7 +26,6 @@ class Dd4hep(CMakePackage):
license("LGPL-3.0-or-later")
version("master", branch="master")
version("1.28", sha256="b28d671eda0154073873a044a384486e66f1f200065deca99537aa84f07328ad")
version("1.27.2", sha256="09d8acd743d010274562b856d39e2a88aeaf89cf287a4148f52223b0cd960ab2")
version("1.27.1", sha256="e66ae726c0a9a55e5603024a7f8a48ffbc5613ea36e5f892e9a90d87833f92e0")
version("1.27", sha256="51fbd0f91f2511261d9b01e4b3528c658bea1ea1b5d67b25b6812615e782a902")

View File

@@ -19,8 +19,6 @@ class Delly2(MakefilePackage):
license("BSD-3-Clause")
version("1.2.6", sha256="1a71fcc5f2a55649c2104086f3f7163ed58c5868eaf040a25e45c777b0e1abb7")
version("1.1.8", sha256="f72a1143dc71449fc277fc8b3e337a4d35b2fe736f3693a14b1986efa8da4889")
version("1.1.6", sha256="08961e9c81431eb486476fa71eea94941ad24ec1970b71e5a7720623a39bfd2a")
version("0.9.1", tag="v0.9.1", commit="ef1cd626a85cfd1c1b7acfca2b5fd5957f2a05f1")
version("2017-08-03", commit="e32a9cd55c7e3df5a6ae4a91f31a0deb354529fc", deprecated=True)

View File

@@ -45,13 +45,23 @@ class Delphes(CMakePackage):
variant("pythia8", default=True, description="build with pythia8")
variant(
"cxxstd",
default="17",
values=("14", "17"),
multi=False,
description="Use the specified C++ standard when building.",
)
depends_on("cmake", type="build")
depends_on("root")
depends_on("root cxxstd=14", when="cxxstd=14")
depends_on("root cxxstd=17", when="cxxstd=17")
depends_on("pythia8", when="+pythia8")
def cmake_args(self):
args = []
args.append(f"-DCMAKE_CXX_STANDARD={self.spec['root'].variants['cxxstd'].value}")
# C++ Standard
args.append("-DCMAKE_CXX_STANDARD=%s" % self.spec.variants["cxxstd"].value)
return args
def setup_run_environment(self, env):

View File

@@ -47,6 +47,9 @@ def setup_run_environment(self, env):
env.prepend_path("LIBRARY_PATH", self.prefix.linux.lib64)
env.prepend_path("LD_LIBRARY_PATH", self.prefix.linux.lib64)
def setup_dependent_build_environment(self, env, dependent_spec):
self.setup_run_environment(env)
def edit(self, spec, prefix):
# Move contents to dmd/
mkdir = which("mkdir")

View File

@@ -23,7 +23,6 @@ class DoubleBatchedFftLibrary(CMakePackage):
version("main", branch="main")
version("develop", branch="develop")
version("0.5.0", sha256="cbd2ecf039cc40830e57a8af8295abf2083ce3b1a333279a8c17762f41131fff")
version("0.4.0", sha256="f3518012b632c92c2a933d70a040d6b0eee2d631ab6b1881a192a8d1624f242d")
version("0.3.6", sha256="ff163251d77d3c686563141e871c702bf4997c0302d53616add55d6cf9b02d28")

View File

@@ -11,13 +11,11 @@ class Duckdb(CMakePackage):
"""DuckDB is an in-process SQL OLAP Database Management System."""
homepage = "https://duckdb.org"
url = "https://github.com/duckdb/duckdb/archive/refs/tags/v0.9.2.tar.gz"
url = "https://github.com/duckdb/duckdb/archive/refs/tags/v0.8.1.tar.gz"
git = "https://github.com/duckdb/duckdb.git"
license("MIT")
maintainers("glentner")
version("master", branch="master")
version("0.9.2", sha256="afff7bd925a98dc2af4039b8ab2159b0705cbf5e0ee05d97f7bb8dce5f880dc2")
version("0.8.1", sha256="a0674f7e320dc7ebcf51990d7fc1c0e7f7b2c335c08f5953702b5285e6c30694")
version("0.7.1", sha256="67f840f861e5ffbe137d65a8543642d016f900b89dd035492d562ad11acf0e1e")

View File

@@ -1,10 +0,0 @@
--- a/Base/src/cts/CtsApi.hpp 2024-02-10 02:32:48.001444742 +0000
+++ b/Base/src/cts/CtsApi.hpp 2024-02-10 02:33:09.161119010 +0000
@@ -16,6 +16,7 @@
//============================================================================
#include <string>
#include <vector>
+#include <cassert>
#include "CheckPt.hpp"
#include "NodeFwd.hpp"

View File

@@ -68,10 +68,6 @@ class Ecflow(CMakePackage):
# Requirement to use the Python3_EXECUTABLE variable
depends_on("cmake@3.16:", type="build")
# https://github.com/JCSDA/spack-stack/issues/1001
# https://github.com/JCSDA/spack-stack/issues/1009
patch("ctsapi_cassert.patch", when="@5.11.4")
@when("@:4.13.0")
def patch(self):
version = str(self.spec["python"].version[:2])

View File

@@ -10,18 +10,9 @@ class Eigenexa(AutotoolsPackage):
"""EigenExa is a high-performance eigenvalue solver."""
homepage = "https://www.r-ccs.riken.jp/labs/lpnctrt/projects/eigenexa/"
url = "https://www.r-ccs.riken.jp/labs/lpnctrt/projects/eigenexa/EigenExa-2.6.tgz"
version("2.12", sha256="2a33999b09d4434a5ce2fbd18cabbfee1cff0b2a12df7ded1f67127157b08f86")
version("2.11", sha256="87dee8ac13f410a007e82df2688fa7f143883229dac729fd20836f4a28fac43d")
version("2.10", sha256="5b1806e132b191d23680b34fbc286d676ba20f58ee754122087a3ec3cacb8fa3")
version("2.9", sha256="8788922035bf67abf1a7aecf8e30dd7564de387fda4ecd11c6b4cf9259d25990")
version("2.8", sha256="3ee846d4db10336d393738eadab2f1c941dfc8fb501f2a4baf0823f0ff938f56")
version("2.7", sha256="490f3d0217a8c101e66e785229baaba5b4d674508bc9a5aca6cc5fa074f3a8aa")
version(
"2.6",
sha256="a1a4e571a8051443f28e7ea4889272993452a4babd036d2b4dd6b28154302f95",
url="https://www.r-ccs.riken.jp/labs/lpnctrt/projects/eigenexa/EigenExa-2.6.tgz",
)
version("2.6", sha256="a1a4e571a8051443f28e7ea4889272993452a4babd036d2b4dd6b28154302f95")
depends_on("autoconf", type="build")
depends_on("automake", type="build")
@@ -32,16 +23,11 @@ class Eigenexa(AutotoolsPackage):
depends_on("scalapack")
patch("fj_compiler.patch", when="%fj")
patch("gcc_compiler.patch", when="@:2.6.99 %gcc")
patch("gcc_compiler.patch", when="%gcc")
parallel = False
force_autoreconf = True
def url_for_version(self, version):
return "https://www.r-ccs.riken.jp/labs/lpnctrt/projects/eigenexa/EigenExa-{0}.{1}".format(
version, "tar.gz" if version >= Version("2.7") else ".tgz"
)
def setup_build_environment(self, env):
env.set("FC", self.spec["mpi"].mpifc, force=True)
env.set("F77", self.spec["mpi"].mpif77, force=True)
@@ -61,13 +47,6 @@ def setup_build_environment(self, env):
),
)
if self.spec.satisfies("%gcc@10:"):
fflags = "-fallow-argument-mismatch"
if self.spec.satisfies("@:2.8"):
fflags += " -fallow-invalid-boz"
env.set("FCFLAGS", fflags)
env.set("FFLAGS", fflags)
@run_after("install")
def cache_test_sources(self):
"""Save off benchmark files for stand-alone tests."""

View File

@@ -35,6 +35,11 @@ class Elpa(AutotoolsPackage, CudaPackage, ROCmPackage):
version(
"2022.11.001", sha256="75db3ac146f9a6a1598e3418ddcab2be2f40a30ef9ec4c00a3b5d3808c99c430"
)
version(
"2022.11.001.rc2",
sha256="13d67e7d69894c631b48e4fcac905b51c4e41554c7eb4731e98c4e205f0fab9f",
deprecated=True,
)
version(
"2021.11.001", sha256="fb361da6c59946661b73e51538d419028f763d7cb9dacf9d8cd5c9cd3fb7802f"
)
@@ -93,7 +98,7 @@ def headers(self):
# upstream sometimes adds tarball suffixes not part of the internal version
elpa_version = str(self.spec.version)
for vsuffix in ("_bugfix", "-patched"):
for vsuffix in ("_bugfix",):
if elpa_version.endswith(vsuffix): # implementation of py3.9 removesuffix
elpa_version = elpa_version[: -len(vsuffix)]
@@ -171,16 +176,16 @@ def configure_args(self):
options += self.enable_or_disable("openmp")
# Additional linker search paths and link libs
ldflags = [spec["blas"].libs.search_flags, spec["lapack"].libs.search_flags]
libs = [spec["lapack"].libs.link_flags, spec["blas"].libs.link_flags]
# If using blas with openmp support, link with openmp
# Needed for Spack-provided OneAPI MKL and for many externals
if self.spec["blas"].satisfies("threads=openmp"):
ldflags.append(self.compiler.openmp_flag)
options += [f'LDFLAGS={" ".join(ldflags)}', f'LIBS={" ".join(libs)}']
# if using mkl with openmp support, link with openmp
mkl_openmp_flag = (
self.compiler.openmp_flag
if self.spec.satisfies("^intel-oneapi-mkl threads=openmp")
else ""
)
options += [
"LDFLAGS={0} {1}".format(mkl_openmp_flag, spec["lapack"].libs.search_flags),
"LIBS={0} {1}".format(spec["lapack"].libs.link_flags, spec["blas"].libs.link_flags),
]
if "+mpi" in self.spec:
options += [

Some files were not shown because too many files have changed in this diff Show More