Compare commits

..

1 Commits

Author SHA1 Message Date
Harmen Stoppels
b12d65ce92 spec lookup: separate module 2025-01-23 18:22:42 +01:00
504 changed files with 3064 additions and 3959 deletions

View File

@@ -40,17 +40,17 @@ jobs:
# 1: Platforms to build for # 1: Platforms to build for
# 2: Base image (e.g. ubuntu:22.04) # 2: Base image (e.g. ubuntu:22.04)
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'], dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
[centos-stream9, 'linux/amd64,linux/arm64', 'centos:stream9'], [centos-stream9, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream9'],
[leap15, 'linux/amd64,linux/arm64', 'opensuse/leap:15'], [leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
[ubuntu-focal, 'linux/amd64,linux/arm64', 'ubuntu:20.04'], [ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
[ubuntu-jammy, 'linux/amd64,linux/arm64', 'ubuntu:22.04'], [ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
[ubuntu-noble, 'linux/amd64,linux/arm64', 'ubuntu:24.04'], [ubuntu-noble, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:24.04'],
[almalinux8, 'linux/amd64,linux/arm64', 'almalinux:8'], [almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
[almalinux9, 'linux/amd64,linux/arm64', 'almalinux:9'], [almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'], [rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'], [rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
[fedora39, 'linux/amd64,linux/arm64', 'fedora:39'], [fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
[fedora40, 'linux/amd64,linux/arm64', 'fedora:40']] [fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
name: Build ${{ matrix.dockerfile[0] }} name: Build ${{ matrix.dockerfile[0] }}
if: github.repository == 'spack/spack' if: github.repository == 'spack/spack'
steps: steps:

View File

@@ -81,10 +81,6 @@ jobs:
with: with:
with_coverage: ${{ needs.changes.outputs.core }} with_coverage: ${{ needs.changes.outputs.core }}
import-check:
needs: [ changes ]
uses: ./.github/workflows/import-check.yaml
all-prechecks: all-prechecks:
needs: [ prechecks ] needs: [ prechecks ]
if: ${{ always() }} if: ${{ always() }}

View File

@@ -33,4 +33,3 @@ jobs:
with: with:
verbose: true verbose: true
fail_ci_if_error: false fail_ci_if_error: false
token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -1,49 +0,0 @@
name: import-check
on:
workflow_call:
jobs:
# Check we don't make the situation with circular imports worse
import-check:
runs-on: ubuntu-latest
steps:
- uses: julia-actions/setup-julia@v2
with:
version: '1.10'
- uses: julia-actions/cache@v2
# PR: use the base of the PR as the old commit
- name: Checkout PR base commit
if: github.event_name == 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
ref: ${{ github.event.pull_request.base.sha }}
path: old
# not a PR: use the previous commit as the old commit
- name: Checkout previous commit
if: github.event_name != 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
fetch-depth: 2
path: old
- name: Checkout previous commit
if: github.event_name != 'pull_request'
run: git -C old reset --hard HEAD^
- name: Checkout new commit
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
path: new
- name: Install circular import checker
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
repository: haampie/circular-import-fighter
ref: e38bcd0aa46368e30648b61b7f0d8c1ca68aadff
path: circular-import-fighter
- name: Install dependencies
working-directory: circular-import-fighter
run: make -j dependencies
- name: Circular import check
working-directory: circular-import-fighter
run: make -j compare "SPACK_ROOT=../old ../new"

View File

@@ -86,6 +86,66 @@ jobs:
spack -d bootstrap now --dev spack -d bootstrap now --dev
spack -d style -t black spack -d style -t black
spack unit-test -V spack unit-test -V
# Check we don't make the situation with circular imports worse
import-check:
runs-on: ubuntu-latest
steps:
- uses: julia-actions/setup-julia@v2
with:
version: '1.10'
- uses: julia-actions/cache@v2
# PR: use the base of the PR as the old commit
- name: Checkout PR base commit
if: github.event_name == 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
ref: ${{ github.event.pull_request.base.sha }}
path: old
# not a PR: use the previous commit as the old commit
- name: Checkout previous commit
if: github.event_name != 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
fetch-depth: 2
path: old
- name: Checkout previous commit
if: github.event_name != 'pull_request'
run: git -C old reset --hard HEAD^
- name: Checkout new commit
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
path: new
- name: Install circular import checker
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
repository: haampie/circular-import-fighter
ref: b5d6ce9be35f602cca7d5a6aa0259fca10639cca
path: circular-import-fighter
- name: Install dependencies
working-directory: circular-import-fighter
run: make -j dependencies
- name: Problematic imports before
working-directory: circular-import-fighter
run: make SPACK_ROOT=../old SUFFIX=.old
- name: Problematic imports after
working-directory: circular-import-fighter
run: make SPACK_ROOT=../new SUFFIX=.new
- name: Compare import cycles
working-directory: circular-import-fighter
run: |
edges_before="$(head -n1 solution.old)"
edges_after="$(head -n1 solution.new)"
if [ "$edges_after" -gt "$edges_before" ]; then
printf '\033[1;31mImport check failed: %s imports need to be deleted, ' "$edges_after"
printf 'previously this was %s\033[0m\n' "$edges_before"
printf 'Compare \033[1;97m"Problematic imports before"\033[0m and '
printf '\033[1;97m"Problematic imports after"\033[0m.\n'
exit 1
else
printf '\033[1;32mImport check passed: %s <= %s\033[0m\n' "$edges_after" "$edges_before"
fi
# Further style checks from pylint # Further style checks from pylint
pylint: pylint:

View File

@@ -25,6 +25,7 @@ exit 1
# The code above runs this file with our preferred python interpreter. # The code above runs this file with our preferred python interpreter.
import os import os
import os.path
import sys import sys
min_python3 = (3, 6) min_python3 = (3, 6)

View File

@@ -36,7 +36,7 @@ packages:
go-or-gccgo-bootstrap: [go-bootstrap, gcc] go-or-gccgo-bootstrap: [go-bootstrap, gcc]
iconv: [libiconv] iconv: [libiconv]
ipp: [intel-oneapi-ipp] ipp: [intel-oneapi-ipp]
java: [openjdk, jdk] java: [openjdk, jdk, ibm-java]
jpeg: [libjpeg-turbo, libjpeg] jpeg: [libjpeg-turbo, libjpeg]
lapack: [openblas, amdlibflame] lapack: [openblas, amdlibflame]
libc: [glibc, musl] libc: [glibc, musl]
@@ -73,27 +73,15 @@ packages:
permissions: permissions:
read: world read: world
write: user write: user
cray-fftw:
buildable: false
cray-libsci:
buildable: false
cray-mpich: cray-mpich:
buildable: false buildable: false
cray-mvapich2: cray-mvapich2:
buildable: false buildable: false
cray-pmi:
buildable: false
egl: egl:
buildable: false buildable: false
essl:
buildable: false
fujitsu-mpi: fujitsu-mpi:
buildable: false buildable: false
fujitsu-ssl2:
buildable: false
hpcx-mpi: hpcx-mpi:
buildable: false buildable: false
mpt:
buildable: false
spectrum-mpi: spectrum-mpi:
buildable: false buildable: false

View File

@@ -170,7 +170,7 @@ bootstrapping.
To register the mirror on the platform where it's supposed to be used run the following command(s): To register the mirror on the platform where it's supposed to be used run the following command(s):
% spack bootstrap add --trust local-sources /opt/bootstrap/metadata/sources % spack bootstrap add --trust local-sources /opt/bootstrap/metadata/sources
% spack bootstrap add --trust local-binaries /opt/bootstrap/metadata/binaries % spack bootstrap add --trust local-binaries /opt/bootstrap/metadata/binaries
% spack buildcache update-index /opt/bootstrap/bootstrap_cache
This command needs to be run on a machine with internet access and the resulting folder This command needs to be run on a machine with internet access and the resulting folder
has to be moved over to the air-gapped system. Once the local sources are added using the has to be moved over to the air-gapped system. Once the local sources are added using the

View File

@@ -272,9 +272,9 @@ often lists dependencies and the flags needed to locate them. The
"environment variables" section lists environment variables that the "environment variables" section lists environment variables that the
build system uses to pass flags to the compiler and linker. build system uses to pass flags to the compiler and linker.
^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^
Adding flags to configure Addings flags to configure
^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^
For most of the flags you encounter, you will want a variant to For most of the flags you encounter, you will want a variant to
optionally enable/disable them. You can then optionally pass these optionally enable/disable them. You can then optionally pass these
@@ -285,7 +285,7 @@ function like so:
def configure_args(self): def configure_args(self):
args = [] args = []
...
if self.spec.satisfies("+mpi"): if self.spec.satisfies("+mpi"):
args.append("--enable-mpi") args.append("--enable-mpi")
else: else:
@@ -299,10 +299,7 @@ Alternatively, you can use the :ref:`enable_or_disable <autotools_enable_or_dis
.. code-block:: python .. code-block:: python
def configure_args(self): def configure_args(self):
args = [] return [self.enable_or_disable("mpi")]
...
args.extend(self.enable_or_disable("mpi"))
return args
Note that we are explicitly disabling MPI support if it is not Note that we are explicitly disabling MPI support if it is not
@@ -347,14 +344,7 @@ typically used to enable or disable some feature within the package.
default=False, default=False,
description="Memchecker support for debugging [degrades performance]" description="Memchecker support for debugging [degrades performance]"
) )
... config_args.extend(self.enable_or_disable("memchecker"))
def configure_args(self):
args = []
...
args.extend(self.enable_or_disable("memchecker"))
return args
In this example, specifying the variant ``+memchecker`` will generate In this example, specifying the variant ``+memchecker`` will generate
the following configuration options: the following configuration options:

View File

@@ -56,13 +56,13 @@ If you look at the ``perl`` package, you'll see:
.. code-block:: python .. code-block:: python
phases = ("configure", "build", "install") phases = ["configure", "build", "install"]
Similarly, ``cmake`` defines: Similarly, ``cmake`` defines:
.. code-block:: python .. code-block:: python
phases = ("bootstrap", "build", "install") phases = ["bootstrap", "build", "install"]
If we look at the ``cmake`` example, this tells Spack's ``PackageBase`` If we look at the ``cmake`` example, this tells Spack's ``PackageBase``
class to run the ``bootstrap``, ``build``, and ``install`` functions class to run the ``bootstrap``, ``build``, and ``install`` functions

View File

@@ -361,6 +361,7 @@ and the tags associated with the class of runners to build on.
* ``.linux_neoverse_n1`` * ``.linux_neoverse_n1``
* ``.linux_neoverse_v1`` * ``.linux_neoverse_v1``
* ``.linux_neoverse_v2`` * ``.linux_neoverse_v2``
* ``.linux_power``
* ``.linux_skylake`` * ``.linux_skylake``
* ``.linux_x86_64`` * ``.linux_x86_64``
* ``.linux_x86_64_v4`` * ``.linux_x86_64_v4``

View File

@@ -112,19 +112,6 @@ the original but may concretize differently in the presence of different
explicit or default configuration settings (e.g., a different version of explicit or default configuration settings (e.g., a different version of
Spack or for a different user account). Spack or for a different user account).
Environments created from a manifest will copy any included configs
from relative paths inside the environment. Relative paths from
outside the environment will cause errors, and absolute paths will be
kept absolute. For example, if ``spack.yaml`` includes:
.. code-block:: yaml
spack:
include: [./config.yaml]
then the created environment will have its own copy of the file
``config.yaml`` copied from the location in the original environment.
Create an environment from a ``spack.lock`` file using: Create an environment from a ``spack.lock`` file using:
.. code-block:: console .. code-block:: console
@@ -173,7 +160,7 @@ accepts. If an environment already exists then spack will simply activate it
and ignore the create-specific flags. and ignore the create-specific flags.
.. code-block:: console .. code-block:: console
$ spack env activate --create -p myenv $ spack env activate --create -p myenv
# ... # ...
# [creates if myenv does not exist yet] # [creates if myenv does not exist yet]
@@ -437,8 +424,8 @@ Developing Packages in a Spack Environment
The ``spack develop`` command allows one to develop Spack packages in The ``spack develop`` command allows one to develop Spack packages in
an environment. It requires a spec containing a concrete version, and an environment. It requires a spec containing a concrete version, and
will configure Spack to install the package from local source. will configure Spack to install the package from local source.
If a version is not provided from the command line interface then spack If a version is not provided from the command line interface then spack
will automatically pick the highest version the package has defined. will automatically pick the highest version the package has defined.
This means any infinity versions (``develop``, ``main``, ``stable``) will be This means any infinity versions (``develop``, ``main``, ``stable``) will be
preferred in this selection process. preferred in this selection process.
@@ -448,9 +435,9 @@ set, and Spack will ensure the package and its dependents are rebuilt
any time the environment is installed if the package's local source any time the environment is installed if the package's local source
code has been modified. Spack's native implementation to check for modifications code has been modified. Spack's native implementation to check for modifications
is to check if ``mtime`` is newer than the installation. is to check if ``mtime`` is newer than the installation.
A custom check can be created by overriding the ``detect_dev_src_change`` method A custom check can be created by overriding the ``detect_dev_src_change`` method
in your package class. This is particularly useful for projects using custom spack repo's in your package class. This is particularly useful for projects using custom spack repo's
to drive development and want to optimize performance. to drive development and want to optimize performance.
Spack ensures that all instances of a Spack ensures that all instances of a
developed package in the environment are concretized to match the developed package in the environment are concretized to match the
@@ -466,7 +453,7 @@ Further development on ``foo`` can be tested by re-installing the environment,
and eventually committed and pushed to the upstream git repo. and eventually committed and pushed to the upstream git repo.
If the package being developed supports out-of-source builds then users can use the If the package being developed supports out-of-source builds then users can use the
``--build_directory`` flag to control the location and name of the build directory. ``--build_directory`` flag to control the location and name of the build directory.
This is a shortcut to set the ``package_attributes:build_directory`` in the This is a shortcut to set the ``package_attributes:build_directory`` in the
``packages`` configuration (see :ref:`assigning-package-attributes`). ``packages`` configuration (see :ref:`assigning-package-attributes`).
The supplied location will become the build-directory for that package in all future builds. The supplied location will become the build-directory for that package in all future builds.

View File

@@ -3,7 +3,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""URL primitives that just require Python standard library.""" """URL primitives that just require Python standard library."""
import itertools import itertools
import os import os.path
import re import re
from typing import Optional, Set, Tuple from typing import Optional, Set, Tuple
from urllib.parse import urlsplit, urlunsplit from urllib.parse import urlsplit, urlunsplit

View File

@@ -668,7 +668,7 @@ def copy(src, dest, _permissions=False):
_permissions (bool): for internal use only _permissions (bool): for internal use only
Raises: Raises:
OSError: if *src* does not match any files or directories IOError: if *src* does not match any files or directories
ValueError: if *src* matches multiple files but *dest* is ValueError: if *src* matches multiple files but *dest* is
not a directory not a directory
""" """
@@ -679,7 +679,7 @@ def copy(src, dest, _permissions=False):
files = glob.glob(src) files = glob.glob(src)
if not files: if not files:
raise OSError("No such file or directory: '{0}'".format(src)) raise IOError("No such file or directory: '{0}'".format(src))
if len(files) > 1 and not os.path.isdir(dest): if len(files) > 1 and not os.path.isdir(dest):
raise ValueError( raise ValueError(
"'{0}' matches multiple files but '{1}' is not a directory".format(src, dest) "'{0}' matches multiple files but '{1}' is not a directory".format(src, dest)
@@ -710,7 +710,7 @@ def install(src, dest):
dest (str): the destination file or directory dest (str): the destination file or directory
Raises: Raises:
OSError: if *src* does not match any files or directories IOError: if *src* does not match any files or directories
ValueError: if *src* matches multiple files but *dest* is ValueError: if *src* matches multiple files but *dest* is
not a directory not a directory
""" """
@@ -748,7 +748,7 @@ def copy_tree(
_permissions (bool): for internal use only _permissions (bool): for internal use only
Raises: Raises:
OSError: if *src* does not match any files or directories IOError: if *src* does not match any files or directories
ValueError: if *src* is a parent directory of *dest* ValueError: if *src* is a parent directory of *dest*
""" """
if _permissions: if _permissions:
@@ -762,7 +762,7 @@ def copy_tree(
files = glob.glob(src) files = glob.glob(src)
if not files: if not files:
raise OSError("No such file or directory: '{0}'".format(src)) raise IOError("No such file or directory: '{0}'".format(src))
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add # For Windows hard-links and junctions, the source path must exist to make a symlink. Add
# all symlinks to this list while traversing the tree, then when finished, make all # all symlinks to this list while traversing the tree, then when finished, make all
@@ -843,7 +843,7 @@ def install_tree(src, dest, symlinks=True, ignore=None):
ignore (typing.Callable): function indicating which files to ignore ignore (typing.Callable): function indicating which files to ignore
Raises: Raises:
OSError: if *src* does not match any files or directories IOError: if *src* does not match any files or directories
ValueError: if *src* is a parent directory of *dest* ValueError: if *src* is a parent directory of *dest*
""" """
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True) copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
@@ -1472,7 +1472,7 @@ def set_executable(path):
def recursive_mtime_greater_than(path: str, time: float) -> bool: def recursive_mtime_greater_than(path: str, time: float) -> bool:
"""Returns true if any file or dir recursively under `path` has mtime greater than `time`.""" """Returns true if any file or dir recursively under `path` has mtime greater than `time`."""
# use bfs order to increase likelihood of early return # use bfs order to increase likelihood of early return
queue: Deque[str] = collections.deque([path]) queue: Deque[str] = collections.deque()
if os.stat(path).st_mtime > time: if os.stat(path).st_mtime > time:
return True return True

View File

@@ -308,7 +308,7 @@ class LinkTree:
def __init__(self, source_root): def __init__(self, source_root):
if not os.path.exists(source_root): if not os.path.exists(source_root):
raise OSError("No such file or directory: '%s'", source_root) raise IOError("No such file or directory: '%s'", source_root)
self._root = source_root self._root = source_root

View File

@@ -391,7 +391,7 @@ def _poll_lock(self, op: int) -> bool:
return True return True
except OSError as e: except IOError as e:
# EAGAIN and EACCES == locked by another process (so try again) # EAGAIN and EACCES == locked by another process (so try again)
if e.errno not in (errno.EAGAIN, errno.EACCES): if e.errno not in (errno.EAGAIN, errno.EACCES):
raise raise

View File

@@ -918,7 +918,7 @@ def _writer_daemon(
try: try:
if stdin_file.read(1) == "v": if stdin_file.read(1) == "v":
echo = not echo echo = not echo
except OSError as e: except IOError as e:
# If SIGTTIN is ignored, the system gives EIO # If SIGTTIN is ignored, the system gives EIO
# to let the caller know the read failed b/c it # to let the caller know the read failed b/c it
# was in the bg. Ignore that too. # was in the bg. Ignore that too.
@@ -1013,7 +1013,7 @@ def wrapped(*args, **kwargs):
while True: while True:
try: try:
return function(*args, **kwargs) return function(*args, **kwargs)
except OSError as e: except IOError as e:
if e.errno == errno.EINTR: if e.errno == errno.EINTR:
continue continue
raise raise

View File

@@ -10,7 +10,7 @@
import spack.util.git import spack.util.git
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string #: PEP440 canonical <major>.<minor>.<micro>.<devN> string
__version__ = "1.0.0.dev0" __version__ = "0.24.0.dev0"
spack_version = __version__ spack_version = __version__

View File

@@ -5,7 +5,6 @@
import codecs import codecs
import collections import collections
import concurrent.futures import concurrent.futures
import contextlib
import copy import copy
import hashlib import hashlib
import io import io
@@ -92,9 +91,6 @@
CURRENT_BUILD_CACHE_LAYOUT_VERSION = 2 CURRENT_BUILD_CACHE_LAYOUT_VERSION = 2
INDEX_HASH_FILE = "index.json.hash"
class BuildCacheDatabase(spack_db.Database): class BuildCacheDatabase(spack_db.Database):
"""A database for binary buildcaches. """A database for binary buildcaches.
@@ -506,7 +502,7 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}):
scheme = urllib.parse.urlparse(mirror_url).scheme scheme = urllib.parse.urlparse(mirror_url).scheme
if scheme != "oci" and not web_util.url_exists( if scheme != "oci" and not web_util.url_exists(
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE) url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, "index.json")
): ):
return False return False
@@ -708,7 +704,7 @@ def _read_specs_and_push_index(
# Now generate the index, compute its hash, and push the two files to # Now generate the index, compute its hash, and push the two files to
# the mirror. # the mirror.
index_json_path = os.path.join(temp_dir, spack_db.INDEX_JSON_FILE) index_json_path = os.path.join(temp_dir, "index.json")
with open(index_json_path, "w", encoding="utf-8") as f: with open(index_json_path, "w", encoding="utf-8") as f:
db._write_to_file(f) db._write_to_file(f)
@@ -718,14 +714,14 @@ def _read_specs_and_push_index(
index_hash = compute_hash(index_string) index_hash = compute_hash(index_string)
# Write the hash out to a local file # Write the hash out to a local file
index_hash_path = os.path.join(temp_dir, INDEX_HASH_FILE) index_hash_path = os.path.join(temp_dir, "index.json.hash")
with open(index_hash_path, "w", encoding="utf-8") as f: with open(index_hash_path, "w", encoding="utf-8") as f:
f.write(index_hash) f.write(index_hash)
# Push the index itself # Push the index itself
web_util.push_to_url( web_util.push_to_url(
index_json_path, index_json_path,
url_util.join(cache_prefix, spack_db.INDEX_JSON_FILE), url_util.join(cache_prefix, "index.json"),
keep_original=False, keep_original=False,
extra_args={"ContentType": "application/json", "CacheControl": "no-cache"}, extra_args={"ContentType": "application/json", "CacheControl": "no-cache"},
) )
@@ -733,7 +729,7 @@ def _read_specs_and_push_index(
# Push the hash # Push the hash
web_util.push_to_url( web_util.push_to_url(
index_hash_path, index_hash_path,
url_util.join(cache_prefix, INDEX_HASH_FILE), url_util.join(cache_prefix, "index.json.hash"),
keep_original=False, keep_original=False,
extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"}, extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"},
) )
@@ -802,7 +798,7 @@ def url_read_method(url):
try: try:
_, _, spec_file = web_util.read_from_url(url) _, _, spec_file = web_util.read_from_url(url)
contents = codecs.getreader("utf-8")(spec_file).read() contents = codecs.getreader("utf-8")(spec_file).read()
except (web_util.SpackWebError, OSError) as e: except web_util.SpackWebError as e:
tty.error(f"Error reading specfile: {url}: {e}") tty.error(f"Error reading specfile: {url}: {e}")
return contents return contents
@@ -1789,7 +1785,7 @@ def _oci_update_index(
db.mark(spec, "in_buildcache", True) db.mark(spec, "in_buildcache", True)
# Create the index.json file # Create the index.json file
index_json_path = os.path.join(tmpdir, spack_db.INDEX_JSON_FILE) index_json_path = os.path.join(tmpdir, "index.json")
with open(index_json_path, "w", encoding="utf-8") as f: with open(index_json_path, "w", encoding="utf-8") as f:
db._write_to_file(f) db._write_to_file(f)
@@ -2010,7 +2006,7 @@ def fetch_url_to_mirror(url):
# Download the config = spec.json and the relevant tarball # Download the config = spec.json and the relevant tarball
try: try:
manifest = json.load(response) manifest = json.loads(response.read())
spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"]) spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"])
tarball_digest = spack.oci.image.Digest.from_string( tarball_digest = spack.oci.image.Digest.from_string(
manifest["layers"][-1]["digest"] manifest["layers"][-1]["digest"]
@@ -2271,24 +2267,6 @@ def relocate_package(spec: spack.spec.Spec) -> None:
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary: with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
codesign("-fs-", tmp_binary) codesign("-fs-", tmp_binary)
install_manifest = os.path.join(
spec.prefix,
spack.store.STORE.layout.metadata_dir,
spack.store.STORE.layout.manifest_file_name,
)
if not os.path.exists(install_manifest):
spec_id = spec.format("{name}/{hash:7}")
tty.warn("No manifest file in tarball for spec %s" % spec_id)
# overwrite old metadata with new
if spec.spliced:
# rewrite spec on disk
spack.store.STORE.layout.write_spec(spec, spack.store.STORE.layout.spec_file_path(spec))
# de-cache the install manifest
with contextlib.suppress(FileNotFoundError):
os.unlink(install_manifest)
def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool, remote_checksum): def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool, remote_checksum):
stagepath = os.path.dirname(filename) stagepath = os.path.dirname(filename)
@@ -2455,6 +2433,15 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
except Exception as e: except Exception as e:
shutil.rmtree(spec.prefix, ignore_errors=True) shutil.rmtree(spec.prefix, ignore_errors=True)
raise e raise e
else:
manifest_file = os.path.join(
spec.prefix,
spack.store.STORE.layout.metadata_dir,
spack.store.STORE.layout.manifest_file_name,
)
if not os.path.exists(manifest_file):
spec_id = spec.format("{name}/{hash:7}")
tty.warn("No manifest file in tarball for spec %s" % spec_id)
finally: finally:
if tmpdir: if tmpdir:
shutil.rmtree(tmpdir, ignore_errors=True) shutil.rmtree(tmpdir, ignore_errors=True)
@@ -2559,6 +2546,10 @@ def install_root_node(
tty.msg('Installing "{0}" from a buildcache'.format(spec.format())) tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
extract_tarball(spec, download_result, force) extract_tarball(spec, download_result, force)
spec.package.windows_establish_runtime_linkage() spec.package.windows_establish_runtime_linkage()
if spec.spliced: # overwrite old metadata with new
spack.store.STORE.layout.write_spec(
spec, spack.store.STORE.layout.spec_file_path(spec)
)
spack.hooks.post_install(spec, False) spack.hooks.post_install(spec, False)
spack.store.STORE.db.add(spec, allow_missing=allow_missing) spack.store.STORE.db.add(spec, allow_missing=allow_missing)
@@ -2596,14 +2587,11 @@ def try_direct_fetch(spec, mirrors=None):
) )
try: try:
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json) _, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
specfile_contents = codecs.getreader("utf-8")(fs).read()
specfile_is_signed = True specfile_is_signed = True
except (web_util.SpackWebError, OSError) as e1: except web_util.SpackWebError as e1:
try: try:
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json) _, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
specfile_contents = codecs.getreader("utf-8")(fs).read() except web_util.SpackWebError as e2:
specfile_is_signed = False
except (web_util.SpackWebError, OSError) as e2:
tty.debug( tty.debug(
f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}", f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}",
e1, e1,
@@ -2613,6 +2601,7 @@ def try_direct_fetch(spec, mirrors=None):
f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2 f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2
) )
continue continue
specfile_contents = codecs.getreader("utf-8")(fs).read()
# read the spec from the build cache file. All specs in build caches # read the spec from the build cache file. All specs in build caches
# are concrete (as they are built) so we need to mark this spec # are concrete (as they are built) so we need to mark this spec
@@ -2706,9 +2695,8 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
try: try:
_, _, json_file = web_util.read_from_url(keys_index) _, _, json_file = web_util.read_from_url(keys_index)
json_index = sjson.load(json_file) json_index = sjson.load(codecs.getreader("utf-8")(json_file))
except (web_util.SpackWebError, OSError, ValueError) as url_err: except web_util.SpackWebError as url_err:
# TODO: avoid repeated request
if web_util.url_exists(keys_index): if web_util.url_exists(keys_index):
tty.error( tty.error(
f"Unable to find public keys in {url_util.format(fetch_url)}," f"Unable to find public keys in {url_util.format(fetch_url)},"
@@ -2955,14 +2943,14 @@ def __init__(self, url, local_hash, urlopen=web_util.urlopen):
def get_remote_hash(self): def get_remote_hash(self):
# Failure to fetch index.json.hash is not fatal # Failure to fetch index.json.hash is not fatal
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, INDEX_HASH_FILE) url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
try: try:
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers)) response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
remote_hash = response.read(64) except (TimeoutError, urllib.error.URLError):
except OSError:
return None return None
# Validate the hash # Validate the hash
remote_hash = response.read(64)
if not re.match(rb"[a-f\d]{64}$", remote_hash): if not re.match(rb"[a-f\d]{64}$", remote_hash):
return None return None
return remote_hash.decode("utf-8") return remote_hash.decode("utf-8")
@@ -2976,17 +2964,17 @@ def conditional_fetch(self) -> FetchIndexResult:
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True) return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
# Otherwise, download index.json # Otherwise, download index.json
url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE) url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
try: try:
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers)) response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
except OSError as e: except (TimeoutError, urllib.error.URLError) as e:
raise FetchIndexError(f"Could not fetch index from {url_index}", e) from e raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
try: try:
result = codecs.getreader("utf-8")(response).read() result = codecs.getreader("utf-8")(response).read()
except (ValueError, OSError) as e: except ValueError as e:
raise FetchIndexError(f"Remote index {url_index} is invalid") from e raise FetchIndexError("Remote index {} is invalid".format(url_index), e) from e
computed_hash = compute_hash(result) computed_hash = compute_hash(result)
@@ -3020,7 +3008,7 @@ def __init__(self, url, etag, urlopen=web_util.urlopen):
def conditional_fetch(self) -> FetchIndexResult: def conditional_fetch(self) -> FetchIndexResult:
# Just do a conditional fetch immediately # Just do a conditional fetch immediately
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE) url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'} headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'}
try: try:
@@ -3030,12 +3018,12 @@ def conditional_fetch(self) -> FetchIndexResult:
# Not modified; that means fresh. # Not modified; that means fresh.
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True) return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
raise FetchIndexError(f"Could not fetch index {url}", e) from e raise FetchIndexError(f"Could not fetch index {url}", e) from e
except OSError as e: # URLError, socket.timeout, etc. except (TimeoutError, urllib.error.URLError) as e:
raise FetchIndexError(f"Could not fetch index {url}", e) from e raise FetchIndexError(f"Could not fetch index {url}", e) from e
try: try:
result = codecs.getreader("utf-8")(response).read() result = codecs.getreader("utf-8")(response).read()
except (ValueError, OSError) as e: except ValueError as e:
raise FetchIndexError(f"Remote index {url} is invalid", e) from e raise FetchIndexError(f"Remote index {url} is invalid", e) from e
headers = response.headers headers = response.headers
@@ -3067,11 +3055,11 @@ def conditional_fetch(self) -> FetchIndexResult:
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"}, headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
) )
) )
except OSError as e: except (TimeoutError, urllib.error.URLError) as e:
raise FetchIndexError(f"Could not fetch manifest from {url_manifest}", e) from e raise FetchIndexError(f"Could not fetch manifest from {url_manifest}", e) from e
try: try:
manifest = json.load(response) manifest = json.loads(response.read())
except Exception as e: except Exception as e:
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
@@ -3086,16 +3074,14 @@ def conditional_fetch(self) -> FetchIndexResult:
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True) return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
# Otherwise fetch the blob / index.json # Otherwise fetch the blob / index.json
try: response = self.urlopen(
response = self.urlopen( urllib.request.Request(
urllib.request.Request( url=self.ref.blob_url(index_digest),
url=self.ref.blob_url(index_digest), headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
)
) )
result = codecs.getreader("utf-8")(response).read() )
except (OSError, ValueError) as e:
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e result = codecs.getreader("utf-8")(response).read()
# Make sure the blob we download has the advertised hash # Make sure the blob we download has the advertised hash
if compute_hash(result) != index_digest.digest: if compute_hash(result) != index_digest.digest:

View File

@@ -5,7 +5,7 @@
import fnmatch import fnmatch
import glob import glob
import importlib import importlib
import os import os.path
import re import re
import sys import sys
import sysconfig import sysconfig

View File

@@ -27,9 +27,9 @@
class ClingoBootstrapConcretizer: class ClingoBootstrapConcretizer:
def __init__(self, configuration): def __init__(self, configuration):
self.host_platform = spack.platforms.host() self.host_platform = spack.platforms.host()
self.host_os = self.host_platform.default_operating_system() self.host_os = self.host_platform.operating_system("frontend")
self.host_target = archspec.cpu.host().family self.host_target = archspec.cpu.host().family
self.host_architecture = spack.spec.ArchSpec.default_arch() self.host_architecture = spack.spec.ArchSpec.frontend_arch()
self.host_architecture.target = str(self.host_target) self.host_architecture.target = str(self.host_target)
self.host_compiler = self._valid_compiler_or_raise() self.host_compiler = self._valid_compiler_or_raise()
self.host_python = self.python_external_spec() self.host_python = self.python_external_spec()

View File

@@ -4,7 +4,7 @@
"""Manage configuration swapping for bootstrapping purposes""" """Manage configuration swapping for bootstrapping purposes"""
import contextlib import contextlib
import os import os.path
import sys import sys
from typing import Any, Dict, Generator, MutableSequence, Sequence from typing import Any, Dict, Generator, MutableSequence, Sequence
@@ -141,7 +141,7 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
def _add_compilers_if_missing() -> None: def _add_compilers_if_missing() -> None:
arch = spack.spec.ArchSpec.default_arch() arch = spack.spec.ArchSpec.frontend_arch()
if not spack.compilers.compilers_for_arch(arch): if not spack.compilers.compilers_for_arch(arch):
spack.compilers.find_compilers() spack.compilers.find_compilers()

View File

@@ -25,6 +25,7 @@
import functools import functools
import json import json
import os import os
import os.path
import sys import sys
import uuid import uuid
from typing import Any, Callable, Dict, List, Optional, Tuple from typing import Any, Callable, Dict, List, Optional, Tuple
@@ -45,7 +46,6 @@
import spack.util.executable import spack.util.executable
import spack.util.path import spack.util.path
import spack.util.spack_yaml import spack.util.spack_yaml
import spack.util.url
import spack.version import spack.version
from spack.installer import PackageInstaller from spack.installer import PackageInstaller
@@ -97,12 +97,8 @@ def __init__(self, conf: ConfigDictionary) -> None:
self.name = conf["name"] self.name = conf["name"]
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"]) self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
# Check for relative paths, and turn them into absolute paths # Promote (relative) paths to file urls
# root is the metadata_dir self.url = spack.mirrors.mirror.Mirror(conf["info"]["url"]).fetch_url
maybe_url = conf["info"]["url"]
if spack.util.url.is_path_instead_of_url(maybe_url) and not os.path.isabs(maybe_url):
maybe_url = os.path.join(self.metadata_dir, maybe_url)
self.url = spack.mirrors.mirror.Mirror(maybe_url).fetch_url
@property @property
def mirror_scope(self) -> spack.config.InternalConfigScope: def mirror_scope(self) -> spack.config.InternalConfigScope:

View File

@@ -301,13 +301,11 @@ def clean_environment():
env.unset("CPLUS_INCLUDE_PATH") env.unset("CPLUS_INCLUDE_PATH")
env.unset("OBJC_INCLUDE_PATH") env.unset("OBJC_INCLUDE_PATH")
# prevent configure scripts from sourcing variables from config site file (AC_SITE_LOAD).
env.set("CONFIG_SITE", os.devnull)
env.unset("CMAKE_PREFIX_PATH") env.unset("CMAKE_PREFIX_PATH")
env.unset("PYTHONPATH") env.unset("PYTHONPATH")
env.unset("R_HOME") env.unset("R_HOME")
env.unset("R_ENVIRON") env.unset("R_ENVIRON")
env.unset("LUA_PATH") env.unset("LUA_PATH")
env.unset("LUA_CPATH") env.unset("LUA_CPATH")

View File

@@ -6,9 +6,7 @@
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import spack.directives import spack.directives
import spack.spec
import spack.util.executable import spack.util.executable
import spack.util.prefix
from .autotools import AutotoolsBuilder, AutotoolsPackage from .autotools import AutotoolsBuilder, AutotoolsPackage
@@ -19,18 +17,19 @@ class AspellBuilder(AutotoolsBuilder):
to the Aspell extensions. to the Aspell extensions.
""" """
def configure( def configure(self, pkg, spec, prefix):
self,
pkg: "AspellDictPackage", # type: ignore[override]
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
):
aspell = spec["aspell"].prefix.bin.aspell aspell = spec["aspell"].prefix.bin.aspell
prezip = spec["aspell"].prefix.bin.prezip prezip = spec["aspell"].prefix.bin.prezip
destdir = prefix destdir = prefix
sh = spack.util.executable.Executable("/bin/sh") sh = spack.util.executable.which("sh")
sh("./configure", "--vars", f"ASPELL={aspell}", f"PREZIP={prezip}", f"DESTDIR={destdir}") sh(
"./configure",
"--vars",
"ASPELL={0}".format(aspell),
"PREZIP={0}".format(prezip),
"DESTDIR={0}".format(destdir),
)
# Aspell dictionaries install their bits into their prefix.lib # Aspell dictionaries install their bits into their prefix.lib

View File

@@ -2,6 +2,7 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os import os
import os.path
import stat import stat
import subprocess import subprocess
from typing import Callable, List, Optional, Set, Tuple, Union from typing import Callable, List, Optional, Set, Tuple, Union
@@ -533,7 +534,7 @@ def build_directory(self) -> str:
return build_dir return build_dir
@spack.phase_callbacks.run_before("autoreconf") @spack.phase_callbacks.run_before("autoreconf")
def _delete_configure_to_force_update(self) -> None: def delete_configure_to_force_update(self) -> None:
if self.force_autoreconf: if self.force_autoreconf:
fs.force_remove(self.configure_abs_path) fs.force_remove(self.configure_abs_path)
@@ -546,7 +547,7 @@ def autoreconf_search_path_args(self) -> List[str]:
return _autoreconf_search_path_args(self.spec) return _autoreconf_search_path_args(self.spec)
@spack.phase_callbacks.run_after("autoreconf") @spack.phase_callbacks.run_after("autoreconf")
def _set_configure_or_die(self) -> None: def set_configure_or_die(self) -> None:
"""Ensure the presence of a "configure" script, or raise. If the "configure" """Ensure the presence of a "configure" script, or raise. If the "configure"
is found, a module level attribute is set. is found, a module level attribute is set.
@@ -570,7 +571,10 @@ def configure_args(self) -> List[str]:
return [] return []
def autoreconf( def autoreconf(
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None: ) -> None:
"""Not needed usually, configure should be already there""" """Not needed usually, configure should be already there"""
@@ -599,7 +603,10 @@ def autoreconf(
self.pkg.module.autoreconf(*autoreconf_args) self.pkg.module.autoreconf(*autoreconf_args)
def configure( def configure(
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None: ) -> None:
"""Run "configure", with the arguments specified by the builder and an """Run "configure", with the arguments specified by the builder and an
appropriately set prefix. appropriately set prefix.
@@ -612,7 +619,10 @@ def configure(
pkg.module.configure(*options) pkg.module.configure(*options)
def build( def build(
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None: ) -> None:
"""Run "make" on the build targets specified by the builder.""" """Run "make" on the build targets specified by the builder."""
# See https://autotools.io/automake/silent.html # See https://autotools.io/automake/silent.html
@@ -622,7 +632,10 @@ def build(
pkg.module.make(*params) pkg.module.make(*params)
def install( def install(
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None: ) -> None:
"""Run "make" on the install targets specified by the builder.""" """Run "make" on the install targets specified by the builder."""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
@@ -819,7 +832,7 @@ def installcheck(self) -> None:
self.pkg._if_make_target_execute("installcheck") self.pkg._if_make_target_execute("installcheck")
@spack.phase_callbacks.run_after("install") @spack.phase_callbacks.run_after("install")
def _remove_libtool_archives(self) -> None: def remove_libtool_archives(self) -> None:
"""Remove all .la files in prefix sub-folders if the package sets """Remove all .la files in prefix sub-folders if the package sets
``install_libtool_archives`` to be False. ``install_libtool_archives`` to be False.
""" """

View File

@@ -10,8 +10,6 @@
import llnl.util.tty as tty import llnl.util.tty as tty
import spack.phase_callbacks import spack.phase_callbacks
import spack.spec
import spack.util.prefix
from .cmake import CMakeBuilder, CMakePackage from .cmake import CMakeBuilder, CMakePackage
@@ -332,9 +330,7 @@ def initconfig_package_entries(self):
"""This method is to be overwritten by the package""" """This method is to be overwritten by the package"""
return [] return []
def initconfig( def initconfig(self, pkg, spec, prefix):
self, pkg: "CachedCMakePackage", spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
cache_entries = ( cache_entries = (
self.std_initconfig_entries() self.std_initconfig_entries()
+ self.initconfig_compiler_entries() + self.initconfig_compiler_entries()

View File

@@ -7,8 +7,6 @@
import spack.builder import spack.builder
import spack.package_base import spack.package_base
import spack.phase_callbacks import spack.phase_callbacks
import spack.spec
import spack.util.prefix
from spack.directives import build_system, depends_on from spack.directives import build_system, depends_on
from spack.multimethod import when from spack.multimethod import when
@@ -83,16 +81,12 @@ def check_args(self):
def setup_build_environment(self, env): def setup_build_environment(self, env):
env.set("CARGO_HOME", self.stage.path) env.set("CARGO_HOME", self.stage.path)
def build( def build(self, pkg, spec, prefix):
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Runs ``cargo install`` in the source directory""" """Runs ``cargo install`` in the source directory"""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pkg.module.cargo("install", "--root", "out", "--path", ".", *self.build_args) pkg.module.cargo("install", "--root", "out", "--path", ".", *self.build_args)
def install( def install(self, pkg, spec, prefix):
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Copy build files into package prefix.""" """Copy build files into package prefix."""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
fs.install_tree("out", prefix) fs.install_tree("out", prefix)

View File

@@ -454,22 +454,18 @@ def cmake_args(self) -> List[str]:
return [] return []
def cmake( def cmake(
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None: ) -> None:
"""Runs ``cmake`` in the build directory""" """Runs ``cmake`` in the build directory"""
if spec.is_develop: # skip cmake phase if it is an incremental develop build
# skip cmake phase if it is an incremental develop build if spec.is_develop and os.path.isfile(
os.path.join(self.build_directory, "CMakeCache.txt")
# Determine the files that will re-run CMake that are generated from a successful ):
# configure step based on state return
primary_generator = _extract_primary_generator(self.generator)
configure_artifact = "Makefile"
if primary_generator == "Ninja":
configure_artifact = "ninja.build"
if os.path.isfile(os.path.join(self.build_directory, configure_artifact)):
return
options = self.std_cmake_args options = self.std_cmake_args
options += self.cmake_args() options += self.cmake_args()
@@ -478,7 +474,10 @@ def cmake(
pkg.module.cmake(*options) pkg.module.cmake(*options)
def build( def build(
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None: ) -> None:
"""Make the build targets""" """Make the build targets"""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
@@ -489,7 +488,10 @@ def build(
pkg.module.ninja(*self.build_targets) pkg.module.ninja(*self.build_targets)
def install( def install(
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None: ) -> None:
"""Make the install targets""" """Make the install targets"""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):

View File

@@ -15,7 +15,7 @@ class CudaPackage(PackageBase):
"""Auxiliary class which contains CUDA variant, dependencies and conflicts """Auxiliary class which contains CUDA variant, dependencies and conflicts
and is meant to unify and facilitate its usage. and is meant to unify and facilitate its usage.
Maintainers: ax3l, Rombur, davidbeckingsale, pauleonix Maintainers: ax3l, Rombur, davidbeckingsale
""" """
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list # https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
@@ -47,12 +47,6 @@ class CudaPackage(PackageBase):
"89", "89",
"90", "90",
"90a", "90a",
"100",
"100a",
"101",
"101a",
"120",
"120a",
) )
# FIXME: keep cuda and cuda_arch separate to make usage easier until # FIXME: keep cuda and cuda_arch separate to make usage easier until
@@ -105,56 +99,39 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
# CUDA version vs Architecture # CUDA version vs Architecture
# https://en.wikipedia.org/wiki/CUDA#GPUs_supported # https://en.wikipedia.org/wiki/CUDA#GPUs_supported
# https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html#deprecated-features # https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html#deprecated-features
# Tesla support:
depends_on("cuda@:6.0", when="cuda_arch=10") depends_on("cuda@:6.0", when="cuda_arch=10")
depends_on("cuda@:6.5", when="cuda_arch=11") depends_on("cuda@:6.5", when="cuda_arch=11")
depends_on("cuda@2.1:6.5", when="cuda_arch=12") depends_on("cuda@2.1:6.5", when="cuda_arch=12")
depends_on("cuda@2.1:6.5", when="cuda_arch=13") depends_on("cuda@2.1:6.5", when="cuda_arch=13")
# Fermi support:
depends_on("cuda@3.0:8.0", when="cuda_arch=20") depends_on("cuda@3.0:8.0", when="cuda_arch=20")
depends_on("cuda@3.2:8.0", when="cuda_arch=21") depends_on("cuda@3.2:8.0", when="cuda_arch=21")
# Kepler support:
depends_on("cuda@5.0:10.2", when="cuda_arch=30") depends_on("cuda@5.0:10.2", when="cuda_arch=30")
depends_on("cuda@5.0:10.2", when="cuda_arch=32") depends_on("cuda@5.0:10.2", when="cuda_arch=32")
depends_on("cuda@5.0:11.8", when="cuda_arch=35") depends_on("cuda@5.0:11.8", when="cuda_arch=35")
depends_on("cuda@6.5:11.8", when="cuda_arch=37") depends_on("cuda@6.5:11.8", when="cuda_arch=37")
# Maxwell support:
depends_on("cuda@6.0:", when="cuda_arch=50") depends_on("cuda@6.0:", when="cuda_arch=50")
depends_on("cuda@6.5:", when="cuda_arch=52") depends_on("cuda@6.5:", when="cuda_arch=52")
depends_on("cuda@6.5:", when="cuda_arch=53") depends_on("cuda@6.5:", when="cuda_arch=53")
# Pascal support:
depends_on("cuda@8.0:", when="cuda_arch=60") depends_on("cuda@8.0:", when="cuda_arch=60")
depends_on("cuda@8.0:", when="cuda_arch=61") depends_on("cuda@8.0:", when="cuda_arch=61")
depends_on("cuda@8.0:", when="cuda_arch=62") depends_on("cuda@8.0:", when="cuda_arch=62")
# Volta support:
depends_on("cuda@9.0:", when="cuda_arch=70") depends_on("cuda@9.0:", when="cuda_arch=70")
# Turing support:
depends_on("cuda@9.0:", when="cuda_arch=72") depends_on("cuda@9.0:", when="cuda_arch=72")
depends_on("cuda@10.0:", when="cuda_arch=75") depends_on("cuda@10.0:", when="cuda_arch=75")
# Ampere support:
depends_on("cuda@11.0:", when="cuda_arch=80") depends_on("cuda@11.0:", when="cuda_arch=80")
depends_on("cuda@11.1:", when="cuda_arch=86") depends_on("cuda@11.1:", when="cuda_arch=86")
depends_on("cuda@11.4:", when="cuda_arch=87") depends_on("cuda@11.4:", when="cuda_arch=87")
# Ada support:
depends_on("cuda@11.8:", when="cuda_arch=89") depends_on("cuda@11.8:", when="cuda_arch=89")
# Hopper support:
depends_on("cuda@12.0:", when="cuda_arch=90") depends_on("cuda@12.0:", when="cuda_arch=90")
depends_on("cuda@12.0:", when="cuda_arch=90a") depends_on("cuda@12.0:", when="cuda_arch=90a")
# Blackwell support:
depends_on("cuda@12.8:", when="cuda_arch=100")
depends_on("cuda@12.8:", when="cuda_arch=100a")
depends_on("cuda@12.8:", when="cuda_arch=101")
depends_on("cuda@12.8:", when="cuda_arch=101a")
depends_on("cuda@12.8:", when="cuda_arch=120")
depends_on("cuda@12.8:", when="cuda_arch=120a")
# From the NVIDIA install guide we know of conflicts for particular # From the NVIDIA install guide we know of conflicts for particular
# platforms (linux, darwin), architectures (x86, powerpc) and compilers # platforms (linux, darwin), architectures (x86, powerpc) and compilers
# (gcc, clang). We don't restrict %gcc and %clang conflicts to # (gcc, clang). We don't restrict %gcc and %clang conflicts to
@@ -186,7 +163,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8") conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3") conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
conflicts("%gcc@14:", when="+cuda ^cuda@:12.6") conflicts("%gcc@14:", when="+cuda ^cuda@:12.6")
conflicts("%gcc@15:", when="+cuda ^cuda@:12.8")
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0") conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
conflicts("%clang@13:", when="+cuda ^cuda@:11.5") conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
conflicts("%clang@14:", when="+cuda ^cuda@:11.7") conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
@@ -195,7 +171,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
conflicts("%clang@17:", when="+cuda ^cuda@:12.3") conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
conflicts("%clang@18:", when="+cuda ^cuda@:12.5") conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
conflicts("%clang@19:", when="+cuda ^cuda@:12.6") conflicts("%clang@19:", when="+cuda ^cuda@:12.6")
conflicts("%clang@20:", when="+cuda ^cuda@:12.8")
# https://gist.github.com/ax3l/9489132#gistcomment-3860114 # https://gist.github.com/ax3l/9489132#gistcomment-3860114
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0") conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")

View File

@@ -7,8 +7,6 @@
import spack.directives import spack.directives
import spack.package_base import spack.package_base
import spack.phase_callbacks import spack.phase_callbacks
import spack.spec
import spack.util.prefix
from ._checks import BuilderWithDefaults, apply_macos_rpath_fixups, execute_install_time_tests from ._checks import BuilderWithDefaults, apply_macos_rpath_fixups, execute_install_time_tests
@@ -50,8 +48,3 @@ class GenericBuilder(BuilderWithDefaults):
# unconditionally perform any post-install phase tests # unconditionally perform any post-install phase tests
spack.phase_callbacks.run_after("install")(execute_install_time_tests) spack.phase_callbacks.run_after("install")(execute_install_time_tests)
def install(
self, pkg: Package, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
raise NotImplementedError

View File

@@ -7,9 +7,7 @@
import spack.builder import spack.builder
import spack.package_base import spack.package_base
import spack.phase_callbacks import spack.phase_callbacks
import spack.spec from spack.directives import build_system, extends
import spack.util.prefix
from spack.directives import build_system, depends_on
from spack.multimethod import when from spack.multimethod import when
from ._checks import BuilderWithDefaults, execute_install_time_tests from ._checks import BuilderWithDefaults, execute_install_time_tests
@@ -28,7 +26,9 @@ class GoPackage(spack.package_base.PackageBase):
build_system("go") build_system("go")
with when("build_system=go"): with when("build_system=go"):
depends_on("go", type="build") # TODO: this seems like it should be depends_on, see
# setup_dependent_build_environment in go for why I kept it like this
extends("go@1.14:", type="build")
@spack.builder.builder("go") @spack.builder.builder("go")
@@ -71,7 +71,6 @@ class GoBuilder(BuilderWithDefaults):
def setup_build_environment(self, env): def setup_build_environment(self, env):
env.set("GO111MODULE", "on") env.set("GO111MODULE", "on")
env.set("GOTOOLCHAIN", "local") env.set("GOTOOLCHAIN", "local")
env.set("GOPATH", fs.join_path(self.pkg.stage.path, "go"))
@property @property
def build_directory(self): def build_directory(self):
@@ -82,31 +81,19 @@ def build_directory(self):
def build_args(self): def build_args(self):
"""Arguments for ``go build``.""" """Arguments for ``go build``."""
# Pass ldflags -s = --strip-all and -w = --no-warnings by default # Pass ldflags -s = --strip-all and -w = --no-warnings by default
return [ return ["-modcacherw", "-ldflags", "-s -w", "-o", f"{self.pkg.name}"]
"-p",
str(self.pkg.module.make_jobs),
"-modcacherw",
"-ldflags",
"-s -w",
"-o",
f"{self.pkg.name}",
]
@property @property
def check_args(self): def check_args(self):
"""Argument for ``go test`` during check phase""" """Argument for ``go test`` during check phase"""
return [] return []
def build( def build(self, pkg, spec, prefix):
self, pkg: GoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Runs ``go build`` in the source directory""" """Runs ``go build`` in the source directory"""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pkg.module.go("build", *self.build_args) pkg.module.go("build", *self.build_args)
def install( def install(self, pkg, spec, prefix):
self, pkg: GoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Install built binaries into prefix bin.""" """Install built binaries into prefix bin."""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
fs.mkdirp(prefix.bin) fs.mkdirp(prefix.bin)

View File

@@ -7,9 +7,7 @@
import spack.builder import spack.builder
import spack.package_base import spack.package_base
import spack.spec
import spack.util.executable import spack.util.executable
import spack.util.prefix
from spack.directives import build_system, depends_on, extends from spack.directives import build_system, depends_on, extends
from spack.multimethod import when from spack.multimethod import when
@@ -57,9 +55,7 @@ class LuaBuilder(spack.builder.Builder):
#: Names associated with package attributes in the old build-system format #: Names associated with package attributes in the old build-system format
legacy_attributes = () legacy_attributes = ()
def unpack( def unpack(self, pkg, spec, prefix):
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
if os.path.splitext(pkg.stage.archive_file)[1] == ".rock": if os.path.splitext(pkg.stage.archive_file)[1] == ".rock":
directory = pkg.luarocks("unpack", pkg.stage.archive_file, output=str) directory = pkg.luarocks("unpack", pkg.stage.archive_file, output=str)
dirlines = directory.split("\n") dirlines = directory.split("\n")
@@ -70,16 +66,15 @@ def unpack(
def _generate_tree_line(name, prefix): def _generate_tree_line(name, prefix):
return """{{ name = "{name}", root = "{prefix}" }};""".format(name=name, prefix=prefix) return """{{ name = "{name}", root = "{prefix}" }};""".format(name=name, prefix=prefix)
def generate_luarocks_config( def generate_luarocks_config(self, pkg, spec, prefix):
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
spec = self.pkg.spec spec = self.pkg.spec
table_entries = [] table_entries = []
for d in spec.traverse(deptype=("build", "run")): for d in spec.traverse(deptype=("build", "run")):
if d.package.extends(self.pkg.extendee_spec): if d.package.extends(self.pkg.extendee_spec):
table_entries.append(self._generate_tree_line(d.name, d.prefix)) table_entries.append(self._generate_tree_line(d.name, d.prefix))
with open(self._luarocks_config_path(), "w", encoding="utf-8") as config: path = self._luarocks_config_path()
with open(path, "w", encoding="utf-8") as config:
config.write( config.write(
""" """
deps_mode="all" deps_mode="all"
@@ -90,26 +85,23 @@ def generate_luarocks_config(
"\n".join(table_entries) "\n".join(table_entries)
) )
) )
return path
def preprocess( def preprocess(self, pkg, spec, prefix):
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Override this to preprocess source before building with luarocks""" """Override this to preprocess source before building with luarocks"""
pass pass
def luarocks_args(self): def luarocks_args(self):
return [] return []
def install( def install(self, pkg, spec, prefix):
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
rock = "." rock = "."
specs = find(".", "*.rockspec", recursive=False) specs = find(".", "*.rockspec", recursive=False)
if specs: if specs:
rock = specs[0] rock = specs[0]
rocks_args = self.luarocks_args() rocks_args = self.luarocks_args()
rocks_args.append(rock) rocks_args.append(rock)
pkg.luarocks("--tree=" + prefix, "make", *rocks_args) self.pkg.luarocks("--tree=" + prefix, "make", *rocks_args)
def _luarocks_config_path(self): def _luarocks_config_path(self):
return os.path.join(self.pkg.stage.source_path, "spack_luarocks.lua") return os.path.join(self.pkg.stage.source_path, "spack_luarocks.lua")

View File

@@ -98,20 +98,29 @@ def build_directory(self) -> str:
return self.pkg.stage.source_path return self.pkg.stage.source_path
def edit( def edit(
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None: ) -> None:
"""Edit the Makefile before calling make. The default is a no-op.""" """Edit the Makefile before calling make. The default is a no-op."""
pass pass
def build( def build(
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None: ) -> None:
"""Run "make" on the build targets specified by the builder.""" """Run "make" on the build targets specified by the builder."""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pkg.module.make(*self.build_targets) pkg.module.make(*self.build_targets)
def install( def install(
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None: ) -> None:
"""Run "make" on the install targets specified by the builder.""" """Run "make" on the install targets specified by the builder."""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):

View File

@@ -5,8 +5,6 @@
import spack.builder import spack.builder
import spack.package_base import spack.package_base
import spack.spec
import spack.util.prefix
from spack.directives import build_system, depends_on from spack.directives import build_system, depends_on
from spack.multimethod import when from spack.multimethod import when
from spack.util.executable import which from spack.util.executable import which
@@ -60,20 +58,16 @@ def build_args(self):
"""List of args to pass to build phase.""" """List of args to pass to build phase."""
return [] return []
def build( def build(self, pkg, spec, prefix):
self, pkg: MavenPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Compile code and package into a JAR file.""" """Compile code and package into a JAR file."""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
mvn = which("mvn", required=True) mvn = which("mvn")
if self.pkg.run_tests: if self.pkg.run_tests:
mvn("verify", *self.build_args()) mvn("verify", *self.build_args())
else: else:
mvn("package", "-DskipTests", *self.build_args()) mvn("package", "-DskipTests", *self.build_args())
def install( def install(self, pkg, spec, prefix):
self, pkg: MavenPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Copy to installation prefix.""" """Copy to installation prefix."""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
fs.install_tree(".", prefix) fs.install_tree(".", prefix)

View File

@@ -188,7 +188,10 @@ def meson_args(self) -> List[str]:
return [] return []
def meson( def meson(
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None: ) -> None:
"""Run ``meson`` in the build directory""" """Run ``meson`` in the build directory"""
options = [] options = []
@@ -201,7 +204,10 @@ def meson(
pkg.module.meson(*options) pkg.module.meson(*options)
def build( def build(
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None: ) -> None:
"""Make the build targets""" """Make the build targets"""
options = ["-v"] options = ["-v"]
@@ -210,7 +216,10 @@ def build(
pkg.module.ninja(*options) pkg.module.ninja(*options)
def install( def install(
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None: ) -> None:
"""Make the install targets""" """Make the install targets"""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):

View File

@@ -7,8 +7,6 @@
import spack.builder import spack.builder
import spack.package_base import spack.package_base
import spack.spec
import spack.util.prefix
from spack.directives import build_system, conflicts from spack.directives import build_system, conflicts
from ._checks import BuilderWithDefaults from ._checks import BuilderWithDefaults
@@ -101,9 +99,7 @@ def msbuild_install_args(self):
as `msbuild_args` by default.""" as `msbuild_args` by default."""
return self.msbuild_args() return self.msbuild_args()
def build( def build(self, pkg, spec, prefix):
self, pkg: MSBuildPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Run "msbuild" on the build targets specified by the builder.""" """Run "msbuild" on the build targets specified by the builder."""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pkg.module.msbuild( pkg.module.msbuild(
@@ -112,9 +108,7 @@ def build(
self.define_targets(*self.build_targets), self.define_targets(*self.build_targets),
) )
def install( def install(self, pkg, spec, prefix):
self, pkg: MSBuildPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Run "msbuild" on the install targets specified by the builder. """Run "msbuild" on the install targets specified by the builder.
This is INSTALL by default""" This is INSTALL by default"""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):

View File

@@ -7,8 +7,6 @@
import spack.builder import spack.builder
import spack.package_base import spack.package_base
import spack.spec
import spack.util.prefix
from spack.directives import build_system, conflicts from spack.directives import build_system, conflicts
from ._checks import BuilderWithDefaults from ._checks import BuilderWithDefaults
@@ -125,9 +123,7 @@ def nmake_install_args(self):
Individual packages should override to specify NMake args to command line""" Individual packages should override to specify NMake args to command line"""
return [] return []
def build( def build(self, pkg, spec, prefix):
self, pkg: NMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Run "nmake" on the build targets specified by the builder.""" """Run "nmake" on the build targets specified by the builder."""
opts = self.std_nmake_args opts = self.std_nmake_args
opts += self.nmake_args() opts += self.nmake_args()
@@ -136,9 +132,7 @@ def build(
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pkg.module.nmake(*opts, *self.build_targets, ignore_quotes=self.ignore_quotes) pkg.module.nmake(*opts, *self.build_targets, ignore_quotes=self.ignore_quotes)
def install( def install(self, pkg, spec, prefix):
self, pkg: NMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Run "nmake" on the install targets specified by the builder. """Run "nmake" on the install targets specified by the builder.
This is INSTALL by default""" This is INSTALL by default"""
opts = self.std_nmake_args opts = self.std_nmake_args

View File

@@ -3,8 +3,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import spack.builder import spack.builder
import spack.package_base import spack.package_base
import spack.spec
import spack.util.prefix
from spack.directives import build_system, extends from spack.directives import build_system, extends
from spack.multimethod import when from spack.multimethod import when
@@ -44,9 +42,7 @@ class OctaveBuilder(BuilderWithDefaults):
#: Names associated with package attributes in the old build-system format #: Names associated with package attributes in the old build-system format
legacy_attributes = () legacy_attributes = ()
def install( def install(self, pkg, spec, prefix):
self, pkg: OctavePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Install the package from the archive file""" """Install the package from the archive file"""
pkg.module.octave( pkg.module.octave(
"--quiet", "--quiet",

View File

@@ -10,8 +10,6 @@
import spack.builder import spack.builder
import spack.package_base import spack.package_base
import spack.phase_callbacks import spack.phase_callbacks
import spack.spec
import spack.util.prefix
from spack.directives import build_system, depends_on, extends from spack.directives import build_system, depends_on, extends
from spack.install_test import SkipTest, test_part from spack.install_test import SkipTest, test_part
from spack.multimethod import when from spack.multimethod import when
@@ -151,9 +149,7 @@ def configure_args(self):
""" """
return [] return []
def configure( def configure(self, pkg, spec, prefix):
self, pkg: PerlPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Run Makefile.PL or Build.PL with arguments consisting of """Run Makefile.PL or Build.PL with arguments consisting of
an appropriate installation base directory followed by the an appropriate installation base directory followed by the
list returned by :py:meth:`~.PerlBuilder.configure_args`. list returned by :py:meth:`~.PerlBuilder.configure_args`.
@@ -177,9 +173,7 @@ def fix_shebang(self):
repl = "#!/usr/bin/env perl" repl = "#!/usr/bin/env perl"
filter_file(pattern, repl, "Build", backup=False) filter_file(pattern, repl, "Build", backup=False)
def build( def build(self, pkg, spec, prefix):
self, pkg: PerlPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Builds a Perl package.""" """Builds a Perl package."""
self.build_executable() self.build_executable()
@@ -190,8 +184,6 @@ def check(self):
"""Runs built-in tests of a Perl package.""" """Runs built-in tests of a Perl package."""
self.build_executable("test") self.build_executable("test")
def install( def install(self, pkg, spec, prefix):
self, pkg: PerlPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Installs a Perl package.""" """Installs a Perl package."""
self.build_executable("install") self.build_executable("install")

View File

@@ -28,7 +28,6 @@
import spack.repo import spack.repo
import spack.spec import spack.spec
import spack.store import spack.store
import spack.util.prefix
from spack.directives import build_system, depends_on, extends from spack.directives import build_system, depends_on, extends
from spack.error import NoHeadersError, NoLibrariesError from spack.error import NoHeadersError, NoLibrariesError
from spack.install_test import test_part from spack.install_test import test_part
@@ -264,17 +263,16 @@ def update_external_dependencies(self, extendee_spec=None):
# Ensure architecture information is present # Ensure architecture information is present
if not python.architecture: if not python.architecture:
host_platform = spack.platforms.host() host_platform = spack.platforms.host()
host_os = host_platform.default_operating_system() host_os = host_platform.operating_system("default_os")
host_target = host_platform.default_target() host_target = host_platform.target("default_target")
python.architecture = spack.spec.ArchSpec( python.architecture = spack.spec.ArchSpec(
(str(host_platform), str(host_os), str(host_target)) (str(host_platform), str(host_os), str(host_target))
) )
else: else:
if not python.architecture.platform: if not python.architecture.platform:
python.architecture.platform = spack.platforms.host() python.architecture.platform = spack.platforms.host()
platform = spack.platforms.by_name(python.architecture.platform)
if not python.architecture.os: if not python.architecture.os:
python.architecture.os = platform.default_operating_system() python.architecture.os = "default_os"
if not python.architecture.target: if not python.architecture.target:
python.architecture.target = archspec.cpu.host().family.name python.architecture.target = archspec.cpu.host().family.name

View File

@@ -6,8 +6,6 @@
import spack.builder import spack.builder
import spack.package_base import spack.package_base
import spack.phase_callbacks import spack.phase_callbacks
import spack.spec
import spack.util.prefix
from spack.directives import build_system, depends_on from spack.directives import build_system, depends_on
from ._checks import BuilderWithDefaults, execute_build_time_tests from ._checks import BuilderWithDefaults, execute_build_time_tests
@@ -64,23 +62,17 @@ def qmake_args(self):
"""List of arguments passed to qmake.""" """List of arguments passed to qmake."""
return [] return []
def qmake( def qmake(self, pkg, spec, prefix):
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Run ``qmake`` to configure the project and generate a Makefile.""" """Run ``qmake`` to configure the project and generate a Makefile."""
with working_dir(self.build_directory): with working_dir(self.build_directory):
pkg.module.qmake(*self.qmake_args()) pkg.module.qmake(*self.qmake_args())
def build( def build(self, pkg, spec, prefix):
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Make the build targets""" """Make the build targets"""
with working_dir(self.build_directory): with working_dir(self.build_directory):
pkg.module.make() pkg.module.make()
def install( def install(self, pkg, spec, prefix):
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Make the install targets""" """Make the install targets"""
with working_dir(self.build_directory): with working_dir(self.build_directory):
pkg.module.make("install") pkg.module.make("install")

View File

@@ -9,8 +9,6 @@
import llnl.util.tty as tty import llnl.util.tty as tty
import spack.builder import spack.builder
import spack.spec
import spack.util.prefix
from spack.build_environment import SPACK_NO_PARALLEL_MAKE from spack.build_environment import SPACK_NO_PARALLEL_MAKE
from spack.config import determine_number_of_jobs from spack.config import determine_number_of_jobs
from spack.directives import build_system, extends, maintainers from spack.directives import build_system, extends, maintainers
@@ -76,22 +74,18 @@ def build_directory(self):
ret = os.path.join(ret, self.subdirectory) ret = os.path.join(ret, self.subdirectory)
return ret return ret
def install( def install(self, pkg, spec, prefix):
self, pkg: RacketPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Install everything from build directory.""" """Install everything from build directory."""
raco = Executable("raco") raco = Executable("raco")
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
parallel = pkg.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE)) parallel = self.pkg.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE))
name = pkg.racket_name
assert name is not None, "Racket package name is not set"
args = [ args = [
"pkg", "pkg",
"install", "install",
"-t", "-t",
"dir", "dir",
"-n", "-n",
name, self.pkg.racket_name,
"--deps", "--deps",
"fail", "fail",
"--ignore-implies", "--ignore-implies",
@@ -107,7 +101,8 @@ def install(
except ProcessError: except ProcessError:
args.insert(-2, "--skip-installed") args.insert(-2, "--skip-installed")
raco(*args) raco(*args)
tty.warn( msg = (
f"Racket package {name} was already installed, uninstalling via " "Racket package {0} was already installed, uninstalling via "
"Spack may make someone unhappy!" "Spack may make someone unhappy!"
) )
tty.warn(msg.format(self.pkg.racket_name))

View File

@@ -5,8 +5,6 @@
import spack.builder import spack.builder
import spack.package_base import spack.package_base
import spack.spec
import spack.util.prefix
from spack.directives import build_system, extends, maintainers from spack.directives import build_system, extends, maintainers
from ._checks import BuilderWithDefaults from ._checks import BuilderWithDefaults
@@ -44,9 +42,7 @@ class RubyBuilder(BuilderWithDefaults):
#: Names associated with package attributes in the old build-system format #: Names associated with package attributes in the old build-system format
legacy_attributes = () legacy_attributes = ()
def build( def build(self, pkg, spec, prefix):
self, pkg: RubyPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Build a Ruby gem.""" """Build a Ruby gem."""
# ruby-rake provides both rake.gemspec and Rakefile, but only # ruby-rake provides both rake.gemspec and Rakefile, but only
@@ -62,9 +58,7 @@ def build(
# Some Ruby packages only ship `*.gem` files, so nothing to build # Some Ruby packages only ship `*.gem` files, so nothing to build
pass pass
def install( def install(self, pkg, spec, prefix):
self, pkg: RubyPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Install a Ruby gem. """Install a Ruby gem.
The ruby package sets ``GEM_HOME`` to tell gem where to install to.""" The ruby package sets ``GEM_HOME`` to tell gem where to install to."""

View File

@@ -4,8 +4,6 @@
import spack.builder import spack.builder
import spack.package_base import spack.package_base
import spack.phase_callbacks import spack.phase_callbacks
import spack.spec
import spack.util.prefix
from spack.directives import build_system, depends_on from spack.directives import build_system, depends_on
from ._checks import BuilderWithDefaults, execute_build_time_tests from ._checks import BuilderWithDefaults, execute_build_time_tests
@@ -61,9 +59,7 @@ def build_args(self, spec, prefix):
"""Arguments to pass to build.""" """Arguments to pass to build."""
return [] return []
def build( def build(self, pkg, spec, prefix):
self, pkg: SConsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Build the package.""" """Build the package."""
pkg.module.scons(*self.build_args(spec, prefix)) pkg.module.scons(*self.build_args(spec, prefix))
@@ -71,9 +67,7 @@ def install_args(self, spec, prefix):
"""Arguments to pass to install.""" """Arguments to pass to install."""
return [] return []
def install( def install(self, pkg, spec, prefix):
self, pkg: SConsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Install the package.""" """Install the package."""
pkg.module.scons("install", *self.install_args(spec, prefix)) pkg.module.scons("install", *self.install_args(spec, prefix))

View File

@@ -11,8 +11,6 @@
import spack.install_test import spack.install_test
import spack.package_base import spack.package_base
import spack.phase_callbacks import spack.phase_callbacks
import spack.spec
import spack.util.prefix
from spack.directives import build_system, depends_on, extends from spack.directives import build_system, depends_on, extends
from spack.multimethod import when from spack.multimethod import when
from spack.util.executable import Executable from spack.util.executable import Executable
@@ -43,7 +41,6 @@ class SIPPackage(spack.package_base.PackageBase):
with when("build_system=sip"): with when("build_system=sip"):
extends("python", type=("build", "link", "run")) extends("python", type=("build", "link", "run"))
depends_on("py-sip", type="build") depends_on("py-sip", type="build")
depends_on("gmake", type="build")
@property @property
def import_modules(self): def import_modules(self):
@@ -133,9 +130,7 @@ class SIPBuilder(BuilderWithDefaults):
build_directory = "build" build_directory = "build"
def configure( def configure(self, pkg, spec, prefix):
self, pkg: SIPPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Configure the package.""" """Configure the package."""
# https://www.riverbankcomputing.com/static/Docs/sip/command_line_tools.html # https://www.riverbankcomputing.com/static/Docs/sip/command_line_tools.html
@@ -153,9 +148,7 @@ def configure_args(self):
"""Arguments to pass to configure.""" """Arguments to pass to configure."""
return [] return []
def build( def build(self, pkg, spec, prefix):
self, pkg: SIPPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Build the package.""" """Build the package."""
args = self.build_args() args = self.build_args()
@@ -166,9 +159,7 @@ def build_args(self):
"""Arguments to pass to build.""" """Arguments to pass to build."""
return [] return []
def install( def install(self, pkg, spec, prefix):
self, pkg: SIPPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Install the package.""" """Install the package."""
args = self.install_args() args = self.install_args()

View File

@@ -6,8 +6,6 @@
import spack.builder import spack.builder
import spack.package_base import spack.package_base
import spack.phase_callbacks import spack.phase_callbacks
import spack.spec
import spack.util.prefix
from spack.directives import build_system, depends_on from spack.directives import build_system, depends_on
from ._checks import BuilderWithDefaults, execute_build_time_tests, execute_install_time_tests from ._checks import BuilderWithDefaults, execute_build_time_tests, execute_install_time_tests
@@ -99,9 +97,7 @@ def waf(self, *args, **kwargs):
with working_dir(self.build_directory): with working_dir(self.build_directory):
self.python("waf", "-j{0}".format(jobs), *args, **kwargs) self.python("waf", "-j{0}".format(jobs), *args, **kwargs)
def configure( def configure(self, pkg, spec, prefix):
self, pkg: WafPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Configures the project.""" """Configures the project."""
args = ["--prefix={0}".format(self.pkg.prefix)] args = ["--prefix={0}".format(self.pkg.prefix)]
args += self.configure_args() args += self.configure_args()
@@ -112,9 +108,7 @@ def configure_args(self):
"""Arguments to pass to configure.""" """Arguments to pass to configure."""
return [] return []
def build( def build(self, pkg, spec, prefix):
self, pkg: WafPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Executes the build.""" """Executes the build."""
args = self.build_args() args = self.build_args()
@@ -124,9 +118,7 @@ def build_args(self):
"""Arguments to pass to build.""" """Arguments to pass to build."""
return [] return []
def install( def install(self, pkg, spec, prefix):
self, pkg: WafPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
) -> None:
"""Installs the targets on the system.""" """Installs the targets on the system."""
args = self.install_args() args = self.install_args()

View File

@@ -14,7 +14,8 @@
import zipfile import zipfile
from collections import namedtuple from collections import namedtuple
from typing import Callable, Dict, List, Set from typing import Callable, Dict, List, Set
from urllib.request import Request from urllib.error import HTTPError, URLError
from urllib.request import HTTPHandler, Request, build_opener
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
@@ -62,8 +63,6 @@
PushResult = namedtuple("PushResult", "success url") PushResult = namedtuple("PushResult", "success url")
urlopen = web_util.urlopen # alias for mocking in tests
def get_change_revisions(): def get_change_revisions():
"""If this is a git repo get the revisions to use when checking """If this is a git repo get the revisions to use when checking
@@ -473,9 +472,12 @@ def generate_pipeline(env: ev.Environment, args) -> None:
# Use all unpruned specs to populate the build group for this set # Use all unpruned specs to populate the build group for this set
cdash_config = cfg.get("cdash") cdash_config = cfg.get("cdash")
if options.cdash_handler and options.cdash_handler.auth_token: if options.cdash_handler and options.cdash_handler.auth_token:
options.cdash_handler.populate_buildgroup( try:
[options.cdash_handler.build_name(s) for s in pipeline_specs] options.cdash_handler.populate_buildgroup(
) [options.cdash_handler.build_name(s) for s in pipeline_specs]
)
except (SpackError, HTTPError, URLError, TimeoutError) as err:
tty.warn(f"Problem populating buildgroup: {err}")
elif cdash_config: elif cdash_config:
# warn only if there was actually a CDash configuration. # warn only if there was actually a CDash configuration.
tty.warn("Unable to populate buildgroup without CDash credentials") tty.warn("Unable to populate buildgroup without CDash credentials")
@@ -629,19 +631,29 @@ def download_and_extract_artifacts(url, work_dir):
if token: if token:
headers["PRIVATE-TOKEN"] = token headers["PRIVATE-TOKEN"] = token
request = Request(url, headers=headers, method="GET") opener = build_opener(HTTPHandler)
request = Request(url, headers=headers)
request.get_method = lambda: "GET"
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
response_code = response.getcode()
if response_code != 200:
msg = f"Error response code ({response_code}) in reproduce_ci_job"
raise SpackError(msg)
artifacts_zip_path = os.path.join(work_dir, "artifacts.zip") artifacts_zip_path = os.path.join(work_dir, "artifacts.zip")
os.makedirs(work_dir, exist_ok=True)
try: if not os.path.exists(work_dir):
response = urlopen(request, timeout=SPACK_CDASH_TIMEOUT) os.makedirs(work_dir)
with open(artifacts_zip_path, "wb") as out_file:
shutil.copyfileobj(response, out_file)
except OSError as e:
raise SpackError(f"Error fetching artifacts: {e}")
with zipfile.ZipFile(artifacts_zip_path) as zip_file: with open(artifacts_zip_path, "wb") as out_file:
zip_file.extractall(work_dir) shutil.copyfileobj(response, out_file)
zip_file = zipfile.ZipFile(artifacts_zip_path)
zip_file.extractall(work_dir)
zip_file.close()
os.remove(artifacts_zip_path) os.remove(artifacts_zip_path)

View File

@@ -1,21 +1,23 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details. # Copyright Spack Project Developers. See COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import codecs
import copy import copy
import json import json
import os import os
import re import re
import ssl
import sys import sys
import time import time
from collections import deque from collections import deque
from enum import Enum from enum import Enum
from typing import Dict, Generator, List, Optional, Set, Tuple from typing import Dict, Generator, List, Optional, Set, Tuple
from urllib.parse import quote, urlencode, urlparse from urllib.parse import quote, urlencode, urlparse
from urllib.request import Request from urllib.request import HTTPHandler, HTTPSHandler, Request, build_opener
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.lang import memoized from llnl.util.lang import Singleton, memoized
import spack.binary_distribution as bindist import spack.binary_distribution as bindist
import spack.config as cfg import spack.config as cfg
@@ -33,11 +35,32 @@
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
from spack.reporters.cdash import build_stamp as cdash_build_stamp from spack.reporters.cdash import build_stamp as cdash_build_stamp
def _urlopen():
error_handler = web_util.SpackHTTPDefaultErrorHandler()
# One opener with HTTPS ssl enabled
with_ssl = build_opener(
HTTPHandler(), HTTPSHandler(context=web_util.ssl_create_default_context()), error_handler
)
# One opener with HTTPS ssl disabled
without_ssl = build_opener(
HTTPHandler(), HTTPSHandler(context=ssl._create_unverified_context()), error_handler
)
# And dynamically dispatch based on the config:verify_ssl.
def dispatch_open(fullurl, data=None, timeout=None, verify_ssl=True):
opener = with_ssl if verify_ssl else without_ssl
timeout = timeout or cfg.get("config:connect_timeout", 1)
return opener.open(fullurl, data, timeout)
return dispatch_open
IS_WINDOWS = sys.platform == "win32" IS_WINDOWS = sys.platform == "win32"
SPACK_RESERVED_TAGS = ["public", "protected", "notary"] SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
_dyn_mapping_urlopener = Singleton(_urlopen)
# this exists purely for testing purposes
_urlopen = web_util.urlopen
def copy_files_to_artifacts(src, artifacts_dir): def copy_files_to_artifacts(src, artifacts_dir):
@@ -256,25 +279,26 @@ def copy_test_results(self, source, dest):
reports = fs.join_path(source, "*_Test*.xml") reports = fs.join_path(source, "*_Test*.xml")
copy_files_to_artifacts(reports, dest) copy_files_to_artifacts(reports, dest)
def create_buildgroup(self, headers, url, group_name, group_type): def create_buildgroup(self, opener, headers, url, group_name, group_type):
data = {"newbuildgroup": group_name, "project": self.project, "type": group_type} data = {"newbuildgroup": group_name, "project": self.project, "type": group_type}
enc_data = json.dumps(data).encode("utf-8") enc_data = json.dumps(data).encode("utf-8")
request = Request(url, data=enc_data, headers=headers) request = Request(url, data=enc_data, headers=headers)
try: response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
response_text = _urlopen(request, timeout=SPACK_CDASH_TIMEOUT).read() response_code = response.getcode()
except OSError as e:
tty.warn(f"Failed to create CDash buildgroup: {e}") if response_code not in [200, 201]:
msg = f"Creating buildgroup failed (response code = {response_code})"
tty.warn(msg)
return None return None
try: response_text = response.read()
response_json = json.loads(response_text) response_json = json.loads(response_text)
return response_json["id"] build_group_id = response_json["id"]
except (json.JSONDecodeError, KeyError) as e:
tty.warn(f"Failed to parse CDash response: {e}") return build_group_id
return None
def populate_buildgroup(self, job_names): def populate_buildgroup(self, job_names):
url = f"{self.url}/api/v1/buildgroup.php" url = f"{self.url}/api/v1/buildgroup.php"
@@ -284,11 +308,16 @@ def populate_buildgroup(self, job_names):
"Content-Type": "application/json", "Content-Type": "application/json",
} }
parent_group_id = self.create_buildgroup(headers, url, self.build_group, "Daily") opener = build_opener(HTTPHandler)
group_id = self.create_buildgroup(headers, url, f"Latest {self.build_group}", "Latest")
parent_group_id = self.create_buildgroup(opener, headers, url, self.build_group, "Daily")
group_id = self.create_buildgroup(
opener, headers, url, f"Latest {self.build_group}", "Latest"
)
if not parent_group_id or not group_id: if not parent_group_id or not group_id:
tty.warn(f"Failed to create or retrieve buildgroups for {self.build_group}") msg = f"Failed to create or retrieve buildgroups for {self.build_group}"
tty.warn(msg)
return return
data = { data = {
@@ -300,12 +329,15 @@ def populate_buildgroup(self, job_names):
enc_data = json.dumps(data).encode("utf-8") enc_data = json.dumps(data).encode("utf-8")
request = Request(url, data=enc_data, headers=headers, method="PUT") request = Request(url, data=enc_data, headers=headers)
request.get_method = lambda: "PUT"
try: response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
_urlopen(request, timeout=SPACK_CDASH_TIMEOUT) response_code = response.getcode()
except OSError as e:
tty.warn(f"Failed to populate CDash buildgroup: {e}") if response_code != 200:
msg = f"Error response code ({response_code}) in populate_buildgroup"
tty.warn(msg)
def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optional[str]): def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optional[str]):
"""Explicitly report skipping testing of a spec (e.g., it's CI """Explicitly report skipping testing of a spec (e.g., it's CI
@@ -703,6 +735,9 @@ def _apply_section(dest, src):
for value in header.values(): for value in header.values():
value = os.path.expandvars(value) value = os.path.expandvars(value)
verify_ssl = mapping.get("verify_ssl", spack.config.get("config:verify_ssl", True))
timeout = mapping.get("timeout", spack.config.get("config:connect_timeout", 1))
required = mapping.get("require", []) required = mapping.get("require", [])
allowed = mapping.get("allow", []) allowed = mapping.get("allow", [])
ignored = mapping.get("ignore", []) ignored = mapping.get("ignore", [])
@@ -736,15 +771,19 @@ def job_query(job):
endpoint_url._replace(query=query).geturl(), headers=header, method="GET" endpoint_url._replace(query=query).geturl(), headers=header, method="GET"
) )
try: try:
response = _urlopen(request) response = _dyn_mapping_urlopener(
config = json.load(response) request, verify_ssl=verify_ssl, timeout=timeout
)
except Exception as e: except Exception as e:
# For now just ignore any errors from dynamic mapping and continue # For now just ignore any errors from dynamic mapping and continue
# This is still experimental, and failures should not stop CI # This is still experimental, and failures should not stop CI
# from running normally # from running normally
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}: {e}") tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}")
tty.warn(f"{e}")
continue continue
config = json.load(codecs.getreader("utf-8")(response))
# Strip ignore keys # Strip ignore keys
if ignored: if ignored:
for key in ignored: for key in ignored:

View File

@@ -26,6 +26,7 @@
import spack.paths import spack.paths
import spack.repo import spack.repo
import spack.spec import spack.spec
import spack.spec_lookup
import spack.spec_parser import spack.spec_parser
import spack.store import spack.store
import spack.traverse as traverse import spack.traverse as traverse
@@ -211,7 +212,8 @@ def _concretize_spec_pairs(
): ):
# Get all the concrete specs # Get all the concrete specs
ret = [ ret = [
concrete or (abstract if abstract.concrete else abstract.lookup_hash()) concrete
or (abstract if abstract.concrete else spack.spec_lookup.lookup_hash(abstract))
for abstract, concrete in to_concretize for abstract, concrete in to_concretize
] ]

View File

@@ -3,7 +3,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections import collections
import warnings
import archspec.cpu import archspec.cpu
@@ -52,10 +51,10 @@ def setup_parser(subparser):
"-t", "--target", action="store_true", default=False, help="print only the target" "-t", "--target", action="store_true", default=False, help="print only the target"
) )
parts2.add_argument( parts2.add_argument(
"-f", "--frontend", action="store_true", default=False, help="print frontend (DEPRECATED)" "-f", "--frontend", action="store_true", default=False, help="print frontend"
) )
parts2.add_argument( parts2.add_argument(
"-b", "--backend", action="store_true", default=False, help="print backend (DEPRECATED)" "-b", "--backend", action="store_true", default=False, help="print backend"
) )
@@ -99,14 +98,15 @@ def arch(parser, args):
display_targets(archspec.cpu.TARGETS) display_targets(archspec.cpu.TARGETS)
return return
os_args, target_args = "default_os", "default_target"
if args.frontend: if args.frontend:
warnings.warn("the argument --frontend is deprecated, and will be removed in Spack v1.0") os_args, target_args = "frontend", "frontend"
elif args.backend: elif args.backend:
warnings.warn("the argument --backend is deprecated, and will be removed in Spack v1.0") os_args, target_args = "backend", "backend"
host_platform = spack.platforms.host() host_platform = spack.platforms.host()
host_os = host_platform.default_operating_system() host_os = host_platform.operating_system(os_args)
host_target = host_platform.default_target() host_target = host_platform.target(target_args)
if args.family: if args.family:
host_target = host_target.family host_target = host_target.family
elif args.generic: elif args.generic:

View File

@@ -1,7 +1,7 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details. # Copyright Spack Project Developers. See COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os import os.path
import shutil import shutil
import sys import sys
import tempfile import tempfile
@@ -436,7 +436,6 @@ def write_metadata(subdir, metadata):
shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory) shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory)
shutil.copy(spack.util.path.canonicalize_path(PATCHELF_JSON), abs_directory) shutil.copy(spack.util.path.canonicalize_path(PATCHELF_JSON), abs_directory)
instructions += cmd.format("local-binaries", rel_directory) instructions += cmd.format("local-binaries", rel_directory)
instructions += " % spack buildcache update-index <final-path>/bootstrap_cache\n"
print(instructions) print(instructions)

View File

@@ -4,7 +4,7 @@
import argparse import argparse
import os import os.path
import textwrap import textwrap
from llnl.util.lang import stable_partition from llnl.util.lang import stable_partition

View File

@@ -2,6 +2,7 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os import os
import os.path
import llnl.util.tty import llnl.util.tty

View File

@@ -86,8 +86,8 @@ def create_db_tarball(args):
def report(args): def report(args):
host_platform = spack.platforms.host() host_platform = spack.platforms.host()
host_os = host_platform.default_operating_system() host_os = host_platform.operating_system("frontend")
host_target = host_platform.default_target() host_target = host_platform.target("frontend")
architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target))) architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
print("* **Spack:**", spack.get_version()) print("* **Spack:**", spack.get_version())
print("* **Python:**", platform.python_version()) print("* **Python:**", platform.python_version())

View File

@@ -11,6 +11,7 @@
import spack.cmd import spack.cmd
import spack.environment as ev import spack.environment as ev
import spack.solver.asp as asp import spack.solver.asp as asp
import spack.spec_lookup
import spack.util.spack_json as sjson import spack.util.spack_json as sjson
from spack.cmd.common import arguments from spack.cmd.common import arguments
@@ -210,7 +211,7 @@ def diff(parser, args):
specs = [] specs = []
for spec in spack.cmd.parse_specs(args.specs): for spec in spack.cmd.parse_specs(args.specs):
# If the spec has a hash, check it before disambiguating # If the spec has a hash, check it before disambiguating
spec.replace_hash() spack.spec_lookup.replace_hash(spec)
if spec.concrete: if spec.concrete:
specs.append(spec) specs.append(spec)
else: else:

View File

@@ -110,7 +110,10 @@ def external_find(args):
# Note that KeyboardInterrupt does not subclass Exception # Note that KeyboardInterrupt does not subclass Exception
# (so CTRL-C will terminate the program as expected). # (so CTRL-C will terminate the program as expected).
skip_msg = "Skipping manifest and continuing with other external checks" skip_msg = "Skipping manifest and continuing with other external checks"
if isinstance(e, OSError) and e.errno in (errno.EPERM, errno.EACCES): if (isinstance(e, IOError) or isinstance(e, OSError)) and e.errno in [
errno.EPERM,
errno.EACCES,
]:
# The manifest file does not have sufficient permissions enabled: # The manifest file does not have sufficient permissions enabled:
# print a warning and keep going # print a warning and keep going
tty.warn("Unable to read manifest due to insufficient permissions.", skip_msg) tty.warn("Unable to read manifest due to insufficient permissions.", skip_msg)

View File

@@ -54,6 +54,10 @@
@m{target=target} specific <target> processor @m{target=target} specific <target> processor
@m{arch=platform-os-target} shortcut for all three above @m{arch=platform-os-target} shortcut for all three above
cross-compiling:
@m{os=backend} or @m{os=be} build for compute node (backend)
@m{os=frontend} or @m{os=fe} build for login node (frontend)
dependencies: dependencies:
^dependency [constraints] specify constraints on dependencies ^dependency [constraints] specify constraints on dependencies
^@K{/hash} build with a specific installed ^@K{/hash} build with a specific installed

View File

@@ -5,7 +5,7 @@
"""Implementation details of the ``spack module`` command.""" """Implementation details of the ``spack module`` command."""
import collections import collections
import os import os.path
import shutil import shutil
import sys import sys

View File

@@ -177,15 +177,16 @@ def test_run(args):
matching = spack.store.STORE.db.query_local(spec, hashes=hashes, explicit=explicit) matching = spack.store.STORE.db.query_local(spec, hashes=hashes, explicit=explicit)
if spec and not matching: if spec and not matching:
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec)) tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
"""
TODO: Need to write out a log message and/or CDASH Testing
output that package not installed IF continue to process
these issues here.
# TODO: Need to write out a log message and/or CDASH Testing if args.log_format:
# output that package not installed IF continue to process # Proceed with the spec assuming the test process
# these issues here. # to ensure report package as skipped (e.g., for CI)
specs_to_test.append(spec)
# if args.log_format: """
# # Proceed with the spec assuming the test process
# # to ensure report package as skipped (e.g., for CI)
# specs_to_test.append(spec)
specs_to_test.extend(matching) specs_to_test.extend(matching)

View File

@@ -2,7 +2,7 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os import os.path
import shutil import shutil
import llnl.util.tty as tty import llnl.util.tty as tty

View File

@@ -5,7 +5,7 @@
import argparse import argparse
import collections import collections
import io import io
import os import os.path
import re import re
import sys import sys

View File

@@ -801,17 +801,17 @@ def _extract_compiler_paths(spec: "spack.spec.Spec") -> Optional[Dict[str, str]]
def _extract_os_and_target(spec: "spack.spec.Spec"): def _extract_os_and_target(spec: "spack.spec.Spec"):
if not spec.architecture: if not spec.architecture:
host_platform = spack.platforms.host() host_platform = spack.platforms.host()
operating_system = host_platform.default_operating_system() operating_system = host_platform.operating_system("default_os")
target = host_platform.default_target() target = host_platform.target("default_target")
else: else:
target = spec.architecture.target target = spec.architecture.target
if not target: if not target:
target = spack.platforms.host().default_target() target = spack.platforms.host().target("default_target")
operating_system = spec.os operating_system = spec.os
if not operating_system: if not operating_system:
host_platform = spack.platforms.host() host_platform = spack.platforms.host()
operating_system = host_platform.default_operating_system() operating_system = host_platform.operating_system("default_os")
return operating_system, target return operating_system, target

View File

@@ -199,10 +199,12 @@ def concretize_one(spec: Union[str, Spec], tests: TestsType = False) -> Spec:
the packages in the list, if True activate 'test' dependencies for all packages. the packages in the list, if True activate 'test' dependencies for all packages.
""" """
from spack.solver.asp import Solver, SpecBuilder from spack.solver.asp import Solver, SpecBuilder
from spack.spec_lookup import replace_hash
if isinstance(spec, str): if isinstance(spec, str):
spec = Spec(spec) spec = Spec(spec)
spec = spec.lookup_hash()
replace_hash(spec)
if spec.concrete: if spec.concrete:
return spec.copy() return spec.copy()

View File

@@ -53,7 +53,6 @@
import spack.schema.definitions import spack.schema.definitions
import spack.schema.develop import spack.schema.develop
import spack.schema.env import spack.schema.env
import spack.schema.env_vars
import spack.schema.mirrors import spack.schema.mirrors
import spack.schema.modules import spack.schema.modules
import spack.schema.packages import spack.schema.packages
@@ -71,7 +70,6 @@
"compilers": spack.schema.compilers.schema, "compilers": spack.schema.compilers.schema,
"concretizer": spack.schema.concretizer.schema, "concretizer": spack.schema.concretizer.schema,
"definitions": spack.schema.definitions.schema, "definitions": spack.schema.definitions.schema,
"env_vars": spack.schema.env_vars.schema,
"view": spack.schema.view.schema, "view": spack.schema.view.schema,
"develop": spack.schema.develop.schema, "develop": spack.schema.develop.schema,
"mirrors": spack.schema.mirrors.schema, "mirrors": spack.schema.mirrors.schema,

View File

@@ -57,7 +57,7 @@ def validate(configuration_file):
# Set the default value of the concretization strategy to unify and # Set the default value of the concretization strategy to unify and
# warn if the user explicitly set another value # warn if the user explicitly set another value
env_dict.setdefault("concretizer", {"unify": True}) env_dict.setdefault("concretizer", {"unify": True})
if env_dict["concretizer"]["unify"] is not True: if not env_dict["concretizer"]["unify"] is True:
warnings.warn( warnings.warn(
'"concretizer:unify" is not set to "true", which means the ' '"concretizer:unify" is not set to "true", which means the '
"generated image may contain different variants of the same " "generated image may contain different variants of the same "

View File

@@ -3,7 +3,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Manages the details on the images used in the various stages.""" """Manages the details on the images used in the various stages."""
import json import json
import os import os.path
import shlex import shlex
import sys import sys

View File

@@ -123,15 +123,6 @@
"deprecated_for", "deprecated_for",
) )
#: File where the database is written
INDEX_JSON_FILE = "index.json"
# Verifier file to check last modification of the DB
_INDEX_VERIFIER_FILE = "index_verifier"
# Lockfile for the database
_LOCK_FILE = "lock"
@llnl.util.lang.memoized @llnl.util.lang.memoized
def _getfqdn(): def _getfqdn():
@@ -263,6 +254,18 @@ def from_dict(cls, spec, dictionary):
return InstallRecord(spec, **d) return InstallRecord(spec, **d)
class ForbiddenLockError(SpackError):
"""Raised when an upstream DB attempts to acquire a lock"""
class ForbiddenLock:
def __getattr__(self, name):
raise ForbiddenLockError("Cannot access attribute '{0}' of lock".format(name))
def __reduce__(self):
return ForbiddenLock, tuple()
class LockConfiguration(NamedTuple): class LockConfiguration(NamedTuple):
"""Data class to configure locks in Database objects """Data class to configure locks in Database objects
@@ -586,9 +589,9 @@ def __init__(
self.layout = layout self.layout = layout
# Set up layout of database files within the db dir # Set up layout of database files within the db dir
self._index_path = self.database_directory / INDEX_JSON_FILE self._index_path = self.database_directory / "index.json"
self._verifier_path = self.database_directory / _INDEX_VERIFIER_FILE self._verifier_path = self.database_directory / "index_verifier"
self._lock_path = self.database_directory / _LOCK_FILE self._lock_path = self.database_directory / "lock"
self.is_upstream = is_upstream self.is_upstream = is_upstream
self.last_seen_verifier = "" self.last_seen_verifier = ""
@@ -603,14 +606,18 @@ def __init__(
# initialize rest of state. # initialize rest of state.
self.db_lock_timeout = lock_cfg.database_timeout self.db_lock_timeout = lock_cfg.database_timeout
tty.debug(f"DATABASE LOCK TIMEOUT: {str(self.db_lock_timeout)}s") tty.debug("DATABASE LOCK TIMEOUT: {0}s".format(str(self.db_lock_timeout)))
self.lock = lk.Lock( self.lock: Union[ForbiddenLock, lk.Lock]
str(self._lock_path), if self.is_upstream:
default_timeout=self.db_lock_timeout, self.lock = ForbiddenLock()
desc="database", else:
enable=not self.is_upstream and lock_cfg.enable, self.lock = lk.Lock(
) str(self._lock_path),
default_timeout=self.db_lock_timeout,
desc="database",
enable=lock_cfg.enable,
)
self._data: Dict[str, InstallRecord] = {} self._data: Dict[str, InstallRecord] = {}
# For every installed spec we keep track of its install prefix, so that # For every installed spec we keep track of its install prefix, so that
@@ -1034,9 +1041,6 @@ def _write(self, type=None, value=None, traceback=None):
This routine does no locking. This routine does no locking.
""" """
if self.is_upstream:
raise UpstreamDatabaseLockingError("Cannot write to an upstream database")
self._ensure_parent_directories() self._ensure_parent_directories()
# Do not write if exceptions were raised # Do not write if exceptions were raised
@@ -1086,7 +1090,7 @@ def _read(self):
self._state_is_inconsistent = False self._state_is_inconsistent = False
return return
elif self.is_upstream: elif self.is_upstream:
tty.warn(f"upstream not found: {self._index_path}") tty.warn("upstream not found: {0}".format(self._index_path))
def _add( def _add(
self, self,
@@ -1657,24 +1661,38 @@ def query(
""" """
valid_trees = ["all", "upstream", "local", self.root] + [u.root for u in self.upstream_dbs] valid_trees = ["all", "upstream", "local", self.root] + [u.root for u in self.upstream_dbs]
if install_tree not in valid_trees: if install_tree not in valid_trees:
raise ValueError( msg = "Invalid install_tree argument to Database.query()\n"
f"Invalid install_tree argument to Database.query(). Try one of {valid_trees}" msg += f"Try one of {', '.join(valid_trees)}"
tty.error(msg)
return []
upstream_results = []
upstreams = self.upstream_dbs
if install_tree not in ("all", "upstream"):
upstreams = [u for u in self.upstream_dbs if u.root == install_tree]
for upstream_db in upstreams:
# queries for upstream DBs need to *not* lock - we may not
# have permissions to do this and the upstream DBs won't know about
# us anyway (so e.g. they should never uninstall specs)
upstream_results.extend(
upstream_db._query(
query_spec,
predicate_fn=predicate_fn,
installed=installed,
explicit=explicit,
start_date=start_date,
end_date=end_date,
hashes=hashes,
in_buildcache=in_buildcache,
origin=origin,
)
or []
) )
if install_tree == "all": local_results: Set["spack.spec.Spec"] = set()
databases = [self, *self.upstream_dbs] if install_tree in ("all", "local") or self.root == install_tree:
elif install_tree == "upstream": local_results = set(
databases = self.upstream_dbs self.query_local(
elif install_tree == "local" or self.root == install_tree:
databases = [self]
else:
databases = [u for u in self.upstream_dbs if u.root == install_tree]
results: List[spack.spec.Spec] = []
for db in databases:
results.extend(
db.query_local(
query_spec, query_spec,
predicate_fn=predicate_fn, predicate_fn=predicate_fn,
installed=installed, installed=installed,
@@ -1687,13 +1705,8 @@ def query(
) )
) )
# Stable deduplication on dag hash picks local specs over upstreams. results = list(local_results) + list(x for x in upstream_results if x not in local_results)
if len(databases) > 1: results.sort() # type: ignore[call-overload]
results = list(llnl.util.lang.dedupe(results, key=lambda x: x.dag_hash()))
# reduce number of comparisons with slow default __lt__
results.sort(key=lambda s: s.name)
results.sort()
return results return results
def query_one( def query_one(

View File

@@ -15,6 +15,7 @@
import glob import glob
import itertools import itertools
import os import os
import os.path
import pathlib import pathlib
import re import re
import sys import sys

View File

@@ -7,6 +7,7 @@
import collections import collections
import concurrent.futures import concurrent.futures
import os import os
import os.path
import re import re
import sys import sys
import traceback import traceback

View File

@@ -32,7 +32,7 @@ class OpenMpi(Package):
""" """
import collections import collections
import collections.abc import collections.abc
import os import os.path
import re import re
from typing import Any, Callable, List, Optional, Tuple, Type, Union from typing import Any, Callable, List, Optional, Tuple, Type, Union

View File

@@ -581,7 +581,7 @@ def _error_on_nonempty_view_dir(new_root):
# Check if the target path lexists # Check if the target path lexists
try: try:
st = os.lstat(new_root) st = os.lstat(new_root)
except OSError: except (IOError, OSError):
return return
# Empty directories are fine # Empty directories are fine
@@ -861,7 +861,7 @@ def regenerate(self, concrete_roots: List[Spec]) -> None:
): ):
try: try:
shutil.rmtree(old_root) shutil.rmtree(old_root)
except OSError as e: except (IOError, OSError) as e:
msg = "Failed to remove old view at %s\n" % old_root msg = "Failed to remove old view at %s\n" % old_root
msg += str(e) msg += str(e)
tty.warn(msg) tty.warn(msg)
@@ -2554,7 +2554,7 @@ def is_latest_format(manifest):
try: try:
with open(manifest, encoding="utf-8") as f: with open(manifest, encoding="utf-8") as f:
data = syaml.load(f) data = syaml.load(f)
except OSError: except (OSError, IOError):
return True return True
top_level_key = _top_level_key(data) top_level_key = _top_level_key(data)
changed = spack.schema.env.update(data[top_level_key]) changed = spack.schema.env.update(data[top_level_key])
@@ -2634,32 +2634,6 @@ def _ensure_env_dir():
shutil.copy(envfile, target_manifest) shutil.copy(envfile, target_manifest)
# Copy relative path includes that live inside the environment dir
try:
manifest = EnvironmentManifestFile(environment_dir)
except Exception:
# error handling for bad manifests is handled on other code paths
return
includes = manifest[TOP_LEVEL_KEY].get("include", [])
for include in includes:
if os.path.isabs(include):
continue
abspath = pathlib.Path(os.path.normpath(environment_dir / include))
common_path = pathlib.Path(os.path.commonpath([environment_dir, abspath]))
if common_path != environment_dir:
tty.debug(f"Will not copy relative include from outside environment: {include}")
continue
orig_abspath = os.path.normpath(envfile.parent / include)
if not os.path.exists(orig_abspath):
tty.warn(f"Included file does not exist; will not copy: '{include}'")
continue
fs.touchp(abspath)
shutil.copy(orig_abspath, abspath)
class EnvironmentManifestFile(collections.abc.Mapping): class EnvironmentManifestFile(collections.abc.Mapping):
"""Manages the in-memory representation of a manifest file, and its synchronization """Manages the in-memory representation of a manifest file, and its synchronization

View File

@@ -10,7 +10,6 @@
import spack.environment as ev import spack.environment as ev
import spack.repo import spack.repo
import spack.schema.environment
import spack.store import spack.store
from spack.util.environment import EnvironmentModifications from spack.util.environment import EnvironmentModifications
@@ -157,11 +156,6 @@ def activate(
# MANPATH, PYTHONPATH, etc. All variables that end in PATH (case-sensitive) # MANPATH, PYTHONPATH, etc. All variables that end in PATH (case-sensitive)
# become PATH variables. # become PATH variables.
# #
env_vars_yaml = env.manifest.configuration.get("env_vars", None)
if env_vars_yaml:
env_mods.extend(spack.schema.environment.parse(env_vars_yaml))
try: try:
if view and env.has_view(view): if view and env.has_view(view):
with spack.store.STORE.db.read_transaction(): with spack.store.STORE.db.read_transaction():
@@ -195,10 +189,6 @@ def deactivate() -> EnvironmentModifications:
if active is None: if active is None:
return env_mods return env_mods
env_vars_yaml = active.manifest.configuration.get("env_vars", None)
if env_vars_yaml:
env_mods.extend(spack.schema.environment.parse(env_vars_yaml).reversed())
active_view = os.getenv(ev.spack_env_view_var) active_view = os.getenv(ev.spack_env_view_var)
if active_view and active.has_view(active_view): if active_view and active.has_view(active_view):

View File

@@ -202,3 +202,10 @@ class MirrorError(SpackError):
def __init__(self, msg, long_msg=None): def __init__(self, msg, long_msg=None):
super().__init__(msg, long_msg) super().__init__(msg, long_msg)
class InvalidHashError(SpecError):
def __init__(self, spec, hash):
msg = f"No spec with hash {hash} could be found to match {spec}."
msg += " Either the hash does not exist, or it does not match other spec constraints."
super().__init__(msg)

View File

@@ -187,7 +187,7 @@ def path_for_extension(target_name: str, *, paths: List[str]) -> str:
if name == target_name: if name == target_name:
return path return path
else: else:
raise OSError('extension "{0}" not found'.format(target_name)) raise IOError('extension "{0}" not found'.format(target_name))
def get_module(cmd_name): def get_module(cmd_name):

View File

@@ -25,6 +25,7 @@
import functools import functools
import http.client import http.client
import os import os
import os.path
import re import re
import shutil import shutil
import urllib.error import urllib.error
@@ -320,15 +321,9 @@ def _fetch_urllib(self, url):
request = urllib.request.Request(url, headers={"User-Agent": web_util.SPACK_USER_AGENT}) request = urllib.request.Request(url, headers={"User-Agent": web_util.SPACK_USER_AGENT})
if os.path.lexists(save_file):
os.remove(save_file)
try: try:
response = web_util.urlopen(request) response = web_util.urlopen(request)
tty.msg(f"Fetching {url}") except (TimeoutError, urllib.error.URLError) as e:
with open(save_file, "wb") as f:
shutil.copyfileobj(response, f)
except OSError as e:
# clean up archive on failure. # clean up archive on failure.
if self.archive_file: if self.archive_file:
os.remove(self.archive_file) os.remove(self.archive_file)
@@ -336,6 +331,14 @@ def _fetch_urllib(self, url):
os.remove(save_file) os.remove(save_file)
raise FailedDownloadError(e) from e raise FailedDownloadError(e) from e
tty.msg(f"Fetching {url}")
if os.path.lexists(save_file):
os.remove(save_file)
with open(save_file, "wb") as f:
shutil.copyfileobj(response, f)
# Save the redirected URL for error messages. Sometimes we're redirected to an arbitrary # Save the redirected URL for error messages. Sometimes we're redirected to an arbitrary
# mirror that is broken, leading to spurious download failures. In that case it's helpful # mirror that is broken, leading to spurious download failures. In that case it's helpful
# for users to know which URL was actually fetched. # for users to know which URL was actually fetched.
@@ -532,16 +535,11 @@ def __init__(self, *, url: str, checksum: Optional[str] = None, **kwargs):
@_needs_stage @_needs_stage
def fetch(self): def fetch(self):
file = self.stage.save_filename file = self.stage.save_filename
tty.msg(f"Fetching {self.url}")
if os.path.lexists(file):
os.remove(file)
try: try:
response = self._urlopen(self.url) response = self._urlopen(self.url)
tty.msg(f"Fetching {self.url}") except (TimeoutError, urllib.error.URLError) as e:
with open(file, "wb") as f:
shutil.copyfileobj(response, f)
except OSError as e:
# clean up archive on failure. # clean up archive on failure.
if self.archive_file: if self.archive_file:
os.remove(self.archive_file) os.remove(self.archive_file)
@@ -549,6 +547,12 @@ def fetch(self):
os.remove(file) os.remove(file)
raise FailedDownloadError(e) from e raise FailedDownloadError(e) from e
if os.path.lexists(file):
os.remove(file)
with open(file, "wb") as f:
shutil.copyfileobj(response, f)
class VCSFetchStrategy(FetchStrategy): class VCSFetchStrategy(FetchStrategy):
"""Superclass for version control system fetch strategies. """Superclass for version control system fetch strategies.

View File

@@ -427,7 +427,7 @@ def needs_file(spec, file):
try: try:
with open(manifest_file, "r", encoding="utf-8") as f: with open(manifest_file, "r", encoding="utf-8") as f:
manifest = s_json.load(f) manifest = s_json.load(f)
except OSError: except (OSError, IOError):
# if we can't load it, assume it doesn't know about the file. # if we can't load it, assume it doesn't know about the file.
manifest = {} manifest = {}
return test_path in manifest return test_path in manifest
@@ -831,7 +831,7 @@ def get_spec_from_file(filename):
try: try:
with open(filename, "r", encoding="utf-8") as f: with open(filename, "r", encoding="utf-8") as f:
return spack.spec.Spec.from_yaml(f) return spack.spec.Spec.from_yaml(f)
except OSError: except IOError:
return None return None

View File

@@ -26,7 +26,7 @@ def is_shared_library_elf(filepath):
with open(filepath, "rb") as f: with open(filepath, "rb") as f:
elf = parse_elf(f, interpreter=True, dynamic_section=True) elf = parse_elf(f, interpreter=True, dynamic_section=True)
return elf.has_pt_dynamic and (elf.has_soname or not elf.has_pt_interp) return elf.has_pt_dynamic and (elf.has_soname or not elf.has_pt_interp)
except (OSError, ElfParsingError): except (IOError, OSError, ElfParsingError):
return False return False

View File

@@ -166,7 +166,7 @@ def filter_shebangs_in_directory(directory, filenames=None):
# Only look at executable, non-symlink files. # Only look at executable, non-symlink files.
try: try:
st = os.lstat(path) st = os.lstat(path)
except OSError: except (IOError, OSError):
continue continue
if stat.S_ISLNK(st.st_mode) or stat.S_ISDIR(st.st_mode) or not st.st_mode & is_exe: if stat.S_ISLNK(st.st_mode) or stat.S_ISDIR(st.st_mode) or not st.st_mode & is_exe:

View File

@@ -814,7 +814,7 @@ def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int:
# Include build dependencies if pkg is going to be built from sources, or # Include build dependencies if pkg is going to be built from sources, or
# if build deps are explicitly requested. # if build deps are explicitly requested.
if include_build_deps or not ( if include_build_deps or not (
cache_only or pkg.spec.installed and pkg.spec.dag_hash() not in self.overwrite cache_only or pkg.spec.installed and not pkg.spec.dag_hash() in self.overwrite
): ):
depflag |= dt.BUILD depflag |= dt.BUILD
if self.run_tests(pkg): if self.run_tests(pkg):

View File

@@ -14,6 +14,7 @@
import io import io
import operator import operator
import os import os
import os.path
import pstats import pstats
import re import re
import shlex import shlex
@@ -163,7 +164,7 @@ def format_help_sections(self, level):
# lazily add all commands to the parser when needed. # lazily add all commands to the parser when needed.
add_all_commands(self) add_all_commands(self)
# Print help on subcommands in neatly formatted sections. """Print help on subcommands in neatly formatted sections."""
formatter = self._get_formatter() formatter = self._get_formatter()
# Create a list of subcommand actions. Argparse internals are nasty! # Create a list of subcommand actions. Argparse internals are nasty!
@@ -728,7 +729,7 @@ def _compatible_sys_types():
with the current host. with the current host.
""" """
host_platform = spack.platforms.host() host_platform = spack.platforms.host()
host_os = str(host_platform.default_operating_system()) host_os = str(host_platform.operating_system("default_os"))
host_target = archspec.cpu.host() host_target = archspec.cpu.host()
compatible_targets = [host_target] + host_target.ancestors compatible_targets = [host_target] + host_target.ancestors

View File

@@ -2,6 +2,7 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os import os
import os.path
from typing import Optional from typing import Optional
import llnl.url import llnl.url

View File

@@ -64,7 +64,7 @@ def from_local_path(path: str):
@staticmethod @staticmethod
def from_url(url: str): def from_url(url: str):
"""Create an anonymous mirror by URL. This method validates the URL.""" """Create an anonymous mirror by URL. This method validates the URL."""
if urllib.parse.urlparse(url).scheme not in supported_url_schemes: if not urllib.parse.urlparse(url).scheme in supported_url_schemes:
raise ValueError( raise ValueError(
f'"{url}" is not a valid mirror URL. ' f'"{url}" is not a valid mirror URL. '
f"Scheme must be one of {supported_url_schemes}." f"Scheme must be one of {supported_url_schemes}."

View File

@@ -2,6 +2,7 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os import os
import os.path
import traceback import traceback
import llnl.util.tty as tty import llnl.util.tty as tty

View File

@@ -31,7 +31,7 @@
import copy import copy
import datetime import datetime
import inspect import inspect
import os import os.path
import re import re
import string import string
from typing import List, Optional from typing import List, Optional

View File

@@ -4,7 +4,7 @@
import collections import collections
import itertools import itertools
import os import os.path
from typing import Dict, List, Optional, Tuple from typing import Dict, List, Optional, Tuple
import llnl.util.filesystem as fs import llnl.util.filesystem as fs

View File

@@ -5,7 +5,7 @@
"""This module implements the classes necessary to generate Tcl """This module implements the classes necessary to generate Tcl
non-hierarchical modules. non-hierarchical modules.
""" """
import os import os.path
from typing import Dict, Optional, Tuple from typing import Dict, Optional, Tuple
import spack.config import spack.config

View File

@@ -7,7 +7,6 @@
import base64 import base64
import json import json
import re import re
import socket
import time import time
import urllib.error import urllib.error
import urllib.parse import urllib.parse
@@ -383,7 +382,6 @@ def create_opener():
"""Create an opener that can handle OCI authentication.""" """Create an opener that can handle OCI authentication."""
opener = urllib.request.OpenerDirector() opener = urllib.request.OpenerDirector()
for handler in [ for handler in [
urllib.request.ProxyHandler(),
urllib.request.UnknownHandler(), urllib.request.UnknownHandler(),
urllib.request.HTTPSHandler(context=spack.util.web.ssl_create_default_context()), urllib.request.HTTPSHandler(context=spack.util.web.ssl_create_default_context()),
spack.util.web.SpackHTTPDefaultErrorHandler(), spack.util.web.SpackHTTPDefaultErrorHandler(),
@@ -412,7 +410,7 @@ def wrapper(*args, **kwargs):
for i in range(retries): for i in range(retries):
try: try:
return f(*args, **kwargs) return f(*args, **kwargs)
except OSError as e: except (urllib.error.URLError, TimeoutError) as e:
# Retry on internal server errors, and rate limit errors # Retry on internal server errors, and rate limit errors
# Potentially this could take into account the Retry-After header # Potentially this could take into account the Retry-After header
# if registries support it # if registries support it
@@ -422,10 +420,9 @@ def wrapper(*args, **kwargs):
and (500 <= e.code < 600 or e.code == 429) and (500 <= e.code < 600 or e.code == 429)
) )
or ( or (
isinstance(e, urllib.error.URLError) isinstance(e, urllib.error.URLError) and isinstance(e.reason, TimeoutError)
and isinstance(e.reason, socket.timeout)
) )
or isinstance(e, socket.timeout) or isinstance(e, TimeoutError)
): ):
# Exponential backoff # Exponential backoff
sleep(2**i) sleep(2**i)

View File

@@ -2,64 +2,31 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
# flake8: noqa: F401, E402 # flake8: noqa: F401
"""spack.package defines the public API for Spack packages, by re-exporting useful symbols from """spack.util.package is a set of useful build tools and directives for packages.
other modules. Packages should import this module, instead of importing from spack.* directly
to ensure forward compatibility with future versions of Spack."""
Everything in this module is automatically imported into Spack package files.
"""
from os import chdir, environ, getcwd, makedirs, mkdir, remove, removedirs from os import chdir, environ, getcwd, makedirs, mkdir, remove, removedirs
from shutil import move, rmtree from shutil import move, rmtree
from spack.error import InstallError, NoHeadersError, NoLibrariesError
# Emulate some shell commands for convenience
env = environ
cd = chdir
pwd = getcwd
# import most common types used in packages # import most common types used in packages
from typing import Dict, List, Optional from typing import Dict, List, Optional
import llnl.util.filesystem
class tty: from llnl.util.filesystem import *
import llnl.util.tty as _tty
debug = _tty.debug
error = _tty.error
info = _tty.info
msg = _tty.msg
warn = _tty.warn
from llnl.util.filesystem import (
FileFilter,
FileList,
HeaderList,
LibraryList,
ancestor,
can_access,
change_sed_delimiter,
copy,
copy_tree,
filter_file,
find,
find_all_headers,
find_first,
find_headers,
find_libraries,
find_system_libraries,
force_remove,
force_symlink,
install,
install_tree,
is_exe,
join_path,
keep_modification_time,
library_extensions,
mkdirp,
remove_directory_contents,
remove_linked_tree,
rename,
set_executable,
set_install_permissions,
touch,
working_dir,
)
from llnl.util.symlink import symlink from llnl.util.symlink import symlink
import spack.util.executable
# These props will be overridden when the build env is set up.
from spack.build_environment import MakeExecutable from spack.build_environment import MakeExecutable
from spack.build_systems.aspell_dict import AspellDictPackage from spack.build_systems.aspell_dict import AspellDictPackage
from spack.build_systems.autotools import AutotoolsPackage from spack.build_systems.autotools import AutotoolsPackage
@@ -109,24 +76,7 @@ class tty:
from spack.builder import BaseBuilder from spack.builder import BaseBuilder
from spack.config import determine_number_of_jobs from spack.config import determine_number_of_jobs
from spack.deptypes import ALL_TYPES as all_deptypes from spack.deptypes import ALL_TYPES as all_deptypes
from spack.directives import ( from spack.directives import *
build_system,
can_splice,
conditional,
conflicts,
depends_on,
extends,
license,
maintainers,
patch,
provides,
redistribute,
requires,
resource,
variant,
version,
)
from spack.error import InstallError, NoHeadersError, NoLibrariesError
from spack.install_test import ( from spack.install_test import (
SkipTest, SkipTest,
cache_extra_test_sources, cache_extra_test_sources,
@@ -136,28 +86,26 @@ class tty:
install_test_root, install_test_root,
test_part, test_part,
) )
from spack.installer import ExternalPackageError, InstallLockError, UpstreamPackageError
from spack.mixins import filter_compiler_wrappers from spack.mixins import filter_compiler_wrappers
from spack.multimethod import default_args, when from spack.multimethod import default_args, when
from spack.package_base import build_system_flags, env_flags, inject_flags, on_package_attributes from spack.package_base import (
from spack.package_completions import ( DependencyConflictError,
bash_completion_path, build_system_flags,
fish_completion_path, env_flags,
zsh_completion_path, flatten_dependencies,
inject_flags,
install_dependency_symlinks,
on_package_attributes,
) )
from spack.package_completions import *
from spack.phase_callbacks import run_after, run_before from spack.phase_callbacks import run_after, run_before
from spack.spec import Spec from spack.spec import InvalidSpecDetected, Spec
from spack.util.environment import EnvironmentModifications from spack.util.executable import *
from spack.util.executable import Executable, ProcessError, which, which_string
from spack.util.filesystem import fix_darwin_install_name from spack.util.filesystem import fix_darwin_install_name
from spack.util.prefix import Prefix
from spack.variant import any_combination_of, auto_or_any_combination_of, disjoint_sets from spack.variant import any_combination_of, auto_or_any_combination_of, disjoint_sets
from spack.version import Version, ver from spack.version import Version, ver
# Emulate some shell commands for convenience
env = environ
cd = chdir
pwd = getcwd
# These are just here for editor support; they may be set when the build env is set up. # These are just here for editor support; they may be set when the build env is set up.
configure: Executable configure: Executable
make_jobs: int make_jobs: int

View File

@@ -30,6 +30,7 @@
import llnl.util.filesystem as fsys import llnl.util.filesystem as fsys
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.lang import classproperty, memoized from llnl.util.lang import classproperty, memoized
from llnl.util.link_tree import LinkTree
import spack.compilers import spack.compilers
import spack.config import spack.config
@@ -66,6 +67,10 @@
] ]
FLAG_HANDLER_TYPE = Callable[[str, Iterable[str]], FLAG_HANDLER_RETURN_TYPE] FLAG_HANDLER_TYPE = Callable[[str, Iterable[str]], FLAG_HANDLER_RETURN_TYPE]
"""Allowed URL schemes for spack packages."""
_ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file", "git"]
#: Filename for the Spack build/install log. #: Filename for the Spack build/install log.
_spack_build_logfile = "spack-build-out.txt" _spack_build_logfile = "spack-build-out.txt"
@@ -697,6 +702,9 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
#: Verbosity level, preserved across installs. #: Verbosity level, preserved across installs.
_verbose = None _verbose = None
#: index of patches by sha256 sum, built lazily
_patches_by_hash = None
#: Package homepage where users can find more information about the package #: Package homepage where users can find more information about the package
homepage: Optional[str] = None homepage: Optional[str] = None
@@ -2284,6 +2292,19 @@ def rpath_args(self):
build_system_flags = PackageBase.build_system_flags build_system_flags = PackageBase.build_system_flags
def install_dependency_symlinks(pkg, spec, prefix):
"""
Execute a dummy install and flatten dependencies.
This routine can be used in a ``package.py`` definition by setting
``install = install_dependency_symlinks``.
This feature comes in handy for creating a common location for the
the installation of third-party libraries.
"""
flatten_dependencies(spec, prefix)
def use_cray_compiler_names(): def use_cray_compiler_names():
"""Compiler names for builds that rely on cray compiler names.""" """Compiler names for builds that rely on cray compiler names."""
os.environ["CC"] = "cc" os.environ["CC"] = "cc"
@@ -2292,6 +2313,23 @@ def use_cray_compiler_names():
os.environ["F77"] = "ftn" os.environ["F77"] = "ftn"
def flatten_dependencies(spec, flat_dir):
"""Make each dependency of spec present in dir via symlink."""
for dep in spec.traverse(root=False):
name = dep.name
dep_path = spack.store.STORE.layout.path_for_spec(dep)
dep_files = LinkTree(dep_path)
os.mkdir(flat_dir + "/" + name)
conflict = dep_files.find_conflict(flat_dir + "/" + name)
if conflict:
raise DependencyConflictError(conflict)
dep_files.merge(flat_dir + "/" + name)
def possible_dependencies( def possible_dependencies(
*pkg_or_spec: Union[str, spack.spec.Spec, typing.Type[PackageBase]], *pkg_or_spec: Union[str, spack.spec.Spec, typing.Type[PackageBase]],
transitive: bool = True, transitive: bool = True,

View File

@@ -4,6 +4,7 @@
import hashlib import hashlib
import os import os
import os.path
import pathlib import pathlib
import sys import sys
from typing import Any, Dict, Optional, Tuple, Type, Union from typing import Any, Dict, Optional, Tuple, Type, Union

View File

@@ -52,7 +52,8 @@ def use_platform(new_platform):
import spack.config import spack.config
assert isinstance(new_platform, Platform), f'"{new_platform}" must be an instance of Platform' msg = '"{0}" must be an instance of Platform'
assert isinstance(new_platform, Platform), msg.format(new_platform)
original_host_fn = host original_host_fn = host

View File

@@ -1,22 +1,42 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details. # Copyright Spack Project Developers. See COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import warnings
from typing import Optional from typing import Optional
import archspec.cpu import archspec.cpu
import llnl.util.lang import llnl.util.lang
import spack.error
class NoPlatformError(spack.error.SpackError):
def __init__(self):
msg = "Could not determine a platform for this machine"
super().__init__(msg)
@llnl.util.lang.lazy_lexicographic_ordering @llnl.util.lang.lazy_lexicographic_ordering
class Platform: class Platform:
"""Platform is an abstract class extended by subclasses. """Platform is an abstract class extended by subclasses.
To add a new type of platform (such as cray_xe), create a subclass and set all the
class attributes such as priority, front_target, back_target, front_os, back_os.
Platform also contain a priority class attribute. A lower number signifies higher Platform also contain a priority class attribute. A lower number signifies higher
priority. These numbers are arbitrarily set and can be changed though often there priority. These numbers are arbitrarily set and can be changed though often there
isn't much need unless a new platform is added and the user wants that to be isn't much need unless a new platform is added and the user wants that to be
detected first. detected first.
Targets are created inside the platform subclasses. Most architecture (like linux,
and darwin) will have only one target family (x86_64) but in the case of Cray
machines, there is both a frontend and backend processor. The user can specify
which targets are present on front-end and back-end architecture.
Depending on the platform, operating systems are either autodetected or are
set. The user can set the frontend and backend operating setting by the class
attributes front_os and back_os. The operating system will be responsible for
compiler detection.
""" """
# Subclass sets number. Controls detection order # Subclass sets number. Controls detection order
@@ -25,72 +45,82 @@ class Platform:
#: binary formats used on this platform; used by relocation logic #: binary formats used on this platform; used by relocation logic
binary_formats = ["elf"] binary_formats = ["elf"]
default: str front_end: Optional[str] = None
default_os: str back_end: Optional[str] = None
default: Optional[str] = None # The default back end target.
front_os: Optional[str] = None
back_os: Optional[str] = None
default_os: Optional[str] = None
reserved_targets = ["default_target", "frontend", "fe", "backend", "be"] reserved_targets = ["default_target", "frontend", "fe", "backend", "be"]
reserved_oss = ["default_os", "frontend", "fe", "backend", "be"] reserved_oss = ["default_os", "frontend", "fe", "backend", "be"]
deprecated_names = ["frontend", "fe", "backend", "be"]
def __init__(self, name): def __init__(self, name):
self.targets = {} self.targets = {}
self.operating_sys = {} self.operating_sys = {}
self.name = name self.name = name
self._init_targets()
def add_target(self, name: str, target: archspec.cpu.Microarchitecture) -> None: def add_target(self, name: str, target: archspec.cpu.Microarchitecture) -> None:
"""Used by the platform specific subclass to list available targets.
Raises an error if the platform specifies a name
that is reserved by spack as an alias.
"""
if name in Platform.reserved_targets: if name in Platform.reserved_targets:
msg = f"{name} is a spack reserved alias and cannot be the name of a target" msg = "{0} is a spack reserved alias and cannot be the name of a target"
raise ValueError(msg) raise ValueError(msg.format(name))
self.targets[name] = target self.targets[name] = target
def _init_targets(self): def _add_archspec_targets(self):
self.default = archspec.cpu.host().name
for name, microarchitecture in archspec.cpu.TARGETS.items(): for name, microarchitecture in archspec.cpu.TARGETS.items():
self.add_target(name, microarchitecture) self.add_target(name, microarchitecture)
def target(self, name): def target(self, name):
"""This is a getter method for the target dictionary
that handles defaulting based on the values provided by default,
front-end, and back-end. This can be overwritten
by a subclass for which we want to provide further aliasing options.
"""
# TODO: Check if we can avoid using strings here
name = str(name) name = str(name)
if name in Platform.deprecated_names: if name == "default_target":
warnings.warn(f"target={name} is deprecated, use target={self.default} instead")
if name in Platform.reserved_targets:
name = self.default name = self.default
elif name == "frontend" or name == "fe":
name = self.front_end
elif name == "backend" or name == "be":
name = self.back_end
return self.targets.get(name, None) return self.targets.get(name, None)
def add_operating_system(self, name, os_class): def add_operating_system(self, name, os_class):
if name in Platform.reserved_oss + Platform.deprecated_names: """Add the operating_system class object into the
msg = f"{name} is a spack reserved alias and cannot be the name of an OS" platform.operating_sys dictionary.
raise ValueError(msg) """
if name in Platform.reserved_oss:
msg = "{0} is a spack reserved alias and cannot be the name of an OS"
raise ValueError(msg.format(name))
self.operating_sys[name] = os_class self.operating_sys[name] = os_class
def default_target(self):
return self.target(self.default)
def default_operating_system(self):
return self.operating_system(self.default_os)
def operating_system(self, name): def operating_system(self, name):
if name in Platform.deprecated_names: if name == "default_os":
warnings.warn(f"os={name} is deprecated, use os={self.default_os} instead")
if name in Platform.reserved_oss:
name = self.default_os name = self.default_os
if name == "frontend" or name == "fe":
name = self.front_os
if name == "backend" or name == "be":
name = self.back_os
return self.operating_sys.get(name, None) return self.operating_sys.get(name, None)
def setup_platform_environment(self, pkg, env): def setup_platform_environment(self, pkg, env):
"""Platform-specific build environment modifications. """Subclass can override this method if it requires any
platform-specific build environment modifications.
This method is meant toi be overridden by subclasses, when needed.
""" """
pass pass
@classmethod @classmethod
def detect(cls): def detect(cls):
"""Returns True if the host platform is detected to be the current Platform class, """Return True if the the host platform is detected to be the current
False otherwise. Platform class, False otherwise.
Derived classes are responsible for implementing this method. Derived classes are responsible for implementing this method.
""" """
@@ -105,7 +135,11 @@ def __str__(self):
def _cmp_iter(self): def _cmp_iter(self):
yield self.name yield self.name
yield self.default yield self.default
yield self.front_end
yield self.back_end
yield self.default_os yield self.default_os
yield self.front_os
yield self.back_os
def targets(): def targets():
for t in sorted(self.targets.values()): for t in sorted(self.targets.values()):

View File

@@ -1,7 +1,7 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details. # Copyright Spack Project Developers. See COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os import os.path
def slingshot_network(): def slingshot_network():

View File

@@ -4,6 +4,8 @@
import platform as py_platform import platform as py_platform
import archspec.cpu
from spack.operating_systems.mac_os import MacOs from spack.operating_systems.mac_os import MacOs
from spack.version import Version from spack.version import Version
@@ -17,8 +19,18 @@ class Darwin(Platform):
def __init__(self): def __init__(self):
super().__init__("darwin") super().__init__("darwin")
self._add_archspec_targets()
self.default = archspec.cpu.host().name
self.front_end = self.default
self.back_end = self.default
mac_os = MacOs() mac_os = MacOs()
self.default_os = str(mac_os) self.default_os = str(mac_os)
self.front_os = str(mac_os)
self.back_os = str(mac_os)
self.add_operating_system(str(mac_os), mac_os) self.add_operating_system(str(mac_os), mac_os)
@classmethod @classmethod

View File

@@ -3,6 +3,8 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import platform import platform
import archspec.cpu
from spack.operating_systems.freebsd import FreeBSDOs from spack.operating_systems.freebsd import FreeBSDOs
from ._platform import Platform from ._platform import Platform
@@ -13,8 +15,18 @@ class FreeBSD(Platform):
def __init__(self): def __init__(self):
super().__init__("freebsd") super().__init__("freebsd")
self._add_archspec_targets()
# Get specific default
self.default = archspec.cpu.host().name
self.front_end = self.default
self.back_end = self.default
os = FreeBSDOs() os = FreeBSDOs()
self.default_os = str(os) self.default_os = str(os)
self.front_os = self.default_os
self.back_os = self.default_os
self.add_operating_system(str(os), os) self.add_operating_system(str(os), os)
@classmethod @classmethod

View File

@@ -3,6 +3,8 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import platform import platform
import archspec.cpu
from spack.operating_systems.linux_distro import LinuxDistro from spack.operating_systems.linux_distro import LinuxDistro
from ._platform import Platform from ._platform import Platform
@@ -13,8 +15,18 @@ class Linux(Platform):
def __init__(self): def __init__(self):
super().__init__("linux") super().__init__("linux")
self._add_archspec_targets()
# Get specific default
self.default = archspec.cpu.host().name
self.front_end = self.default
self.back_end = self.default
linux_dist = LinuxDistro() linux_dist = LinuxDistro()
self.default_os = str(linux_dist) self.default_os = str(linux_dist)
self.front_os = self.default_os
self.back_os = self.default_os
self.add_operating_system(str(linux_dist), linux_dist) self.add_operating_system(str(linux_dist), linux_dist)
@classmethod @classmethod

View File

@@ -16,19 +16,31 @@ class Test(Platform):
if platform.system().lower() == "darwin": if platform.system().lower() == "darwin":
binary_formats = ["macho"] binary_formats = ["macho"]
if platform.machine() == "arm64":
front_end = "aarch64"
back_end = "m1"
default = "m1"
else:
front_end = "x86_64"
back_end = "core2"
default = "core2"
front_os = "redhat6"
back_os = "debian6"
default_os = "debian6" default_os = "debian6"
default = "m1" if platform.machine() == "arm64" else "core2"
def __init__(self, name=None): def __init__(self, name=None):
name = name or "test" name = name or "test"
super().__init__(name) super().__init__(name)
self.add_operating_system("debian6", spack.operating_systems.OperatingSystem("debian", 6)) self.add_target(self.default, archspec.cpu.TARGETS[self.default])
self.add_operating_system("redhat6", spack.operating_systems.OperatingSystem("redhat", 6)) self.add_target(self.front_end, archspec.cpu.TARGETS[self.front_end])
def _init_targets(self): self.add_operating_system(
targets = ("aarch64", "m1") if platform.machine() == "arm64" else ("x86_64", "core2") self.default_os, spack.operating_systems.OperatingSystem("debian", 6)
for t in targets: )
self.add_target(t, archspec.cpu.TARGETS[t]) self.add_operating_system(
self.front_os, spack.operating_systems.OperatingSystem("redhat", 6)
)
@classmethod @classmethod
def detect(cls): def detect(cls):

View File

@@ -4,6 +4,8 @@
import platform import platform
import archspec.cpu
from spack.operating_systems.windows_os import WindowsOs from spack.operating_systems.windows_os import WindowsOs
from ._platform import Platform from ._platform import Platform
@@ -14,8 +16,18 @@ class Windows(Platform):
def __init__(self): def __init__(self):
super().__init__("windows") super().__init__("windows")
self._add_archspec_targets()
self.default = archspec.cpu.host().name
self.front_end = self.default
self.back_end = self.default
windows_os = WindowsOs() windows_os = WindowsOs()
self.default_os = str(windows_os) self.default_os = str(windows_os)
self.front_os = str(windows_os)
self.back_os = str(windows_os)
self.add_operating_system(str(windows_os), windows_os) self.add_operating_system(str(windows_os), windows_os)
@classmethod @classmethod

View File

@@ -14,6 +14,7 @@
import inspect import inspect
import itertools import itertools
import os import os
import os.path
import random import random
import re import re
import shutil import shutil
@@ -1041,7 +1042,7 @@ def _read_config(self) -> Dict[str, str]:
return yaml_data["repo"] return yaml_data["repo"]
except OSError: except IOError:
tty.die(f"Error reading {self.config_file} when opening {self.root}") tty.die(f"Error reading {self.config_file} when opening {self.root}")
def get(self, spec: "spack.spec.Spec") -> "spack.package_base.PackageBase": def get(self, spec: "spack.spec.Spec") -> "spack.package_base.PackageBase":
@@ -1369,7 +1370,7 @@ def create_repo(root, namespace=None, subdir=packages_dir_name):
if subdir != packages_dir_name: if subdir != packages_dir_name:
config.write(f" subdirectory: '{subdir}'\n") config.write(f" subdirectory: '{subdir}'\n")
except OSError as e: except (IOError, OSError) as e:
# try to clean up. # try to clean up.
if existed: if existed:
shutil.rmtree(config_path, ignore_errors=True) shutil.rmtree(config_path, ignore_errors=True)

Some files were not shown because too many files have changed in this diff Show More