Compare commits
1 Commits
hs/fix/qua
...
packages/a
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2eadde1c97 |
2
.github/workflows/coverage.yml
vendored
2
.github/workflows/coverage.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
|||||||
- run: coverage xml
|
- run: coverage xml
|
||||||
|
|
||||||
- name: "Upload coverage report to CodeCov"
|
- name: "Upload coverage report to CodeCov"
|
||||||
uses: codecov/codecov-action@1e68e06f1dbfde0e4cefc87efeba9e4643565303
|
uses: codecov/codecov-action@05f5a9cfad807516dbbef9929c4a42df3eb78766
|
||||||
with:
|
with:
|
||||||
verbose: true
|
verbose: true
|
||||||
fail_ci_if_error: false
|
fail_ci_if_error: false
|
||||||
|
|||||||
@@ -2,6 +2,6 @@ black==24.10.0
|
|||||||
clingo==5.7.1
|
clingo==5.7.1
|
||||||
flake8==7.1.1
|
flake8==7.1.1
|
||||||
isort==5.13.2
|
isort==5.13.2
|
||||||
mypy==1.11.2
|
mypy==1.8.0
|
||||||
types-six==1.17.0.20241205
|
types-six==1.17.0.20241205
|
||||||
vermin==1.6.0
|
vermin==1.6.0
|
||||||
|
|||||||
6
.github/workflows/valid-style.yml
vendored
6
.github/workflows/valid-style.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
|||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||||
with:
|
with:
|
||||||
python-version: '3.13'
|
python-version: '3.11'
|
||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
- name: Install Python Packages
|
- name: Install Python Packages
|
||||||
run: |
|
run: |
|
||||||
@@ -39,7 +39,7 @@ jobs:
|
|||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||||
with:
|
with:
|
||||||
python-version: '3.13'
|
python-version: '3.11'
|
||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
@@ -58,7 +58,7 @@ jobs:
|
|||||||
secrets: inherit
|
secrets: inherit
|
||||||
with:
|
with:
|
||||||
with_coverage: ${{ inputs.with_coverage }}
|
with_coverage: ${{ inputs.with_coverage }}
|
||||||
python_version: '3.13'
|
python_version: '3.11'
|
||||||
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
|
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
|
||||||
bootstrap-dev-rhel8:
|
bootstrap-dev-rhel8:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|||||||
@@ -25,6 +25,7 @@ exit 1
|
|||||||
# The code above runs this file with our preferred python interpreter.
|
# The code above runs this file with our preferred python interpreter.
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import os.path
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
min_python3 = (3, 6)
|
min_python3 = (3, 6)
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ packages:
|
|||||||
go-or-gccgo-bootstrap: [go-bootstrap, gcc]
|
go-or-gccgo-bootstrap: [go-bootstrap, gcc]
|
||||||
iconv: [libiconv]
|
iconv: [libiconv]
|
||||||
ipp: [intel-oneapi-ipp]
|
ipp: [intel-oneapi-ipp]
|
||||||
java: [openjdk, jdk]
|
java: [openjdk, jdk, ibm-java]
|
||||||
jpeg: [libjpeg-turbo, libjpeg]
|
jpeg: [libjpeg-turbo, libjpeg]
|
||||||
lapack: [openblas, amdlibflame]
|
lapack: [openblas, amdlibflame]
|
||||||
libc: [glibc, musl]
|
libc: [glibc, musl]
|
||||||
@@ -73,27 +73,15 @@ packages:
|
|||||||
permissions:
|
permissions:
|
||||||
read: world
|
read: world
|
||||||
write: user
|
write: user
|
||||||
cray-fftw:
|
|
||||||
buildable: false
|
|
||||||
cray-libsci:
|
|
||||||
buildable: false
|
|
||||||
cray-mpich:
|
cray-mpich:
|
||||||
buildable: false
|
buildable: false
|
||||||
cray-mvapich2:
|
cray-mvapich2:
|
||||||
buildable: false
|
buildable: false
|
||||||
cray-pmi:
|
|
||||||
buildable: false
|
|
||||||
egl:
|
egl:
|
||||||
buildable: false
|
buildable: false
|
||||||
essl:
|
|
||||||
buildable: false
|
|
||||||
fujitsu-mpi:
|
fujitsu-mpi:
|
||||||
buildable: false
|
buildable: false
|
||||||
fujitsu-ssl2:
|
|
||||||
buildable: false
|
|
||||||
hpcx-mpi:
|
hpcx-mpi:
|
||||||
buildable: false
|
buildable: false
|
||||||
mpt:
|
|
||||||
buildable: false
|
|
||||||
spectrum-mpi:
|
spectrum-mpi:
|
||||||
buildable: false
|
buildable: false
|
||||||
|
|||||||
@@ -170,7 +170,7 @@ bootstrapping.
|
|||||||
To register the mirror on the platform where it's supposed to be used run the following command(s):
|
To register the mirror on the platform where it's supposed to be used run the following command(s):
|
||||||
% spack bootstrap add --trust local-sources /opt/bootstrap/metadata/sources
|
% spack bootstrap add --trust local-sources /opt/bootstrap/metadata/sources
|
||||||
% spack bootstrap add --trust local-binaries /opt/bootstrap/metadata/binaries
|
% spack bootstrap add --trust local-binaries /opt/bootstrap/metadata/binaries
|
||||||
% spack buildcache update-index /opt/bootstrap/bootstrap_cache
|
|
||||||
|
|
||||||
This command needs to be run on a machine with internet access and the resulting folder
|
This command needs to be run on a machine with internet access and the resulting folder
|
||||||
has to be moved over to the air-gapped system. Once the local sources are added using the
|
has to be moved over to the air-gapped system. Once the local sources are added using the
|
||||||
|
|||||||
@@ -56,13 +56,13 @@ If you look at the ``perl`` package, you'll see:
|
|||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
phases = ("configure", "build", "install")
|
phases = ["configure", "build", "install"]
|
||||||
|
|
||||||
Similarly, ``cmake`` defines:
|
Similarly, ``cmake`` defines:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
phases = ("bootstrap", "build", "install")
|
phases = ["bootstrap", "build", "install"]
|
||||||
|
|
||||||
If we look at the ``cmake`` example, this tells Spack's ``PackageBase``
|
If we look at the ``cmake`` example, this tells Spack's ``PackageBase``
|
||||||
class to run the ``bootstrap``, ``build``, and ``install`` functions
|
class to run the ``bootstrap``, ``build``, and ``install`` functions
|
||||||
|
|||||||
@@ -25,23 +25,14 @@ These settings can be overridden in ``etc/spack/config.yaml`` or
|
|||||||
The location where Spack will install packages and their dependencies.
|
The location where Spack will install packages and their dependencies.
|
||||||
Default is ``$spack/opt/spack``.
|
Default is ``$spack/opt/spack``.
|
||||||
|
|
||||||
---------------
|
---------------------------------------------------
|
||||||
``projections``
|
``install_hash_length`` and ``install_path_scheme``
|
||||||
---------------
|
---------------------------------------------------
|
||||||
|
|
||||||
.. warning::
|
The default Spack installation path can be very long and can create problems
|
||||||
|
for scripts with hardcoded shebangs. Additionally, when using the Intel
|
||||||
Modifying projections of the install tree is strongly discouraged.
|
compiler, and if there is also a long list of dependencies, the compiler may
|
||||||
|
segfault. If you see the following:
|
||||||
By default Spack installs all packages into a unique directory relative to the install
|
|
||||||
tree root with the following layout:
|
|
||||||
|
|
||||||
.. code-block::
|
|
||||||
|
|
||||||
{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}
|
|
||||||
|
|
||||||
In very rare cases, it may be necessary to reduce the length of this path. For example,
|
|
||||||
very old versions of the Intel compiler are known to segfault when input paths are too long:
|
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
@@ -49,25 +40,36 @@ very old versions of the Intel compiler are known to segfault when input paths a
|
|||||||
** Segmentation violation signal raised. **
|
** Segmentation violation signal raised. **
|
||||||
Access violation or stack overflow. Please contact Intel Support for assistance.
|
Access violation or stack overflow. Please contact Intel Support for assistance.
|
||||||
|
|
||||||
Another case is Python and R packages with many runtime dependencies, which can result
|
it may be because variables containing dependency specs may be too long. There
|
||||||
in very large ``PYTHONPATH`` and ``R_LIBS`` environment variables. This can cause the
|
are two parameters to help with long path names. Firstly, the
|
||||||
``execve`` system call to fail with ``E2BIG``, preventing processes from starting.
|
``install_hash_length`` parameter can set the length of the hash in the
|
||||||
|
installation path from 1 to 32. The default path uses the full 32 characters.
|
||||||
|
|
||||||
For this reason, Spack allows users to modify the installation layout through custom
|
Secondly, it is also possible to modify the entire installation
|
||||||
projections. For example
|
scheme. By default Spack uses
|
||||||
|
``{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}``
|
||||||
|
where the tokens that are available for use in this directive are the
|
||||||
|
same as those understood by the :meth:`~spack.spec.Spec.format`
|
||||||
|
method. Using this parameter it is possible to use a different package
|
||||||
|
layout or reduce the depth of the installation paths. For example
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
config:
|
config:
|
||||||
install_tree:
|
install_path_scheme: '{name}/{version}/{hash:7}'
|
||||||
root: $spack/opt/spack
|
|
||||||
projections:
|
|
||||||
all: "{name}/{version}/{hash:16}"
|
|
||||||
|
|
||||||
would install packages into sub-directories using only the package name, version and a
|
would install packages into sub-directories using only the package
|
||||||
hash length of 16 characters.
|
name, version and a hash length of 7 characters.
|
||||||
|
|
||||||
Notice that reducing the hash length increases the likelihood of hash collisions.
|
When using either parameter to set the hash length it only affects the
|
||||||
|
representation of the hash in the installation directory. You
|
||||||
|
should be aware that the smaller the hash length the more likely
|
||||||
|
naming conflicts will occur. These parameters are independent of those
|
||||||
|
used to configure module names.
|
||||||
|
|
||||||
|
.. warning:: Modifying the installation hash length or path scheme after
|
||||||
|
packages have been installed will prevent Spack from being
|
||||||
|
able to find the old installation directories.
|
||||||
|
|
||||||
--------------------
|
--------------------
|
||||||
``build_stage``
|
``build_stage``
|
||||||
|
|||||||
@@ -543,10 +543,10 @@ With either interpreter you can run a single command:
|
|||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack python -c 'from spack.concretize import concretize_one; concretize_one("python")'
|
$ spack python -c 'from spack.spec import Spec; Spec("python").concretized()'
|
||||||
...
|
...
|
||||||
|
|
||||||
$ spack python -i ipython -c 'from spack.concretize import concretize_one; concretize_one("python")'
|
$ spack python -i ipython -c 'from spack.spec import Spec; Spec("python").concretized()'
|
||||||
Out[1]: ...
|
Out[1]: ...
|
||||||
|
|
||||||
or a file:
|
or a file:
|
||||||
|
|||||||
@@ -456,13 +456,14 @@ For instance, the following config options,
|
|||||||
tcl:
|
tcl:
|
||||||
all:
|
all:
|
||||||
suffixes:
|
suffixes:
|
||||||
^python@3: 'python{^python.version.up_to_2}'
|
^python@3: 'python{^python.version}'
|
||||||
^openblas: 'openblas'
|
^openblas: 'openblas'
|
||||||
|
|
||||||
will add a ``python3.12`` to module names of packages compiled with Python 3.12, and similarly for
|
will add a ``python-3.12.1`` version string to any packages compiled with
|
||||||
all specs depending on ``python@3``. This is useful to know which version of Python a set of Python
|
Python matching the spec, ``python@3``. This is useful to know which
|
||||||
extensions is associated with. Likewise, the ``openblas`` string is attached to any program that
|
version of Python a set of Python extensions is associated with. Likewise, the
|
||||||
has openblas in the spec, most likely via the ``+blas`` variant specification.
|
``openblas`` string is attached to any program that has openblas in the spec,
|
||||||
|
most likely via the ``+blas`` variant specification.
|
||||||
|
|
||||||
The most heavyweight solution to module naming is to change the entire
|
The most heavyweight solution to module naming is to change the entire
|
||||||
naming convention for module files. This uses the projections format
|
naming convention for module files. This uses the projections format
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ sphinx_design==0.6.1
|
|||||||
sphinx-rtd-theme==3.0.2
|
sphinx-rtd-theme==3.0.2
|
||||||
python-levenshtein==0.26.1
|
python-levenshtein==0.26.1
|
||||||
docutils==0.21.2
|
docutils==0.21.2
|
||||||
pygments==2.19.1
|
pygments==2.18.0
|
||||||
urllib3==2.3.0
|
urllib3==2.3.0
|
||||||
pytest==8.3.4
|
pytest==8.3.4
|
||||||
isort==5.13.2
|
isort==5.13.2
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
"""URL primitives that just require Python standard library."""
|
"""URL primitives that just require Python standard library."""
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os.path
|
||||||
import re
|
import re
|
||||||
from typing import Optional, Set, Tuple
|
from typing import Optional, Set, Tuple
|
||||||
from urllib.parse import urlsplit, urlunsplit
|
from urllib.parse import urlsplit, urlunsplit
|
||||||
|
|||||||
@@ -75,6 +75,7 @@
|
|||||||
"install_tree",
|
"install_tree",
|
||||||
"is_exe",
|
"is_exe",
|
||||||
"join_path",
|
"join_path",
|
||||||
|
"last_modification_time_recursive",
|
||||||
"library_extensions",
|
"library_extensions",
|
||||||
"mkdirp",
|
"mkdirp",
|
||||||
"partition_path",
|
"partition_path",
|
||||||
@@ -1469,36 +1470,15 @@ def set_executable(path):
|
|||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
def recursive_mtime_greater_than(path: str, time: float) -> bool:
|
def last_modification_time_recursive(path):
|
||||||
"""Returns true if any file or dir recursively under `path` has mtime greater than `time`."""
|
path = os.path.abspath(path)
|
||||||
# use bfs order to increase likelihood of early return
|
times = [os.stat(path).st_mtime]
|
||||||
queue: Deque[str] = collections.deque([path])
|
times.extend(
|
||||||
|
os.lstat(os.path.join(root, name)).st_mtime
|
||||||
if os.stat(path).st_mtime > time:
|
for root, dirs, files in os.walk(path)
|
||||||
return True
|
for name in dirs + files
|
||||||
|
)
|
||||||
while queue:
|
return max(times)
|
||||||
current = queue.popleft()
|
|
||||||
|
|
||||||
try:
|
|
||||||
entries = os.scandir(current)
|
|
||||||
except OSError:
|
|
||||||
continue
|
|
||||||
|
|
||||||
with entries:
|
|
||||||
for entry in entries:
|
|
||||||
try:
|
|
||||||
st = entry.stat(follow_symlinks=False)
|
|
||||||
except OSError:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if st.st_mtime > time:
|
|
||||||
return True
|
|
||||||
|
|
||||||
if entry.is_dir(follow_symlinks=False):
|
|
||||||
queue.append(entry.path)
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
@@ -1760,7 +1740,8 @@ def find(
|
|||||||
|
|
||||||
|
|
||||||
def _log_file_access_issue(e: OSError, path: str) -> None:
|
def _log_file_access_issue(e: OSError, path: str) -> None:
|
||||||
tty.debug(f"find must skip {path}: {e}")
|
errno_name = errno.errorcode.get(e.errno, "UNKNOWN")
|
||||||
|
tty.debug(f"find must skip {path}: {errno_name} {e}")
|
||||||
|
|
||||||
|
|
||||||
def _file_id(s: os.stat_result) -> Tuple[int, int]:
|
def _file_id(s: os.stat_result) -> Tuple[int, int]:
|
||||||
|
|||||||
@@ -1356,8 +1356,14 @@ def _test_detection_by_executable(pkgs, debug_log, error_cls):
|
|||||||
|
|
||||||
def _compare_extra_attribute(_expected, _detected, *, _spec):
|
def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||||
result = []
|
result = []
|
||||||
|
# Check items are of the same type
|
||||||
|
if not isinstance(_detected, type(_expected)):
|
||||||
|
_summary = f'{pkg_name}: error when trying to detect "{_expected}"'
|
||||||
|
_details = [f"{_detected} was detected instead"]
|
||||||
|
return [error_cls(summary=_summary, details=_details)]
|
||||||
|
|
||||||
# If they are string expected is a regex
|
# If they are string expected is a regex
|
||||||
if isinstance(_expected, str) and isinstance(_detected, str):
|
if isinstance(_expected, str):
|
||||||
try:
|
try:
|
||||||
_regex = re.compile(_expected)
|
_regex = re.compile(_expected)
|
||||||
except re.error:
|
except re.error:
|
||||||
@@ -1373,7 +1379,7 @@ def _compare_extra_attribute(_expected, _detected, *, _spec):
|
|||||||
_details = [f"{_detected} does not match the regex"]
|
_details = [f"{_detected} does not match the regex"]
|
||||||
return [error_cls(summary=_summary, details=_details)]
|
return [error_cls(summary=_summary, details=_details)]
|
||||||
|
|
||||||
elif isinstance(_expected, dict) and isinstance(_detected, dict):
|
if isinstance(_expected, dict):
|
||||||
_not_detected = set(_expected.keys()) - set(_detected.keys())
|
_not_detected = set(_expected.keys()) - set(_detected.keys())
|
||||||
if _not_detected:
|
if _not_detected:
|
||||||
_summary = f"{pkg_name}: cannot detect some attributes for spec {_spec}"
|
_summary = f"{pkg_name}: cannot detect some attributes for spec {_spec}"
|
||||||
@@ -1388,10 +1394,6 @@ def _compare_extra_attribute(_expected, _detected, *, _spec):
|
|||||||
result.extend(
|
result.extend(
|
||||||
_compare_extra_attribute(_expected[_key], _detected[_key], _spec=_spec)
|
_compare_extra_attribute(_expected[_key], _detected[_key], _spec=_spec)
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
_summary = f'{pkg_name}: error when trying to detect "{_expected}"'
|
|
||||||
_details = [f"{_detected} was detected instead"]
|
|
||||||
return [error_cls(summary=_summary, details=_details)]
|
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,6 @@
|
|||||||
import codecs
|
import codecs
|
||||||
import collections
|
import collections
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
import contextlib
|
|
||||||
import copy
|
import copy
|
||||||
import hashlib
|
import hashlib
|
||||||
import io
|
import io
|
||||||
@@ -24,7 +23,7 @@
|
|||||||
import urllib.request
|
import urllib.request
|
||||||
import warnings
|
import warnings
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
from typing import IO, Callable, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
|
from typing import IO, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
|
||||||
|
|
||||||
import llnl.util.filesystem as fsys
|
import llnl.util.filesystem as fsys
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
@@ -92,9 +91,6 @@
|
|||||||
CURRENT_BUILD_CACHE_LAYOUT_VERSION = 2
|
CURRENT_BUILD_CACHE_LAYOUT_VERSION = 2
|
||||||
|
|
||||||
|
|
||||||
INDEX_HASH_FILE = "index.json.hash"
|
|
||||||
|
|
||||||
|
|
||||||
class BuildCacheDatabase(spack_db.Database):
|
class BuildCacheDatabase(spack_db.Database):
|
||||||
"""A database for binary buildcaches.
|
"""A database for binary buildcaches.
|
||||||
|
|
||||||
@@ -506,7 +502,7 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}):
|
|||||||
scheme = urllib.parse.urlparse(mirror_url).scheme
|
scheme = urllib.parse.urlparse(mirror_url).scheme
|
||||||
|
|
||||||
if scheme != "oci" and not web_util.url_exists(
|
if scheme != "oci" and not web_util.url_exists(
|
||||||
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||||
):
|
):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -595,18 +591,32 @@ def file_matches(f: IO[bytes], regex: llnl.util.lang.PatternBytes) -> bool:
|
|||||||
f.seek(0)
|
f.seek(0)
|
||||||
|
|
||||||
|
|
||||||
def specs_to_relocate(spec: spack.spec.Spec) -> List[spack.spec.Spec]:
|
def deps_to_relocate(spec):
|
||||||
"""Return the set of specs that may be referenced in the install prefix of the provided spec.
|
"""Return the transitive link and direct run dependencies of the spec.
|
||||||
We currently include non-external transitive link and direct run dependencies."""
|
|
||||||
specs = [
|
This is a special traversal for dependencies we need to consider when relocating a package.
|
||||||
|
|
||||||
|
Package binaries, scripts, and other files may refer to the prefixes of dependencies, so
|
||||||
|
we need to rewrite those locations when dependencies are in a different place at install time
|
||||||
|
than they were at build time.
|
||||||
|
|
||||||
|
This traversal covers transitive link dependencies and direct run dependencies because:
|
||||||
|
|
||||||
|
1. Spack adds RPATHs for transitive link dependencies so that packages can find needed
|
||||||
|
dependency libraries.
|
||||||
|
2. Packages may call any of their *direct* run dependencies (and may bake their paths into
|
||||||
|
binaries or scripts), so we also need to search for run dependency prefixes when relocating.
|
||||||
|
|
||||||
|
This returns a deduplicated list of transitive link dependencies and direct run dependencies.
|
||||||
|
"""
|
||||||
|
deps = [
|
||||||
s
|
s
|
||||||
for s in itertools.chain(
|
for s in itertools.chain(
|
||||||
spec.traverse(root=True, deptype="link", order="breadth", key=traverse.by_dag_hash),
|
spec.traverse(root=True, deptype="link"), spec.dependencies(deptype="run")
|
||||||
spec.dependencies(deptype="run"),
|
|
||||||
)
|
)
|
||||||
if not s.external
|
if not s.external
|
||||||
]
|
]
|
||||||
return list(llnl.util.lang.dedupe(specs, key=lambda s: s.dag_hash()))
|
return llnl.util.lang.dedupe(deps, key=lambda s: s.dag_hash())
|
||||||
|
|
||||||
|
|
||||||
def get_buildinfo_dict(spec):
|
def get_buildinfo_dict(spec):
|
||||||
@@ -620,7 +630,7 @@ def get_buildinfo_dict(spec):
|
|||||||
# "relocate_binaries": [],
|
# "relocate_binaries": [],
|
||||||
# "relocate_links": [],
|
# "relocate_links": [],
|
||||||
"hardlinks_deduped": True,
|
"hardlinks_deduped": True,
|
||||||
"hash_to_prefix": {d.dag_hash(): str(d.prefix) for d in specs_to_relocate(spec)},
|
"hash_to_prefix": {d.dag_hash(): str(d.prefix) for d in deps_to_relocate(spec)},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -673,24 +683,19 @@ def sign_specfile(key: str, specfile_path: str) -> str:
|
|||||||
|
|
||||||
|
|
||||||
def _read_specs_and_push_index(
|
def _read_specs_and_push_index(
|
||||||
file_list: List[str],
|
file_list, read_method, cache_prefix, db: BuildCacheDatabase, temp_dir, concurrency
|
||||||
read_method: Callable,
|
|
||||||
cache_prefix: str,
|
|
||||||
db: BuildCacheDatabase,
|
|
||||||
temp_dir: str,
|
|
||||||
concurrency: int,
|
|
||||||
):
|
):
|
||||||
"""Read all the specs listed in the provided list, using thread given thread parallelism,
|
"""Read all the specs listed in the provided list, using thread given thread parallelism,
|
||||||
generate the index, and push it to the mirror.
|
generate the index, and push it to the mirror.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
file_list: List of urls or file paths pointing at spec files to read
|
file_list (list(str)): List of urls or file paths pointing at spec files to read
|
||||||
read_method: A function taking a single argument, either a url or a file path,
|
read_method: A function taking a single argument, either a url or a file path,
|
||||||
and which reads the spec file at that location, and returns the spec.
|
and which reads the spec file at that location, and returns the spec.
|
||||||
cache_prefix: prefix of the build cache on s3 where index should be pushed.
|
cache_prefix (str): prefix of the build cache on s3 where index should be pushed.
|
||||||
db: A spack database used for adding specs and then writing the index.
|
db: A spack database used for adding specs and then writing the index.
|
||||||
temp_dir: Location to write index.json and hash for pushing
|
temp_dir (str): Location to write index.json and hash for pushing
|
||||||
concurrency: Number of parallel processes to use when fetching
|
concurrency (int): Number of parallel processes to use when fetching
|
||||||
"""
|
"""
|
||||||
for file in file_list:
|
for file in file_list:
|
||||||
contents = read_method(file)
|
contents = read_method(file)
|
||||||
@@ -708,7 +713,7 @@ def _read_specs_and_push_index(
|
|||||||
|
|
||||||
# Now generate the index, compute its hash, and push the two files to
|
# Now generate the index, compute its hash, and push the two files to
|
||||||
# the mirror.
|
# the mirror.
|
||||||
index_json_path = os.path.join(temp_dir, spack_db.INDEX_JSON_FILE)
|
index_json_path = os.path.join(temp_dir, "index.json")
|
||||||
with open(index_json_path, "w", encoding="utf-8") as f:
|
with open(index_json_path, "w", encoding="utf-8") as f:
|
||||||
db._write_to_file(f)
|
db._write_to_file(f)
|
||||||
|
|
||||||
@@ -718,14 +723,14 @@ def _read_specs_and_push_index(
|
|||||||
index_hash = compute_hash(index_string)
|
index_hash = compute_hash(index_string)
|
||||||
|
|
||||||
# Write the hash out to a local file
|
# Write the hash out to a local file
|
||||||
index_hash_path = os.path.join(temp_dir, INDEX_HASH_FILE)
|
index_hash_path = os.path.join(temp_dir, "index.json.hash")
|
||||||
with open(index_hash_path, "w", encoding="utf-8") as f:
|
with open(index_hash_path, "w", encoding="utf-8") as f:
|
||||||
f.write(index_hash)
|
f.write(index_hash)
|
||||||
|
|
||||||
# Push the index itself
|
# Push the index itself
|
||||||
web_util.push_to_url(
|
web_util.push_to_url(
|
||||||
index_json_path,
|
index_json_path,
|
||||||
url_util.join(cache_prefix, spack_db.INDEX_JSON_FILE),
|
url_util.join(cache_prefix, "index.json"),
|
||||||
keep_original=False,
|
keep_original=False,
|
||||||
extra_args={"ContentType": "application/json", "CacheControl": "no-cache"},
|
extra_args={"ContentType": "application/json", "CacheControl": "no-cache"},
|
||||||
)
|
)
|
||||||
@@ -733,7 +738,7 @@ def _read_specs_and_push_index(
|
|||||||
# Push the hash
|
# Push the hash
|
||||||
web_util.push_to_url(
|
web_util.push_to_url(
|
||||||
index_hash_path,
|
index_hash_path,
|
||||||
url_util.join(cache_prefix, INDEX_HASH_FILE),
|
url_util.join(cache_prefix, "index.json.hash"),
|
||||||
keep_original=False,
|
keep_original=False,
|
||||||
extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"},
|
extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"},
|
||||||
)
|
)
|
||||||
@@ -802,7 +807,7 @@ def url_read_method(url):
|
|||||||
try:
|
try:
|
||||||
_, _, spec_file = web_util.read_from_url(url)
|
_, _, spec_file = web_util.read_from_url(url)
|
||||||
contents = codecs.getreader("utf-8")(spec_file).read()
|
contents = codecs.getreader("utf-8")(spec_file).read()
|
||||||
except (web_util.SpackWebError, OSError) as e:
|
except web_util.SpackWebError as e:
|
||||||
tty.error(f"Error reading specfile: {url}: {e}")
|
tty.error(f"Error reading specfile: {url}: {e}")
|
||||||
return contents
|
return contents
|
||||||
|
|
||||||
@@ -870,12 +875,9 @@ def _url_generate_package_index(url: str, tmpdir: str, concurrency: int = 32):
|
|||||||
tty.debug(f"Retrieving spec descriptor files from {url} to build index")
|
tty.debug(f"Retrieving spec descriptor files from {url} to build index")
|
||||||
|
|
||||||
db = BuildCacheDatabase(tmpdir)
|
db = BuildCacheDatabase(tmpdir)
|
||||||
db._write()
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
_read_specs_and_push_index(
|
_read_specs_and_push_index(file_list, read_fn, url, db, db.database_directory, concurrency)
|
||||||
file_list, read_fn, url, db, str(db.database_directory), concurrency
|
|
||||||
)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise GenerateIndexError(f"Encountered problem pushing package index to {url}: {e}") from e
|
raise GenerateIndexError(f"Encountered problem pushing package index to {url}: {e}") from e
|
||||||
|
|
||||||
@@ -1110,7 +1112,7 @@ def _exists_in_buildcache(spec: spack.spec.Spec, tmpdir: str, out_url: str) -> E
|
|||||||
|
|
||||||
|
|
||||||
def prefixes_to_relocate(spec):
|
def prefixes_to_relocate(spec):
|
||||||
prefixes = [s.prefix for s in specs_to_relocate(spec)]
|
prefixes = [s.prefix for s in deps_to_relocate(spec)]
|
||||||
prefixes.append(spack.hooks.sbang.sbang_install_path())
|
prefixes.append(spack.hooks.sbang.sbang_install_path())
|
||||||
prefixes.append(str(spack.store.STORE.layout.root))
|
prefixes.append(str(spack.store.STORE.layout.root))
|
||||||
return prefixes
|
return prefixes
|
||||||
@@ -1789,7 +1791,7 @@ def _oci_update_index(
|
|||||||
db.mark(spec, "in_buildcache", True)
|
db.mark(spec, "in_buildcache", True)
|
||||||
|
|
||||||
# Create the index.json file
|
# Create the index.json file
|
||||||
index_json_path = os.path.join(tmpdir, spack_db.INDEX_JSON_FILE)
|
index_json_path = os.path.join(tmpdir, "index.json")
|
||||||
with open(index_json_path, "w", encoding="utf-8") as f:
|
with open(index_json_path, "w", encoding="utf-8") as f:
|
||||||
db._write_to_file(f)
|
db._write_to_file(f)
|
||||||
|
|
||||||
@@ -2010,7 +2012,7 @@ def fetch_url_to_mirror(url):
|
|||||||
|
|
||||||
# Download the config = spec.json and the relevant tarball
|
# Download the config = spec.json and the relevant tarball
|
||||||
try:
|
try:
|
||||||
manifest = json.load(response)
|
manifest = json.loads(response.read())
|
||||||
spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"])
|
spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"])
|
||||||
tarball_digest = spack.oci.image.Digest.from_string(
|
tarball_digest = spack.oci.image.Digest.from_string(
|
||||||
manifest["layers"][-1]["digest"]
|
manifest["layers"][-1]["digest"]
|
||||||
@@ -2137,9 +2139,10 @@ def fetch_url_to_mirror(url):
|
|||||||
|
|
||||||
|
|
||||||
def dedupe_hardlinks_if_necessary(root, buildinfo):
|
def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||||
"""Updates a buildinfo dict for old archives that did not dedupe hardlinks. De-duping hardlinks
|
"""Updates a buildinfo dict for old archives that did
|
||||||
is necessary when relocating files in parallel and in-place. This means we must preserve inodes
|
not dedupe hardlinks. De-duping hardlinks is necessary
|
||||||
when relocating."""
|
when relocating files in parallel and in-place. This
|
||||||
|
means we must preserve inodes when relocating."""
|
||||||
|
|
||||||
# New archives don't need this.
|
# New archives don't need this.
|
||||||
if buildinfo.get("hardlinks_deduped", False):
|
if buildinfo.get("hardlinks_deduped", False):
|
||||||
@@ -2168,48 +2171,65 @@ def dedupe_hardlinks_if_necessary(root, buildinfo):
|
|||||||
buildinfo[key] = new_list
|
buildinfo[key] = new_list
|
||||||
|
|
||||||
|
|
||||||
def relocate_package(spec: spack.spec.Spec) -> None:
|
def relocate_package(spec):
|
||||||
"""Relocate binaries and text files in the given spec prefix, based on its buildinfo file."""
|
"""
|
||||||
spec_prefix = str(spec.prefix)
|
Relocate the given package
|
||||||
buildinfo = read_buildinfo_file(spec_prefix)
|
"""
|
||||||
|
workdir = str(spec.prefix)
|
||||||
|
buildinfo = read_buildinfo_file(workdir)
|
||||||
|
new_layout_root = str(spack.store.STORE.layout.root)
|
||||||
|
new_prefix = str(spec.prefix)
|
||||||
|
new_rel_prefix = str(os.path.relpath(new_prefix, new_layout_root))
|
||||||
|
new_spack_prefix = str(spack.paths.prefix)
|
||||||
|
|
||||||
|
old_sbang_install_path = None
|
||||||
|
if "sbang_install_path" in buildinfo:
|
||||||
|
old_sbang_install_path = str(buildinfo["sbang_install_path"])
|
||||||
old_layout_root = str(buildinfo["buildpath"])
|
old_layout_root = str(buildinfo["buildpath"])
|
||||||
|
old_spack_prefix = str(buildinfo.get("spackprefix"))
|
||||||
|
old_rel_prefix = buildinfo.get("relative_prefix")
|
||||||
|
old_prefix = os.path.join(old_layout_root, old_rel_prefix)
|
||||||
|
rel = buildinfo.get("relative_rpaths", False)
|
||||||
|
|
||||||
# Warn about old style tarballs created with the --rel flag (removed in Spack v0.20)
|
# In the past prefix_to_hash was the default and externals were not dropped, so prefixes
|
||||||
if buildinfo.get("relative_rpaths", False):
|
# were not unique.
|
||||||
tty.warn(
|
|
||||||
f"Tarball for {spec} uses relative rpaths, which can cause library loading issues."
|
|
||||||
)
|
|
||||||
|
|
||||||
# In Spack 0.19 and older prefix_to_hash was the default and externals were not dropped, so
|
|
||||||
# prefixes were not unique.
|
|
||||||
if "hash_to_prefix" in buildinfo:
|
if "hash_to_prefix" in buildinfo:
|
||||||
hash_to_old_prefix = buildinfo["hash_to_prefix"]
|
hash_to_old_prefix = buildinfo["hash_to_prefix"]
|
||||||
elif "prefix_to_hash" in buildinfo:
|
elif "prefix_to_hash" in buildinfo:
|
||||||
hash_to_old_prefix = {v: k for (k, v) in buildinfo["prefix_to_hash"].items()}
|
hash_to_old_prefix = dict((v, k) for (k, v) in buildinfo["prefix_to_hash"].items())
|
||||||
else:
|
else:
|
||||||
raise NewLayoutException(
|
hash_to_old_prefix = dict()
|
||||||
"Package tarball was created from an install prefix with a different directory layout "
|
|
||||||
"and an older buildcache create implementation. It cannot be relocated."
|
|
||||||
)
|
|
||||||
|
|
||||||
prefix_to_prefix: Dict[str, str] = {}
|
if old_rel_prefix != new_rel_prefix and not hash_to_old_prefix:
|
||||||
|
msg = "Package tarball was created from an install "
|
||||||
|
msg += "prefix with a different directory layout and an older "
|
||||||
|
msg += "buildcache create implementation. It cannot be relocated."
|
||||||
|
raise NewLayoutException(msg)
|
||||||
|
|
||||||
if "sbang_install_path" in buildinfo:
|
# Spurious replacements (e.g. sbang) will cause issues with binaries
|
||||||
old_sbang_install_path = str(buildinfo["sbang_install_path"])
|
# For example, the new sbang can be longer than the old one.
|
||||||
prefix_to_prefix[old_sbang_install_path] = spack.hooks.sbang.sbang_install_path()
|
# Hence 2 dictionaries are maintained here.
|
||||||
|
prefix_to_prefix_text = collections.OrderedDict()
|
||||||
|
prefix_to_prefix_bin = collections.OrderedDict()
|
||||||
|
|
||||||
# First match specific prefix paths. Possibly the *local* install prefix of some dependency is
|
if old_sbang_install_path:
|
||||||
# in an upstream, so we cannot assume the original spack store root can be mapped uniformly to
|
install_path = spack.hooks.sbang.sbang_install_path()
|
||||||
# the new spack store root.
|
prefix_to_prefix_text[old_sbang_install_path] = install_path
|
||||||
|
|
||||||
# If the spec is spliced, we need to handle the simultaneous mapping from the old install_tree
|
# First match specific prefix paths. Possibly the *local* install prefix
|
||||||
# to the new install_tree and from the build_spec to the spliced spec. Because foo.build_spec
|
# of some dependency is in an upstream, so we cannot assume the original
|
||||||
# is foo for any non-spliced spec, we can simplify by checking for spliced-in nodes by checking
|
# spack store root can be mapped uniformly to the new spack store root.
|
||||||
# for nodes not in the build_spec without any explicit check for whether the spec is spliced.
|
#
|
||||||
# An analog in this algorithm is any spec that shares a name or provides the same virtuals in
|
# If the spec is spliced, we need to handle the simultaneous mapping
|
||||||
# the context of the relevant root spec. This ensures that the analog for a spec s is the spec
|
# from the old install_tree to the new install_tree and from the build_spec
|
||||||
# that s replaced when we spliced.
|
# to the spliced spec.
|
||||||
relocation_specs = specs_to_relocate(spec)
|
# Because foo.build_spec is foo for any non-spliced spec, we can simplify
|
||||||
|
# by checking for spliced-in nodes by checking for nodes not in the build_spec
|
||||||
|
# without any explicit check for whether the spec is spliced.
|
||||||
|
# An analog in this algorithm is any spec that shares a name or provides the same virtuals
|
||||||
|
# in the context of the relevant root spec. This ensures that the analog for a spec s
|
||||||
|
# is the spec that s replaced when we spliced.
|
||||||
|
relocation_specs = deps_to_relocate(spec)
|
||||||
build_spec_ids = set(id(s) for s in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD))
|
build_spec_ids = set(id(s) for s in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD))
|
||||||
for s in relocation_specs:
|
for s in relocation_specs:
|
||||||
analog = s
|
analog = s
|
||||||
@@ -2228,66 +2248,98 @@ def relocate_package(spec: spack.spec.Spec) -> None:
|
|||||||
lookup_dag_hash = analog.dag_hash()
|
lookup_dag_hash = analog.dag_hash()
|
||||||
if lookup_dag_hash in hash_to_old_prefix:
|
if lookup_dag_hash in hash_to_old_prefix:
|
||||||
old_dep_prefix = hash_to_old_prefix[lookup_dag_hash]
|
old_dep_prefix = hash_to_old_prefix[lookup_dag_hash]
|
||||||
prefix_to_prefix[old_dep_prefix] = str(s.prefix)
|
prefix_to_prefix_bin[old_dep_prefix] = str(s.prefix)
|
||||||
|
prefix_to_prefix_text[old_dep_prefix] = str(s.prefix)
|
||||||
|
|
||||||
# Only then add the generic fallback of install prefix -> install prefix.
|
# Only then add the generic fallback of install prefix -> install prefix.
|
||||||
prefix_to_prefix[old_layout_root] = str(spack.store.STORE.layout.root)
|
prefix_to_prefix_text[old_prefix] = new_prefix
|
||||||
|
prefix_to_prefix_bin[old_prefix] = new_prefix
|
||||||
|
prefix_to_prefix_text[old_layout_root] = new_layout_root
|
||||||
|
prefix_to_prefix_bin[old_layout_root] = new_layout_root
|
||||||
|
|
||||||
# Delete identity mappings from prefix_to_prefix
|
# This is vestigial code for the *old* location of sbang. Previously,
|
||||||
prefix_to_prefix = {k: v for k, v in prefix_to_prefix.items() if k != v}
|
# sbang was a bash script, and it lived in the spack prefix. It is
|
||||||
|
# now a POSIX script that lives in the install prefix. Old packages
|
||||||
|
# will have the old sbang location in their shebangs.
|
||||||
|
orig_sbang = "#!/bin/bash {0}/bin/sbang".format(old_spack_prefix)
|
||||||
|
new_sbang = spack.hooks.sbang.sbang_shebang_line()
|
||||||
|
prefix_to_prefix_text[orig_sbang] = new_sbang
|
||||||
|
|
||||||
# If there's nothing to relocate, we're done.
|
tty.debug("Relocating package from", "%s to %s." % (old_layout_root, new_layout_root))
|
||||||
if not prefix_to_prefix:
|
|
||||||
return
|
|
||||||
|
|
||||||
for old, new in prefix_to_prefix.items():
|
# Old archives maybe have hardlinks repeated.
|
||||||
tty.debug(f"Relocating: {old} => {new}.")
|
dedupe_hardlinks_if_necessary(workdir, buildinfo)
|
||||||
|
|
||||||
# Old archives may have hardlinks repeated.
|
def is_backup_file(file):
|
||||||
dedupe_hardlinks_if_necessary(spec_prefix, buildinfo)
|
return file.endswith("~")
|
||||||
|
|
||||||
# Text files containing the prefix text
|
# Text files containing the prefix text
|
||||||
textfiles = [os.path.join(spec_prefix, f) for f in buildinfo["relocate_textfiles"]]
|
text_names = list()
|
||||||
binaries = [os.path.join(spec_prefix, f) for f in buildinfo.get("relocate_binaries")]
|
for filename in buildinfo["relocate_textfiles"]:
|
||||||
links = [os.path.join(spec_prefix, f) for f in buildinfo.get("relocate_links", [])]
|
text_name = os.path.join(workdir, filename)
|
||||||
|
# Don't add backup files generated by filter_file during install step.
|
||||||
|
if not is_backup_file(text_name):
|
||||||
|
text_names.append(text_name)
|
||||||
|
|
||||||
platform = spack.platforms.by_name(spec.platform)
|
# If we are not installing back to the same install tree do the relocation
|
||||||
if "macho" in platform.binary_formats:
|
if old_prefix != new_prefix:
|
||||||
relocate.relocate_macho_binaries(binaries, prefix_to_prefix)
|
files_to_relocate = [
|
||||||
elif "elf" in platform.binary_formats:
|
os.path.join(workdir, filename) for filename in buildinfo.get("relocate_binaries")
|
||||||
relocate.relocate_elf_binaries(binaries, prefix_to_prefix)
|
]
|
||||||
|
# If the buildcache was not created with relativized rpaths
|
||||||
|
# do the relocation of path in binaries
|
||||||
|
platform = spack.platforms.by_name(spec.platform)
|
||||||
|
if "macho" in platform.binary_formats:
|
||||||
|
relocate.relocate_macho_binaries(
|
||||||
|
files_to_relocate,
|
||||||
|
old_layout_root,
|
||||||
|
new_layout_root,
|
||||||
|
prefix_to_prefix_bin,
|
||||||
|
rel,
|
||||||
|
old_prefix,
|
||||||
|
new_prefix,
|
||||||
|
)
|
||||||
|
elif "elf" in platform.binary_formats and not rel:
|
||||||
|
# The new ELF dynamic section relocation logic only handles absolute to
|
||||||
|
# absolute relocation.
|
||||||
|
relocate.new_relocate_elf_binaries(files_to_relocate, prefix_to_prefix_bin)
|
||||||
|
elif "elf" in platform.binary_formats and rel:
|
||||||
|
relocate.relocate_elf_binaries(
|
||||||
|
files_to_relocate,
|
||||||
|
old_layout_root,
|
||||||
|
new_layout_root,
|
||||||
|
prefix_to_prefix_bin,
|
||||||
|
rel,
|
||||||
|
old_prefix,
|
||||||
|
new_prefix,
|
||||||
|
)
|
||||||
|
|
||||||
relocate.relocate_links(links, prefix_to_prefix)
|
# Relocate links to the new install prefix
|
||||||
relocate.relocate_text(textfiles, prefix_to_prefix)
|
links = [os.path.join(workdir, f) for f in buildinfo.get("relocate_links", [])]
|
||||||
changed_files = relocate.relocate_text_bin(binaries, prefix_to_prefix)
|
relocate.relocate_links(links, prefix_to_prefix_bin)
|
||||||
|
|
||||||
# Add ad-hoc signatures to patched macho files when on macOS.
|
# For all buildcaches
|
||||||
if "macho" in platform.binary_formats and sys.platform == "darwin":
|
# relocate the install prefixes in text files including dependencies
|
||||||
codesign = which("codesign")
|
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||||
if not codesign:
|
|
||||||
return
|
|
||||||
for binary in changed_files:
|
|
||||||
# preserve the original inode by running codesign on a copy
|
|
||||||
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
|
|
||||||
codesign("-fs-", tmp_binary)
|
|
||||||
|
|
||||||
install_manifest = os.path.join(
|
# relocate the install prefixes in binary files including dependencies
|
||||||
spec.prefix,
|
changed_files = relocate.relocate_text_bin(files_to_relocate, prefix_to_prefix_bin)
|
||||||
spack.store.STORE.layout.metadata_dir,
|
|
||||||
spack.store.STORE.layout.manifest_file_name,
|
|
||||||
)
|
|
||||||
if not os.path.exists(install_manifest):
|
|
||||||
spec_id = spec.format("{name}/{hash:7}")
|
|
||||||
tty.warn("No manifest file in tarball for spec %s" % spec_id)
|
|
||||||
|
|
||||||
# overwrite old metadata with new
|
# Add ad-hoc signatures to patched macho files when on macOS.
|
||||||
if spec.spliced:
|
if "macho" in platform.binary_formats and sys.platform == "darwin":
|
||||||
# rewrite spec on disk
|
codesign = which("codesign")
|
||||||
spack.store.STORE.layout.write_spec(spec, spack.store.STORE.layout.spec_file_path(spec))
|
if not codesign:
|
||||||
|
return
|
||||||
|
for binary in changed_files:
|
||||||
|
# preserve the original inode by running codesign on a copy
|
||||||
|
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
|
||||||
|
codesign("-fs-", tmp_binary)
|
||||||
|
|
||||||
# de-cache the install manifest
|
# If we are installing back to the same location
|
||||||
with contextlib.suppress(FileNotFoundError):
|
# relocate the sbang location if the spack directory changed
|
||||||
os.unlink(install_manifest)
|
else:
|
||||||
|
if old_spack_prefix != new_spack_prefix:
|
||||||
|
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||||
|
|
||||||
|
|
||||||
def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool, remote_checksum):
|
def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool, remote_checksum):
|
||||||
@@ -2455,6 +2507,15 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
shutil.rmtree(spec.prefix, ignore_errors=True)
|
shutil.rmtree(spec.prefix, ignore_errors=True)
|
||||||
raise e
|
raise e
|
||||||
|
else:
|
||||||
|
manifest_file = os.path.join(
|
||||||
|
spec.prefix,
|
||||||
|
spack.store.STORE.layout.metadata_dir,
|
||||||
|
spack.store.STORE.layout.manifest_file_name,
|
||||||
|
)
|
||||||
|
if not os.path.exists(manifest_file):
|
||||||
|
spec_id = spec.format("{name}/{hash:7}")
|
||||||
|
tty.warn("No manifest file in tarball for spec %s" % spec_id)
|
||||||
finally:
|
finally:
|
||||||
if tmpdir:
|
if tmpdir:
|
||||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||||
@@ -2559,6 +2620,10 @@ def install_root_node(
|
|||||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||||
extract_tarball(spec, download_result, force)
|
extract_tarball(spec, download_result, force)
|
||||||
spec.package.windows_establish_runtime_linkage()
|
spec.package.windows_establish_runtime_linkage()
|
||||||
|
if spec.spliced: # overwrite old metadata with new
|
||||||
|
spack.store.STORE.layout.write_spec(
|
||||||
|
spec, spack.store.STORE.layout.spec_file_path(spec)
|
||||||
|
)
|
||||||
spack.hooks.post_install(spec, False)
|
spack.hooks.post_install(spec, False)
|
||||||
spack.store.STORE.db.add(spec, allow_missing=allow_missing)
|
spack.store.STORE.db.add(spec, allow_missing=allow_missing)
|
||||||
|
|
||||||
@@ -2596,14 +2661,11 @@ def try_direct_fetch(spec, mirrors=None):
|
|||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
|
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
|
||||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
|
||||||
specfile_is_signed = True
|
specfile_is_signed = True
|
||||||
except (web_util.SpackWebError, OSError) as e1:
|
except web_util.SpackWebError as e1:
|
||||||
try:
|
try:
|
||||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
except web_util.SpackWebError as e2:
|
||||||
specfile_is_signed = False
|
|
||||||
except (web_util.SpackWebError, OSError) as e2:
|
|
||||||
tty.debug(
|
tty.debug(
|
||||||
f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}",
|
f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}",
|
||||||
e1,
|
e1,
|
||||||
@@ -2613,6 +2675,7 @@ def try_direct_fetch(spec, mirrors=None):
|
|||||||
f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2
|
f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||||
|
|
||||||
# read the spec from the build cache file. All specs in build caches
|
# read the spec from the build cache file. All specs in build caches
|
||||||
# are concrete (as they are built) so we need to mark this spec
|
# are concrete (as they are built) so we need to mark this spec
|
||||||
@@ -2706,9 +2769,8 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
_, _, json_file = web_util.read_from_url(keys_index)
|
_, _, json_file = web_util.read_from_url(keys_index)
|
||||||
json_index = sjson.load(json_file)
|
json_index = sjson.load(codecs.getreader("utf-8")(json_file))
|
||||||
except (web_util.SpackWebError, OSError, ValueError) as url_err:
|
except web_util.SpackWebError as url_err:
|
||||||
# TODO: avoid repeated request
|
|
||||||
if web_util.url_exists(keys_index):
|
if web_util.url_exists(keys_index):
|
||||||
tty.error(
|
tty.error(
|
||||||
f"Unable to find public keys in {url_util.format(fetch_url)},"
|
f"Unable to find public keys in {url_util.format(fetch_url)},"
|
||||||
@@ -2955,14 +3017,14 @@ def __init__(self, url, local_hash, urlopen=web_util.urlopen):
|
|||||||
|
|
||||||
def get_remote_hash(self):
|
def get_remote_hash(self):
|
||||||
# Failure to fetch index.json.hash is not fatal
|
# Failure to fetch index.json.hash is not fatal
|
||||||
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, INDEX_HASH_FILE)
|
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
|
||||||
try:
|
try:
|
||||||
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
|
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
|
||||||
remote_hash = response.read(64)
|
except (TimeoutError, urllib.error.URLError):
|
||||||
except OSError:
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Validate the hash
|
# Validate the hash
|
||||||
|
remote_hash = response.read(64)
|
||||||
if not re.match(rb"[a-f\d]{64}$", remote_hash):
|
if not re.match(rb"[a-f\d]{64}$", remote_hash):
|
||||||
return None
|
return None
|
||||||
return remote_hash.decode("utf-8")
|
return remote_hash.decode("utf-8")
|
||||||
@@ -2976,17 +3038,17 @@ def conditional_fetch(self) -> FetchIndexResult:
|
|||||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||||
|
|
||||||
# Otherwise, download index.json
|
# Otherwise, download index.json
|
||||||
url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
|
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
|
||||||
except OSError as e:
|
except (TimeoutError, urllib.error.URLError) as e:
|
||||||
raise FetchIndexError(f"Could not fetch index from {url_index}", e) from e
|
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = codecs.getreader("utf-8")(response).read()
|
result = codecs.getreader("utf-8")(response).read()
|
||||||
except (ValueError, OSError) as e:
|
except ValueError as e:
|
||||||
raise FetchIndexError(f"Remote index {url_index} is invalid") from e
|
raise FetchIndexError("Remote index {} is invalid".format(url_index), e) from e
|
||||||
|
|
||||||
computed_hash = compute_hash(result)
|
computed_hash = compute_hash(result)
|
||||||
|
|
||||||
@@ -3020,7 +3082,7 @@ def __init__(self, url, etag, urlopen=web_util.urlopen):
|
|||||||
|
|
||||||
def conditional_fetch(self) -> FetchIndexResult:
|
def conditional_fetch(self) -> FetchIndexResult:
|
||||||
# Just do a conditional fetch immediately
|
# Just do a conditional fetch immediately
|
||||||
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||||
headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'}
|
headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -3030,12 +3092,12 @@ def conditional_fetch(self) -> FetchIndexResult:
|
|||||||
# Not modified; that means fresh.
|
# Not modified; that means fresh.
|
||||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||||
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
||||||
except OSError as e: # URLError, socket.timeout, etc.
|
except (TimeoutError, urllib.error.URLError) as e:
|
||||||
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = codecs.getreader("utf-8")(response).read()
|
result = codecs.getreader("utf-8")(response).read()
|
||||||
except (ValueError, OSError) as e:
|
except ValueError as e:
|
||||||
raise FetchIndexError(f"Remote index {url} is invalid", e) from e
|
raise FetchIndexError(f"Remote index {url} is invalid", e) from e
|
||||||
|
|
||||||
headers = response.headers
|
headers = response.headers
|
||||||
@@ -3067,11 +3129,11 @@ def conditional_fetch(self) -> FetchIndexResult:
|
|||||||
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
|
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
except OSError as e:
|
except (TimeoutError, urllib.error.URLError) as e:
|
||||||
raise FetchIndexError(f"Could not fetch manifest from {url_manifest}", e) from e
|
raise FetchIndexError(f"Could not fetch manifest from {url_manifest}", e) from e
|
||||||
|
|
||||||
try:
|
try:
|
||||||
manifest = json.load(response)
|
manifest = json.loads(response.read())
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
||||||
|
|
||||||
@@ -3086,16 +3148,14 @@ def conditional_fetch(self) -> FetchIndexResult:
|
|||||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||||
|
|
||||||
# Otherwise fetch the blob / index.json
|
# Otherwise fetch the blob / index.json
|
||||||
try:
|
response = self.urlopen(
|
||||||
response = self.urlopen(
|
urllib.request.Request(
|
||||||
urllib.request.Request(
|
url=self.ref.blob_url(index_digest),
|
||||||
url=self.ref.blob_url(index_digest),
|
headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
|
||||||
headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
result = codecs.getreader("utf-8")(response).read()
|
)
|
||||||
except (OSError, ValueError) as e:
|
|
||||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
result = codecs.getreader("utf-8")(response).read()
|
||||||
|
|
||||||
# Make sure the blob we download has the advertised hash
|
# Make sure the blob we download has the advertised hash
|
||||||
if compute_hash(result) != index_digest.digest:
|
if compute_hash(result) != index_digest.digest:
|
||||||
|
|||||||
@@ -5,14 +5,12 @@
|
|||||||
import fnmatch
|
import fnmatch
|
||||||
import glob
|
import glob
|
||||||
import importlib
|
import importlib
|
||||||
import os
|
import os.path
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import sysconfig
|
import sysconfig
|
||||||
import warnings
|
import warnings
|
||||||
from typing import Optional, Sequence, Union
|
from typing import Dict, Optional, Sequence, Union
|
||||||
|
|
||||||
from typing_extensions import TypedDict
|
|
||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|
||||||
@@ -20,17 +18,13 @@
|
|||||||
from llnl.util import tty
|
from llnl.util import tty
|
||||||
|
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.spec
|
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.util.environment
|
import spack.util.environment
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
|
|
||||||
from .config import spec_for_current_python
|
from .config import spec_for_current_python
|
||||||
|
|
||||||
|
QueryInfo = Dict[str, "spack.spec.Spec"]
|
||||||
class QueryInfo(TypedDict, total=False):
|
|
||||||
spec: spack.spec.Spec
|
|
||||||
command: spack.util.executable.Executable
|
|
||||||
|
|
||||||
|
|
||||||
def _python_import(module: str) -> bool:
|
def _python_import(module: str) -> bool:
|
||||||
@@ -217,9 +211,7 @@ def _executables_in_store(
|
|||||||
):
|
):
|
||||||
spack.util.environment.path_put_first("PATH", [bin_dir])
|
spack.util.environment.path_put_first("PATH", [bin_dir])
|
||||||
if query_info is not None:
|
if query_info is not None:
|
||||||
query_info["command"] = spack.util.executable.which(
|
query_info["command"] = spack.util.executable.which(*executables, path=bin_dir)
|
||||||
*executables, path=bin_dir, required=True
|
|
||||||
)
|
|
||||||
query_info["spec"] = concrete_spec
|
query_info["spec"] = concrete_spec
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|||||||
@@ -27,9 +27,9 @@
|
|||||||
class ClingoBootstrapConcretizer:
|
class ClingoBootstrapConcretizer:
|
||||||
def __init__(self, configuration):
|
def __init__(self, configuration):
|
||||||
self.host_platform = spack.platforms.host()
|
self.host_platform = spack.platforms.host()
|
||||||
self.host_os = self.host_platform.default_operating_system()
|
self.host_os = self.host_platform.operating_system("frontend")
|
||||||
self.host_target = archspec.cpu.host().family
|
self.host_target = archspec.cpu.host().family
|
||||||
self.host_architecture = spack.spec.ArchSpec.default_arch()
|
self.host_architecture = spack.spec.ArchSpec.frontend_arch()
|
||||||
self.host_architecture.target = str(self.host_target)
|
self.host_architecture.target = str(self.host_target)
|
||||||
self.host_compiler = self._valid_compiler_or_raise()
|
self.host_compiler = self._valid_compiler_or_raise()
|
||||||
self.host_python = self.python_external_spec()
|
self.host_python = self.python_external_spec()
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
"""Manage configuration swapping for bootstrapping purposes"""
|
"""Manage configuration swapping for bootstrapping purposes"""
|
||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import os
|
import os.path
|
||||||
import sys
|
import sys
|
||||||
from typing import Any, Dict, Generator, MutableSequence, Sequence
|
from typing import Any, Dict, Generator, MutableSequence, Sequence
|
||||||
|
|
||||||
@@ -141,7 +141,7 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
|||||||
|
|
||||||
|
|
||||||
def _add_compilers_if_missing() -> None:
|
def _add_compilers_if_missing() -> None:
|
||||||
arch = spack.spec.ArchSpec.default_arch()
|
arch = spack.spec.ArchSpec.frontend_arch()
|
||||||
if not spack.compilers.compilers_for_arch(arch):
|
if not spack.compilers.compilers_for_arch(arch):
|
||||||
spack.compilers.find_compilers()
|
spack.compilers.find_compilers()
|
||||||
|
|
||||||
|
|||||||
@@ -25,6 +25,7 @@
|
|||||||
import functools
|
import functools
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
import os.path
|
||||||
import sys
|
import sys
|
||||||
import uuid
|
import uuid
|
||||||
from typing import Any, Callable, Dict, List, Optional, Tuple
|
from typing import Any, Callable, Dict, List, Optional, Tuple
|
||||||
@@ -33,10 +34,8 @@
|
|||||||
from llnl.util.lang import GroupedExceptionHandler
|
from llnl.util.lang import GroupedExceptionHandler
|
||||||
|
|
||||||
import spack.binary_distribution
|
import spack.binary_distribution
|
||||||
import spack.concretize
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.detection
|
import spack.detection
|
||||||
import spack.error
|
|
||||||
import spack.mirrors.mirror
|
import spack.mirrors.mirror
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.spec
|
import spack.spec
|
||||||
@@ -45,17 +44,10 @@
|
|||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.util.spack_yaml
|
import spack.util.spack_yaml
|
||||||
import spack.util.url
|
|
||||||
import spack.version
|
import spack.version
|
||||||
from spack.installer import PackageInstaller
|
from spack.installer import PackageInstaller
|
||||||
|
|
||||||
from ._common import (
|
from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store
|
||||||
QueryInfo,
|
|
||||||
_executables_in_store,
|
|
||||||
_python_import,
|
|
||||||
_root_spec,
|
|
||||||
_try_import_from_store,
|
|
||||||
)
|
|
||||||
from .clingo import ClingoBootstrapConcretizer
|
from .clingo import ClingoBootstrapConcretizer
|
||||||
from .config import spack_python_interpreter, spec_for_current_python
|
from .config import spack_python_interpreter, spec_for_current_python
|
||||||
|
|
||||||
@@ -97,12 +89,8 @@ def __init__(self, conf: ConfigDictionary) -> None:
|
|||||||
self.name = conf["name"]
|
self.name = conf["name"]
|
||||||
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
||||||
|
|
||||||
# Check for relative paths, and turn them into absolute paths
|
# Promote (relative) paths to file urls
|
||||||
# root is the metadata_dir
|
self.url = spack.mirrors.mirror.Mirror(conf["info"]["url"]).fetch_url
|
||||||
maybe_url = conf["info"]["url"]
|
|
||||||
if spack.util.url.is_path_instead_of_url(maybe_url) and not os.path.isabs(maybe_url):
|
|
||||||
maybe_url = os.path.join(self.metadata_dir, maybe_url)
|
|
||||||
self.url = spack.mirrors.mirror.Mirror(maybe_url).fetch_url
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def mirror_scope(self) -> spack.config.InternalConfigScope:
|
def mirror_scope(self) -> spack.config.InternalConfigScope:
|
||||||
@@ -146,7 +134,7 @@ class BuildcacheBootstrapper(Bootstrapper):
|
|||||||
|
|
||||||
def __init__(self, conf) -> None:
|
def __init__(self, conf) -> None:
|
||||||
super().__init__(conf)
|
super().__init__(conf)
|
||||||
self.last_search: Optional[QueryInfo] = None
|
self.last_search: Optional[ConfigDictionary] = None
|
||||||
self.config_scope_name = f"bootstrap_buildcache-{uuid.uuid4()}"
|
self.config_scope_name = f"bootstrap_buildcache-{uuid.uuid4()}"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -223,14 +211,14 @@ def _install_and_test(
|
|||||||
for _, pkg_hash, pkg_sha256 in item["binaries"]:
|
for _, pkg_hash, pkg_sha256 in item["binaries"]:
|
||||||
self._install_by_hash(pkg_hash, pkg_sha256, bincache_platform)
|
self._install_by_hash(pkg_hash, pkg_sha256, bincache_platform)
|
||||||
|
|
||||||
info: QueryInfo = {}
|
info: ConfigDictionary = {}
|
||||||
if test_fn(query_spec=abstract_spec, query_info=info):
|
if test_fn(query_spec=abstract_spec, query_info=info):
|
||||||
self.last_search = info
|
self.last_search = info
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||||
info: QueryInfo
|
info: ConfigDictionary
|
||||||
test_fn, info = functools.partial(_try_import_from_store, module), {}
|
test_fn, info = functools.partial(_try_import_from_store, module), {}
|
||||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||||
return True
|
return True
|
||||||
@@ -243,7 +231,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
|||||||
return self._install_and_test(abstract_spec, bincache_platform, data, test_fn)
|
return self._install_and_test(abstract_spec, bincache_platform, data, test_fn)
|
||||||
|
|
||||||
def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bool:
|
def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bool:
|
||||||
info: QueryInfo
|
info: ConfigDictionary
|
||||||
test_fn, info = functools.partial(_executables_in_store, executables), {}
|
test_fn, info = functools.partial(_executables_in_store, executables), {}
|
||||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||||
self.last_search = info
|
self.last_search = info
|
||||||
@@ -261,11 +249,11 @@ class SourceBootstrapper(Bootstrapper):
|
|||||||
|
|
||||||
def __init__(self, conf) -> None:
|
def __init__(self, conf) -> None:
|
||||||
super().__init__(conf)
|
super().__init__(conf)
|
||||||
self.last_search: Optional[QueryInfo] = None
|
self.last_search: Optional[ConfigDictionary] = None
|
||||||
self.config_scope_name = f"bootstrap_source-{uuid.uuid4()}"
|
self.config_scope_name = f"bootstrap_source-{uuid.uuid4()}"
|
||||||
|
|
||||||
def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||||
info: QueryInfo = {}
|
info: ConfigDictionary = {}
|
||||||
if _try_import_from_store(module, abstract_spec_str, query_info=info):
|
if _try_import_from_store(module, abstract_spec_str, query_info=info):
|
||||||
self.last_search = info
|
self.last_search = info
|
||||||
return True
|
return True
|
||||||
@@ -282,10 +270,10 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
|||||||
bootstrapper = ClingoBootstrapConcretizer(configuration=spack.config.CONFIG)
|
bootstrapper = ClingoBootstrapConcretizer(configuration=spack.config.CONFIG)
|
||||||
concrete_spec = bootstrapper.concretize()
|
concrete_spec = bootstrapper.concretize()
|
||||||
else:
|
else:
|
||||||
abstract_spec = spack.spec.Spec(
|
concrete_spec = spack.spec.Spec(
|
||||||
abstract_spec_str + " ^" + spec_for_current_python()
|
abstract_spec_str + " ^" + spec_for_current_python()
|
||||||
)
|
)
|
||||||
concrete_spec = spack.concretize.concretize_one(abstract_spec)
|
concrete_spec.concretize()
|
||||||
|
|
||||||
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
|
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
|
||||||
tty.debug(msg.format(module, abstract_spec_str))
|
tty.debug(msg.format(module, abstract_spec_str))
|
||||||
@@ -300,7 +288,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bool:
|
def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bool:
|
||||||
info: QueryInfo = {}
|
info: ConfigDictionary = {}
|
||||||
if _executables_in_store(executables, abstract_spec_str, query_info=info):
|
if _executables_in_store(executables, abstract_spec_str, query_info=info):
|
||||||
self.last_search = info
|
self.last_search = info
|
||||||
return True
|
return True
|
||||||
@@ -311,7 +299,7 @@ def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bo
|
|||||||
# might reduce compilation time by a fair amount
|
# might reduce compilation time by a fair amount
|
||||||
_add_externals_if_missing()
|
_add_externals_if_missing()
|
||||||
|
|
||||||
concrete_spec = spack.concretize.concretize_one(abstract_spec_str)
|
concrete_spec = spack.spec.Spec(abstract_spec_str).concretized()
|
||||||
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||||
tty.debug(msg.format(abstract_spec_str))
|
tty.debug(msg.format(abstract_spec_str))
|
||||||
with spack.config.override(self.mirror_scope):
|
with spack.config.override(self.mirror_scope):
|
||||||
@@ -328,9 +316,11 @@ def create_bootstrapper(conf: ConfigDictionary):
|
|||||||
return _bootstrap_methods[btype](conf)
|
return _bootstrap_methods[btype](conf)
|
||||||
|
|
||||||
|
|
||||||
def source_is_enabled(conf: ConfigDictionary) -> bool:
|
def source_is_enabled_or_raise(conf: ConfigDictionary):
|
||||||
"""Returns true if the source is not enabled for bootstrapping"""
|
"""Raise ValueError if the source is not enabled for bootstrapping"""
|
||||||
return spack.config.get("bootstrap:trusted").get(conf["name"], False)
|
trusted, name = spack.config.get("bootstrap:trusted"), conf["name"]
|
||||||
|
if not trusted.get(name, False):
|
||||||
|
raise ValueError("source is not trusted")
|
||||||
|
|
||||||
|
|
||||||
def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str] = None):
|
def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str] = None):
|
||||||
@@ -360,23 +350,24 @@ def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str]
|
|||||||
exception_handler = GroupedExceptionHandler()
|
exception_handler = GroupedExceptionHandler()
|
||||||
|
|
||||||
for current_config in bootstrapping_sources():
|
for current_config in bootstrapping_sources():
|
||||||
if not source_is_enabled(current_config):
|
|
||||||
continue
|
|
||||||
with exception_handler.forward(current_config["name"], Exception):
|
with exception_handler.forward(current_config["name"], Exception):
|
||||||
if create_bootstrapper(current_config).try_import(module, abstract_spec):
|
source_is_enabled_or_raise(current_config)
|
||||||
|
current_bootstrapper = create_bootstrapper(current_config)
|
||||||
|
if current_bootstrapper.try_import(module, abstract_spec):
|
||||||
return
|
return
|
||||||
|
|
||||||
|
assert exception_handler, (
|
||||||
|
f"expected at least one exception to have been raised at this point: "
|
||||||
|
f"while bootstrapping {module}"
|
||||||
|
)
|
||||||
msg = f'cannot bootstrap the "{module}" Python module '
|
msg = f'cannot bootstrap the "{module}" Python module '
|
||||||
if abstract_spec:
|
if abstract_spec:
|
||||||
msg += f'from spec "{abstract_spec}" '
|
msg += f'from spec "{abstract_spec}" '
|
||||||
|
if tty.is_debug():
|
||||||
if not exception_handler:
|
|
||||||
msg += ": no bootstrapping sources are enabled"
|
|
||||||
elif spack.error.debug or spack.error.SHOW_BACKTRACE:
|
|
||||||
msg += exception_handler.grouped_message(with_tracebacks=True)
|
msg += exception_handler.grouped_message(with_tracebacks=True)
|
||||||
else:
|
else:
|
||||||
msg += exception_handler.grouped_message(with_tracebacks=False)
|
msg += exception_handler.grouped_message(with_tracebacks=False)
|
||||||
msg += "\nRun `spack --backtrace ...` for more detailed errors"
|
msg += "\nRun `spack --debug ...` for more detailed errors"
|
||||||
raise ImportError(msg)
|
raise ImportError(msg)
|
||||||
|
|
||||||
|
|
||||||
@@ -414,9 +405,8 @@ def ensure_executables_in_path_or_raise(
|
|||||||
exception_handler = GroupedExceptionHandler()
|
exception_handler = GroupedExceptionHandler()
|
||||||
|
|
||||||
for current_config in bootstrapping_sources():
|
for current_config in bootstrapping_sources():
|
||||||
if not source_is_enabled(current_config):
|
|
||||||
continue
|
|
||||||
with exception_handler.forward(current_config["name"], Exception):
|
with exception_handler.forward(current_config["name"], Exception):
|
||||||
|
source_is_enabled_or_raise(current_config)
|
||||||
current_bootstrapper = create_bootstrapper(current_config)
|
current_bootstrapper = create_bootstrapper(current_config)
|
||||||
if current_bootstrapper.try_search_path(executables, abstract_spec):
|
if current_bootstrapper.try_search_path(executables, abstract_spec):
|
||||||
# Additional environment variables needed
|
# Additional environment variables needed
|
||||||
@@ -424,7 +414,6 @@ def ensure_executables_in_path_or_raise(
|
|||||||
current_bootstrapper.last_search["spec"],
|
current_bootstrapper.last_search["spec"],
|
||||||
current_bootstrapper.last_search["command"],
|
current_bootstrapper.last_search["command"],
|
||||||
)
|
)
|
||||||
assert cmd is not None, "expected an Executable"
|
|
||||||
cmd.add_default_envmod(
|
cmd.add_default_envmod(
|
||||||
spack.user_environment.environment_modifications_for_specs(
|
spack.user_environment.environment_modifications_for_specs(
|
||||||
concrete_spec, set_package_py_globals=False
|
concrete_spec, set_package_py_globals=False
|
||||||
@@ -432,17 +421,18 @@ def ensure_executables_in_path_or_raise(
|
|||||||
)
|
)
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
assert exception_handler, (
|
||||||
|
f"expected at least one exception to have been raised at this point: "
|
||||||
|
f"while bootstrapping {executables_str}"
|
||||||
|
)
|
||||||
msg = f"cannot bootstrap any of the {executables_str} executables "
|
msg = f"cannot bootstrap any of the {executables_str} executables "
|
||||||
if abstract_spec:
|
if abstract_spec:
|
||||||
msg += f'from spec "{abstract_spec}" '
|
msg += f'from spec "{abstract_spec}" '
|
||||||
|
if tty.is_debug():
|
||||||
if not exception_handler:
|
|
||||||
msg += ": no bootstrapping sources are enabled"
|
|
||||||
elif spack.error.debug or spack.error.SHOW_BACKTRACE:
|
|
||||||
msg += exception_handler.grouped_message(with_tracebacks=True)
|
msg += exception_handler.grouped_message(with_tracebacks=True)
|
||||||
else:
|
else:
|
||||||
msg += exception_handler.grouped_message(with_tracebacks=False)
|
msg += exception_handler.grouped_message(with_tracebacks=False)
|
||||||
msg += "\nRun `spack --backtrace ...` for more detailed errors"
|
msg += "\nRun `spack --debug ...` for more detailed errors"
|
||||||
raise RuntimeError(msg)
|
raise RuntimeError(msg)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -63,6 +63,7 @@ def _missing(name: str, purpose: str, system_only: bool = True) -> str:
|
|||||||
|
|
||||||
def _core_requirements() -> List[RequiredResponseType]:
|
def _core_requirements() -> List[RequiredResponseType]:
|
||||||
_core_system_exes = {
|
_core_system_exes = {
|
||||||
|
"make": _missing("make", "required to build software from sources"),
|
||||||
"patch": _missing("patch", "required to patch source code before building"),
|
"patch": _missing("patch", "required to patch source code before building"),
|
||||||
"tar": _missing("tar", "required to manage code archives"),
|
"tar": _missing("tar", "required to manage code archives"),
|
||||||
"gzip": _missing("gzip", "required to compress/decompress code archives"),
|
"gzip": _missing("gzip", "required to compress/decompress code archives"),
|
||||||
|
|||||||
@@ -44,19 +44,7 @@
|
|||||||
from enum import Flag, auto
|
from enum import Flag, auto
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
from multiprocessing.connection import Connection
|
from multiprocessing.connection import Connection
|
||||||
from typing import (
|
from typing import Callable, Dict, List, Optional, Set, Tuple
|
||||||
Callable,
|
|
||||||
Dict,
|
|
||||||
List,
|
|
||||||
Optional,
|
|
||||||
Sequence,
|
|
||||||
Set,
|
|
||||||
TextIO,
|
|
||||||
Tuple,
|
|
||||||
Type,
|
|
||||||
Union,
|
|
||||||
overload,
|
|
||||||
)
|
|
||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|
||||||
@@ -158,128 +146,48 @@ def get_effective_jobs(jobs, parallel=True, supports_jobserver=False):
|
|||||||
|
|
||||||
|
|
||||||
class MakeExecutable(Executable):
|
class MakeExecutable(Executable):
|
||||||
"""Special callable executable object for make so the user can specify parallelism options
|
"""Special callable executable object for make so the user can specify
|
||||||
on a per-invocation basis.
|
parallelism options on a per-invocation basis. Specifying
|
||||||
|
'parallel' to the call will override whatever the package's
|
||||||
|
global setting is, so you can either default to true or false and
|
||||||
|
override particular calls. Specifying 'jobs_env' to a particular
|
||||||
|
call will name an environment variable which will be set to the
|
||||||
|
parallelism level (without affecting the normal invocation with
|
||||||
|
-j).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, name: str, *, jobs: int, supports_jobserver: bool = True) -> None:
|
def __init__(self, name, jobs, **kwargs):
|
||||||
super().__init__(name)
|
supports_jobserver = kwargs.pop("supports_jobserver", True)
|
||||||
|
super().__init__(name, **kwargs)
|
||||||
self.supports_jobserver = supports_jobserver
|
self.supports_jobserver = supports_jobserver
|
||||||
self.jobs = jobs
|
self.jobs = jobs
|
||||||
|
|
||||||
@overload
|
def __call__(self, *args, **kwargs):
|
||||||
def __call__(
|
"""parallel, and jobs_env from kwargs are swallowed and used here;
|
||||||
self,
|
remaining arguments are passed through to the superclass.
|
||||||
*args: str,
|
|
||||||
parallel: bool = ...,
|
|
||||||
jobs_env: Optional[str] = ...,
|
|
||||||
jobs_env_supports_jobserver: bool = ...,
|
|
||||||
fail_on_error: bool = ...,
|
|
||||||
ignore_errors: Union[int, Sequence[int]] = ...,
|
|
||||||
ignore_quotes: Optional[bool] = ...,
|
|
||||||
timeout: Optional[int] = ...,
|
|
||||||
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
|
||||||
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
|
||||||
input: Optional[TextIO] = ...,
|
|
||||||
output: Union[Optional[TextIO], str] = ...,
|
|
||||||
error: Union[Optional[TextIO], str] = ...,
|
|
||||||
_dump_env: Optional[Dict[str, str]] = ...,
|
|
||||||
) -> None: ...
|
|
||||||
|
|
||||||
@overload
|
|
||||||
def __call__(
|
|
||||||
self,
|
|
||||||
*args: str,
|
|
||||||
parallel: bool = ...,
|
|
||||||
jobs_env: Optional[str] = ...,
|
|
||||||
jobs_env_supports_jobserver: bool = ...,
|
|
||||||
fail_on_error: bool = ...,
|
|
||||||
ignore_errors: Union[int, Sequence[int]] = ...,
|
|
||||||
ignore_quotes: Optional[bool] = ...,
|
|
||||||
timeout: Optional[int] = ...,
|
|
||||||
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
|
||||||
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
|
||||||
input: Optional[TextIO] = ...,
|
|
||||||
output: Union[Type[str], Callable] = ...,
|
|
||||||
error: Union[Optional[TextIO], str, Type[str], Callable] = ...,
|
|
||||||
_dump_env: Optional[Dict[str, str]] = ...,
|
|
||||||
) -> str: ...
|
|
||||||
|
|
||||||
@overload
|
|
||||||
def __call__(
|
|
||||||
self,
|
|
||||||
*args: str,
|
|
||||||
parallel: bool = ...,
|
|
||||||
jobs_env: Optional[str] = ...,
|
|
||||||
jobs_env_supports_jobserver: bool = ...,
|
|
||||||
fail_on_error: bool = ...,
|
|
||||||
ignore_errors: Union[int, Sequence[int]] = ...,
|
|
||||||
ignore_quotes: Optional[bool] = ...,
|
|
||||||
timeout: Optional[int] = ...,
|
|
||||||
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
|
||||||
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
|
||||||
input: Optional[TextIO] = ...,
|
|
||||||
output: Union[Optional[TextIO], str, Type[str], Callable] = ...,
|
|
||||||
error: Union[Type[str], Callable] = ...,
|
|
||||||
_dump_env: Optional[Dict[str, str]] = ...,
|
|
||||||
) -> str: ...
|
|
||||||
|
|
||||||
def __call__(
|
|
||||||
self,
|
|
||||||
*args: str,
|
|
||||||
parallel: bool = True,
|
|
||||||
jobs_env: Optional[str] = None,
|
|
||||||
jobs_env_supports_jobserver: bool = False,
|
|
||||||
**kwargs,
|
|
||||||
) -> Optional[str]:
|
|
||||||
"""Runs this "make" executable in a subprocess.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
parallel: if False, parallelism is disabled
|
|
||||||
jobs_env: environment variable that will be set to the current level of parallelism
|
|
||||||
jobs_env_supports_jobserver: whether the jobs env supports a job server
|
|
||||||
|
|
||||||
For all the other **kwargs, refer to the base class.
|
|
||||||
"""
|
"""
|
||||||
|
parallel = kwargs.pop("parallel", True)
|
||||||
|
jobs_env = kwargs.pop("jobs_env", None)
|
||||||
|
jobs_env_supports_jobserver = kwargs.pop("jobs_env_supports_jobserver", False)
|
||||||
|
|
||||||
jobs = get_effective_jobs(
|
jobs = get_effective_jobs(
|
||||||
self.jobs, parallel=parallel, supports_jobserver=self.supports_jobserver
|
self.jobs, parallel=parallel, supports_jobserver=self.supports_jobserver
|
||||||
)
|
)
|
||||||
if jobs is not None:
|
if jobs is not None:
|
||||||
args = (f"-j{jobs}",) + args
|
args = ("-j{0}".format(jobs),) + args
|
||||||
|
|
||||||
if jobs_env:
|
if jobs_env:
|
||||||
# Caller wants us to set an environment variable to control the parallelism
|
# Caller wants us to set an environment variable to
|
||||||
|
# control the parallelism.
|
||||||
jobs_env_jobs = get_effective_jobs(
|
jobs_env_jobs = get_effective_jobs(
|
||||||
self.jobs, parallel=parallel, supports_jobserver=jobs_env_supports_jobserver
|
self.jobs, parallel=parallel, supports_jobserver=jobs_env_supports_jobserver
|
||||||
)
|
)
|
||||||
if jobs_env_jobs is not None:
|
if jobs_env_jobs is not None:
|
||||||
extra_env = kwargs.setdefault("extra_env", {})
|
kwargs["extra_env"] = {jobs_env: str(jobs_env_jobs)}
|
||||||
extra_env.update({jobs_env: str(jobs_env_jobs)})
|
|
||||||
|
|
||||||
return super().__call__(*args, **kwargs)
|
return super().__call__(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class UndeclaredDependencyError(spack.error.SpackError):
|
|
||||||
"""Raised if a dependency is invoking an executable through a module global, without
|
|
||||||
declaring a dependency on it.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class DeprecatedExecutable:
|
|
||||||
def __init__(self, pkg: str, exe: str, exe_pkg: str) -> None:
|
|
||||||
self.pkg = pkg
|
|
||||||
self.exe = exe
|
|
||||||
self.exe_pkg = exe_pkg
|
|
||||||
|
|
||||||
def __call__(self, *args, **kwargs):
|
|
||||||
raise UndeclaredDependencyError(
|
|
||||||
f"{self.pkg} is using {self.exe} without declaring a dependency on {self.exe_pkg}"
|
|
||||||
)
|
|
||||||
|
|
||||||
def add_default_env(self, key: str, value: str):
|
|
||||||
self.__call__()
|
|
||||||
|
|
||||||
|
|
||||||
def clean_environment():
|
def clean_environment():
|
||||||
# Stuff in here sanitizes the build environment to eliminate
|
# Stuff in here sanitizes the build environment to eliminate
|
||||||
# anything the user has set that may interfere. We apply it immediately
|
# anything the user has set that may interfere. We apply it immediately
|
||||||
@@ -301,13 +209,11 @@ def clean_environment():
|
|||||||
env.unset("CPLUS_INCLUDE_PATH")
|
env.unset("CPLUS_INCLUDE_PATH")
|
||||||
env.unset("OBJC_INCLUDE_PATH")
|
env.unset("OBJC_INCLUDE_PATH")
|
||||||
|
|
||||||
# prevent configure scripts from sourcing variables from config site file (AC_SITE_LOAD).
|
|
||||||
env.set("CONFIG_SITE", os.devnull)
|
|
||||||
env.unset("CMAKE_PREFIX_PATH")
|
env.unset("CMAKE_PREFIX_PATH")
|
||||||
|
|
||||||
env.unset("PYTHONPATH")
|
env.unset("PYTHONPATH")
|
||||||
env.unset("R_HOME")
|
env.unset("R_HOME")
|
||||||
env.unset("R_ENVIRON")
|
env.unset("R_ENVIRON")
|
||||||
|
|
||||||
env.unset("LUA_PATH")
|
env.unset("LUA_PATH")
|
||||||
env.unset("LUA_CPATH")
|
env.unset("LUA_CPATH")
|
||||||
|
|
||||||
@@ -715,9 +621,10 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
|||||||
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||||
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||||
|
|
||||||
module.make = DeprecatedExecutable(pkg.name, "make", "gmake")
|
# TODO: make these build deps that can be installed if not found.
|
||||||
module.gmake = DeprecatedExecutable(pkg.name, "gmake", "gmake")
|
module.make = MakeExecutable("make", jobs)
|
||||||
module.ninja = DeprecatedExecutable(pkg.name, "ninja", "ninja")
|
module.gmake = MakeExecutable("gmake", jobs)
|
||||||
|
module.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
|
||||||
# TODO: johnwparent: add package or builder support to define these build tools
|
# TODO: johnwparent: add package or builder support to define these build tools
|
||||||
# for now there is no entrypoint for builders to define these on their
|
# for now there is no entrypoint for builders to define these on their
|
||||||
# own
|
# own
|
||||||
|
|||||||
@@ -6,9 +6,7 @@
|
|||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
|
||||||
import spack.directives
|
import spack.directives
|
||||||
import spack.spec
|
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
import spack.util.prefix
|
|
||||||
|
|
||||||
from .autotools import AutotoolsBuilder, AutotoolsPackage
|
from .autotools import AutotoolsBuilder, AutotoolsPackage
|
||||||
|
|
||||||
@@ -19,18 +17,19 @@ class AspellBuilder(AutotoolsBuilder):
|
|||||||
to the Aspell extensions.
|
to the Aspell extensions.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def configure(
|
def configure(self, pkg, spec, prefix):
|
||||||
self,
|
|
||||||
pkg: "AspellDictPackage", # type: ignore[override]
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
):
|
|
||||||
aspell = spec["aspell"].prefix.bin.aspell
|
aspell = spec["aspell"].prefix.bin.aspell
|
||||||
prezip = spec["aspell"].prefix.bin.prezip
|
prezip = spec["aspell"].prefix.bin.prezip
|
||||||
destdir = prefix
|
destdir = prefix
|
||||||
|
|
||||||
sh = spack.util.executable.Executable("/bin/sh")
|
sh = spack.util.executable.which("sh")
|
||||||
sh("./configure", "--vars", f"ASPELL={aspell}", f"PREZIP={prezip}", f"DESTDIR={destdir}")
|
sh(
|
||||||
|
"./configure",
|
||||||
|
"--vars",
|
||||||
|
"ASPELL={0}".format(aspell),
|
||||||
|
"PREZIP={0}".format(prezip),
|
||||||
|
"DESTDIR={0}".format(destdir),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# Aspell dictionaries install their bits into their prefix.lib
|
# Aspell dictionaries install their bits into their prefix.lib
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
import os
|
||||||
|
import os.path
|
||||||
import stat
|
import stat
|
||||||
import subprocess
|
import subprocess
|
||||||
from typing import Callable, List, Optional, Set, Tuple, Union
|
from typing import Callable, List, Optional, Set, Tuple, Union
|
||||||
@@ -355,13 +356,6 @@ def _do_patch_libtool_configure(self) -> None:
|
|||||||
)
|
)
|
||||||
# Support Libtool 2.4.2 and older:
|
# Support Libtool 2.4.2 and older:
|
||||||
x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2')
|
x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2')
|
||||||
# Configure scripts generated with libtool < 2.5.4 have a faulty test for the
|
|
||||||
# -single_module linker flag. A deprecation warning makes it think the default is
|
|
||||||
# -multi_module, triggering it to use problematic linker flags (such as ld -r). The
|
|
||||||
# linker default is `-single_module` from (ancient) macOS 10.4, so override by setting
|
|
||||||
# `lt_cv_apple_cc_single_mod=yes`. See the fix in libtool commit
|
|
||||||
# 82f7f52123e4e7e50721049f7fa6f9b870e09c9d.
|
|
||||||
x.filter("lt_cv_apple_cc_single_mod=no", "lt_cv_apple_cc_single_mod=yes", string=True)
|
|
||||||
|
|
||||||
@spack.phase_callbacks.run_after("configure")
|
@spack.phase_callbacks.run_after("configure")
|
||||||
def _do_patch_libtool(self) -> None:
|
def _do_patch_libtool(self) -> None:
|
||||||
@@ -533,7 +527,7 @@ def build_directory(self) -> str:
|
|||||||
return build_dir
|
return build_dir
|
||||||
|
|
||||||
@spack.phase_callbacks.run_before("autoreconf")
|
@spack.phase_callbacks.run_before("autoreconf")
|
||||||
def _delete_configure_to_force_update(self) -> None:
|
def delete_configure_to_force_update(self) -> None:
|
||||||
if self.force_autoreconf:
|
if self.force_autoreconf:
|
||||||
fs.force_remove(self.configure_abs_path)
|
fs.force_remove(self.configure_abs_path)
|
||||||
|
|
||||||
@@ -546,7 +540,7 @@ def autoreconf_search_path_args(self) -> List[str]:
|
|||||||
return _autoreconf_search_path_args(self.spec)
|
return _autoreconf_search_path_args(self.spec)
|
||||||
|
|
||||||
@spack.phase_callbacks.run_after("autoreconf")
|
@spack.phase_callbacks.run_after("autoreconf")
|
||||||
def _set_configure_or_die(self) -> None:
|
def set_configure_or_die(self) -> None:
|
||||||
"""Ensure the presence of a "configure" script, or raise. If the "configure"
|
"""Ensure the presence of a "configure" script, or raise. If the "configure"
|
||||||
is found, a module level attribute is set.
|
is found, a module level attribute is set.
|
||||||
|
|
||||||
@@ -570,7 +564,10 @@ def configure_args(self) -> List[str]:
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
def autoreconf(
|
def autoreconf(
|
||||||
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
self,
|
||||||
|
pkg: spack.package_base.PackageBase,
|
||||||
|
spec: spack.spec.Spec,
|
||||||
|
prefix: spack.util.prefix.Prefix,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Not needed usually, configure should be already there"""
|
"""Not needed usually, configure should be already there"""
|
||||||
|
|
||||||
@@ -599,7 +596,10 @@ def autoreconf(
|
|||||||
self.pkg.module.autoreconf(*autoreconf_args)
|
self.pkg.module.autoreconf(*autoreconf_args)
|
||||||
|
|
||||||
def configure(
|
def configure(
|
||||||
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
self,
|
||||||
|
pkg: spack.package_base.PackageBase,
|
||||||
|
spec: spack.spec.Spec,
|
||||||
|
prefix: spack.util.prefix.Prefix,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Run "configure", with the arguments specified by the builder and an
|
"""Run "configure", with the arguments specified by the builder and an
|
||||||
appropriately set prefix.
|
appropriately set prefix.
|
||||||
@@ -612,7 +612,10 @@ def configure(
|
|||||||
pkg.module.configure(*options)
|
pkg.module.configure(*options)
|
||||||
|
|
||||||
def build(
|
def build(
|
||||||
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
self,
|
||||||
|
pkg: spack.package_base.PackageBase,
|
||||||
|
spec: spack.spec.Spec,
|
||||||
|
prefix: spack.util.prefix.Prefix,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Run "make" on the build targets specified by the builder."""
|
"""Run "make" on the build targets specified by the builder."""
|
||||||
# See https://autotools.io/automake/silent.html
|
# See https://autotools.io/automake/silent.html
|
||||||
@@ -622,7 +625,10 @@ def build(
|
|||||||
pkg.module.make(*params)
|
pkg.module.make(*params)
|
||||||
|
|
||||||
def install(
|
def install(
|
||||||
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
self,
|
||||||
|
pkg: spack.package_base.PackageBase,
|
||||||
|
spec: spack.spec.Spec,
|
||||||
|
prefix: spack.util.prefix.Prefix,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Run "make" on the install targets specified by the builder."""
|
"""Run "make" on the install targets specified by the builder."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
@@ -819,7 +825,7 @@ def installcheck(self) -> None:
|
|||||||
self.pkg._if_make_target_execute("installcheck")
|
self.pkg._if_make_target_execute("installcheck")
|
||||||
|
|
||||||
@spack.phase_callbacks.run_after("install")
|
@spack.phase_callbacks.run_after("install")
|
||||||
def _remove_libtool_archives(self) -> None:
|
def remove_libtool_archives(self) -> None:
|
||||||
"""Remove all .la files in prefix sub-folders if the package sets
|
"""Remove all .la files in prefix sub-folders if the package sets
|
||||||
``install_libtool_archives`` to be False.
|
``install_libtool_archives`` to be False.
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -10,8 +10,6 @@
|
|||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
import spack.spec
|
|
||||||
import spack.util.prefix
|
|
||||||
|
|
||||||
from .cmake import CMakeBuilder, CMakePackage
|
from .cmake import CMakeBuilder, CMakePackage
|
||||||
|
|
||||||
@@ -295,26 +293,12 @@ def initconfig_hardware_entries(self):
|
|||||||
entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
|
entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
|
||||||
entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
|
entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
|
||||||
|
|
||||||
if spec.satisfies("%gcc"):
|
|
||||||
entries.append(
|
|
||||||
cmake_cache_string(
|
|
||||||
"CMAKE_HIP_FLAGS", f"--gcc-toolchain={self.pkg.compiler.prefix}"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
return entries
|
return entries
|
||||||
|
|
||||||
def std_initconfig_entries(self):
|
def std_initconfig_entries(self):
|
||||||
cmake_prefix_path_env = os.environ["CMAKE_PREFIX_PATH"]
|
cmake_prefix_path_env = os.environ["CMAKE_PREFIX_PATH"]
|
||||||
cmake_prefix_path = cmake_prefix_path_env.replace(os.pathsep, ";")
|
cmake_prefix_path = cmake_prefix_path_env.replace(os.pathsep, ";")
|
||||||
complete_rpath_list = ";".join(
|
|
||||||
[
|
|
||||||
self.pkg.spec.prefix.lib,
|
|
||||||
self.pkg.spec.prefix.lib64,
|
|
||||||
*os.environ.get("SPACK_COMPILER_EXTRA_RPATHS", "").split(":"),
|
|
||||||
*os.environ.get("SPACK_COMPILER_IMPLICIT_RPATHS", "").split(":"),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
return [
|
return [
|
||||||
"#------------------{0}".format("-" * 60),
|
"#------------------{0}".format("-" * 60),
|
||||||
"# !!!! This is a generated file, edit at own risk !!!!",
|
"# !!!! This is a generated file, edit at own risk !!!!",
|
||||||
@@ -323,8 +307,6 @@ def std_initconfig_entries(self):
|
|||||||
"#------------------{0}\n".format("-" * 60),
|
"#------------------{0}\n".format("-" * 60),
|
||||||
cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path),
|
cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path),
|
||||||
cmake_cache_string("CMAKE_INSTALL_RPATH_USE_LINK_PATH", "ON"),
|
cmake_cache_string("CMAKE_INSTALL_RPATH_USE_LINK_PATH", "ON"),
|
||||||
cmake_cache_string("CMAKE_BUILD_RPATH", complete_rpath_list),
|
|
||||||
cmake_cache_string("CMAKE_INSTALL_RPATH", complete_rpath_list),
|
|
||||||
self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"),
|
self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"),
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -332,9 +314,7 @@ def initconfig_package_entries(self):
|
|||||||
"""This method is to be overwritten by the package"""
|
"""This method is to be overwritten by the package"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def initconfig(
|
def initconfig(self, pkg, spec, prefix):
|
||||||
self, pkg: "CachedCMakePackage", spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
cache_entries = (
|
cache_entries = (
|
||||||
self.std_initconfig_entries()
|
self.std_initconfig_entries()
|
||||||
+ self.initconfig_compiler_entries()
|
+ self.initconfig_compiler_entries()
|
||||||
|
|||||||
@@ -7,8 +7,6 @@
|
|||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
import spack.spec
|
|
||||||
import spack.util.prefix
|
|
||||||
from spack.directives import build_system, depends_on
|
from spack.directives import build_system, depends_on
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
|
|
||||||
@@ -73,26 +71,19 @@ def build_directory(self):
|
|||||||
@property
|
@property
|
||||||
def build_args(self):
|
def build_args(self):
|
||||||
"""Arguments for ``cargo build``."""
|
"""Arguments for ``cargo build``."""
|
||||||
return ["-j", str(self.pkg.module.make_jobs)]
|
return []
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def check_args(self):
|
def check_args(self):
|
||||||
"""Argument for ``cargo test`` during check phase"""
|
"""Argument for ``cargo test`` during check phase"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def setup_build_environment(self, env):
|
def build(self, pkg, spec, prefix):
|
||||||
env.set("CARGO_HOME", self.stage.path)
|
|
||||||
|
|
||||||
def build(
|
|
||||||
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Runs ``cargo install`` in the source directory"""
|
"""Runs ``cargo install`` in the source directory"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pkg.module.cargo("install", "--root", "out", "--path", ".", *self.build_args)
|
pkg.module.cargo("install", "--root", "out", "--path", ".", *self.build_args)
|
||||||
|
|
||||||
def install(
|
def install(self, pkg, spec, prefix):
|
||||||
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Copy build files into package prefix."""
|
"""Copy build files into package prefix."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
fs.install_tree("out", prefix)
|
fs.install_tree("out", prefix)
|
||||||
|
|||||||
@@ -454,7 +454,10 @@ def cmake_args(self) -> List[str]:
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
def cmake(
|
def cmake(
|
||||||
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
self,
|
||||||
|
pkg: spack.package_base.PackageBase,
|
||||||
|
spec: spack.spec.Spec,
|
||||||
|
prefix: spack.util.prefix.Prefix,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Runs ``cmake`` in the build directory"""
|
"""Runs ``cmake`` in the build directory"""
|
||||||
|
|
||||||
@@ -471,7 +474,10 @@ def cmake(
|
|||||||
pkg.module.cmake(*options)
|
pkg.module.cmake(*options)
|
||||||
|
|
||||||
def build(
|
def build(
|
||||||
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
self,
|
||||||
|
pkg: spack.package_base.PackageBase,
|
||||||
|
spec: spack.spec.Spec,
|
||||||
|
prefix: spack.util.prefix.Prefix,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Make the build targets"""
|
"""Make the build targets"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
@@ -482,7 +488,10 @@ def build(
|
|||||||
pkg.module.ninja(*self.build_targets)
|
pkg.module.ninja(*self.build_targets)
|
||||||
|
|
||||||
def install(
|
def install(
|
||||||
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
self,
|
||||||
|
pkg: spack.package_base.PackageBase,
|
||||||
|
spec: spack.spec.Spec,
|
||||||
|
prefix: spack.util.prefix.Prefix,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Make the install targets"""
|
"""Make the install targets"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
|
|||||||
@@ -7,8 +7,6 @@
|
|||||||
import spack.directives
|
import spack.directives
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
import spack.spec
|
|
||||||
import spack.util.prefix
|
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, apply_macos_rpath_fixups, execute_install_time_tests
|
from ._checks import BuilderWithDefaults, apply_macos_rpath_fixups, execute_install_time_tests
|
||||||
|
|
||||||
@@ -50,8 +48,3 @@ class GenericBuilder(BuilderWithDefaults):
|
|||||||
|
|
||||||
# unconditionally perform any post-install phase tests
|
# unconditionally perform any post-install phase tests
|
||||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||||
|
|
||||||
def install(
|
|
||||||
self, pkg: Package, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|||||||
@@ -7,9 +7,7 @@
|
|||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
import spack.spec
|
from spack.directives import build_system, extends
|
||||||
import spack.util.prefix
|
|
||||||
from spack.directives import build_system, depends_on
|
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||||
@@ -28,7 +26,9 @@ class GoPackage(spack.package_base.PackageBase):
|
|||||||
build_system("go")
|
build_system("go")
|
||||||
|
|
||||||
with when("build_system=go"):
|
with when("build_system=go"):
|
||||||
depends_on("go", type="build")
|
# TODO: this seems like it should be depends_on, see
|
||||||
|
# setup_dependent_build_environment in go for why I kept it like this
|
||||||
|
extends("go@1.14:", type="build")
|
||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("go")
|
@spack.builder.builder("go")
|
||||||
@@ -71,7 +71,6 @@ class GoBuilder(BuilderWithDefaults):
|
|||||||
def setup_build_environment(self, env):
|
def setup_build_environment(self, env):
|
||||||
env.set("GO111MODULE", "on")
|
env.set("GO111MODULE", "on")
|
||||||
env.set("GOTOOLCHAIN", "local")
|
env.set("GOTOOLCHAIN", "local")
|
||||||
env.set("GOPATH", fs.join_path(self.pkg.stage.path, "go"))
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def build_directory(self):
|
def build_directory(self):
|
||||||
@@ -82,31 +81,19 @@ def build_directory(self):
|
|||||||
def build_args(self):
|
def build_args(self):
|
||||||
"""Arguments for ``go build``."""
|
"""Arguments for ``go build``."""
|
||||||
# Pass ldflags -s = --strip-all and -w = --no-warnings by default
|
# Pass ldflags -s = --strip-all and -w = --no-warnings by default
|
||||||
return [
|
return ["-modcacherw", "-ldflags", "-s -w", "-o", f"{self.pkg.name}"]
|
||||||
"-p",
|
|
||||||
str(self.pkg.module.make_jobs),
|
|
||||||
"-modcacherw",
|
|
||||||
"-ldflags",
|
|
||||||
"-s -w",
|
|
||||||
"-o",
|
|
||||||
f"{self.pkg.name}",
|
|
||||||
]
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def check_args(self):
|
def check_args(self):
|
||||||
"""Argument for ``go test`` during check phase"""
|
"""Argument for ``go test`` during check phase"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def build(
|
def build(self, pkg, spec, prefix):
|
||||||
self, pkg: GoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Runs ``go build`` in the source directory"""
|
"""Runs ``go build`` in the source directory"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pkg.module.go("build", *self.build_args)
|
pkg.module.go("build", *self.build_args)
|
||||||
|
|
||||||
def install(
|
def install(self, pkg, spec, prefix):
|
||||||
self, pkg: GoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Install built binaries into prefix bin."""
|
"""Install built binaries into prefix bin."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
fs.mkdirp(prefix.bin)
|
fs.mkdirp(prefix.bin)
|
||||||
|
|||||||
@@ -7,9 +7,7 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.spec
|
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
import spack.util.prefix
|
|
||||||
from spack.directives import build_system, depends_on, extends
|
from spack.directives import build_system, depends_on, extends
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
|
|
||||||
@@ -57,9 +55,7 @@ class LuaBuilder(spack.builder.Builder):
|
|||||||
#: Names associated with package attributes in the old build-system format
|
#: Names associated with package attributes in the old build-system format
|
||||||
legacy_attributes = ()
|
legacy_attributes = ()
|
||||||
|
|
||||||
def unpack(
|
def unpack(self, pkg, spec, prefix):
|
||||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
if os.path.splitext(pkg.stage.archive_file)[1] == ".rock":
|
if os.path.splitext(pkg.stage.archive_file)[1] == ".rock":
|
||||||
directory = pkg.luarocks("unpack", pkg.stage.archive_file, output=str)
|
directory = pkg.luarocks("unpack", pkg.stage.archive_file, output=str)
|
||||||
dirlines = directory.split("\n")
|
dirlines = directory.split("\n")
|
||||||
@@ -70,16 +66,15 @@ def unpack(
|
|||||||
def _generate_tree_line(name, prefix):
|
def _generate_tree_line(name, prefix):
|
||||||
return """{{ name = "{name}", root = "{prefix}" }};""".format(name=name, prefix=prefix)
|
return """{{ name = "{name}", root = "{prefix}" }};""".format(name=name, prefix=prefix)
|
||||||
|
|
||||||
def generate_luarocks_config(
|
def generate_luarocks_config(self, pkg, spec, prefix):
|
||||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
spec = self.pkg.spec
|
spec = self.pkg.spec
|
||||||
table_entries = []
|
table_entries = []
|
||||||
for d in spec.traverse(deptype=("build", "run")):
|
for d in spec.traverse(deptype=("build", "run")):
|
||||||
if d.package.extends(self.pkg.extendee_spec):
|
if d.package.extends(self.pkg.extendee_spec):
|
||||||
table_entries.append(self._generate_tree_line(d.name, d.prefix))
|
table_entries.append(self._generate_tree_line(d.name, d.prefix))
|
||||||
|
|
||||||
with open(self._luarocks_config_path(), "w", encoding="utf-8") as config:
|
path = self._luarocks_config_path()
|
||||||
|
with open(path, "w", encoding="utf-8") as config:
|
||||||
config.write(
|
config.write(
|
||||||
"""
|
"""
|
||||||
deps_mode="all"
|
deps_mode="all"
|
||||||
@@ -90,26 +85,23 @@ def generate_luarocks_config(
|
|||||||
"\n".join(table_entries)
|
"\n".join(table_entries)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
return path
|
||||||
|
|
||||||
def preprocess(
|
def preprocess(self, pkg, spec, prefix):
|
||||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Override this to preprocess source before building with luarocks"""
|
"""Override this to preprocess source before building with luarocks"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def luarocks_args(self):
|
def luarocks_args(self):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def install(
|
def install(self, pkg, spec, prefix):
|
||||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
rock = "."
|
rock = "."
|
||||||
specs = find(".", "*.rockspec", recursive=False)
|
specs = find(".", "*.rockspec", recursive=False)
|
||||||
if specs:
|
if specs:
|
||||||
rock = specs[0]
|
rock = specs[0]
|
||||||
rocks_args = self.luarocks_args()
|
rocks_args = self.luarocks_args()
|
||||||
rocks_args.append(rock)
|
rocks_args.append(rock)
|
||||||
pkg.luarocks("--tree=" + prefix, "make", *rocks_args)
|
self.pkg.luarocks("--tree=" + prefix, "make", *rocks_args)
|
||||||
|
|
||||||
def _luarocks_config_path(self):
|
def _luarocks_config_path(self):
|
||||||
return os.path.join(self.pkg.stage.source_path, "spack_luarocks.lua")
|
return os.path.join(self.pkg.stage.source_path, "spack_luarocks.lua")
|
||||||
|
|||||||
@@ -98,20 +98,29 @@ def build_directory(self) -> str:
|
|||||||
return self.pkg.stage.source_path
|
return self.pkg.stage.source_path
|
||||||
|
|
||||||
def edit(
|
def edit(
|
||||||
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
self,
|
||||||
|
pkg: spack.package_base.PackageBase,
|
||||||
|
spec: spack.spec.Spec,
|
||||||
|
prefix: spack.util.prefix.Prefix,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Edit the Makefile before calling make. The default is a no-op."""
|
"""Edit the Makefile before calling make. The default is a no-op."""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def build(
|
def build(
|
||||||
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
self,
|
||||||
|
pkg: spack.package_base.PackageBase,
|
||||||
|
spec: spack.spec.Spec,
|
||||||
|
prefix: spack.util.prefix.Prefix,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Run "make" on the build targets specified by the builder."""
|
"""Run "make" on the build targets specified by the builder."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pkg.module.make(*self.build_targets)
|
pkg.module.make(*self.build_targets)
|
||||||
|
|
||||||
def install(
|
def install(
|
||||||
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
self,
|
||||||
|
pkg: spack.package_base.PackageBase,
|
||||||
|
spec: spack.spec.Spec,
|
||||||
|
prefix: spack.util.prefix.Prefix,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Run "make" on the install targets specified by the builder."""
|
"""Run "make" on the install targets specified by the builder."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
|
|||||||
@@ -5,8 +5,6 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.spec
|
|
||||||
import spack.util.prefix
|
|
||||||
from spack.directives import build_system, depends_on
|
from spack.directives import build_system, depends_on
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
from spack.util.executable import which
|
from spack.util.executable import which
|
||||||
@@ -60,20 +58,16 @@ def build_args(self):
|
|||||||
"""List of args to pass to build phase."""
|
"""List of args to pass to build phase."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def build(
|
def build(self, pkg, spec, prefix):
|
||||||
self, pkg: MavenPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Compile code and package into a JAR file."""
|
"""Compile code and package into a JAR file."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
mvn = which("mvn", required=True)
|
mvn = which("mvn")
|
||||||
if self.pkg.run_tests:
|
if self.pkg.run_tests:
|
||||||
mvn("verify", *self.build_args())
|
mvn("verify", *self.build_args())
|
||||||
else:
|
else:
|
||||||
mvn("package", "-DskipTests", *self.build_args())
|
mvn("package", "-DskipTests", *self.build_args())
|
||||||
|
|
||||||
def install(
|
def install(self, pkg, spec, prefix):
|
||||||
self, pkg: MavenPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Copy to installation prefix."""
|
"""Copy to installation prefix."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
fs.install_tree(".", prefix)
|
fs.install_tree(".", prefix)
|
||||||
|
|||||||
@@ -188,7 +188,10 @@ def meson_args(self) -> List[str]:
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
def meson(
|
def meson(
|
||||||
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
self,
|
||||||
|
pkg: spack.package_base.PackageBase,
|
||||||
|
spec: spack.spec.Spec,
|
||||||
|
prefix: spack.util.prefix.Prefix,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Run ``meson`` in the build directory"""
|
"""Run ``meson`` in the build directory"""
|
||||||
options = []
|
options = []
|
||||||
@@ -201,7 +204,10 @@ def meson(
|
|||||||
pkg.module.meson(*options)
|
pkg.module.meson(*options)
|
||||||
|
|
||||||
def build(
|
def build(
|
||||||
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
self,
|
||||||
|
pkg: spack.package_base.PackageBase,
|
||||||
|
spec: spack.spec.Spec,
|
||||||
|
prefix: spack.util.prefix.Prefix,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Make the build targets"""
|
"""Make the build targets"""
|
||||||
options = ["-v"]
|
options = ["-v"]
|
||||||
@@ -210,7 +216,10 @@ def build(
|
|||||||
pkg.module.ninja(*options)
|
pkg.module.ninja(*options)
|
||||||
|
|
||||||
def install(
|
def install(
|
||||||
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
self,
|
||||||
|
pkg: spack.package_base.PackageBase,
|
||||||
|
spec: spack.spec.Spec,
|
||||||
|
prefix: spack.util.prefix.Prefix,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Make the install targets"""
|
"""Make the install targets"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
|
|||||||
@@ -7,8 +7,6 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.spec
|
|
||||||
import spack.util.prefix
|
|
||||||
from spack.directives import build_system, conflicts
|
from spack.directives import build_system, conflicts
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults
|
from ._checks import BuilderWithDefaults
|
||||||
@@ -101,9 +99,7 @@ def msbuild_install_args(self):
|
|||||||
as `msbuild_args` by default."""
|
as `msbuild_args` by default."""
|
||||||
return self.msbuild_args()
|
return self.msbuild_args()
|
||||||
|
|
||||||
def build(
|
def build(self, pkg, spec, prefix):
|
||||||
self, pkg: MSBuildPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Run "msbuild" on the build targets specified by the builder."""
|
"""Run "msbuild" on the build targets specified by the builder."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pkg.module.msbuild(
|
pkg.module.msbuild(
|
||||||
@@ -112,9 +108,7 @@ def build(
|
|||||||
self.define_targets(*self.build_targets),
|
self.define_targets(*self.build_targets),
|
||||||
)
|
)
|
||||||
|
|
||||||
def install(
|
def install(self, pkg, spec, prefix):
|
||||||
self, pkg: MSBuildPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Run "msbuild" on the install targets specified by the builder.
|
"""Run "msbuild" on the install targets specified by the builder.
|
||||||
This is INSTALL by default"""
|
This is INSTALL by default"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
|
|||||||
@@ -7,8 +7,6 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.spec
|
|
||||||
import spack.util.prefix
|
|
||||||
from spack.directives import build_system, conflicts
|
from spack.directives import build_system, conflicts
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults
|
from ._checks import BuilderWithDefaults
|
||||||
@@ -125,9 +123,7 @@ def nmake_install_args(self):
|
|||||||
Individual packages should override to specify NMake args to command line"""
|
Individual packages should override to specify NMake args to command line"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def build(
|
def build(self, pkg, spec, prefix):
|
||||||
self, pkg: NMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Run "nmake" on the build targets specified by the builder."""
|
"""Run "nmake" on the build targets specified by the builder."""
|
||||||
opts = self.std_nmake_args
|
opts = self.std_nmake_args
|
||||||
opts += self.nmake_args()
|
opts += self.nmake_args()
|
||||||
@@ -136,9 +132,7 @@ def build(
|
|||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pkg.module.nmake(*opts, *self.build_targets, ignore_quotes=self.ignore_quotes)
|
pkg.module.nmake(*opts, *self.build_targets, ignore_quotes=self.ignore_quotes)
|
||||||
|
|
||||||
def install(
|
def install(self, pkg, spec, prefix):
|
||||||
self, pkg: NMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Run "nmake" on the install targets specified by the builder.
|
"""Run "nmake" on the install targets specified by the builder.
|
||||||
This is INSTALL by default"""
|
This is INSTALL by default"""
|
||||||
opts = self.std_nmake_args
|
opts = self.std_nmake_args
|
||||||
|
|||||||
@@ -3,8 +3,6 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.spec
|
|
||||||
import spack.util.prefix
|
|
||||||
from spack.directives import build_system, extends
|
from spack.directives import build_system, extends
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
|
|
||||||
@@ -44,9 +42,7 @@ class OctaveBuilder(BuilderWithDefaults):
|
|||||||
#: Names associated with package attributes in the old build-system format
|
#: Names associated with package attributes in the old build-system format
|
||||||
legacy_attributes = ()
|
legacy_attributes = ()
|
||||||
|
|
||||||
def install(
|
def install(self, pkg, spec, prefix):
|
||||||
self, pkg: OctavePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Install the package from the archive file"""
|
"""Install the package from the archive file"""
|
||||||
pkg.module.octave(
|
pkg.module.octave(
|
||||||
"--quiet",
|
"--quiet",
|
||||||
|
|||||||
@@ -10,11 +10,8 @@
|
|||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
import spack.spec
|
from spack.directives import build_system, extends
|
||||||
import spack.util.prefix
|
|
||||||
from spack.directives import build_system, depends_on, extends
|
|
||||||
from spack.install_test import SkipTest, test_part
|
from spack.install_test import SkipTest, test_part
|
||||||
from spack.multimethod import when
|
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||||
@@ -31,9 +28,7 @@ class PerlPackage(spack.package_base.PackageBase):
|
|||||||
|
|
||||||
build_system("perl")
|
build_system("perl")
|
||||||
|
|
||||||
with when("build_system=perl"):
|
extends("perl", when="build_system=perl")
|
||||||
extends("perl")
|
|
||||||
depends_on("gmake", type="build")
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@memoized
|
@memoized
|
||||||
@@ -151,9 +146,7 @@ def configure_args(self):
|
|||||||
"""
|
"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def configure(
|
def configure(self, pkg, spec, prefix):
|
||||||
self, pkg: PerlPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Run Makefile.PL or Build.PL with arguments consisting of
|
"""Run Makefile.PL or Build.PL with arguments consisting of
|
||||||
an appropriate installation base directory followed by the
|
an appropriate installation base directory followed by the
|
||||||
list returned by :py:meth:`~.PerlBuilder.configure_args`.
|
list returned by :py:meth:`~.PerlBuilder.configure_args`.
|
||||||
@@ -177,9 +170,7 @@ def fix_shebang(self):
|
|||||||
repl = "#!/usr/bin/env perl"
|
repl = "#!/usr/bin/env perl"
|
||||||
filter_file(pattern, repl, "Build", backup=False)
|
filter_file(pattern, repl, "Build", backup=False)
|
||||||
|
|
||||||
def build(
|
def build(self, pkg, spec, prefix):
|
||||||
self, pkg: PerlPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Builds a Perl package."""
|
"""Builds a Perl package."""
|
||||||
self.build_executable()
|
self.build_executable()
|
||||||
|
|
||||||
@@ -190,8 +181,6 @@ def check(self):
|
|||||||
"""Runs built-in tests of a Perl package."""
|
"""Runs built-in tests of a Perl package."""
|
||||||
self.build_executable("test")
|
self.build_executable("test")
|
||||||
|
|
||||||
def install(
|
def install(self, pkg, spec, prefix):
|
||||||
self, pkg: PerlPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Installs a Perl package."""
|
"""Installs a Perl package."""
|
||||||
self.build_executable("install")
|
self.build_executable("install")
|
||||||
|
|||||||
@@ -28,7 +28,6 @@
|
|||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.util.prefix
|
|
||||||
from spack.directives import build_system, depends_on, extends
|
from spack.directives import build_system, depends_on, extends
|
||||||
from spack.error import NoHeadersError, NoLibrariesError
|
from spack.error import NoHeadersError, NoLibrariesError
|
||||||
from spack.install_test import test_part
|
from spack.install_test import test_part
|
||||||
@@ -264,17 +263,16 @@ def update_external_dependencies(self, extendee_spec=None):
|
|||||||
# Ensure architecture information is present
|
# Ensure architecture information is present
|
||||||
if not python.architecture:
|
if not python.architecture:
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
host_os = host_platform.default_operating_system()
|
host_os = host_platform.operating_system("default_os")
|
||||||
host_target = host_platform.default_target()
|
host_target = host_platform.target("default_target")
|
||||||
python.architecture = spack.spec.ArchSpec(
|
python.architecture = spack.spec.ArchSpec(
|
||||||
(str(host_platform), str(host_os), str(host_target))
|
(str(host_platform), str(host_os), str(host_target))
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
if not python.architecture.platform:
|
if not python.architecture.platform:
|
||||||
python.architecture.platform = spack.platforms.host()
|
python.architecture.platform = spack.platforms.host()
|
||||||
platform = spack.platforms.by_name(python.architecture.platform)
|
|
||||||
if not python.architecture.os:
|
if not python.architecture.os:
|
||||||
python.architecture.os = platform.default_operating_system()
|
python.architecture.os = "default_os"
|
||||||
if not python.architecture.target:
|
if not python.architecture.target:
|
||||||
python.architecture.target = archspec.cpu.host().family.name
|
python.architecture.target = archspec.cpu.host().family.name
|
||||||
|
|
||||||
|
|||||||
@@ -6,8 +6,6 @@
|
|||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
import spack.spec
|
|
||||||
import spack.util.prefix
|
|
||||||
from spack.directives import build_system, depends_on
|
from spack.directives import build_system, depends_on
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||||
@@ -29,7 +27,6 @@ class QMakePackage(spack.package_base.PackageBase):
|
|||||||
build_system("qmake")
|
build_system("qmake")
|
||||||
|
|
||||||
depends_on("qmake", type="build", when="build_system=qmake")
|
depends_on("qmake", type="build", when="build_system=qmake")
|
||||||
depends_on("gmake", type="build")
|
|
||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("qmake")
|
@spack.builder.builder("qmake")
|
||||||
@@ -64,23 +61,17 @@ def qmake_args(self):
|
|||||||
"""List of arguments passed to qmake."""
|
"""List of arguments passed to qmake."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def qmake(
|
def qmake(self, pkg, spec, prefix):
|
||||||
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Run ``qmake`` to configure the project and generate a Makefile."""
|
"""Run ``qmake`` to configure the project and generate a Makefile."""
|
||||||
with working_dir(self.build_directory):
|
with working_dir(self.build_directory):
|
||||||
pkg.module.qmake(*self.qmake_args())
|
pkg.module.qmake(*self.qmake_args())
|
||||||
|
|
||||||
def build(
|
def build(self, pkg, spec, prefix):
|
||||||
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Make the build targets"""
|
"""Make the build targets"""
|
||||||
with working_dir(self.build_directory):
|
with working_dir(self.build_directory):
|
||||||
pkg.module.make()
|
pkg.module.make()
|
||||||
|
|
||||||
def install(
|
def install(self, pkg, spec, prefix):
|
||||||
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Make the install targets"""
|
"""Make the install targets"""
|
||||||
with working_dir(self.build_directory):
|
with working_dir(self.build_directory):
|
||||||
pkg.module.make("install")
|
pkg.module.make("install")
|
||||||
|
|||||||
@@ -94,7 +94,7 @@ def list_url(cls):
|
|||||||
if cls.cran:
|
if cls.cran:
|
||||||
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
|
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
|
||||||
|
|
||||||
@lang.classproperty
|
@property
|
||||||
def git(cls):
|
def git(self):
|
||||||
if cls.bioc:
|
if self.bioc:
|
||||||
return f"https://git.bioconductor.org/packages/{cls.bioc}"
|
return f"https://git.bioconductor.org/packages/{self.bioc}"
|
||||||
|
|||||||
@@ -9,8 +9,6 @@
|
|||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.spec
|
|
||||||
import spack.util.prefix
|
|
||||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
|
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
|
||||||
from spack.config import determine_number_of_jobs
|
from spack.config import determine_number_of_jobs
|
||||||
from spack.directives import build_system, extends, maintainers
|
from spack.directives import build_system, extends, maintainers
|
||||||
@@ -76,22 +74,18 @@ def build_directory(self):
|
|||||||
ret = os.path.join(ret, self.subdirectory)
|
ret = os.path.join(ret, self.subdirectory)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def install(
|
def install(self, pkg, spec, prefix):
|
||||||
self, pkg: RacketPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Install everything from build directory."""
|
"""Install everything from build directory."""
|
||||||
raco = Executable("raco")
|
raco = Executable("raco")
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
parallel = pkg.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE))
|
parallel = self.pkg.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE))
|
||||||
name = pkg.racket_name
|
|
||||||
assert name is not None, "Racket package name is not set"
|
|
||||||
args = [
|
args = [
|
||||||
"pkg",
|
"pkg",
|
||||||
"install",
|
"install",
|
||||||
"-t",
|
"-t",
|
||||||
"dir",
|
"dir",
|
||||||
"-n",
|
"-n",
|
||||||
name,
|
self.pkg.racket_name,
|
||||||
"--deps",
|
"--deps",
|
||||||
"fail",
|
"fail",
|
||||||
"--ignore-implies",
|
"--ignore-implies",
|
||||||
@@ -107,7 +101,8 @@ def install(
|
|||||||
except ProcessError:
|
except ProcessError:
|
||||||
args.insert(-2, "--skip-installed")
|
args.insert(-2, "--skip-installed")
|
||||||
raco(*args)
|
raco(*args)
|
||||||
tty.warn(
|
msg = (
|
||||||
f"Racket package {name} was already installed, uninstalling via "
|
"Racket package {0} was already installed, uninstalling via "
|
||||||
"Spack may make someone unhappy!"
|
"Spack may make someone unhappy!"
|
||||||
)
|
)
|
||||||
|
tty.warn(msg.format(self.pkg.racket_name))
|
||||||
|
|||||||
@@ -140,7 +140,7 @@ class ROCmPackage(PackageBase):
|
|||||||
when="+rocm",
|
when="+rocm",
|
||||||
)
|
)
|
||||||
|
|
||||||
depends_on("llvm-amdgpu", type="build", when="+rocm")
|
depends_on("llvm-amdgpu", when="+rocm")
|
||||||
depends_on("hsa-rocr-dev", when="+rocm")
|
depends_on("hsa-rocr-dev", when="+rocm")
|
||||||
depends_on("hip +rocm", when="+rocm")
|
depends_on("hip +rocm", when="+rocm")
|
||||||
|
|
||||||
|
|||||||
@@ -5,8 +5,6 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.spec
|
|
||||||
import spack.util.prefix
|
|
||||||
from spack.directives import build_system, extends, maintainers
|
from spack.directives import build_system, extends, maintainers
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults
|
from ._checks import BuilderWithDefaults
|
||||||
@@ -44,9 +42,7 @@ class RubyBuilder(BuilderWithDefaults):
|
|||||||
#: Names associated with package attributes in the old build-system format
|
#: Names associated with package attributes in the old build-system format
|
||||||
legacy_attributes = ()
|
legacy_attributes = ()
|
||||||
|
|
||||||
def build(
|
def build(self, pkg, spec, prefix):
|
||||||
self, pkg: RubyPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Build a Ruby gem."""
|
"""Build a Ruby gem."""
|
||||||
|
|
||||||
# ruby-rake provides both rake.gemspec and Rakefile, but only
|
# ruby-rake provides both rake.gemspec and Rakefile, but only
|
||||||
@@ -62,9 +58,7 @@ def build(
|
|||||||
# Some Ruby packages only ship `*.gem` files, so nothing to build
|
# Some Ruby packages only ship `*.gem` files, so nothing to build
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def install(
|
def install(self, pkg, spec, prefix):
|
||||||
self, pkg: RubyPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Install a Ruby gem.
|
"""Install a Ruby gem.
|
||||||
|
|
||||||
The ruby package sets ``GEM_HOME`` to tell gem where to install to."""
|
The ruby package sets ``GEM_HOME`` to tell gem where to install to."""
|
||||||
|
|||||||
@@ -4,8 +4,6 @@
|
|||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
import spack.spec
|
|
||||||
import spack.util.prefix
|
|
||||||
from spack.directives import build_system, depends_on
|
from spack.directives import build_system, depends_on
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||||
@@ -61,9 +59,7 @@ def build_args(self, spec, prefix):
|
|||||||
"""Arguments to pass to build."""
|
"""Arguments to pass to build."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def build(
|
def build(self, pkg, spec, prefix):
|
||||||
self, pkg: SConsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Build the package."""
|
"""Build the package."""
|
||||||
pkg.module.scons(*self.build_args(spec, prefix))
|
pkg.module.scons(*self.build_args(spec, prefix))
|
||||||
|
|
||||||
@@ -71,9 +67,7 @@ def install_args(self, spec, prefix):
|
|||||||
"""Arguments to pass to install."""
|
"""Arguments to pass to install."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def install(
|
def install(self, pkg, spec, prefix):
|
||||||
self, pkg: SConsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Install the package."""
|
"""Install the package."""
|
||||||
pkg.module.scons("install", *self.install_args(spec, prefix))
|
pkg.module.scons("install", *self.install_args(spec, prefix))
|
||||||
|
|
||||||
|
|||||||
@@ -11,8 +11,6 @@
|
|||||||
import spack.install_test
|
import spack.install_test
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
import spack.spec
|
|
||||||
import spack.util.prefix
|
|
||||||
from spack.directives import build_system, depends_on, extends
|
from spack.directives import build_system, depends_on, extends
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
@@ -43,7 +41,6 @@ class SIPPackage(spack.package_base.PackageBase):
|
|||||||
with when("build_system=sip"):
|
with when("build_system=sip"):
|
||||||
extends("python", type=("build", "link", "run"))
|
extends("python", type=("build", "link", "run"))
|
||||||
depends_on("py-sip", type="build")
|
depends_on("py-sip", type="build")
|
||||||
depends_on("gmake", type="build")
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def import_modules(self):
|
def import_modules(self):
|
||||||
@@ -133,9 +130,7 @@ class SIPBuilder(BuilderWithDefaults):
|
|||||||
|
|
||||||
build_directory = "build"
|
build_directory = "build"
|
||||||
|
|
||||||
def configure(
|
def configure(self, pkg, spec, prefix):
|
||||||
self, pkg: SIPPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Configure the package."""
|
"""Configure the package."""
|
||||||
|
|
||||||
# https://www.riverbankcomputing.com/static/Docs/sip/command_line_tools.html
|
# https://www.riverbankcomputing.com/static/Docs/sip/command_line_tools.html
|
||||||
@@ -153,9 +148,7 @@ def configure_args(self):
|
|||||||
"""Arguments to pass to configure."""
|
"""Arguments to pass to configure."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def build(
|
def build(self, pkg, spec, prefix):
|
||||||
self, pkg: SIPPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Build the package."""
|
"""Build the package."""
|
||||||
args = self.build_args()
|
args = self.build_args()
|
||||||
|
|
||||||
@@ -166,9 +159,7 @@ def build_args(self):
|
|||||||
"""Arguments to pass to build."""
|
"""Arguments to pass to build."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def install(
|
def install(self, pkg, spec, prefix):
|
||||||
self, pkg: SIPPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Install the package."""
|
"""Install the package."""
|
||||||
args = self.install_args()
|
args = self.install_args()
|
||||||
|
|
||||||
|
|||||||
@@ -6,8 +6,6 @@
|
|||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
import spack.spec
|
|
||||||
import spack.util.prefix
|
|
||||||
from spack.directives import build_system, depends_on
|
from spack.directives import build_system, depends_on
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, execute_build_time_tests, execute_install_time_tests
|
from ._checks import BuilderWithDefaults, execute_build_time_tests, execute_install_time_tests
|
||||||
@@ -99,9 +97,7 @@ def waf(self, *args, **kwargs):
|
|||||||
with working_dir(self.build_directory):
|
with working_dir(self.build_directory):
|
||||||
self.python("waf", "-j{0}".format(jobs), *args, **kwargs)
|
self.python("waf", "-j{0}".format(jobs), *args, **kwargs)
|
||||||
|
|
||||||
def configure(
|
def configure(self, pkg, spec, prefix):
|
||||||
self, pkg: WafPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Configures the project."""
|
"""Configures the project."""
|
||||||
args = ["--prefix={0}".format(self.pkg.prefix)]
|
args = ["--prefix={0}".format(self.pkg.prefix)]
|
||||||
args += self.configure_args()
|
args += self.configure_args()
|
||||||
@@ -112,9 +108,7 @@ def configure_args(self):
|
|||||||
"""Arguments to pass to configure."""
|
"""Arguments to pass to configure."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def build(
|
def build(self, pkg, spec, prefix):
|
||||||
self, pkg: WafPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Executes the build."""
|
"""Executes the build."""
|
||||||
args = self.build_args()
|
args = self.build_args()
|
||||||
|
|
||||||
@@ -124,9 +118,7 @@ def build_args(self):
|
|||||||
"""Arguments to pass to build."""
|
"""Arguments to pass to build."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def install(
|
def install(self, pkg, spec, prefix):
|
||||||
self, pkg: WafPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
|
||||||
) -> None:
|
|
||||||
"""Installs the targets on the system."""
|
"""Installs the targets on the system."""
|
||||||
args = self.install_args()
|
args = self.install_args()
|
||||||
|
|
||||||
|
|||||||
@@ -14,6 +14,7 @@
|
|||||||
import zipfile
|
import zipfile
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from typing import Callable, Dict, List, Set
|
from typing import Callable, Dict, List, Set
|
||||||
|
from urllib.error import HTTPError, URLError
|
||||||
from urllib.request import HTTPHandler, Request, build_opener
|
from urllib.request import HTTPHandler, Request, build_opener
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
@@ -471,9 +472,12 @@ def generate_pipeline(env: ev.Environment, args) -> None:
|
|||||||
# Use all unpruned specs to populate the build group for this set
|
# Use all unpruned specs to populate the build group for this set
|
||||||
cdash_config = cfg.get("cdash")
|
cdash_config = cfg.get("cdash")
|
||||||
if options.cdash_handler and options.cdash_handler.auth_token:
|
if options.cdash_handler and options.cdash_handler.auth_token:
|
||||||
options.cdash_handler.populate_buildgroup(
|
try:
|
||||||
[options.cdash_handler.build_name(s) for s in pipeline_specs]
|
options.cdash_handler.populate_buildgroup(
|
||||||
)
|
[options.cdash_handler.build_name(s) for s in pipeline_specs]
|
||||||
|
)
|
||||||
|
except (SpackError, HTTPError, URLError, TimeoutError) as err:
|
||||||
|
tty.warn(f"Problem populating buildgroup: {err}")
|
||||||
elif cdash_config:
|
elif cdash_config:
|
||||||
# warn only if there was actually a CDash configuration.
|
# warn only if there was actually a CDash configuration.
|
||||||
tty.warn("Unable to populate buildgroup without CDash credentials")
|
tty.warn("Unable to populate buildgroup without CDash credentials")
|
||||||
|
|||||||
@@ -1,21 +1,23 @@
|
|||||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import codecs
|
||||||
import copy
|
import copy
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import ssl
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from collections import deque
|
from collections import deque
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Dict, Generator, List, Optional, Set, Tuple
|
from typing import Dict, Generator, List, Optional, Set, Tuple
|
||||||
from urllib.parse import quote, urlencode, urlparse
|
from urllib.parse import quote, urlencode, urlparse
|
||||||
from urllib.request import Request
|
from urllib.request import HTTPHandler, HTTPSHandler, Request, build_opener
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.lang import memoized
|
from llnl.util.lang import Singleton, memoized
|
||||||
|
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.config as cfg
|
import spack.config as cfg
|
||||||
@@ -33,11 +35,32 @@
|
|||||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||||
|
|
||||||
|
|
||||||
|
def _urlopen():
|
||||||
|
error_handler = web_util.SpackHTTPDefaultErrorHandler()
|
||||||
|
|
||||||
|
# One opener with HTTPS ssl enabled
|
||||||
|
with_ssl = build_opener(
|
||||||
|
HTTPHandler(), HTTPSHandler(context=web_util.ssl_create_default_context()), error_handler
|
||||||
|
)
|
||||||
|
|
||||||
|
# One opener with HTTPS ssl disabled
|
||||||
|
without_ssl = build_opener(
|
||||||
|
HTTPHandler(), HTTPSHandler(context=ssl._create_unverified_context()), error_handler
|
||||||
|
)
|
||||||
|
|
||||||
|
# And dynamically dispatch based on the config:verify_ssl.
|
||||||
|
def dispatch_open(fullurl, data=None, timeout=None, verify_ssl=True):
|
||||||
|
opener = with_ssl if verify_ssl else without_ssl
|
||||||
|
timeout = timeout or cfg.get("config:connect_timeout", 1)
|
||||||
|
return opener.open(fullurl, data, timeout)
|
||||||
|
|
||||||
|
return dispatch_open
|
||||||
|
|
||||||
|
|
||||||
IS_WINDOWS = sys.platform == "win32"
|
IS_WINDOWS = sys.platform == "win32"
|
||||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||||
|
_dyn_mapping_urlopener = Singleton(_urlopen)
|
||||||
# this exists purely for testing purposes
|
|
||||||
_urlopen = web_util.urlopen
|
|
||||||
|
|
||||||
|
|
||||||
def copy_files_to_artifacts(src, artifacts_dir):
|
def copy_files_to_artifacts(src, artifacts_dir):
|
||||||
@@ -256,25 +279,26 @@ def copy_test_results(self, source, dest):
|
|||||||
reports = fs.join_path(source, "*_Test*.xml")
|
reports = fs.join_path(source, "*_Test*.xml")
|
||||||
copy_files_to_artifacts(reports, dest)
|
copy_files_to_artifacts(reports, dest)
|
||||||
|
|
||||||
def create_buildgroup(self, headers, url, group_name, group_type):
|
def create_buildgroup(self, opener, headers, url, group_name, group_type):
|
||||||
data = {"newbuildgroup": group_name, "project": self.project, "type": group_type}
|
data = {"newbuildgroup": group_name, "project": self.project, "type": group_type}
|
||||||
|
|
||||||
enc_data = json.dumps(data).encode("utf-8")
|
enc_data = json.dumps(data).encode("utf-8")
|
||||||
|
|
||||||
request = Request(url, data=enc_data, headers=headers)
|
request = Request(url, data=enc_data, headers=headers)
|
||||||
|
|
||||||
try:
|
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||||
response_text = _urlopen(request, timeout=SPACK_CDASH_TIMEOUT).read()
|
response_code = response.getcode()
|
||||||
except OSError as e:
|
|
||||||
tty.warn(f"Failed to create CDash buildgroup: {e}")
|
if response_code not in [200, 201]:
|
||||||
|
msg = f"Creating buildgroup failed (response code = {response_code})"
|
||||||
|
tty.warn(msg)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
response_text = response.read()
|
||||||
response_json = json.loads(response_text)
|
response_json = json.loads(response_text)
|
||||||
return response_json["id"]
|
build_group_id = response_json["id"]
|
||||||
except (json.JSONDecodeError, KeyError) as e:
|
|
||||||
tty.warn(f"Failed to parse CDash response: {e}")
|
return build_group_id
|
||||||
return None
|
|
||||||
|
|
||||||
def populate_buildgroup(self, job_names):
|
def populate_buildgroup(self, job_names):
|
||||||
url = f"{self.url}/api/v1/buildgroup.php"
|
url = f"{self.url}/api/v1/buildgroup.php"
|
||||||
@@ -284,11 +308,16 @@ def populate_buildgroup(self, job_names):
|
|||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
}
|
}
|
||||||
|
|
||||||
parent_group_id = self.create_buildgroup(headers, url, self.build_group, "Daily")
|
opener = build_opener(HTTPHandler)
|
||||||
group_id = self.create_buildgroup(headers, url, f"Latest {self.build_group}", "Latest")
|
|
||||||
|
parent_group_id = self.create_buildgroup(opener, headers, url, self.build_group, "Daily")
|
||||||
|
group_id = self.create_buildgroup(
|
||||||
|
opener, headers, url, f"Latest {self.build_group}", "Latest"
|
||||||
|
)
|
||||||
|
|
||||||
if not parent_group_id or not group_id:
|
if not parent_group_id or not group_id:
|
||||||
tty.warn(f"Failed to create or retrieve buildgroups for {self.build_group}")
|
msg = f"Failed to create or retrieve buildgroups for {self.build_group}"
|
||||||
|
tty.warn(msg)
|
||||||
return
|
return
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
@@ -300,12 +329,15 @@ def populate_buildgroup(self, job_names):
|
|||||||
|
|
||||||
enc_data = json.dumps(data).encode("utf-8")
|
enc_data = json.dumps(data).encode("utf-8")
|
||||||
|
|
||||||
request = Request(url, data=enc_data, headers=headers, method="PUT")
|
request = Request(url, data=enc_data, headers=headers)
|
||||||
|
request.get_method = lambda: "PUT"
|
||||||
|
|
||||||
try:
|
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||||
_urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
|
response_code = response.getcode()
|
||||||
except OSError as e:
|
|
||||||
tty.warn(f"Failed to populate CDash buildgroup: {e}")
|
if response_code != 200:
|
||||||
|
msg = f"Error response code ({response_code}) in populate_buildgroup"
|
||||||
|
tty.warn(msg)
|
||||||
|
|
||||||
def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optional[str]):
|
def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optional[str]):
|
||||||
"""Explicitly report skipping testing of a spec (e.g., it's CI
|
"""Explicitly report skipping testing of a spec (e.g., it's CI
|
||||||
@@ -703,6 +735,9 @@ def _apply_section(dest, src):
|
|||||||
for value in header.values():
|
for value in header.values():
|
||||||
value = os.path.expandvars(value)
|
value = os.path.expandvars(value)
|
||||||
|
|
||||||
|
verify_ssl = mapping.get("verify_ssl", spack.config.get("config:verify_ssl", True))
|
||||||
|
timeout = mapping.get("timeout", spack.config.get("config:connect_timeout", 1))
|
||||||
|
|
||||||
required = mapping.get("require", [])
|
required = mapping.get("require", [])
|
||||||
allowed = mapping.get("allow", [])
|
allowed = mapping.get("allow", [])
|
||||||
ignored = mapping.get("ignore", [])
|
ignored = mapping.get("ignore", [])
|
||||||
@@ -736,15 +771,19 @@ def job_query(job):
|
|||||||
endpoint_url._replace(query=query).geturl(), headers=header, method="GET"
|
endpoint_url._replace(query=query).geturl(), headers=header, method="GET"
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
response = _urlopen(request)
|
response = _dyn_mapping_urlopener(
|
||||||
config = json.load(response)
|
request, verify_ssl=verify_ssl, timeout=timeout
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# For now just ignore any errors from dynamic mapping and continue
|
# For now just ignore any errors from dynamic mapping and continue
|
||||||
# This is still experimental, and failures should not stop CI
|
# This is still experimental, and failures should not stop CI
|
||||||
# from running normally
|
# from running normally
|
||||||
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}: {e}")
|
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}")
|
||||||
|
tty.warn(f"{e}")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
config = json.load(codecs.getreader("utf-8")(response))
|
||||||
|
|
||||||
# Strip ignore keys
|
# Strip ignore keys
|
||||||
if ignored:
|
if ignored:
|
||||||
for key in ignored:
|
for key in ignored:
|
||||||
|
|||||||
@@ -171,9 +171,7 @@ def quote_kvp(string: str) -> str:
|
|||||||
|
|
||||||
|
|
||||||
def parse_specs(
|
def parse_specs(
|
||||||
args: Union[str, List[str]],
|
args: Union[str, List[str]], concretize: bool = False, tests: bool = False
|
||||||
concretize: bool = False,
|
|
||||||
tests: spack.concretize.TestsType = False,
|
|
||||||
) -> List[spack.spec.Spec]:
|
) -> List[spack.spec.Spec]:
|
||||||
"""Convenience function for parsing arguments from specs. Handles common
|
"""Convenience function for parsing arguments from specs. Handles common
|
||||||
exceptions and dies if there are errors.
|
exceptions and dies if there are errors.
|
||||||
@@ -185,13 +183,11 @@ def parse_specs(
|
|||||||
if not concretize:
|
if not concretize:
|
||||||
return specs
|
return specs
|
||||||
|
|
||||||
to_concretize: List[spack.concretize.SpecPairInput] = [(s, None) for s in specs]
|
to_concretize = [(s, None) for s in specs]
|
||||||
return _concretize_spec_pairs(to_concretize, tests=tests)
|
return _concretize_spec_pairs(to_concretize, tests=tests)
|
||||||
|
|
||||||
|
|
||||||
def _concretize_spec_pairs(
|
def _concretize_spec_pairs(to_concretize, tests=False):
|
||||||
to_concretize: List[spack.concretize.SpecPairInput], tests: spack.concretize.TestsType = False
|
|
||||||
) -> List[spack.spec.Spec]:
|
|
||||||
"""Helper method that concretizes abstract specs from a list of abstract,concrete pairs.
|
"""Helper method that concretizes abstract specs from a list of abstract,concrete pairs.
|
||||||
|
|
||||||
Any spec with a concrete spec associated with it will concretize to that spec. Any spec
|
Any spec with a concrete spec associated with it will concretize to that spec. Any spec
|
||||||
@@ -202,7 +198,7 @@ def _concretize_spec_pairs(
|
|||||||
# Special case for concretizing a single spec
|
# Special case for concretizing a single spec
|
||||||
if len(to_concretize) == 1:
|
if len(to_concretize) == 1:
|
||||||
abstract, concrete = to_concretize[0]
|
abstract, concrete = to_concretize[0]
|
||||||
return [concrete or spack.concretize.concretize_one(abstract, tests=tests)]
|
return [concrete or abstract.concretized()]
|
||||||
|
|
||||||
# Special case if every spec is either concrete or has an abstract hash
|
# Special case if every spec is either concrete or has an abstract hash
|
||||||
if all(
|
if all(
|
||||||
@@ -254,9 +250,9 @@ def matching_spec_from_env(spec):
|
|||||||
"""
|
"""
|
||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
if env:
|
if env:
|
||||||
return env.matching_spec(spec) or spack.concretize.concretize_one(spec)
|
return env.matching_spec(spec) or spec.concretized()
|
||||||
else:
|
else:
|
||||||
return spack.concretize.concretize_one(spec)
|
return spec.concretized()
|
||||||
|
|
||||||
|
|
||||||
def matching_specs_from_env(specs):
|
def matching_specs_from_env(specs):
|
||||||
@@ -297,7 +293,7 @@ def disambiguate_spec(
|
|||||||
|
|
||||||
def disambiguate_spec_from_hashes(
|
def disambiguate_spec_from_hashes(
|
||||||
spec: spack.spec.Spec,
|
spec: spack.spec.Spec,
|
||||||
hashes: Optional[List[str]],
|
hashes: List[str],
|
||||||
local: bool = False,
|
local: bool = False,
|
||||||
installed: Union[bool, InstallRecordStatus] = True,
|
installed: Union[bool, InstallRecordStatus] = True,
|
||||||
first: bool = False,
|
first: bool = False,
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
import warnings
|
|
||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|
||||||
@@ -52,10 +51,10 @@ def setup_parser(subparser):
|
|||||||
"-t", "--target", action="store_true", default=False, help="print only the target"
|
"-t", "--target", action="store_true", default=False, help="print only the target"
|
||||||
)
|
)
|
||||||
parts2.add_argument(
|
parts2.add_argument(
|
||||||
"-f", "--frontend", action="store_true", default=False, help="print frontend (DEPRECATED)"
|
"-f", "--frontend", action="store_true", default=False, help="print frontend"
|
||||||
)
|
)
|
||||||
parts2.add_argument(
|
parts2.add_argument(
|
||||||
"-b", "--backend", action="store_true", default=False, help="print backend (DEPRECATED)"
|
"-b", "--backend", action="store_true", default=False, help="print backend"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -99,14 +98,15 @@ def arch(parser, args):
|
|||||||
display_targets(archspec.cpu.TARGETS)
|
display_targets(archspec.cpu.TARGETS)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
os_args, target_args = "default_os", "default_target"
|
||||||
if args.frontend:
|
if args.frontend:
|
||||||
warnings.warn("the argument --frontend is deprecated, and will be removed in Spack v1.0")
|
os_args, target_args = "frontend", "frontend"
|
||||||
elif args.backend:
|
elif args.backend:
|
||||||
warnings.warn("the argument --backend is deprecated, and will be removed in Spack v1.0")
|
os_args, target_args = "backend", "backend"
|
||||||
|
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
host_os = host_platform.default_operating_system()
|
host_os = host_platform.operating_system(os_args)
|
||||||
host_target = host_platform.default_target()
|
host_target = host_platform.target(target_args)
|
||||||
if args.family:
|
if args.family:
|
||||||
host_target = host_target.family
|
host_target = host_target.family
|
||||||
elif args.generic:
|
elif args.generic:
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
import os.path
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
@@ -14,9 +14,9 @@
|
|||||||
import spack.bootstrap
|
import spack.bootstrap
|
||||||
import spack.bootstrap.config
|
import spack.bootstrap.config
|
||||||
import spack.bootstrap.core
|
import spack.bootstrap.core
|
||||||
import spack.concretize
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.mirrors.utils
|
import spack.mirrors.utils
|
||||||
|
import spack.spec
|
||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.util.spack_yaml
|
import spack.util.spack_yaml
|
||||||
@@ -397,7 +397,7 @@ def _mirror(args):
|
|||||||
llnl.util.tty.msg(msg.format(spec_str, mirror_dir))
|
llnl.util.tty.msg(msg.format(spec_str, mirror_dir))
|
||||||
# Suppress tty from the call below for terser messages
|
# Suppress tty from the call below for terser messages
|
||||||
llnl.util.tty.set_msg_enabled(False)
|
llnl.util.tty.set_msg_enabled(False)
|
||||||
spec = spack.concretize.concretize_one(spec_str)
|
spec = spack.spec.Spec(spec_str).concretized()
|
||||||
for node in spec.traverse():
|
for node in spec.traverse():
|
||||||
spack.mirrors.utils.create(mirror_dir, [node])
|
spack.mirrors.utils.create(mirror_dir, [node])
|
||||||
llnl.util.tty.set_msg_enabled(True)
|
llnl.util.tty.set_msg_enabled(True)
|
||||||
@@ -436,7 +436,6 @@ def write_metadata(subdir, metadata):
|
|||||||
shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory)
|
shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory)
|
||||||
shutil.copy(spack.util.path.canonicalize_path(PATCHELF_JSON), abs_directory)
|
shutil.copy(spack.util.path.canonicalize_path(PATCHELF_JSON), abs_directory)
|
||||||
instructions += cmd.format("local-binaries", rel_directory)
|
instructions += cmd.format("local-binaries", rel_directory)
|
||||||
instructions += " % spack buildcache update-index <final-path>/bootstrap_cache\n"
|
|
||||||
print(instructions)
|
print(instructions)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -16,7 +16,6 @@
|
|||||||
|
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.concretize
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
@@ -555,7 +554,8 @@ def check_fn(args: argparse.Namespace):
|
|||||||
tty.msg("No specs provided, exiting.")
|
tty.msg("No specs provided, exiting.")
|
||||||
return
|
return
|
||||||
|
|
||||||
specs = [spack.concretize.concretize_one(s) for s in specs]
|
for spec in specs:
|
||||||
|
spec.concretize()
|
||||||
|
|
||||||
# Next see if there are any configured binary mirrors
|
# Next see if there are any configured binary mirrors
|
||||||
configured_mirrors = spack.config.get("mirrors", scope=args.scope)
|
configured_mirrors = spack.config.get("mirrors", scope=args.scope)
|
||||||
@@ -623,7 +623,7 @@ def save_specfile_fn(args):
|
|||||||
root = specs[0]
|
root = specs[0]
|
||||||
|
|
||||||
if not root.concrete:
|
if not root.concrete:
|
||||||
root = spack.concretize.concretize_one(root)
|
root.concretize()
|
||||||
|
|
||||||
save_dependency_specfiles(
|
save_dependency_specfiles(
|
||||||
root, args.specfile_dir, dependencies=spack.cmd.parse_specs(args.specs)
|
root, args.specfile_dir, dependencies=spack.cmd.parse_specs(args.specs)
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
import os.path
|
||||||
import textwrap
|
import textwrap
|
||||||
|
|
||||||
from llnl.util.lang import stable_partition
|
from llnl.util.lang import stable_partition
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
import os
|
||||||
|
import os.path
|
||||||
|
|
||||||
import llnl.util.tty
|
import llnl.util.tty
|
||||||
|
|
||||||
|
|||||||
@@ -86,8 +86,8 @@ def create_db_tarball(args):
|
|||||||
|
|
||||||
def report(args):
|
def report(args):
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
host_os = host_platform.default_operating_system()
|
host_os = host_platform.operating_system("frontend")
|
||||||
host_target = host_platform.default_target()
|
host_target = host_platform.target("frontend")
|
||||||
architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
|
architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
|
||||||
print("* **Spack:**", spack.get_version())
|
print("* **Spack:**", spack.get_version())
|
||||||
print("* **Python:**", platform.python_version())
|
print("* **Python:**", platform.python_version())
|
||||||
|
|||||||
@@ -18,7 +18,6 @@
|
|||||||
from llnl.util.symlink import symlink
|
from llnl.util.symlink import symlink
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.concretize
|
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.installer
|
import spack.installer
|
||||||
import spack.store
|
import spack.store
|
||||||
@@ -104,7 +103,7 @@ def deprecate(parser, args):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if args.install:
|
if args.install:
|
||||||
deprecator = spack.concretize.concretize_one(specs[1])
|
deprecator = specs[1].concretized()
|
||||||
else:
|
else:
|
||||||
deprecator = spack.cmd.disambiguate_spec(specs[1], env, local=True)
|
deprecator = spack.cmd.disambiguate_spec(specs[1], env, local=True)
|
||||||
|
|
||||||
|
|||||||
@@ -10,7 +10,6 @@
|
|||||||
import spack.build_environment
|
import spack.build_environment
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.cmd.common.arguments
|
import spack.cmd.common.arguments
|
||||||
import spack.concretize
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.repo
|
import spack.repo
|
||||||
from spack.cmd.common import arguments
|
from spack.cmd.common import arguments
|
||||||
@@ -114,8 +113,8 @@ def dev_build(self, args):
|
|||||||
source_path = os.path.abspath(source_path)
|
source_path = os.path.abspath(source_path)
|
||||||
|
|
||||||
# Forces the build to run out of the source directory.
|
# Forces the build to run out of the source directory.
|
||||||
spec.constrain(f'dev_path="{source_path}"')
|
spec.constrain("dev_path=%s" % source_path)
|
||||||
spec = spack.concretize.concretize_one(spec)
|
spec.concretize()
|
||||||
|
|
||||||
if spec.installed:
|
if spec.installed:
|
||||||
tty.error("Already installed in %s" % spec.prefix)
|
tty.error("Already installed in %s" % spec.prefix)
|
||||||
|
|||||||
@@ -54,6 +54,10 @@
|
|||||||
@m{target=target} specific <target> processor
|
@m{target=target} specific <target> processor
|
||||||
@m{arch=platform-os-target} shortcut for all three above
|
@m{arch=platform-os-target} shortcut for all three above
|
||||||
|
|
||||||
|
cross-compiling:
|
||||||
|
@m{os=backend} or @m{os=be} build for compute node (backend)
|
||||||
|
@m{os=frontend} or @m{os=fe} build for login node (frontend)
|
||||||
|
|
||||||
dependencies:
|
dependencies:
|
||||||
^dependency [constraints] specify constraints on dependencies
|
^dependency [constraints] specify constraints on dependencies
|
||||||
^@K{/hash} build with a specific installed
|
^@K{/hash} build with a specific installed
|
||||||
|
|||||||
@@ -13,7 +13,6 @@
|
|||||||
from llnl.util import lang, tty
|
from llnl.util import lang, tty
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.concretize
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.paths
|
import spack.paths
|
||||||
@@ -451,7 +450,7 @@ def concrete_specs_from_file(args):
|
|||||||
else:
|
else:
|
||||||
s = spack.spec.Spec.from_json(f)
|
s = spack.spec.Spec.from_json(f)
|
||||||
|
|
||||||
concretized = spack.concretize.concretize_one(s)
|
concretized = s.concretized()
|
||||||
if concretized.dag_hash() != s.dag_hash():
|
if concretized.dag_hash() != s.dag_hash():
|
||||||
msg = 'skipped invalid file "{0}". '
|
msg = 'skipped invalid file "{0}". '
|
||||||
msg += "The file does not contain a concrete spec."
|
msg += "The file does not contain a concrete spec."
|
||||||
|
|||||||
@@ -7,9 +7,9 @@
|
|||||||
|
|
||||||
from llnl.path import convert_to_posix_path
|
from llnl.path import convert_to_posix_path
|
||||||
|
|
||||||
import spack.concretize
|
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
|
from spack.spec import Spec
|
||||||
|
|
||||||
description = "generate Windows installer"
|
description = "generate Windows installer"
|
||||||
section = "admin"
|
section = "admin"
|
||||||
@@ -65,7 +65,8 @@ def make_installer(parser, args):
|
|||||||
"""
|
"""
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
output_dir = args.output_dir
|
output_dir = args.output_dir
|
||||||
cmake_spec = spack.concretize.concretize_one("cmake")
|
cmake_spec = Spec("cmake")
|
||||||
|
cmake_spec.concretize()
|
||||||
cmake_path = os.path.join(cmake_spec.prefix, "bin", "cmake.exe")
|
cmake_path = os.path.join(cmake_spec.prefix, "bin", "cmake.exe")
|
||||||
cpack_path = os.path.join(cmake_spec.prefix, "bin", "cpack.exe")
|
cpack_path = os.path.join(cmake_spec.prefix, "bin", "cpack.exe")
|
||||||
spack_source = args.spack_source
|
spack_source = args.spack_source
|
||||||
|
|||||||
@@ -492,7 +492,7 @@ def extend_with_additional_versions(specs, num_versions):
|
|||||||
mirror_specs = spack.mirrors.utils.get_all_versions(specs)
|
mirror_specs = spack.mirrors.utils.get_all_versions(specs)
|
||||||
else:
|
else:
|
||||||
mirror_specs = spack.mirrors.utils.get_matching_versions(specs, num_versions=num_versions)
|
mirror_specs = spack.mirrors.utils.get_matching_versions(specs, num_versions=num_versions)
|
||||||
mirror_specs = [spack.concretize.concretize_one(x) for x in mirror_specs]
|
mirror_specs = [x.concretized() for x in mirror_specs]
|
||||||
return mirror_specs
|
return mirror_specs
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
"""Implementation details of the ``spack module`` command."""
|
"""Implementation details of the ``spack module`` command."""
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
import os
|
import os.path
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import os
|
import os.path
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|||||||
@@ -144,7 +144,7 @@ def is_installed(spec):
|
|||||||
record = spack.store.STORE.db.query_local_by_spec_hash(spec.dag_hash())
|
record = spack.store.STORE.db.query_local_by_spec_hash(spec.dag_hash())
|
||||||
return record and record.installed
|
return record and record.installed
|
||||||
|
|
||||||
all_specs = traverse.traverse_nodes(
|
specs = traverse.traverse_nodes(
|
||||||
specs,
|
specs,
|
||||||
root=False,
|
root=False,
|
||||||
order="breadth",
|
order="breadth",
|
||||||
@@ -155,7 +155,7 @@ def is_installed(spec):
|
|||||||
)
|
)
|
||||||
|
|
||||||
with spack.store.STORE.db.read_transaction():
|
with spack.store.STORE.db.read_transaction():
|
||||||
return [spec for spec in all_specs if is_installed(spec)]
|
return [spec for spec in specs if is_installed(spec)]
|
||||||
|
|
||||||
|
|
||||||
def dependent_environments(
|
def dependent_environments(
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
import argparse
|
import argparse
|
||||||
import collections
|
import collections
|
||||||
import io
|
import io
|
||||||
import os
|
import os.path
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|||||||
@@ -749,18 +749,12 @@ def __init__(self, compiler, feature, flag_name, ver_string=None):
|
|||||||
class CompilerCacheEntry:
|
class CompilerCacheEntry:
|
||||||
"""Deserialized cache entry for a compiler"""
|
"""Deserialized cache entry for a compiler"""
|
||||||
|
|
||||||
__slots__ = ("c_compiler_output", "real_version")
|
__slots__ = ["c_compiler_output", "real_version"]
|
||||||
|
|
||||||
def __init__(self, c_compiler_output: Optional[str], real_version: str):
|
def __init__(self, c_compiler_output: Optional[str], real_version: str):
|
||||||
self.c_compiler_output = c_compiler_output
|
self.c_compiler_output = c_compiler_output
|
||||||
self.real_version = real_version
|
self.real_version = real_version
|
||||||
|
|
||||||
@property
|
|
||||||
def empty(self) -> bool:
|
|
||||||
"""Sometimes the compiler is temporarily broken, preventing us from getting output. The
|
|
||||||
call site determines if that is a problem."""
|
|
||||||
return self.c_compiler_output is None
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_dict(cls, data: Dict[str, Optional[str]]):
|
def from_dict(cls, data: Dict[str, Optional[str]]):
|
||||||
if not isinstance(data, dict):
|
if not isinstance(data, dict):
|
||||||
@@ -798,10 +792,9 @@ def __init__(self, cache: "FileCache") -> None:
|
|||||||
self.cache.init_entry(self.name)
|
self.cache.init_entry(self.name)
|
||||||
self._data: Dict[str, Dict[str, Optional[str]]] = {}
|
self._data: Dict[str, Dict[str, Optional[str]]] = {}
|
||||||
|
|
||||||
def _get_entry(self, key: str, *, allow_empty: bool) -> Optional[CompilerCacheEntry]:
|
def _get_entry(self, key: str) -> Optional[CompilerCacheEntry]:
|
||||||
try:
|
try:
|
||||||
entry = CompilerCacheEntry.from_dict(self._data[key])
|
return CompilerCacheEntry.from_dict(self._data[key])
|
||||||
return entry if allow_empty or not entry.empty else None
|
|
||||||
except ValueError:
|
except ValueError:
|
||||||
del self._data[key]
|
del self._data[key]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
@@ -819,7 +812,7 @@ def get(self, compiler: Compiler) -> CompilerCacheEntry:
|
|||||||
self._data = {}
|
self._data = {}
|
||||||
|
|
||||||
key = self._key(compiler)
|
key = self._key(compiler)
|
||||||
value = self._get_entry(key, allow_empty=False)
|
value = self._get_entry(key)
|
||||||
if value is not None:
|
if value is not None:
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@@ -833,7 +826,7 @@ def get(self, compiler: Compiler) -> CompilerCacheEntry:
|
|||||||
self._data = {}
|
self._data = {}
|
||||||
|
|
||||||
# Use cache entry that may have been created by another process in the meantime.
|
# Use cache entry that may have been created by another process in the meantime.
|
||||||
entry = self._get_entry(key, allow_empty=True)
|
entry = self._get_entry(key)
|
||||||
|
|
||||||
# Finally compute the cache entry
|
# Finally compute the cache entry
|
||||||
if entry is None:
|
if entry is None:
|
||||||
|
|||||||
@@ -801,17 +801,17 @@ def _extract_compiler_paths(spec: "spack.spec.Spec") -> Optional[Dict[str, str]]
|
|||||||
def _extract_os_and_target(spec: "spack.spec.Spec"):
|
def _extract_os_and_target(spec: "spack.spec.Spec"):
|
||||||
if not spec.architecture:
|
if not spec.architecture:
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
operating_system = host_platform.default_operating_system()
|
operating_system = host_platform.operating_system("default_os")
|
||||||
target = host_platform.default_target()
|
target = host_platform.target("default_target")
|
||||||
else:
|
else:
|
||||||
target = spec.architecture.target
|
target = spec.architecture.target
|
||||||
if not target:
|
if not target:
|
||||||
target = spack.platforms.host().default_target()
|
target = spack.platforms.host().target("default_target")
|
||||||
|
|
||||||
operating_system = spec.os
|
operating_system = spec.os
|
||||||
if not operating_system:
|
if not operating_system:
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
operating_system = host_platform.default_operating_system()
|
operating_system = host_platform.operating_system("default_os")
|
||||||
return operating_system, target
|
return operating_system, target
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from typing import Iterable, List, Optional, Sequence, Tuple, Union
|
from typing import Iterable, Optional, Sequence, Tuple, Union
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
@@ -35,14 +35,14 @@ def enable_compiler_existence_check():
|
|||||||
CHECK_COMPILER_EXISTENCE = saved
|
CHECK_COMPILER_EXISTENCE = saved
|
||||||
|
|
||||||
|
|
||||||
SpecPairInput = Tuple[Spec, Optional[Spec]]
|
|
||||||
SpecPair = Tuple[Spec, Spec]
|
SpecPair = Tuple[Spec, Spec]
|
||||||
|
SpecLike = Union[Spec, str]
|
||||||
TestsType = Union[bool, Iterable[str]]
|
TestsType = Union[bool, Iterable[str]]
|
||||||
|
|
||||||
|
|
||||||
def _concretize_specs_together(
|
def concretize_specs_together(
|
||||||
abstract_specs: Sequence[Spec], tests: TestsType = False
|
abstract_specs: Sequence[SpecLike], tests: TestsType = False
|
||||||
) -> List[Spec]:
|
) -> Sequence[Spec]:
|
||||||
"""Given a number of specs as input, tries to concretize them together.
|
"""Given a number of specs as input, tries to concretize them together.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -50,16 +50,17 @@ def _concretize_specs_together(
|
|||||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||||
will have test dependencies. If False, test dependencies will be disregarded.
|
will have test dependencies. If False, test dependencies will be disregarded.
|
||||||
"""
|
"""
|
||||||
from spack.solver.asp import Solver
|
import spack.solver.asp
|
||||||
|
|
||||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||||
result = Solver().solve(abstract_specs, tests=tests, allow_deprecated=allow_deprecated)
|
solver = spack.solver.asp.Solver()
|
||||||
|
result = solver.solve(abstract_specs, tests=tests, allow_deprecated=allow_deprecated)
|
||||||
return [s.copy() for s in result.specs]
|
return [s.copy() for s in result.specs]
|
||||||
|
|
||||||
|
|
||||||
def concretize_together(
|
def concretize_together(
|
||||||
spec_list: Sequence[SpecPairInput], tests: TestsType = False
|
spec_list: Sequence[SpecPair], tests: TestsType = False
|
||||||
) -> List[SpecPair]:
|
) -> Sequence[SpecPair]:
|
||||||
"""Given a number of specs as input, tries to concretize them together.
|
"""Given a number of specs as input, tries to concretize them together.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -70,13 +71,13 @@ def concretize_together(
|
|||||||
"""
|
"""
|
||||||
to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list]
|
to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list]
|
||||||
abstract_specs = [abstract for abstract, _ in spec_list]
|
abstract_specs = [abstract for abstract, _ in spec_list]
|
||||||
concrete_specs = _concretize_specs_together(to_concretize, tests=tests)
|
concrete_specs = concretize_specs_together(to_concretize, tests=tests)
|
||||||
return list(zip(abstract_specs, concrete_specs))
|
return list(zip(abstract_specs, concrete_specs))
|
||||||
|
|
||||||
|
|
||||||
def concretize_together_when_possible(
|
def concretize_together_when_possible(
|
||||||
spec_list: Sequence[SpecPairInput], tests: TestsType = False
|
spec_list: Sequence[SpecPair], tests: TestsType = False
|
||||||
) -> List[SpecPair]:
|
) -> Sequence[SpecPair]:
|
||||||
"""Given a number of specs as input, tries to concretize them together to the extent possible.
|
"""Given a number of specs as input, tries to concretize them together to the extent possible.
|
||||||
|
|
||||||
See documentation for ``unify: when_possible`` concretization for the precise definition of
|
See documentation for ``unify: when_possible`` concretization for the precise definition of
|
||||||
@@ -88,7 +89,7 @@ def concretize_together_when_possible(
|
|||||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||||
will have test dependencies. If False, test dependencies will be disregarded.
|
will have test dependencies. If False, test dependencies will be disregarded.
|
||||||
"""
|
"""
|
||||||
from spack.solver.asp import Solver
|
import spack.solver.asp
|
||||||
|
|
||||||
to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list]
|
to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list]
|
||||||
old_concrete_to_abstract = {
|
old_concrete_to_abstract = {
|
||||||
@@ -96,8 +97,9 @@ def concretize_together_when_possible(
|
|||||||
}
|
}
|
||||||
|
|
||||||
result_by_user_spec = {}
|
result_by_user_spec = {}
|
||||||
|
solver = spack.solver.asp.Solver()
|
||||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||||
for result in Solver().solve_in_rounds(
|
for result in solver.solve_in_rounds(
|
||||||
to_concretize, tests=tests, allow_deprecated=allow_deprecated
|
to_concretize, tests=tests, allow_deprecated=allow_deprecated
|
||||||
):
|
):
|
||||||
result_by_user_spec.update(result.specs_by_input)
|
result_by_user_spec.update(result.specs_by_input)
|
||||||
@@ -111,8 +113,8 @@ def concretize_together_when_possible(
|
|||||||
|
|
||||||
|
|
||||||
def concretize_separately(
|
def concretize_separately(
|
||||||
spec_list: Sequence[SpecPairInput], tests: TestsType = False
|
spec_list: Sequence[SpecPair], tests: TestsType = False
|
||||||
) -> List[SpecPair]:
|
) -> Sequence[SpecPair]:
|
||||||
"""Concretizes the input specs separately from each other.
|
"""Concretizes the input specs separately from each other.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -121,7 +123,7 @@ def concretize_separately(
|
|||||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||||
will have test dependencies. If False, test dependencies will be disregarded.
|
will have test dependencies. If False, test dependencies will be disregarded.
|
||||||
"""
|
"""
|
||||||
from spack.bootstrap import ensure_bootstrap_configuration, ensure_clingo_importable_or_raise
|
import spack.bootstrap
|
||||||
|
|
||||||
to_concretize = [abstract for abstract, concrete in spec_list if not concrete]
|
to_concretize = [abstract for abstract, concrete in spec_list if not concrete]
|
||||||
args = [
|
args = [
|
||||||
@@ -131,8 +133,8 @@ def concretize_separately(
|
|||||||
]
|
]
|
||||||
ret = [(i, abstract) for i, abstract in enumerate(to_concretize) if abstract.concrete]
|
ret = [(i, abstract) for i, abstract in enumerate(to_concretize) if abstract.concrete]
|
||||||
# Ensure we don't try to bootstrap clingo in parallel
|
# Ensure we don't try to bootstrap clingo in parallel
|
||||||
with ensure_bootstrap_configuration():
|
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||||
ensure_clingo_importable_or_raise()
|
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||||
|
|
||||||
# Ensure all the indexes have been built or updated, since
|
# Ensure all the indexes have been built or updated, since
|
||||||
# otherwise the processes in the pool may timeout on waiting
|
# otherwise the processes in the pool may timeout on waiting
|
||||||
@@ -187,52 +189,10 @@ def _concretize_task(packed_arguments: Tuple[int, str, TestsType]) -> Tuple[int,
|
|||||||
index, spec_str, tests = packed_arguments
|
index, spec_str, tests = packed_arguments
|
||||||
with tty.SuppressOutput(msg_enabled=False):
|
with tty.SuppressOutput(msg_enabled=False):
|
||||||
start = time.time()
|
start = time.time()
|
||||||
spec = concretize_one(Spec(spec_str), tests=tests)
|
spec = Spec(spec_str).concretized(tests=tests)
|
||||||
return index, spec, time.time() - start
|
return index, spec, time.time() - start
|
||||||
|
|
||||||
|
|
||||||
def concretize_one(spec: Union[str, Spec], tests: TestsType = False) -> Spec:
|
|
||||||
"""Return a concretized copy of the given spec.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
tests: if False disregard 'test' dependencies, if a list of names activate them for
|
|
||||||
the packages in the list, if True activate 'test' dependencies for all packages.
|
|
||||||
"""
|
|
||||||
from spack.solver.asp import Solver, SpecBuilder
|
|
||||||
|
|
||||||
if isinstance(spec, str):
|
|
||||||
spec = Spec(spec)
|
|
||||||
spec = spec.lookup_hash()
|
|
||||||
|
|
||||||
if spec.concrete:
|
|
||||||
return spec.copy()
|
|
||||||
|
|
||||||
for node in spec.traverse():
|
|
||||||
if not node.name:
|
|
||||||
raise spack.error.SpecError(
|
|
||||||
f"Spec {node} has no name; cannot concretize an anonymous spec"
|
|
||||||
)
|
|
||||||
|
|
||||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
|
||||||
result = Solver().solve([spec], tests=tests, allow_deprecated=allow_deprecated)
|
|
||||||
|
|
||||||
# take the best answer
|
|
||||||
opt, i, answer = min(result.answers)
|
|
||||||
name = spec.name
|
|
||||||
# TODO: Consolidate this code with similar code in solve.py
|
|
||||||
if spec.virtual:
|
|
||||||
providers = [s.name for s in answer.values() if s.package.provides(name)]
|
|
||||||
name = providers[0]
|
|
||||||
|
|
||||||
node = SpecBuilder.make_node(pkg=name)
|
|
||||||
assert (
|
|
||||||
node in answer
|
|
||||||
), f"cannot find {name} in the list of specs {','.join([n.pkg for n in answer.keys()])}"
|
|
||||||
|
|
||||||
concretized = answer[node]
|
|
||||||
return concretized
|
|
||||||
|
|
||||||
|
|
||||||
class UnavailableCompilerVersionError(spack.error.SpackError):
|
class UnavailableCompilerVersionError(spack.error.SpackError):
|
||||||
"""Raised when there is no available compiler that satisfies a
|
"""Raised when there is no available compiler that satisfies a
|
||||||
compiler spec."""
|
compiler spec."""
|
||||||
|
|||||||
@@ -36,8 +36,6 @@
|
|||||||
import sys
|
import sys
|
||||||
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union
|
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union
|
||||||
|
|
||||||
import jsonschema
|
|
||||||
|
|
||||||
from llnl.util import filesystem, lang, tty
|
from llnl.util import filesystem, lang, tty
|
||||||
|
|
||||||
import spack.error
|
import spack.error
|
||||||
@@ -53,7 +51,6 @@
|
|||||||
import spack.schema.definitions
|
import spack.schema.definitions
|
||||||
import spack.schema.develop
|
import spack.schema.develop
|
||||||
import spack.schema.env
|
import spack.schema.env
|
||||||
import spack.schema.env_vars
|
|
||||||
import spack.schema.mirrors
|
import spack.schema.mirrors
|
||||||
import spack.schema.modules
|
import spack.schema.modules
|
||||||
import spack.schema.packages
|
import spack.schema.packages
|
||||||
@@ -71,7 +68,6 @@
|
|||||||
"compilers": spack.schema.compilers.schema,
|
"compilers": spack.schema.compilers.schema,
|
||||||
"concretizer": spack.schema.concretizer.schema,
|
"concretizer": spack.schema.concretizer.schema,
|
||||||
"definitions": spack.schema.definitions.schema,
|
"definitions": spack.schema.definitions.schema,
|
||||||
"env_vars": spack.schema.env_vars.schema,
|
|
||||||
"view": spack.schema.view.schema,
|
"view": spack.schema.view.schema,
|
||||||
"develop": spack.schema.develop.schema,
|
"develop": spack.schema.develop.schema,
|
||||||
"mirrors": spack.schema.mirrors.schema,
|
"mirrors": spack.schema.mirrors.schema,
|
||||||
@@ -955,6 +951,12 @@ def set(path: str, value: Any, scope: Optional[str] = None) -> None:
|
|||||||
return CONFIG.set(path, value, scope)
|
return CONFIG.set(path, value, scope)
|
||||||
|
|
||||||
|
|
||||||
|
def add_default_platform_scope(platform: str) -> None:
|
||||||
|
plat_name = os.path.join("defaults", platform)
|
||||||
|
plat_path = os.path.join(CONFIGURATION_DEFAULTS_PATH[1], platform)
|
||||||
|
CONFIG.push_scope(DirectoryConfigScope(plat_name, plat_path))
|
||||||
|
|
||||||
|
|
||||||
def scopes() -> Dict[str, ConfigScope]:
|
def scopes() -> Dict[str, ConfigScope]:
|
||||||
"""Convenience function to get list of configuration scopes."""
|
"""Convenience function to get list of configuration scopes."""
|
||||||
return CONFIG.scopes
|
return CONFIG.scopes
|
||||||
@@ -1052,6 +1054,8 @@ def validate(
|
|||||||
This leverages the line information (start_mark, end_mark) stored
|
This leverages the line information (start_mark, end_mark) stored
|
||||||
on Spack YAML structures.
|
on Spack YAML structures.
|
||||||
"""
|
"""
|
||||||
|
import jsonschema
|
||||||
|
|
||||||
try:
|
try:
|
||||||
spack.schema.Validator(schema).validate(data)
|
spack.schema.Validator(schema).validate(data)
|
||||||
except jsonschema.ValidationError as e:
|
except jsonschema.ValidationError as e:
|
||||||
|
|||||||
@@ -6,8 +6,6 @@
|
|||||||
"""
|
"""
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
import jsonschema
|
|
||||||
|
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.schema.env as env
|
import spack.schema.env as env
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
@@ -32,6 +30,8 @@ def validate(configuration_file):
|
|||||||
Returns:
|
Returns:
|
||||||
A sanitized copy of the configuration stored in the input file
|
A sanitized copy of the configuration stored in the input file
|
||||||
"""
|
"""
|
||||||
|
import jsonschema
|
||||||
|
|
||||||
with open(configuration_file, encoding="utf-8") as f:
|
with open(configuration_file, encoding="utf-8") as f:
|
||||||
config = syaml.load(f)
|
config = syaml.load(f)
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
"""Manages the details on the images used in the various stages."""
|
"""Manages the details on the images used in the various stages."""
|
||||||
import json
|
import json
|
||||||
import os
|
import os.path
|
||||||
import shlex
|
import shlex
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|||||||
@@ -9,8 +9,6 @@
|
|||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import jsonschema
|
|
||||||
|
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.schema.env
|
import spack.schema.env
|
||||||
@@ -190,6 +188,8 @@ def paths(self):
|
|||||||
@tengine.context_property
|
@tengine.context_property
|
||||||
def manifest(self):
|
def manifest(self):
|
||||||
"""The spack.yaml file that should be used in the image"""
|
"""The spack.yaml file that should be used in the image"""
|
||||||
|
import jsonschema
|
||||||
|
|
||||||
# Copy in the part of spack.yaml prescribed in the configuration file
|
# Copy in the part of spack.yaml prescribed in the configuration file
|
||||||
manifest = copy.deepcopy(self.config)
|
manifest = copy.deepcopy(self.config)
|
||||||
manifest.pop("container")
|
manifest.pop("container")
|
||||||
|
|||||||
@@ -123,15 +123,6 @@
|
|||||||
"deprecated_for",
|
"deprecated_for",
|
||||||
)
|
)
|
||||||
|
|
||||||
#: File where the database is written
|
|
||||||
INDEX_JSON_FILE = "index.json"
|
|
||||||
|
|
||||||
# Verifier file to check last modification of the DB
|
|
||||||
_INDEX_VERIFIER_FILE = "index_verifier"
|
|
||||||
|
|
||||||
# Lockfile for the database
|
|
||||||
_LOCK_FILE = "lock"
|
|
||||||
|
|
||||||
|
|
||||||
@llnl.util.lang.memoized
|
@llnl.util.lang.memoized
|
||||||
def _getfqdn():
|
def _getfqdn():
|
||||||
@@ -269,7 +260,7 @@ class ForbiddenLockError(SpackError):
|
|||||||
|
|
||||||
class ForbiddenLock:
|
class ForbiddenLock:
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
raise ForbiddenLockError(f"Cannot access attribute '{name}' of lock")
|
raise ForbiddenLockError("Cannot access attribute '{0}' of lock".format(name))
|
||||||
|
|
||||||
def __reduce__(self):
|
def __reduce__(self):
|
||||||
return ForbiddenLock, tuple()
|
return ForbiddenLock, tuple()
|
||||||
@@ -428,25 +419,14 @@ class FailureTracker:
|
|||||||
the likelihood of collision very low with no cleanup required.
|
the likelihood of collision very low with no cleanup required.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
#: root directory of the failure tracker
|
|
||||||
dir: pathlib.Path
|
|
||||||
|
|
||||||
#: File for locking particular concrete spec hashes
|
|
||||||
locker: SpecLocker
|
|
||||||
|
|
||||||
def __init__(self, root_dir: Union[str, pathlib.Path], default_timeout: Optional[float]):
|
def __init__(self, root_dir: Union[str, pathlib.Path], default_timeout: Optional[float]):
|
||||||
#: Ensure a persistent location for dealing with parallel installation
|
#: Ensure a persistent location for dealing with parallel installation
|
||||||
#: failures (e.g., across near-concurrent processes).
|
#: failures (e.g., across near-concurrent processes).
|
||||||
self.dir = pathlib.Path(root_dir) / _DB_DIRNAME / "failures"
|
self.dir = pathlib.Path(root_dir) / _DB_DIRNAME / "failures"
|
||||||
self.locker = SpecLocker(failures_lock_path(root_dir), default_timeout=default_timeout)
|
|
||||||
|
|
||||||
def _ensure_parent_directories(self) -> None:
|
|
||||||
"""Ensure that parent directories of the FailureTracker exist.
|
|
||||||
|
|
||||||
Accesses the filesystem only once, the first time it's called on a given FailureTracker.
|
|
||||||
"""
|
|
||||||
self.dir.mkdir(parents=True, exist_ok=True)
|
self.dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
self.locker = SpecLocker(failures_lock_path(root_dir), default_timeout=default_timeout)
|
||||||
|
|
||||||
def clear(self, spec: "spack.spec.Spec", force: bool = False) -> None:
|
def clear(self, spec: "spack.spec.Spec", force: bool = False) -> None:
|
||||||
"""Removes any persistent and cached failure tracking for the spec.
|
"""Removes any persistent and cached failure tracking for the spec.
|
||||||
|
|
||||||
@@ -489,18 +469,13 @@ def clear_all(self) -> None:
|
|||||||
|
|
||||||
tty.debug("Removing prefix failure tracking files")
|
tty.debug("Removing prefix failure tracking files")
|
||||||
try:
|
try:
|
||||||
marks = os.listdir(str(self.dir))
|
for fail_mark in os.listdir(str(self.dir)):
|
||||||
except FileNotFoundError:
|
try:
|
||||||
return # directory doesn't exist yet
|
(self.dir / fail_mark).unlink()
|
||||||
|
except OSError as exc:
|
||||||
|
tty.warn(f"Unable to remove failure marking file {fail_mark}: {str(exc)}")
|
||||||
except OSError as exc:
|
except OSError as exc:
|
||||||
tty.warn(f"Unable to remove failure marking files: {str(exc)}")
|
tty.warn(f"Unable to remove failure marking files: {str(exc)}")
|
||||||
return
|
|
||||||
|
|
||||||
for fail_mark in marks:
|
|
||||||
try:
|
|
||||||
(self.dir / fail_mark).unlink()
|
|
||||||
except OSError as exc:
|
|
||||||
tty.warn(f"Unable to remove failure marking file {fail_mark}: {str(exc)}")
|
|
||||||
|
|
||||||
def mark(self, spec: "spack.spec.Spec") -> lk.Lock:
|
def mark(self, spec: "spack.spec.Spec") -> lk.Lock:
|
||||||
"""Marks a spec as failing to install.
|
"""Marks a spec as failing to install.
|
||||||
@@ -508,8 +483,6 @@ def mark(self, spec: "spack.spec.Spec") -> lk.Lock:
|
|||||||
Args:
|
Args:
|
||||||
spec: spec that failed to install
|
spec: spec that failed to install
|
||||||
"""
|
"""
|
||||||
self._ensure_parent_directories()
|
|
||||||
|
|
||||||
# Dump the spec to the failure file for (manual) debugging purposes
|
# Dump the spec to the failure file for (manual) debugging purposes
|
||||||
path = self._path(spec)
|
path = self._path(spec)
|
||||||
path.write_text(spec.to_json())
|
path.write_text(spec.to_json())
|
||||||
@@ -594,13 +567,17 @@ def __init__(
|
|||||||
Relevant only if the repository is not an upstream.
|
Relevant only if the repository is not an upstream.
|
||||||
"""
|
"""
|
||||||
self.root = root
|
self.root = root
|
||||||
self.database_directory = pathlib.Path(self.root) / _DB_DIRNAME
|
self.database_directory = os.path.join(self.root, _DB_DIRNAME)
|
||||||
self.layout = layout
|
self.layout = layout
|
||||||
|
|
||||||
# Set up layout of database files within the db dir
|
# Set up layout of database files within the db dir
|
||||||
self._index_path = self.database_directory / INDEX_JSON_FILE
|
self._index_path = os.path.join(self.database_directory, "index.json")
|
||||||
self._verifier_path = self.database_directory / _INDEX_VERIFIER_FILE
|
self._verifier_path = os.path.join(self.database_directory, "index_verifier")
|
||||||
self._lock_path = self.database_directory / _LOCK_FILE
|
self._lock_path = os.path.join(self.database_directory, "lock")
|
||||||
|
|
||||||
|
# Create needed directories and files
|
||||||
|
if not is_upstream and not os.path.exists(self.database_directory):
|
||||||
|
fs.mkdirp(self.database_directory)
|
||||||
|
|
||||||
self.is_upstream = is_upstream
|
self.is_upstream = is_upstream
|
||||||
self.last_seen_verifier = ""
|
self.last_seen_verifier = ""
|
||||||
@@ -615,14 +592,14 @@ def __init__(
|
|||||||
|
|
||||||
# initialize rest of state.
|
# initialize rest of state.
|
||||||
self.db_lock_timeout = lock_cfg.database_timeout
|
self.db_lock_timeout = lock_cfg.database_timeout
|
||||||
tty.debug(f"DATABASE LOCK TIMEOUT: {str(self.db_lock_timeout)}s")
|
tty.debug("DATABASE LOCK TIMEOUT: {0}s".format(str(self.db_lock_timeout)))
|
||||||
|
|
||||||
self.lock: Union[ForbiddenLock, lk.Lock]
|
self.lock: Union[ForbiddenLock, lk.Lock]
|
||||||
if self.is_upstream:
|
if self.is_upstream:
|
||||||
self.lock = ForbiddenLock()
|
self.lock = ForbiddenLock()
|
||||||
else:
|
else:
|
||||||
self.lock = lk.Lock(
|
self.lock = lk.Lock(
|
||||||
str(self._lock_path),
|
self._lock_path,
|
||||||
default_timeout=self.db_lock_timeout,
|
default_timeout=self.db_lock_timeout,
|
||||||
desc="database",
|
desc="database",
|
||||||
enable=lock_cfg.enable,
|
enable=lock_cfg.enable,
|
||||||
@@ -639,11 +616,6 @@ def __init__(
|
|||||||
self._write_transaction_impl = lk.WriteTransaction
|
self._write_transaction_impl = lk.WriteTransaction
|
||||||
self._read_transaction_impl = lk.ReadTransaction
|
self._read_transaction_impl = lk.ReadTransaction
|
||||||
|
|
||||||
def _ensure_parent_directories(self):
|
|
||||||
"""Create the parent directory for the DB, if necessary."""
|
|
||||||
if not self.is_upstream:
|
|
||||||
self.database_directory.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
def write_transaction(self):
|
def write_transaction(self):
|
||||||
"""Get a write lock context manager for use in a `with` block."""
|
"""Get a write lock context manager for use in a `with` block."""
|
||||||
return self._write_transaction_impl(self.lock, acquire=self._read, release=self._write)
|
return self._write_transaction_impl(self.lock, acquire=self._read, release=self._write)
|
||||||
@@ -658,8 +630,6 @@ def _write_to_file(self, stream):
|
|||||||
|
|
||||||
This function does not do any locking or transactions.
|
This function does not do any locking or transactions.
|
||||||
"""
|
"""
|
||||||
self._ensure_parent_directories()
|
|
||||||
|
|
||||||
# map from per-spec hash code to installation record.
|
# map from per-spec hash code to installation record.
|
||||||
installs = dict(
|
installs = dict(
|
||||||
(k, v.to_dict(include_fields=self.record_fields)) for k, v in self._data.items()
|
(k, v.to_dict(include_fields=self.record_fields)) for k, v in self._data.items()
|
||||||
@@ -789,7 +759,7 @@ def _read_from_file(self, filename):
|
|||||||
Does not do any locking.
|
Does not do any locking.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
with open(str(filename), "r", encoding="utf-8") as f:
|
with open(filename, "r", encoding="utf-8") as f:
|
||||||
# In the future we may use a stream of JSON objects, hence `raw_decode` for compat.
|
# In the future we may use a stream of JSON objects, hence `raw_decode` for compat.
|
||||||
fdata, _ = JSONDecoder().raw_decode(f.read())
|
fdata, _ = JSONDecoder().raw_decode(f.read())
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -890,13 +860,11 @@ def reindex(self):
|
|||||||
if self.is_upstream:
|
if self.is_upstream:
|
||||||
raise UpstreamDatabaseLockingError("Cannot reindex an upstream database")
|
raise UpstreamDatabaseLockingError("Cannot reindex an upstream database")
|
||||||
|
|
||||||
self._ensure_parent_directories()
|
|
||||||
|
|
||||||
# Special transaction to avoid recursive reindex calls and to
|
# Special transaction to avoid recursive reindex calls and to
|
||||||
# ignore errors if we need to rebuild a corrupt database.
|
# ignore errors if we need to rebuild a corrupt database.
|
||||||
def _read_suppress_error():
|
def _read_suppress_error():
|
||||||
try:
|
try:
|
||||||
if self._index_path.is_file():
|
if os.path.isfile(self._index_path):
|
||||||
self._read_from_file(self._index_path)
|
self._read_from_file(self._index_path)
|
||||||
except CorruptDatabaseError as e:
|
except CorruptDatabaseError as e:
|
||||||
tty.warn(f"Reindexing corrupt database, error was: {e}")
|
tty.warn(f"Reindexing corrupt database, error was: {e}")
|
||||||
@@ -1039,7 +1007,7 @@ def _check_ref_counts(self):
|
|||||||
% (key, found, expected, self._index_path)
|
% (key, found, expected, self._index_path)
|
||||||
)
|
)
|
||||||
|
|
||||||
def _write(self, type=None, value=None, traceback=None):
|
def _write(self, type, value, traceback):
|
||||||
"""Write the in-memory database index to its file path.
|
"""Write the in-memory database index to its file path.
|
||||||
|
|
||||||
This is a helper function called by the WriteTransaction context
|
This is a helper function called by the WriteTransaction context
|
||||||
@@ -1050,8 +1018,6 @@ def _write(self, type=None, value=None, traceback=None):
|
|||||||
|
|
||||||
This routine does no locking.
|
This routine does no locking.
|
||||||
"""
|
"""
|
||||||
self._ensure_parent_directories()
|
|
||||||
|
|
||||||
# Do not write if exceptions were raised
|
# Do not write if exceptions were raised
|
||||||
if type is not None:
|
if type is not None:
|
||||||
# A failure interrupted a transaction, so we should record that
|
# A failure interrupted a transaction, so we should record that
|
||||||
@@ -1060,16 +1026,16 @@ def _write(self, type=None, value=None, traceback=None):
|
|||||||
self._state_is_inconsistent = True
|
self._state_is_inconsistent = True
|
||||||
return
|
return
|
||||||
|
|
||||||
temp_file = str(self._index_path) + (".%s.%s.temp" % (_getfqdn(), os.getpid()))
|
temp_file = self._index_path + (".%s.%s.temp" % (_getfqdn(), os.getpid()))
|
||||||
|
|
||||||
# Write a temporary database file them move it into place
|
# Write a temporary database file them move it into place
|
||||||
try:
|
try:
|
||||||
with open(temp_file, "w", encoding="utf-8") as f:
|
with open(temp_file, "w", encoding="utf-8") as f:
|
||||||
self._write_to_file(f)
|
self._write_to_file(f)
|
||||||
fs.rename(temp_file, str(self._index_path))
|
fs.rename(temp_file, self._index_path)
|
||||||
|
|
||||||
if _use_uuid:
|
if _use_uuid:
|
||||||
with self._verifier_path.open("w", encoding="utf-8") as f:
|
with open(self._verifier_path, "w", encoding="utf-8") as f:
|
||||||
new_verifier = str(uuid.uuid4())
|
new_verifier = str(uuid.uuid4())
|
||||||
f.write(new_verifier)
|
f.write(new_verifier)
|
||||||
self.last_seen_verifier = new_verifier
|
self.last_seen_verifier = new_verifier
|
||||||
@@ -1082,11 +1048,11 @@ def _write(self, type=None, value=None, traceback=None):
|
|||||||
|
|
||||||
def _read(self):
|
def _read(self):
|
||||||
"""Re-read Database from the data in the set location. This does no locking."""
|
"""Re-read Database from the data in the set location. This does no locking."""
|
||||||
if self._index_path.is_file():
|
if os.path.isfile(self._index_path):
|
||||||
current_verifier = ""
|
current_verifier = ""
|
||||||
if _use_uuid:
|
if _use_uuid:
|
||||||
try:
|
try:
|
||||||
with self._verifier_path.open("r", encoding="utf-8") as f:
|
with open(self._verifier_path, "r", encoding="utf-8") as f:
|
||||||
current_verifier = f.read()
|
current_verifier = f.read()
|
||||||
except BaseException:
|
except BaseException:
|
||||||
pass
|
pass
|
||||||
@@ -1099,7 +1065,7 @@ def _read(self):
|
|||||||
self._state_is_inconsistent = False
|
self._state_is_inconsistent = False
|
||||||
return
|
return
|
||||||
elif self.is_upstream:
|
elif self.is_upstream:
|
||||||
tty.warn(f"upstream not found: {self._index_path}")
|
tty.warn("upstream not found: {0}".format(self._index_path))
|
||||||
|
|
||||||
def _add(
|
def _add(
|
||||||
self,
|
self,
|
||||||
@@ -1364,7 +1330,7 @@ def deprecate(self, spec: "spack.spec.Spec", deprecator: "spack.spec.Spec") -> N
|
|||||||
def installed_relatives(
|
def installed_relatives(
|
||||||
self,
|
self,
|
||||||
spec: "spack.spec.Spec",
|
spec: "spack.spec.Spec",
|
||||||
direction: tr.DirectionType = "children",
|
direction: str = "children",
|
||||||
transitive: bool = True,
|
transitive: bool = True,
|
||||||
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.ALL,
|
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.ALL,
|
||||||
) -> Set["spack.spec.Spec"]:
|
) -> Set["spack.spec.Spec"]:
|
||||||
@@ -1715,7 +1681,7 @@ def query(
|
|||||||
)
|
)
|
||||||
|
|
||||||
results = list(local_results) + list(x for x in upstream_results if x not in local_results)
|
results = list(local_results) + list(x for x in upstream_results if x not in local_results)
|
||||||
results.sort() # type: ignore[call-overload]
|
results.sort()
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def query_one(
|
def query_one(
|
||||||
|
|||||||
@@ -15,6 +15,7 @@
|
|||||||
import glob
|
import glob
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
|
import os.path
|
||||||
import pathlib
|
import pathlib
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
|||||||
@@ -7,6 +7,7 @@
|
|||||||
import collections
|
import collections
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
import os
|
import os
|
||||||
|
import os.path
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ class OpenMpi(Package):
|
|||||||
"""
|
"""
|
||||||
import collections
|
import collections
|
||||||
import collections.abc
|
import collections.abc
|
||||||
import os
|
import os.path
|
||||||
import re
|
import re
|
||||||
from typing import Any, Callable, List, Optional, Tuple, Type, Union
|
from typing import Any, Callable, List, Optional, Tuple, Type, Union
|
||||||
|
|
||||||
|
|||||||
@@ -8,7 +8,7 @@
|
|||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Dict, List, Optional, Tuple
|
from typing import List, Optional, Tuple
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
from llnl.util.symlink import readlink
|
from llnl.util.symlink import readlink
|
||||||
@@ -17,6 +17,7 @@
|
|||||||
import spack.hash_types as ht
|
import spack.hash_types as ht
|
||||||
import spack.projections
|
import spack.projections
|
||||||
import spack.spec
|
import spack.spec
|
||||||
|
import spack.store
|
||||||
import spack.util.spack_json as sjson
|
import spack.util.spack_json as sjson
|
||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
|
|
||||||
@@ -68,9 +69,10 @@ def specs_from_metadata_dirs(root: str) -> List["spack.spec.Spec"]:
|
|||||||
|
|
||||||
|
|
||||||
class DirectoryLayout:
|
class DirectoryLayout:
|
||||||
"""A directory layout is used to associate unique paths with specs. Different installations are
|
"""A directory layout is used to associate unique paths with specs.
|
||||||
going to want different layouts for their install, and they can use this to customize the
|
Different installations are going to want different layouts for their
|
||||||
nesting structure of spack installs. The default layout is:
|
install, and they can use this to customize the nesting structure of
|
||||||
|
spack installs. The default layout is:
|
||||||
|
|
||||||
* <install root>/
|
* <install root>/
|
||||||
|
|
||||||
@@ -80,30 +82,35 @@ class DirectoryLayout:
|
|||||||
|
|
||||||
* <name>-<version>-<hash>
|
* <name>-<version>-<hash>
|
||||||
|
|
||||||
The installation directory projections can be modified with the projections argument."""
|
The hash here is a SHA-1 hash for the full DAG plus the build
|
||||||
|
spec.
|
||||||
|
|
||||||
def __init__(
|
The installation directory projections can be modified with the
|
||||||
self,
|
projections argument.
|
||||||
root,
|
"""
|
||||||
*,
|
|
||||||
projections: Optional[Dict[str, str]] = None,
|
def __init__(self, root, **kwargs):
|
||||||
hash_length: Optional[int] = None,
|
|
||||||
) -> None:
|
|
||||||
self.root = root
|
self.root = root
|
||||||
projections = projections or default_projections
|
self.check_upstream = True
|
||||||
self.projections = {key: projection.lower() for key, projection in projections.items()}
|
projections = kwargs.get("projections") or default_projections
|
||||||
|
self.projections = dict(
|
||||||
|
(key, projection.lower()) for key, projection in projections.items()
|
||||||
|
)
|
||||||
|
|
||||||
# apply hash length as appropriate
|
# apply hash length as appropriate
|
||||||
self.hash_length = hash_length
|
self.hash_length = kwargs.get("hash_length", None)
|
||||||
if self.hash_length is not None:
|
if self.hash_length is not None:
|
||||||
for when_spec, projection in self.projections.items():
|
for when_spec, projection in self.projections.items():
|
||||||
if "{hash}" not in projection:
|
if "{hash}" not in projection:
|
||||||
raise InvalidDirectoryLayoutParametersError(
|
if "{hash" in projection:
|
||||||
"Conflicting options for installation layout hash length"
|
raise InvalidDirectoryLayoutParametersError(
|
||||||
if "{hash" in projection
|
"Conflicting options for installation layout hash" " length"
|
||||||
else "Cannot specify hash length when the hash is not part of all "
|
)
|
||||||
"install_tree projections"
|
else:
|
||||||
)
|
raise InvalidDirectoryLayoutParametersError(
|
||||||
|
"Cannot specify hash length when the hash is not"
|
||||||
|
" part of all install_tree projections"
|
||||||
|
)
|
||||||
self.projections[when_spec] = projection.replace(
|
self.projections[when_spec] = projection.replace(
|
||||||
"{hash}", "{hash:%d}" % self.hash_length
|
"{hash}", "{hash:%d}" % self.hash_length
|
||||||
)
|
)
|
||||||
@@ -272,6 +279,13 @@ def path_for_spec(self, spec):
|
|||||||
|
|
||||||
if spec.external:
|
if spec.external:
|
||||||
return spec.external_path
|
return spec.external_path
|
||||||
|
if self.check_upstream:
|
||||||
|
upstream, record = spack.store.STORE.db.query_by_spec_hash(spec.dag_hash())
|
||||||
|
if upstream:
|
||||||
|
raise SpackError(
|
||||||
|
"Internal error: attempted to call path_for_spec on"
|
||||||
|
" upstream-installed package."
|
||||||
|
)
|
||||||
|
|
||||||
path = self.relative_path_for_spec(spec)
|
path = self.relative_path_for_spec(spec)
|
||||||
assert not path.startswith(self.root)
|
assert not path.startswith(self.root)
|
||||||
|
|||||||
@@ -10,7 +10,6 @@
|
|||||||
|
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.schema.environment
|
|
||||||
import spack.store
|
import spack.store
|
||||||
from spack.util.environment import EnvironmentModifications
|
from spack.util.environment import EnvironmentModifications
|
||||||
|
|
||||||
@@ -157,11 +156,6 @@ def activate(
|
|||||||
# MANPATH, PYTHONPATH, etc. All variables that end in PATH (case-sensitive)
|
# MANPATH, PYTHONPATH, etc. All variables that end in PATH (case-sensitive)
|
||||||
# become PATH variables.
|
# become PATH variables.
|
||||||
#
|
#
|
||||||
|
|
||||||
env_vars_yaml = env.manifest.configuration.get("env_vars", None)
|
|
||||||
if env_vars_yaml:
|
|
||||||
env_mods.extend(spack.schema.environment.parse(env_vars_yaml))
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if view and env.has_view(view):
|
if view and env.has_view(view):
|
||||||
with spack.store.STORE.db.read_transaction():
|
with spack.store.STORE.db.read_transaction():
|
||||||
@@ -195,10 +189,6 @@ def deactivate() -> EnvironmentModifications:
|
|||||||
if active is None:
|
if active is None:
|
||||||
return env_mods
|
return env_mods
|
||||||
|
|
||||||
env_vars_yaml = active.manifest.configuration.get("env_vars", None)
|
|
||||||
if env_vars_yaml:
|
|
||||||
env_mods.extend(spack.schema.environment.parse(env_vars_yaml).reversed())
|
|
||||||
|
|
||||||
active_view = os.getenv(ev.spack_env_view_var)
|
active_view = os.getenv(ev.spack_env_view_var)
|
||||||
|
|
||||||
if active_view and active.has_view(active_view):
|
if active_view and active.has_view(active_view):
|
||||||
|
|||||||
@@ -15,10 +15,6 @@
|
|||||||
SHOW_BACKTRACE = False
|
SHOW_BACKTRACE = False
|
||||||
|
|
||||||
|
|
||||||
class SpackAPIWarning(UserWarning):
|
|
||||||
"""Warning that formats with file and line number."""
|
|
||||||
|
|
||||||
|
|
||||||
class SpackError(Exception):
|
class SpackError(Exception):
|
||||||
"""This is the superclass for all Spack errors.
|
"""This is the superclass for all Spack errors.
|
||||||
Subclasses can be found in the modules they have to do with.
|
Subclasses can be found in the modules they have to do with.
|
||||||
|
|||||||
@@ -25,6 +25,7 @@
|
|||||||
import functools
|
import functools
|
||||||
import http.client
|
import http.client
|
||||||
import os
|
import os
|
||||||
|
import os.path
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import urllib.error
|
import urllib.error
|
||||||
@@ -320,15 +321,9 @@ def _fetch_urllib(self, url):
|
|||||||
|
|
||||||
request = urllib.request.Request(url, headers={"User-Agent": web_util.SPACK_USER_AGENT})
|
request = urllib.request.Request(url, headers={"User-Agent": web_util.SPACK_USER_AGENT})
|
||||||
|
|
||||||
if os.path.lexists(save_file):
|
|
||||||
os.remove(save_file)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = web_util.urlopen(request)
|
response = web_util.urlopen(request)
|
||||||
tty.msg(f"Fetching {url}")
|
except (TimeoutError, urllib.error.URLError) as e:
|
||||||
with open(save_file, "wb") as f:
|
|
||||||
shutil.copyfileobj(response, f)
|
|
||||||
except OSError as e:
|
|
||||||
# clean up archive on failure.
|
# clean up archive on failure.
|
||||||
if self.archive_file:
|
if self.archive_file:
|
||||||
os.remove(self.archive_file)
|
os.remove(self.archive_file)
|
||||||
@@ -336,6 +331,14 @@ def _fetch_urllib(self, url):
|
|||||||
os.remove(save_file)
|
os.remove(save_file)
|
||||||
raise FailedDownloadError(e) from e
|
raise FailedDownloadError(e) from e
|
||||||
|
|
||||||
|
tty.msg(f"Fetching {url}")
|
||||||
|
|
||||||
|
if os.path.lexists(save_file):
|
||||||
|
os.remove(save_file)
|
||||||
|
|
||||||
|
with open(save_file, "wb") as f:
|
||||||
|
shutil.copyfileobj(response, f)
|
||||||
|
|
||||||
# Save the redirected URL for error messages. Sometimes we're redirected to an arbitrary
|
# Save the redirected URL for error messages. Sometimes we're redirected to an arbitrary
|
||||||
# mirror that is broken, leading to spurious download failures. In that case it's helpful
|
# mirror that is broken, leading to spurious download failures. In that case it's helpful
|
||||||
# for users to know which URL was actually fetched.
|
# for users to know which URL was actually fetched.
|
||||||
@@ -532,16 +535,11 @@ def __init__(self, *, url: str, checksum: Optional[str] = None, **kwargs):
|
|||||||
@_needs_stage
|
@_needs_stage
|
||||||
def fetch(self):
|
def fetch(self):
|
||||||
file = self.stage.save_filename
|
file = self.stage.save_filename
|
||||||
|
tty.msg(f"Fetching {self.url}")
|
||||||
if os.path.lexists(file):
|
|
||||||
os.remove(file)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = self._urlopen(self.url)
|
response = self._urlopen(self.url)
|
||||||
tty.msg(f"Fetching {self.url}")
|
except (TimeoutError, urllib.error.URLError) as e:
|
||||||
with open(file, "wb") as f:
|
|
||||||
shutil.copyfileobj(response, f)
|
|
||||||
except OSError as e:
|
|
||||||
# clean up archive on failure.
|
# clean up archive on failure.
|
||||||
if self.archive_file:
|
if self.archive_file:
|
||||||
os.remove(self.archive_file)
|
os.remove(self.archive_file)
|
||||||
@@ -549,6 +547,12 @@ def fetch(self):
|
|||||||
os.remove(file)
|
os.remove(file)
|
||||||
raise FailedDownloadError(e) from e
|
raise FailedDownloadError(e) from e
|
||||||
|
|
||||||
|
if os.path.lexists(file):
|
||||||
|
os.remove(file)
|
||||||
|
|
||||||
|
with open(file, "wb") as f:
|
||||||
|
shutil.copyfileobj(response, f)
|
||||||
|
|
||||||
|
|
||||||
class VCSFetchStrategy(FetchStrategy):
|
class VCSFetchStrategy(FetchStrategy):
|
||||||
"""Superclass for version control system fetch strategies.
|
"""Superclass for version control system fetch strategies.
|
||||||
|
|||||||
@@ -35,6 +35,7 @@
|
|||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.directory_layout
|
import spack.directory_layout
|
||||||
|
import spack.paths
|
||||||
import spack.projections
|
import spack.projections
|
||||||
import spack.relocate
|
import spack.relocate
|
||||||
import spack.schema.projections
|
import spack.schema.projections
|
||||||
@@ -43,6 +44,7 @@
|
|||||||
import spack.util.spack_json as s_json
|
import spack.util.spack_json as s_json
|
||||||
import spack.util.spack_yaml as s_yaml
|
import spack.util.spack_yaml as s_yaml
|
||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
|
from spack.hooks import sbang
|
||||||
|
|
||||||
__all__ = ["FilesystemView", "YamlFilesystemView"]
|
__all__ = ["FilesystemView", "YamlFilesystemView"]
|
||||||
|
|
||||||
@@ -89,10 +91,16 @@ def view_copy(
|
|||||||
if stat.S_ISLNK(src_stat.st_mode):
|
if stat.S_ISLNK(src_stat.st_mode):
|
||||||
spack.relocate.relocate_links(links=[dst], prefix_to_prefix=prefix_to_projection)
|
spack.relocate.relocate_links(links=[dst], prefix_to_prefix=prefix_to_projection)
|
||||||
elif spack.relocate.is_binary(dst):
|
elif spack.relocate.is_binary(dst):
|
||||||
spack.relocate.relocate_text_bin(binaries=[dst], prefix_to_prefix=prefix_to_projection)
|
spack.relocate.relocate_text_bin(binaries=[dst], prefixes=prefix_to_projection)
|
||||||
else:
|
else:
|
||||||
prefix_to_projection[spack.store.STORE.layout.root] = view._root
|
prefix_to_projection[spack.store.STORE.layout.root] = view._root
|
||||||
spack.relocate.relocate_text(files=[dst], prefix_to_prefix=prefix_to_projection)
|
|
||||||
|
# This is vestigial code for the *old* location of sbang.
|
||||||
|
prefix_to_projection[f"#!/bin/bash {spack.paths.spack_root}/bin/sbang"] = (
|
||||||
|
sbang.sbang_shebang_line()
|
||||||
|
)
|
||||||
|
|
||||||
|
spack.relocate.relocate_text(files=[dst], prefixes=prefix_to_projection)
|
||||||
|
|
||||||
# The os module on Windows does not have a chown function.
|
# The os module on Windows does not have a chown function.
|
||||||
if sys.platform != "win32":
|
if sys.platform != "win32":
|
||||||
|
|||||||
@@ -275,7 +275,7 @@ def _do_fake_install(pkg: "spack.package_base.PackageBase") -> None:
|
|||||||
fs.mkdirp(pkg.prefix.bin)
|
fs.mkdirp(pkg.prefix.bin)
|
||||||
fs.touch(os.path.join(pkg.prefix.bin, command))
|
fs.touch(os.path.join(pkg.prefix.bin, command))
|
||||||
if sys.platform != "win32":
|
if sys.platform != "win32":
|
||||||
chmod = which("chmod", required=True)
|
chmod = which("chmod")
|
||||||
chmod("+x", os.path.join(pkg.prefix.bin, command))
|
chmod("+x", os.path.join(pkg.prefix.bin, command))
|
||||||
|
|
||||||
# Install fake header file
|
# Install fake header file
|
||||||
@@ -539,7 +539,7 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
|
|||||||
# Note that we copy them in as they are in the *install* directory
|
# Note that we copy them in as they are in the *install* directory
|
||||||
# NOT as they are in the repository, because we want a snapshot of
|
# NOT as they are in the repository, because we want a snapshot of
|
||||||
# how *this* particular build was done.
|
# how *this* particular build was done.
|
||||||
for node in spec.traverse(deptype="all"):
|
for node in spec.traverse(deptype=all):
|
||||||
if node is not spec:
|
if node is not spec:
|
||||||
# Locate the dependency package in the install tree and find
|
# Locate the dependency package in the install tree and find
|
||||||
# its provenance information.
|
# its provenance information.
|
||||||
|
|||||||
@@ -14,6 +14,7 @@
|
|||||||
import io
|
import io
|
||||||
import operator
|
import operator
|
||||||
import os
|
import os
|
||||||
|
import os.path
|
||||||
import pstats
|
import pstats
|
||||||
import re
|
import re
|
||||||
import shlex
|
import shlex
|
||||||
@@ -502,16 +503,16 @@ def make_argument_parser(**kwargs):
|
|||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
|
||||||
def showwarning(message, category, filename, lineno, file=None, line=None):
|
def send_warning_to_tty(message, *args):
|
||||||
"""Redirects messages to tty.warn."""
|
"""Redirects messages to tty.warn."""
|
||||||
if category is spack.error.SpackAPIWarning:
|
tty.warn(message)
|
||||||
tty.warn(f"{filename}:{lineno}: {message}")
|
|
||||||
else:
|
|
||||||
tty.warn(message)
|
|
||||||
|
|
||||||
|
|
||||||
def setup_main_options(args):
|
def setup_main_options(args):
|
||||||
"""Configure spack globals based on the basic options."""
|
"""Configure spack globals based on the basic options."""
|
||||||
|
# Assign a custom function to show warnings
|
||||||
|
warnings.showwarning = send_warning_to_tty
|
||||||
|
|
||||||
# Set up environment based on args.
|
# Set up environment based on args.
|
||||||
tty.set_verbose(args.verbose)
|
tty.set_verbose(args.verbose)
|
||||||
tty.set_debug(args.debug)
|
tty.set_debug(args.debug)
|
||||||
@@ -728,7 +729,7 @@ def _compatible_sys_types():
|
|||||||
with the current host.
|
with the current host.
|
||||||
"""
|
"""
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
host_os = str(host_platform.default_operating_system())
|
host_os = str(host_platform.operating_system("default_os"))
|
||||||
host_target = archspec.cpu.host()
|
host_target = archspec.cpu.host()
|
||||||
compatible_targets = [host_target] + host_target.ancestors
|
compatible_targets = [host_target] + host_target.ancestors
|
||||||
|
|
||||||
@@ -902,10 +903,9 @@ def _main(argv=None):
|
|||||||
# main() is tricky to get right, so be careful where you put things.
|
# main() is tricky to get right, so be careful where you put things.
|
||||||
#
|
#
|
||||||
# Things in this first part of `main()` should *not* require any
|
# Things in this first part of `main()` should *not* require any
|
||||||
# configuration. This doesn't include much -- setting up the parser,
|
# configuration. This doesn't include much -- setting up th parser,
|
||||||
# restoring some key environment variables, very simple CLI options, etc.
|
# restoring some key environment variables, very simple CLI options, etc.
|
||||||
# ------------------------------------------------------------------------
|
# ------------------------------------------------------------------------
|
||||||
warnings.showwarning = showwarning
|
|
||||||
|
|
||||||
# Create a parser with a simple positional argument first. We'll
|
# Create a parser with a simple positional argument first. We'll
|
||||||
# lazily load the subcommand(s) we need later. This allows us to
|
# lazily load the subcommand(s) we need later. This allows us to
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
import os
|
||||||
|
import os.path
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import llnl.url
|
import llnl.url
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
import os
|
||||||
|
import os.path
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|||||||
@@ -31,7 +31,7 @@
|
|||||||
import copy
|
import copy
|
||||||
import datetime
|
import datetime
|
||||||
import inspect
|
import inspect
|
||||||
import os
|
import os.path
|
||||||
import re
|
import re
|
||||||
import string
|
import string
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
import collections
|
import collections
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os.path
|
||||||
from typing import Dict, List, Optional, Tuple
|
from typing import Dict, List, Optional, Tuple
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
"""This module implements the classes necessary to generate Tcl
|
"""This module implements the classes necessary to generate Tcl
|
||||||
non-hierarchical modules.
|
non-hierarchical modules.
|
||||||
"""
|
"""
|
||||||
import os
|
import os.path
|
||||||
from typing import Dict, Optional, Tuple
|
from typing import Dict, Optional, Tuple
|
||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
|
|||||||
@@ -7,7 +7,6 @@
|
|||||||
import base64
|
import base64
|
||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
import socket
|
|
||||||
import time
|
import time
|
||||||
import urllib.error
|
import urllib.error
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
@@ -411,7 +410,7 @@ def wrapper(*args, **kwargs):
|
|||||||
for i in range(retries):
|
for i in range(retries):
|
||||||
try:
|
try:
|
||||||
return f(*args, **kwargs)
|
return f(*args, **kwargs)
|
||||||
except OSError as e:
|
except (urllib.error.URLError, TimeoutError) as e:
|
||||||
# Retry on internal server errors, and rate limit errors
|
# Retry on internal server errors, and rate limit errors
|
||||||
# Potentially this could take into account the Retry-After header
|
# Potentially this could take into account the Retry-After header
|
||||||
# if registries support it
|
# if registries support it
|
||||||
@@ -421,10 +420,9 @@ def wrapper(*args, **kwargs):
|
|||||||
and (500 <= e.code < 600 or e.code == 429)
|
and (500 <= e.code < 600 or e.code == 429)
|
||||||
)
|
)
|
||||||
or (
|
or (
|
||||||
isinstance(e, urllib.error.URLError)
|
isinstance(e, urllib.error.URLError) and isinstance(e.reason, TimeoutError)
|
||||||
and isinstance(e.reason, socket.timeout)
|
|
||||||
)
|
)
|
||||||
or isinstance(e, socket.timeout)
|
or isinstance(e, TimeoutError)
|
||||||
):
|
):
|
||||||
# Exponential backoff
|
# Exponential backoff
|
||||||
sleep(2**i)
|
sleep(2**i)
|
||||||
|
|||||||
@@ -3,6 +3,8 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
|
|
||||||
|
import spack.util.spack_yaml as syaml
|
||||||
|
|
||||||
|
|
||||||
@llnl.util.lang.lazy_lexicographic_ordering
|
@llnl.util.lang.lazy_lexicographic_ordering
|
||||||
class OperatingSystem:
|
class OperatingSystem:
|
||||||
@@ -40,4 +42,4 @@ def _cmp_iter(self):
|
|||||||
yield self.version
|
yield self.version
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
return {"name": self.name, "version": self.version}
|
return syaml.syaml_dict([("name", self.name), ("version", self.version)])
|
||||||
|
|||||||
@@ -3,52 +3,29 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
# flake8: noqa: F401
|
# flake8: noqa: F401
|
||||||
"""spack.package defines the public API for Spack packages, by re-exporting useful symbols from
|
"""spack.util.package is a set of useful build tools and directives for packages.
|
||||||
other modules. Packages should import this module, instead of importing from spack.* directly
|
|
||||||
to ensure forward compatibility with future versions of Spack."""
|
|
||||||
|
|
||||||
|
Everything in this module is automatically imported into Spack package files.
|
||||||
|
"""
|
||||||
from os import chdir, environ, getcwd, makedirs, mkdir, remove, removedirs
|
from os import chdir, environ, getcwd, makedirs, mkdir, remove, removedirs
|
||||||
from shutil import move, rmtree
|
from shutil import move, rmtree
|
||||||
|
|
||||||
|
from spack.error import InstallError, NoHeadersError, NoLibrariesError
|
||||||
|
|
||||||
|
# Emulate some shell commands for convenience
|
||||||
|
env = environ
|
||||||
|
cd = chdir
|
||||||
|
pwd = getcwd
|
||||||
|
|
||||||
# import most common types used in packages
|
# import most common types used in packages
|
||||||
from typing import Dict, List, Optional
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
from llnl.util.filesystem import (
|
import llnl.util.filesystem
|
||||||
FileFilter,
|
from llnl.util.filesystem import *
|
||||||
FileList,
|
|
||||||
HeaderList,
|
|
||||||
LibraryList,
|
|
||||||
ancestor,
|
|
||||||
can_access,
|
|
||||||
change_sed_delimiter,
|
|
||||||
copy,
|
|
||||||
copy_tree,
|
|
||||||
filter_file,
|
|
||||||
find,
|
|
||||||
find_all_headers,
|
|
||||||
find_first,
|
|
||||||
find_headers,
|
|
||||||
find_libraries,
|
|
||||||
find_system_libraries,
|
|
||||||
force_remove,
|
|
||||||
force_symlink,
|
|
||||||
install,
|
|
||||||
install_tree,
|
|
||||||
is_exe,
|
|
||||||
join_path,
|
|
||||||
keep_modification_time,
|
|
||||||
library_extensions,
|
|
||||||
mkdirp,
|
|
||||||
remove_directory_contents,
|
|
||||||
remove_linked_tree,
|
|
||||||
rename,
|
|
||||||
set_executable,
|
|
||||||
set_install_permissions,
|
|
||||||
touch,
|
|
||||||
working_dir,
|
|
||||||
)
|
|
||||||
from llnl.util.symlink import symlink
|
from llnl.util.symlink import symlink
|
||||||
|
|
||||||
|
import spack.util.executable
|
||||||
|
|
||||||
# These props will be overridden when the build env is set up.
|
# These props will be overridden when the build env is set up.
|
||||||
from spack.build_environment import MakeExecutable
|
from spack.build_environment import MakeExecutable
|
||||||
from spack.build_systems.aspell_dict import AspellDictPackage
|
from spack.build_systems.aspell_dict import AspellDictPackage
|
||||||
@@ -99,24 +76,7 @@
|
|||||||
from spack.builder import BaseBuilder
|
from spack.builder import BaseBuilder
|
||||||
from spack.config import determine_number_of_jobs
|
from spack.config import determine_number_of_jobs
|
||||||
from spack.deptypes import ALL_TYPES as all_deptypes
|
from spack.deptypes import ALL_TYPES as all_deptypes
|
||||||
from spack.directives import (
|
from spack.directives import *
|
||||||
build_system,
|
|
||||||
can_splice,
|
|
||||||
conditional,
|
|
||||||
conflicts,
|
|
||||||
depends_on,
|
|
||||||
extends,
|
|
||||||
license,
|
|
||||||
maintainers,
|
|
||||||
patch,
|
|
||||||
provides,
|
|
||||||
redistribute,
|
|
||||||
requires,
|
|
||||||
resource,
|
|
||||||
variant,
|
|
||||||
version,
|
|
||||||
)
|
|
||||||
from spack.error import InstallError, NoHeadersError, NoLibrariesError
|
|
||||||
from spack.install_test import (
|
from spack.install_test import (
|
||||||
SkipTest,
|
SkipTest,
|
||||||
cache_extra_test_sources,
|
cache_extra_test_sources,
|
||||||
@@ -126,36 +86,28 @@
|
|||||||
install_test_root,
|
install_test_root,
|
||||||
test_part,
|
test_part,
|
||||||
)
|
)
|
||||||
|
from spack.installer import ExternalPackageError, InstallLockError, UpstreamPackageError
|
||||||
from spack.mixins import filter_compiler_wrappers
|
from spack.mixins import filter_compiler_wrappers
|
||||||
from spack.multimethod import default_args, when
|
from spack.multimethod import default_args, when
|
||||||
from spack.package_base import build_system_flags, env_flags, inject_flags, on_package_attributes
|
from spack.package_base import (
|
||||||
from spack.package_completions import (
|
DependencyConflictError,
|
||||||
bash_completion_path,
|
build_system_flags,
|
||||||
fish_completion_path,
|
env_flags,
|
||||||
zsh_completion_path,
|
flatten_dependencies,
|
||||||
|
inject_flags,
|
||||||
|
install_dependency_symlinks,
|
||||||
|
on_package_attributes,
|
||||||
)
|
)
|
||||||
|
from spack.package_completions import *
|
||||||
from spack.phase_callbacks import run_after, run_before
|
from spack.phase_callbacks import run_after, run_before
|
||||||
from spack.spec import Spec
|
from spack.spec import InvalidSpecDetected, Spec
|
||||||
from spack.util.executable import Executable, ProcessError, which, which_string
|
from spack.util.executable import *
|
||||||
from spack.util.filesystem import fix_darwin_install_name
|
from spack.util.filesystem import fix_darwin_install_name
|
||||||
from spack.variant import any_combination_of, auto_or_any_combination_of, disjoint_sets
|
from spack.variant import any_combination_of, auto_or_any_combination_of, disjoint_sets
|
||||||
from spack.version import Version, ver
|
from spack.version import Version, ver
|
||||||
|
|
||||||
# Emulate some shell commands for convenience
|
# These are just here for editor support; they will be replaced when the build env
|
||||||
env = environ
|
# is set up.
|
||||||
cd = chdir
|
make = MakeExecutable("make", jobs=1)
|
||||||
pwd = getcwd
|
ninja = MakeExecutable("ninja", jobs=1)
|
||||||
|
configure = Executable(join_path(".", "configure"))
|
||||||
# These are just here for editor support; they may be set when the build env is set up.
|
|
||||||
configure: Executable
|
|
||||||
make_jobs: int
|
|
||||||
make: MakeExecutable
|
|
||||||
ninja: MakeExecutable
|
|
||||||
python_include: str
|
|
||||||
python_platlib: str
|
|
||||||
python_purelib: str
|
|
||||||
python: Executable
|
|
||||||
spack_cc: str
|
|
||||||
spack_cxx: str
|
|
||||||
spack_f77: str
|
|
||||||
spack_fc: str
|
|
||||||
|
|||||||
@@ -30,6 +30,7 @@
|
|||||||
import llnl.util.filesystem as fsys
|
import llnl.util.filesystem as fsys
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.lang import classproperty, memoized
|
from llnl.util.lang import classproperty, memoized
|
||||||
|
from llnl.util.link_tree import LinkTree
|
||||||
|
|
||||||
import spack.compilers
|
import spack.compilers
|
||||||
import spack.config
|
import spack.config
|
||||||
@@ -766,9 +767,6 @@ def __init__(self, spec):
|
|||||||
self.win_rpath = fsys.WindowsSimulatedRPath(self)
|
self.win_rpath = fsys.WindowsSimulatedRPath(self)
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
def __getitem__(self, key: str) -> "PackageBase":
|
|
||||||
return self.spec[key].package
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def dependency_names(cls):
|
def dependency_names(cls):
|
||||||
return _subkeys(cls.dependencies)
|
return _subkeys(cls.dependencies)
|
||||||
@@ -1098,14 +1096,14 @@ def update_external_dependencies(self, extendee_spec=None):
|
|||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def detect_dev_src_change(self) -> bool:
|
def detect_dev_src_change(self):
|
||||||
"""
|
"""
|
||||||
Method for checking for source code changes to trigger rebuild/reinstall
|
Method for checking for source code changes to trigger rebuild/reinstall
|
||||||
"""
|
"""
|
||||||
dev_path_var = self.spec.variants.get("dev_path", None)
|
dev_path_var = self.spec.variants.get("dev_path", None)
|
||||||
_, record = spack.store.STORE.db.query_by_spec_hash(self.spec.dag_hash())
|
_, record = spack.store.STORE.db.query_by_spec_hash(self.spec.dag_hash())
|
||||||
assert dev_path_var and record, "dev_path variant and record must be present"
|
mtime = fsys.last_modification_time_recursive(dev_path_var.value)
|
||||||
return fsys.recursive_mtime_greater_than(dev_path_var.value, record.installation_time)
|
return mtime > record.installation_time
|
||||||
|
|
||||||
def all_urls_for_version(self, version: StandardVersion) -> List[str]:
|
def all_urls_for_version(self, version: StandardVersion) -> List[str]:
|
||||||
"""Return all URLs derived from version_urls(), url, urls, and
|
"""Return all URLs derived from version_urls(), url, urls, and
|
||||||
@@ -1818,6 +1816,12 @@ def _has_make_target(self, target):
|
|||||||
Returns:
|
Returns:
|
||||||
bool: True if 'target' is found, else False
|
bool: True if 'target' is found, else False
|
||||||
"""
|
"""
|
||||||
|
# Prevent altering LC_ALL for 'make' outside this function
|
||||||
|
make = copy.deepcopy(self.module.make)
|
||||||
|
|
||||||
|
# Use English locale for missing target message comparison
|
||||||
|
make.add_default_env("LC_ALL", "C")
|
||||||
|
|
||||||
# Check if we have a Makefile
|
# Check if we have a Makefile
|
||||||
for makefile in ["GNUmakefile", "Makefile", "makefile"]:
|
for makefile in ["GNUmakefile", "Makefile", "makefile"]:
|
||||||
if os.path.exists(makefile):
|
if os.path.exists(makefile):
|
||||||
@@ -1826,12 +1830,6 @@ def _has_make_target(self, target):
|
|||||||
tty.debug("No Makefile found in the build directory")
|
tty.debug("No Makefile found in the build directory")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Prevent altering LC_ALL for 'make' outside this function
|
|
||||||
make = copy.deepcopy(self.module.make)
|
|
||||||
|
|
||||||
# Use English locale for missing target message comparison
|
|
||||||
make.add_default_env("LC_ALL", "C")
|
|
||||||
|
|
||||||
# Check if 'target' is a valid target.
|
# Check if 'target' is a valid target.
|
||||||
#
|
#
|
||||||
# `make -n target` performs a "dry run". It prints the commands that
|
# `make -n target` performs a "dry run". It prints the commands that
|
||||||
@@ -2291,6 +2289,19 @@ def rpath_args(self):
|
|||||||
build_system_flags = PackageBase.build_system_flags
|
build_system_flags = PackageBase.build_system_flags
|
||||||
|
|
||||||
|
|
||||||
|
def install_dependency_symlinks(pkg, spec, prefix):
|
||||||
|
"""
|
||||||
|
Execute a dummy install and flatten dependencies.
|
||||||
|
|
||||||
|
This routine can be used in a ``package.py`` definition by setting
|
||||||
|
``install = install_dependency_symlinks``.
|
||||||
|
|
||||||
|
This feature comes in handy for creating a common location for the
|
||||||
|
the installation of third-party libraries.
|
||||||
|
"""
|
||||||
|
flatten_dependencies(spec, prefix)
|
||||||
|
|
||||||
|
|
||||||
def use_cray_compiler_names():
|
def use_cray_compiler_names():
|
||||||
"""Compiler names for builds that rely on cray compiler names."""
|
"""Compiler names for builds that rely on cray compiler names."""
|
||||||
os.environ["CC"] = "cc"
|
os.environ["CC"] = "cc"
|
||||||
@@ -2299,6 +2310,23 @@ def use_cray_compiler_names():
|
|||||||
os.environ["F77"] = "ftn"
|
os.environ["F77"] = "ftn"
|
||||||
|
|
||||||
|
|
||||||
|
def flatten_dependencies(spec, flat_dir):
|
||||||
|
"""Make each dependency of spec present in dir via symlink."""
|
||||||
|
for dep in spec.traverse(root=False):
|
||||||
|
name = dep.name
|
||||||
|
|
||||||
|
dep_path = spack.store.STORE.layout.path_for_spec(dep)
|
||||||
|
dep_files = LinkTree(dep_path)
|
||||||
|
|
||||||
|
os.mkdir(flat_dir + "/" + name)
|
||||||
|
|
||||||
|
conflict = dep_files.find_conflict(flat_dir + "/" + name)
|
||||||
|
if conflict:
|
||||||
|
raise DependencyConflictError(conflict)
|
||||||
|
|
||||||
|
dep_files.merge(flat_dir + "/" + name)
|
||||||
|
|
||||||
|
|
||||||
def possible_dependencies(
|
def possible_dependencies(
|
||||||
*pkg_or_spec: Union[str, spack.spec.Spec, typing.Type[PackageBase]],
|
*pkg_or_spec: Union[str, spack.spec.Spec, typing.Type[PackageBase]],
|
||||||
transitive: bool = True,
|
transitive: bool = True,
|
||||||
|
|||||||
@@ -4,9 +4,10 @@
|
|||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import os
|
import os
|
||||||
|
import os.path
|
||||||
import pathlib
|
import pathlib
|
||||||
import sys
|
import sys
|
||||||
from typing import Any, Dict, Optional, Set, Tuple, Type, Union
|
from typing import Any, Dict, Optional, Tuple, Type, Union
|
||||||
|
|
||||||
import llnl.util.filesystem
|
import llnl.util.filesystem
|
||||||
from llnl.url import allowed_archive
|
from llnl.url import allowed_archive
|
||||||
@@ -503,38 +504,36 @@ def patch_for_package(self, sha256: str, pkg: "spack.package_base.PackageBase")
|
|||||||
patch_dict["sha256"] = sha256
|
patch_dict["sha256"] = sha256
|
||||||
return from_dict(patch_dict, repository=self.repository)
|
return from_dict(patch_dict, repository=self.repository)
|
||||||
|
|
||||||
def update_packages(self, pkgs_fullname: Set[str]) -> None:
|
def update_package(self, pkg_fullname: str) -> None:
|
||||||
"""Update the patch cache.
|
"""Update the patch cache.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
pkg_fullname: package to update.
|
pkg_fullname: package to update.
|
||||||
"""
|
"""
|
||||||
# remove this package from any patch entries that reference it.
|
# remove this package from any patch entries that reference it.
|
||||||
if self.index:
|
empty = []
|
||||||
empty = []
|
for sha256, package_to_patch in self.index.items():
|
||||||
for sha256, package_to_patch in self.index.items():
|
remove = []
|
||||||
remove = []
|
for fullname, patch_dict in package_to_patch.items():
|
||||||
for fullname, patch_dict in package_to_patch.items():
|
if patch_dict["owner"] == pkg_fullname:
|
||||||
if patch_dict["owner"] in pkgs_fullname:
|
remove.append(fullname)
|
||||||
remove.append(fullname)
|
|
||||||
|
|
||||||
for fullname in remove:
|
for fullname in remove:
|
||||||
package_to_patch.pop(fullname)
|
package_to_patch.pop(fullname)
|
||||||
|
|
||||||
if not package_to_patch:
|
if not package_to_patch:
|
||||||
empty.append(sha256)
|
empty.append(sha256)
|
||||||
|
|
||||||
# remove any entries that are now empty
|
# remove any entries that are now empty
|
||||||
for sha256 in empty:
|
for sha256 in empty:
|
||||||
del self.index[sha256]
|
del self.index[sha256]
|
||||||
|
|
||||||
# update the index with per-package patch indexes
|
# update the index with per-package patch indexes
|
||||||
for pkg_fullname in pkgs_fullname:
|
pkg_cls = self.repository.get_pkg_class(pkg_fullname)
|
||||||
pkg_cls = self.repository.get_pkg_class(pkg_fullname)
|
partial_index = self._index_patches(pkg_cls, self.repository)
|
||||||
partial_index = self._index_patches(pkg_cls, self.repository)
|
for sha256, package_to_patch in partial_index.items():
|
||||||
for sha256, package_to_patch in partial_index.items():
|
p2p = self.index.setdefault(sha256, {})
|
||||||
p2p = self.index.setdefault(sha256, {})
|
p2p.update(package_to_patch)
|
||||||
p2p.update(package_to_patch)
|
|
||||||
|
|
||||||
def update(self, other: "PatchCache") -> None:
|
def update(self, other: "PatchCache") -> None:
|
||||||
"""Update this cache with the contents of another.
|
"""Update this cache with the contents of another.
|
||||||
|
|||||||
@@ -52,7 +52,8 @@ def use_platform(new_platform):
|
|||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
|
|
||||||
assert isinstance(new_platform, Platform), f'"{new_platform}" must be an instance of Platform'
|
msg = '"{0}" must be an instance of Platform'
|
||||||
|
assert isinstance(new_platform, Platform), msg.format(new_platform)
|
||||||
|
|
||||||
original_host_fn = host
|
original_host_fn = host
|
||||||
|
|
||||||
|
|||||||
@@ -1,22 +1,42 @@
|
|||||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import warnings
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
|
|
||||||
|
import spack.error
|
||||||
|
|
||||||
|
|
||||||
|
class NoPlatformError(spack.error.SpackError):
|
||||||
|
def __init__(self):
|
||||||
|
msg = "Could not determine a platform for this machine"
|
||||||
|
super().__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
@llnl.util.lang.lazy_lexicographic_ordering
|
@llnl.util.lang.lazy_lexicographic_ordering
|
||||||
class Platform:
|
class Platform:
|
||||||
"""Platform is an abstract class extended by subclasses.
|
"""Platform is an abstract class extended by subclasses.
|
||||||
|
|
||||||
|
To add a new type of platform (such as cray_xe), create a subclass and set all the
|
||||||
|
class attributes such as priority, front_target, back_target, front_os, back_os.
|
||||||
|
|
||||||
Platform also contain a priority class attribute. A lower number signifies higher
|
Platform also contain a priority class attribute. A lower number signifies higher
|
||||||
priority. These numbers are arbitrarily set and can be changed though often there
|
priority. These numbers are arbitrarily set and can be changed though often there
|
||||||
isn't much need unless a new platform is added and the user wants that to be
|
isn't much need unless a new platform is added and the user wants that to be
|
||||||
detected first.
|
detected first.
|
||||||
|
|
||||||
|
Targets are created inside the platform subclasses. Most architecture (like linux,
|
||||||
|
and darwin) will have only one target family (x86_64) but in the case of Cray
|
||||||
|
machines, there is both a frontend and backend processor. The user can specify
|
||||||
|
which targets are present on front-end and back-end architecture.
|
||||||
|
|
||||||
|
Depending on the platform, operating systems are either autodetected or are
|
||||||
|
set. The user can set the frontend and backend operating setting by the class
|
||||||
|
attributes front_os and back_os. The operating system will be responsible for
|
||||||
|
compiler detection.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Subclass sets number. Controls detection order
|
# Subclass sets number. Controls detection order
|
||||||
@@ -25,72 +45,82 @@ class Platform:
|
|||||||
#: binary formats used on this platform; used by relocation logic
|
#: binary formats used on this platform; used by relocation logic
|
||||||
binary_formats = ["elf"]
|
binary_formats = ["elf"]
|
||||||
|
|
||||||
default: str
|
front_end: Optional[str] = None
|
||||||
default_os: str
|
back_end: Optional[str] = None
|
||||||
|
default: Optional[str] = None # The default back end target.
|
||||||
|
|
||||||
|
front_os: Optional[str] = None
|
||||||
|
back_os: Optional[str] = None
|
||||||
|
default_os: Optional[str] = None
|
||||||
|
|
||||||
reserved_targets = ["default_target", "frontend", "fe", "backend", "be"]
|
reserved_targets = ["default_target", "frontend", "fe", "backend", "be"]
|
||||||
reserved_oss = ["default_os", "frontend", "fe", "backend", "be"]
|
reserved_oss = ["default_os", "frontend", "fe", "backend", "be"]
|
||||||
deprecated_names = ["frontend", "fe", "backend", "be"]
|
|
||||||
|
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
self.targets = {}
|
self.targets = {}
|
||||||
self.operating_sys = {}
|
self.operating_sys = {}
|
||||||
self.name = name
|
self.name = name
|
||||||
self._init_targets()
|
|
||||||
|
|
||||||
def add_target(self, name: str, target: archspec.cpu.Microarchitecture) -> None:
|
def add_target(self, name: str, target: archspec.cpu.Microarchitecture) -> None:
|
||||||
|
"""Used by the platform specific subclass to list available targets.
|
||||||
|
Raises an error if the platform specifies a name
|
||||||
|
that is reserved by spack as an alias.
|
||||||
|
"""
|
||||||
if name in Platform.reserved_targets:
|
if name in Platform.reserved_targets:
|
||||||
msg = f"{name} is a spack reserved alias and cannot be the name of a target"
|
msg = "{0} is a spack reserved alias and cannot be the name of a target"
|
||||||
raise ValueError(msg)
|
raise ValueError(msg.format(name))
|
||||||
self.targets[name] = target
|
self.targets[name] = target
|
||||||
|
|
||||||
def _init_targets(self):
|
def _add_archspec_targets(self):
|
||||||
self.default = archspec.cpu.host().name
|
|
||||||
for name, microarchitecture in archspec.cpu.TARGETS.items():
|
for name, microarchitecture in archspec.cpu.TARGETS.items():
|
||||||
self.add_target(name, microarchitecture)
|
self.add_target(name, microarchitecture)
|
||||||
|
|
||||||
def target(self, name):
|
def target(self, name):
|
||||||
|
"""This is a getter method for the target dictionary
|
||||||
|
that handles defaulting based on the values provided by default,
|
||||||
|
front-end, and back-end. This can be overwritten
|
||||||
|
by a subclass for which we want to provide further aliasing options.
|
||||||
|
"""
|
||||||
|
# TODO: Check if we can avoid using strings here
|
||||||
name = str(name)
|
name = str(name)
|
||||||
if name in Platform.deprecated_names:
|
if name == "default_target":
|
||||||
warnings.warn(f"target={name} is deprecated, use target={self.default} instead")
|
|
||||||
|
|
||||||
if name in Platform.reserved_targets:
|
|
||||||
name = self.default
|
name = self.default
|
||||||
|
elif name == "frontend" or name == "fe":
|
||||||
|
name = self.front_end
|
||||||
|
elif name == "backend" or name == "be":
|
||||||
|
name = self.back_end
|
||||||
|
|
||||||
return self.targets.get(name, None)
|
return self.targets.get(name, None)
|
||||||
|
|
||||||
def add_operating_system(self, name, os_class):
|
def add_operating_system(self, name, os_class):
|
||||||
if name in Platform.reserved_oss + Platform.deprecated_names:
|
"""Add the operating_system class object into the
|
||||||
msg = f"{name} is a spack reserved alias and cannot be the name of an OS"
|
platform.operating_sys dictionary.
|
||||||
raise ValueError(msg)
|
"""
|
||||||
|
if name in Platform.reserved_oss:
|
||||||
|
msg = "{0} is a spack reserved alias and cannot be the name of an OS"
|
||||||
|
raise ValueError(msg.format(name))
|
||||||
self.operating_sys[name] = os_class
|
self.operating_sys[name] = os_class
|
||||||
|
|
||||||
def default_target(self):
|
|
||||||
return self.target(self.default)
|
|
||||||
|
|
||||||
def default_operating_system(self):
|
|
||||||
return self.operating_system(self.default_os)
|
|
||||||
|
|
||||||
def operating_system(self, name):
|
def operating_system(self, name):
|
||||||
if name in Platform.deprecated_names:
|
if name == "default_os":
|
||||||
warnings.warn(f"os={name} is deprecated, use os={self.default_os} instead")
|
|
||||||
|
|
||||||
if name in Platform.reserved_oss:
|
|
||||||
name = self.default_os
|
name = self.default_os
|
||||||
|
if name == "frontend" or name == "fe":
|
||||||
|
name = self.front_os
|
||||||
|
if name == "backend" or name == "be":
|
||||||
|
name = self.back_os
|
||||||
|
|
||||||
return self.operating_sys.get(name, None)
|
return self.operating_sys.get(name, None)
|
||||||
|
|
||||||
def setup_platform_environment(self, pkg, env):
|
def setup_platform_environment(self, pkg, env):
|
||||||
"""Platform-specific build environment modifications.
|
"""Subclass can override this method if it requires any
|
||||||
|
platform-specific build environment modifications.
|
||||||
This method is meant toi be overridden by subclasses, when needed.
|
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def detect(cls):
|
def detect(cls):
|
||||||
"""Returns True if the host platform is detected to be the current Platform class,
|
"""Return True if the the host platform is detected to be the current
|
||||||
False otherwise.
|
Platform class, False otherwise.
|
||||||
|
|
||||||
Derived classes are responsible for implementing this method.
|
Derived classes are responsible for implementing this method.
|
||||||
"""
|
"""
|
||||||
@@ -105,7 +135,11 @@ def __str__(self):
|
|||||||
def _cmp_iter(self):
|
def _cmp_iter(self):
|
||||||
yield self.name
|
yield self.name
|
||||||
yield self.default
|
yield self.default
|
||||||
|
yield self.front_end
|
||||||
|
yield self.back_end
|
||||||
yield self.default_os
|
yield self.default_os
|
||||||
|
yield self.front_os
|
||||||
|
yield self.back_os
|
||||||
|
|
||||||
def targets():
|
def targets():
|
||||||
for t in sorted(self.targets.values()):
|
for t in sorted(self.targets.values()):
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
import os.path
|
||||||
|
|
||||||
|
|
||||||
def slingshot_network():
|
def slingshot_network():
|
||||||
|
|||||||
@@ -4,6 +4,8 @@
|
|||||||
|
|
||||||
import platform as py_platform
|
import platform as py_platform
|
||||||
|
|
||||||
|
import archspec.cpu
|
||||||
|
|
||||||
from spack.operating_systems.mac_os import MacOs
|
from spack.operating_systems.mac_os import MacOs
|
||||||
from spack.version import Version
|
from spack.version import Version
|
||||||
|
|
||||||
@@ -17,8 +19,18 @@ class Darwin(Platform):
|
|||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__("darwin")
|
super().__init__("darwin")
|
||||||
|
self._add_archspec_targets()
|
||||||
|
|
||||||
|
self.default = archspec.cpu.host().name
|
||||||
|
self.front_end = self.default
|
||||||
|
self.back_end = self.default
|
||||||
|
|
||||||
mac_os = MacOs()
|
mac_os = MacOs()
|
||||||
|
|
||||||
self.default_os = str(mac_os)
|
self.default_os = str(mac_os)
|
||||||
|
self.front_os = str(mac_os)
|
||||||
|
self.back_os = str(mac_os)
|
||||||
|
|
||||||
self.add_operating_system(str(mac_os), mac_os)
|
self.add_operating_system(str(mac_os), mac_os)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -3,6 +3,8 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import platform
|
import platform
|
||||||
|
|
||||||
|
import archspec.cpu
|
||||||
|
|
||||||
from spack.operating_systems.freebsd import FreeBSDOs
|
from spack.operating_systems.freebsd import FreeBSDOs
|
||||||
|
|
||||||
from ._platform import Platform
|
from ._platform import Platform
|
||||||
@@ -13,8 +15,18 @@ class FreeBSD(Platform):
|
|||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__("freebsd")
|
super().__init__("freebsd")
|
||||||
|
|
||||||
|
self._add_archspec_targets()
|
||||||
|
|
||||||
|
# Get specific default
|
||||||
|
self.default = archspec.cpu.host().name
|
||||||
|
self.front_end = self.default
|
||||||
|
self.back_end = self.default
|
||||||
|
|
||||||
os = FreeBSDOs()
|
os = FreeBSDOs()
|
||||||
self.default_os = str(os)
|
self.default_os = str(os)
|
||||||
|
self.front_os = self.default_os
|
||||||
|
self.back_os = self.default_os
|
||||||
self.add_operating_system(str(os), os)
|
self.add_operating_system(str(os), os)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -3,6 +3,8 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import platform
|
import platform
|
||||||
|
|
||||||
|
import archspec.cpu
|
||||||
|
|
||||||
from spack.operating_systems.linux_distro import LinuxDistro
|
from spack.operating_systems.linux_distro import LinuxDistro
|
||||||
|
|
||||||
from ._platform import Platform
|
from ._platform import Platform
|
||||||
@@ -13,8 +15,18 @@ class Linux(Platform):
|
|||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__("linux")
|
super().__init__("linux")
|
||||||
|
|
||||||
|
self._add_archspec_targets()
|
||||||
|
|
||||||
|
# Get specific default
|
||||||
|
self.default = archspec.cpu.host().name
|
||||||
|
self.front_end = self.default
|
||||||
|
self.back_end = self.default
|
||||||
|
|
||||||
linux_dist = LinuxDistro()
|
linux_dist = LinuxDistro()
|
||||||
self.default_os = str(linux_dist)
|
self.default_os = str(linux_dist)
|
||||||
|
self.front_os = self.default_os
|
||||||
|
self.back_os = self.default_os
|
||||||
self.add_operating_system(str(linux_dist), linux_dist)
|
self.add_operating_system(str(linux_dist), linux_dist)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -16,19 +16,31 @@ class Test(Platform):
|
|||||||
if platform.system().lower() == "darwin":
|
if platform.system().lower() == "darwin":
|
||||||
binary_formats = ["macho"]
|
binary_formats = ["macho"]
|
||||||
|
|
||||||
|
if platform.machine() == "arm64":
|
||||||
|
front_end = "aarch64"
|
||||||
|
back_end = "m1"
|
||||||
|
default = "m1"
|
||||||
|
else:
|
||||||
|
front_end = "x86_64"
|
||||||
|
back_end = "core2"
|
||||||
|
default = "core2"
|
||||||
|
|
||||||
|
front_os = "redhat6"
|
||||||
|
back_os = "debian6"
|
||||||
default_os = "debian6"
|
default_os = "debian6"
|
||||||
default = "m1" if platform.machine() == "arm64" else "core2"
|
|
||||||
|
|
||||||
def __init__(self, name=None):
|
def __init__(self, name=None):
|
||||||
name = name or "test"
|
name = name or "test"
|
||||||
super().__init__(name)
|
super().__init__(name)
|
||||||
self.add_operating_system("debian6", spack.operating_systems.OperatingSystem("debian", 6))
|
self.add_target(self.default, archspec.cpu.TARGETS[self.default])
|
||||||
self.add_operating_system("redhat6", spack.operating_systems.OperatingSystem("redhat", 6))
|
self.add_target(self.front_end, archspec.cpu.TARGETS[self.front_end])
|
||||||
|
|
||||||
def _init_targets(self):
|
self.add_operating_system(
|
||||||
targets = ("aarch64", "m1") if platform.machine() == "arm64" else ("x86_64", "core2")
|
self.default_os, spack.operating_systems.OperatingSystem("debian", 6)
|
||||||
for t in targets:
|
)
|
||||||
self.add_target(t, archspec.cpu.TARGETS[t])
|
self.add_operating_system(
|
||||||
|
self.front_os, spack.operating_systems.OperatingSystem("redhat", 6)
|
||||||
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def detect(cls):
|
def detect(cls):
|
||||||
|
|||||||
@@ -4,6 +4,8 @@
|
|||||||
|
|
||||||
import platform
|
import platform
|
||||||
|
|
||||||
|
import archspec.cpu
|
||||||
|
|
||||||
from spack.operating_systems.windows_os import WindowsOs
|
from spack.operating_systems.windows_os import WindowsOs
|
||||||
|
|
||||||
from ._platform import Platform
|
from ._platform import Platform
|
||||||
@@ -14,8 +16,18 @@ class Windows(Platform):
|
|||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__("windows")
|
super().__init__("windows")
|
||||||
|
self._add_archspec_targets()
|
||||||
|
|
||||||
|
self.default = archspec.cpu.host().name
|
||||||
|
self.front_end = self.default
|
||||||
|
self.back_end = self.default
|
||||||
|
|
||||||
windows_os = WindowsOs()
|
windows_os = WindowsOs()
|
||||||
|
|
||||||
self.default_os = str(windows_os)
|
self.default_os = str(windows_os)
|
||||||
|
self.front_os = str(windows_os)
|
||||||
|
self.back_os = str(windows_os)
|
||||||
|
|
||||||
self.add_operating_system(str(windows_os), windows_os)
|
self.add_operating_system(str(windows_os), windows_os)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user