Compare commits
53 Commits
psakiev/ct
...
develop-20
Author | SHA1 | Date | |
---|---|---|---|
![]() |
35ae2743d9 | ||
![]() |
cd3bd453d3 | ||
![]() |
fc6cd7c51f | ||
![]() |
cfee88a5bb | ||
![]() |
7711730f2c | ||
![]() |
14e8902854 | ||
![]() |
97961555dc | ||
![]() |
51df7e088a | ||
![]() |
b28583bc58 | ||
![]() |
f9f6f094c3 | ||
![]() |
e780a83ac6 | ||
![]() |
7f57a85514 | ||
![]() |
e4927b35d1 | ||
![]() |
098ad7ffc0 | ||
![]() |
db7aece186 | ||
![]() |
0e9f131b44 | ||
![]() |
1d18f571ae | ||
![]() |
586360a8fe | ||
![]() |
b1db22d406 | ||
![]() |
7395656663 | ||
![]() |
d0b736607b | ||
![]() |
fe5d7881f5 | ||
![]() |
28e3295fb0 | ||
![]() |
314a3fbe77 | ||
![]() |
6df831ef00 | ||
![]() |
9818002219 | ||
![]() |
1768b923f1 | ||
![]() |
aa6651fe27 | ||
![]() |
3ded2fc9c5 | ||
![]() |
623c5a4d24 | ||
![]() |
673565aefe | ||
![]() |
930e711771 | ||
![]() |
b1905186a6 | ||
![]() |
176b7f8854 | ||
![]() |
0cb4db950f | ||
![]() |
bba66f9dae | ||
![]() |
9c222aee67 | ||
![]() |
fcc28d72e8 | ||
![]() |
8bc897cee1 | ||
![]() |
2b70f8367c | ||
![]() |
a9a23f4565 | ||
![]() |
0d86ecf122 | ||
![]() |
654d6f1397 | ||
![]() |
ade9c8da0e | ||
![]() |
f4f59b7f18 | ||
![]() |
61d6c5486c | ||
![]() |
576251f0da | ||
![]() |
55e0ef1e64 | ||
![]() |
ef0e54726d | ||
![]() |
8225b18985 | ||
![]() |
02320b18f3 | ||
![]() |
b4e32706db | ||
![]() |
bcde9a3afb |
17
.github/workflows/audit.yaml
vendored
17
.github/workflows/audit.yaml
vendored
@@ -40,6 +40,8 @@ jobs:
|
||||
run: |
|
||||
python -m pip install --upgrade pywin32
|
||||
- name: Package audits (with coverage)
|
||||
env:
|
||||
COVERAGE_FILE: coverage/.coverage-audits-${{ matrix.system.os }}
|
||||
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
@@ -47,27 +49,26 @@ jobs:
|
||||
coverage run $(which spack) audit configs
|
||||
coverage run $(which spack) -d audit externals
|
||||
coverage combine
|
||||
coverage xml
|
||||
- name: Package audits (without coverage)
|
||||
if: ${{ inputs.with_coverage == 'false' && runner.os != 'Windows' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
. share/spack/setup-env.sh
|
||||
spack -d audit packages
|
||||
spack -d audit configs
|
||||
spack -d audit externals
|
||||
- name: Package audits (without coverage)
|
||||
if: ${{ runner.os == 'Windows' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
. share/spack/setup-env.sh
|
||||
spack -d audit packages
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit configs
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit externals
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
|
||||
with:
|
||||
flags: unittests,audits
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
name: coverage-audits-${{ matrix.system.os }}
|
||||
path: coverage
|
||||
include-hidden-files: true
|
||||
|
24
.github/workflows/ci.yaml
vendored
24
.github/workflows/ci.yaml
vendored
@@ -84,8 +84,30 @@ jobs:
|
||||
needs: [ prechecks, changes ]
|
||||
uses: ./.github/workflows/unit_tests.yaml
|
||||
secrets: inherit
|
||||
upload-coverage:
|
||||
needs: [ unit-tests, prechecks ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Download coverage files
|
||||
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16
|
||||
with:
|
||||
pattern: coverage-*
|
||||
path: coverage
|
||||
merge-multiple: true
|
||||
- run: pip install --upgrade coverage
|
||||
- run: ls -la coverage
|
||||
- run: coverage combine -a coverage/.coverage*
|
||||
- run: coverage xml
|
||||
- name: "Upload coverage"
|
||||
uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
all:
|
||||
needs: [ unit-tests, bootstrap ]
|
||||
needs: [ upload-coverage, bootstrap ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Success
|
||||
|
47
.github/workflows/unit_tests.yaml
vendored
47
.github/workflows/unit_tests.yaml
vendored
@@ -76,14 +76,15 @@ jobs:
|
||||
SPACK_PYTHON: python
|
||||
SPACK_TEST_PARALLEL: 2
|
||||
COVERAGE: true
|
||||
COVERAGE_FILE: coverage/.coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||
path: coverage
|
||||
include-hidden-files: true
|
||||
# Test shell integration
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -112,11 +113,11 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
name: coverage-shell
|
||||
path: coverage
|
||||
include-hidden-files: true
|
||||
|
||||
# Test RHEL8 UBI with platform Python. This job is run
|
||||
# only on PRs modifying core Spack
|
||||
@@ -170,13 +171,14 @@ jobs:
|
||||
- name: Run unit tests (full suite with coverage)
|
||||
env:
|
||||
COVERAGE: true
|
||||
COVERAGE_FILE: coverage/.coverage-clingo-cffi
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
name: coverage-clingo-cffi
|
||||
path: coverage
|
||||
include-hidden-files: true
|
||||
# Run unit tests on MacOS
|
||||
macos:
|
||||
runs-on: ${{ matrix.os }}
|
||||
@@ -201,6 +203,7 @@ jobs:
|
||||
- name: Run unit tests
|
||||
env:
|
||||
SPACK_TEST_PARALLEL: 4
|
||||
COVERAGE_FILE: coverage/.coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
@@ -209,11 +212,11 @@ jobs:
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
with:
|
||||
flags: unittests,macos
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||
path: coverage
|
||||
include-hidden-files: true
|
||||
# Run unit tests on Windows
|
||||
windows:
|
||||
defaults:
|
||||
@@ -235,13 +238,13 @@ jobs:
|
||||
run: |
|
||||
./.github/workflows/bin/setup_git.ps1
|
||||
- name: Unit Test
|
||||
env:
|
||||
COVERAGE_FILE: coverage/.coverage-windows
|
||||
run: |
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
name: coverage-windows
|
||||
path: coverage
|
||||
include-hidden-files: true
|
||||
|
59
.github/workflows/valid-style.yml
vendored
59
.github/workflows/valid-style.yml
vendored
@@ -87,3 +87,62 @@ jobs:
|
||||
spack -d bootstrap now --dev
|
||||
spack style -t black
|
||||
spack unit-test -V
|
||||
import-check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: julia-actions/setup-julia@v2
|
||||
with:
|
||||
version: '1.10'
|
||||
- uses: julia-actions/cache@v2
|
||||
|
||||
# PR: use the base of the PR as the old commit
|
||||
- name: Checkout PR base commit
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.base.sha }}
|
||||
path: old
|
||||
# not a PR: use the previous commit as the old commit
|
||||
- name: Checkout previous commit
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 2
|
||||
path: old
|
||||
- name: Checkout previous commit
|
||||
if: github.event_name != 'pull_request'
|
||||
run: git -C old reset --hard HEAD^
|
||||
|
||||
- name: Checkout new commit
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
path: new
|
||||
- name: Install circular import checker
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
repository: haampie/circular-import-fighter
|
||||
ref: 555519c6fd5564fd2eb844e7b87e84f4d12602e2
|
||||
path: circular-import-fighter
|
||||
- name: Install dependencies
|
||||
working-directory: circular-import-fighter
|
||||
run: make -j dependencies
|
||||
- name: Import cycles before
|
||||
working-directory: circular-import-fighter
|
||||
run: make SPACK_ROOT=../old && cp solution solution.old
|
||||
- name: Import cycles after
|
||||
working-directory: circular-import-fighter
|
||||
run: make clean-graph && make SPACK_ROOT=../new && cp solution solution.new
|
||||
- name: Compare import cycles
|
||||
working-directory: circular-import-fighter
|
||||
run: |
|
||||
edges_before="$(grep -oP 'edges to delete: \K\d+' solution.old)"
|
||||
edges_after="$(grep -oP 'edges to delete: \K\d+' solution.new)"
|
||||
if [ "$edges_after" -gt "$edges_before" ]; then
|
||||
printf '\033[1;31mImport check failed: %s imports need to be deleted, ' "$edges_after"
|
||||
printf 'previously this was %s\033[0m\n' "$edges_before"
|
||||
printf 'Compare \033[1;97m"Import cycles before"\033[0m and '
|
||||
printf '\033[1;97m"Import cycles after"\033[0m to see problematic imports.\n'
|
||||
exit 1
|
||||
else
|
||||
printf '\033[1;32mImport check passed: %s <= %s\033[0m\n' "$edges_after" "$edges_before"
|
||||
fi
|
||||
|
@@ -1175,6 +1175,17 @@ unspecified version, but packages can depend on other packages with
|
||||
could depend on ``mpich@1.2:`` if it can only build with version
|
||||
``1.2`` or higher of ``mpich``.
|
||||
|
||||
.. note:: Windows Spec Syntax Caveats
|
||||
Windows has a few idiosyncrasies when it comes to the Spack spec syntax and the use of certain shells
|
||||
Spack's spec dependency syntax uses the carat (``^``) character, however this is an escape string in CMD
|
||||
so it must be escaped with an additional carat (i.e. ``^^``).
|
||||
CMD also will attempt to interpret strings with ``=`` characters in them. Any spec including this symbol
|
||||
must double quote the string.
|
||||
|
||||
Note: All of these issues are unique to CMD, they can be avoided by using Powershell.
|
||||
|
||||
For more context on these caveats see the related issues: `carat <https://github.com/spack/spack/issues/42833>`_ and `equals <https://github.com/spack/spack/issues/43348>`_
|
||||
|
||||
Below are more details about the specifiers that you can add to specs.
|
||||
|
||||
.. _version-specifier:
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. chain:
|
||||
|
||||
============================
|
||||
Chaining Spack Installations
|
||||
============================
|
||||
=============================================
|
||||
Chaining Spack Installations (upstreams.yaml)
|
||||
=============================================
|
||||
|
||||
You can point your Spack installation to another installation to use any
|
||||
packages that are installed there. To register the other Spack instance,
|
||||
|
@@ -893,9 +893,8 @@ The valid variables for a ``when`` clause are:
|
||||
|
||||
#. ``env``. The user environment (usually ``os.environ`` in Python).
|
||||
|
||||
#. ``hostname``. The hostname of the system.
|
||||
|
||||
#. ``full_hostname``. The fully qualified hostname of the system.
|
||||
#. ``hostname``. The hostname of the system (if ``hostname`` is an
|
||||
executable in the user's PATH).
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
SpecLists as Constraints
|
||||
|
@@ -1475,16 +1475,14 @@ in a Windows CMD prompt.
|
||||
Step 3: Run and configure Spack
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To use Spack, run ``bin\spack_cmd.bat`` (you may need to Run as Administrator) from the top-level spack
|
||||
directory. This will provide a Windows command prompt with an environment properly set up with Spack
|
||||
and its prerequisites. If you receive a warning message that Python is not in your ``PATH``
|
||||
On Windows, Spack supports both primary native shells, Powershell and the traditional command prompt.
|
||||
To use Spack, pick your favorite shell, and run ``bin\spack_cmd.bat`` or ``share/spack/setup-env.ps1``
|
||||
(you may need to Run as Administrator) from the top-level spack
|
||||
directory. This will provide a Spack enabled shell. If you receive a warning message that Python is not in your ``PATH``
|
||||
(which may happen if you installed Python from the website and not the Windows Store) add the location
|
||||
of the Python executable to your ``PATH`` now. You can permanently add Python to your ``PATH`` variable
|
||||
by using the ``Edit the system environment variables`` utility in Windows Control Panel.
|
||||
|
||||
.. note::
|
||||
Alternatively, Powershell can be used in place of CMD
|
||||
|
||||
To configure Spack, first run the following command inside the Spack console:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -1549,7 +1547,7 @@ and not tabs, so ensure that this is the case when editing one directly.
|
||||
|
||||
.. note:: Cygwin
|
||||
The use of Cygwin is not officially supported by Spack and is not tested.
|
||||
However Spack will not throw an error, so use if choosing to use Spack
|
||||
However Spack will not prevent this, so use if choosing to use Spack
|
||||
with Cygwin, know that no functionality is garunteed.
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
@@ -1563,21 +1561,12 @@ Spack console via:
|
||||
|
||||
spack install cpuinfo
|
||||
|
||||
If in the previous step, you did not have CMake or Ninja installed, running the command above should bootstrap both packages
|
||||
If in the previous step, you did not have CMake or Ninja installed, running the command above should install both packages
|
||||
|
||||
"""""""""""""""""""""""""""
|
||||
Windows Compatible Packages
|
||||
"""""""""""""""""""""""""""
|
||||
.. note:: Spec Syntax Caveats
|
||||
Windows has a few idiosyncrasies when it comes to the Spack spec syntax and the use of certain shells
|
||||
See the Spack spec syntax doc for more information
|
||||
|
||||
Not all spack packages currently have Windows support. Some are inherently incompatible with the
|
||||
platform, and others simply have yet to be ported. To view the current set of packages with Windows
|
||||
support, the list command should be used via `spack list -t windows`. If there's a package you'd like
|
||||
to install on Windows but is not in that list, feel free to reach out to request the port or contribute
|
||||
the port yourself.
|
||||
|
||||
.. note::
|
||||
This is by no means a comprehensive list, some packages may have ports that were not tagged
|
||||
while others may just work out of the box on Windows and have not been tagged as such.
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
For developers
|
||||
@@ -1588,5 +1577,3 @@ Python, Git, and Spack, instead of requiring the user to do so manually.
|
||||
Instructions for creating the installer are at
|
||||
https://github.com/spack/spack/blob/develop/lib/spack/spack/cmd/installer/README.md
|
||||
|
||||
Alternatively a pre-built copy of the Windows installer is available as an artifact of Spack's Windows CI
|
||||
available at each run of the CI on develop or any PR.
|
||||
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.5-dev (commit 7e6740012b897ae4a950f0bba7e9726b767e921f)
|
||||
* Version: 0.2.5-dev (commit cbb1fd5eb397a70d466e5160b393b87b0dbcc78f)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
|
@@ -2844,8 +2844,7 @@
|
||||
"asimdrdm",
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"asimddp",
|
||||
"ssbs"
|
||||
"asimddp"
|
||||
],
|
||||
"compilers" : {
|
||||
"gcc": [
|
||||
@@ -2942,7 +2941,6 @@
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"dcpodp",
|
||||
"svei8mm",
|
||||
"svebf16",
|
||||
@@ -3010,7 +3008,7 @@
|
||||
},
|
||||
{
|
||||
"versions": "11:",
|
||||
"flags" : "-march=armv8.4-a+sve+ssbs+fp16+bf16+crypto+i8mm+rng"
|
||||
"flags" : "-march=armv8.4-a+sve+fp16+bf16+crypto+i8mm+rng"
|
||||
},
|
||||
{
|
||||
"versions": "12:",
|
||||
@@ -3066,7 +3064,6 @@
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"sb",
|
||||
"dcpodp",
|
||||
"sve2",
|
||||
@@ -3179,7 +3176,6 @@
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"sb",
|
||||
"dcpodp",
|
||||
"sve2",
|
||||
|
@@ -3,6 +3,13 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import re
|
||||
from typing import Optional
|
||||
|
||||
import spack.paths
|
||||
import spack.util.git
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "0.23.0.dev0"
|
||||
spack_version = __version__
|
||||
@@ -19,4 +26,47 @@ def __try_int(v):
|
||||
spack_version_info = tuple([__try_int(v) for v in __version__.split(".")])
|
||||
|
||||
|
||||
__all__ = ["spack_version_info", "spack_version"]
|
||||
def get_spack_commit() -> Optional[str]:
|
||||
"""Get the Spack git commit sha.
|
||||
|
||||
Returns:
|
||||
(str or None) the commit sha if available, otherwise None
|
||||
"""
|
||||
git_path = os.path.join(spack.paths.prefix, ".git")
|
||||
if not os.path.exists(git_path):
|
||||
return None
|
||||
|
||||
git = spack.util.git.git()
|
||||
if not git:
|
||||
return None
|
||||
|
||||
rev = git(
|
||||
"-C",
|
||||
spack.paths.prefix,
|
||||
"rev-parse",
|
||||
"HEAD",
|
||||
output=str,
|
||||
error=os.devnull,
|
||||
fail_on_error=False,
|
||||
)
|
||||
if git.returncode != 0:
|
||||
return None
|
||||
|
||||
match = re.match(r"[a-f\d]{7,}$", rev)
|
||||
return match.group(0) if match else None
|
||||
|
||||
|
||||
def get_version() -> str:
|
||||
"""Get a descriptive version of this instance of Spack.
|
||||
|
||||
Outputs '<PEP440 version> (<git commit sha>)'.
|
||||
|
||||
The commit sha is only added when available.
|
||||
"""
|
||||
commit = get_spack_commit()
|
||||
if commit:
|
||||
return f"{spack_version} ({commit})"
|
||||
return spack_version
|
||||
|
||||
|
||||
__all__ = ["spack_version_info", "spack_version", "get_version", "get_spack_commit"]
|
||||
|
@@ -51,9 +51,10 @@ def _search_duplicate_compilers(error_cls):
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.builder
|
||||
import spack.config
|
||||
import spack.fetch_strategy
|
||||
import spack.patch
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.crypto
|
||||
@@ -281,7 +282,7 @@ def _avoid_mismatched_variants(error_cls):
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for variant in current_spec.variants.values():
|
||||
# Variant does not exist at all
|
||||
if variant.name not in pkg_cls.variants:
|
||||
if variant.name not in pkg_cls.variant_names():
|
||||
summary = (
|
||||
f"Setting a preference for the '{pkg_name}' package to the "
|
||||
f"non-existing variant '{variant.name}'"
|
||||
@@ -290,9 +291,8 @@ def _avoid_mismatched_variants(error_cls):
|
||||
continue
|
||||
|
||||
# Variant cannot accept this value
|
||||
s = spack.spec.Spec(pkg_name)
|
||||
try:
|
||||
s.update_variant_validate(variant.name, variant.value)
|
||||
spack.variant.prevalidate_variant_value(pkg_cls, variant, strict=True)
|
||||
except Exception:
|
||||
summary = (
|
||||
f"Setting the variant '{variant.name}' of the '{pkg_name}' package "
|
||||
@@ -662,9 +662,15 @@ def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
buildsystem_variant, _ = pkg_cls.variants["build_system"]
|
||||
buildsystem_names = [getattr(x, "value", x) for x in buildsystem_variant.values]
|
||||
builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in buildsystem_names]
|
||||
|
||||
# values are either Value objects (for conditional values) or the values themselves
|
||||
build_system_names = set(
|
||||
v.value if isinstance(v, spack.variant.Value) else v
|
||||
for _, variant in pkg_cls.variant_definitions("build_system")
|
||||
for v in variant.values
|
||||
)
|
||||
builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in build_system_names]
|
||||
|
||||
module = pkg_cls.module
|
||||
has_builders_in_package_py = any(
|
||||
getattr(module, name, False) for name in builder_cls_names
|
||||
@@ -931,20 +937,22 @@ def check_virtual_with_variants(spec, msg):
|
||||
|
||||
# check variants
|
||||
dependency_variants = dep.spec.variants
|
||||
for name, value in dependency_variants.items():
|
||||
for name, variant in dependency_variants.items():
|
||||
try:
|
||||
v, _ = dependency_pkg_cls.variants[name]
|
||||
v.validate_or_raise(value, pkg_cls=dependency_pkg_cls)
|
||||
spack.variant.prevalidate_variant_value(
|
||||
dependency_pkg_cls, variant, dep.spec, strict=True
|
||||
)
|
||||
except Exception as e:
|
||||
summary = (
|
||||
f"{pkg_name}: wrong variant used for dependency in 'depends_on()'"
|
||||
)
|
||||
|
||||
error_msg = str(e)
|
||||
if isinstance(e, KeyError):
|
||||
error_msg = (
|
||||
f"variant {str(e).strip()} does not exist in package {dep_name}"
|
||||
f" in package '{dep_name}'"
|
||||
)
|
||||
error_msg += f" in package '{dep_name}'"
|
||||
|
||||
errors.append(
|
||||
error_cls(summary=summary, details=[error_msg, f"in {filename}"])
|
||||
@@ -956,39 +964,38 @@ def check_virtual_with_variants(spec, msg):
|
||||
@package_directives
|
||||
def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
||||
"""Ensures that variant defaults are present and parsable from cli"""
|
||||
|
||||
def check_variant(pkg_cls, variant, vname):
|
||||
# bool is a subclass of int in python. Permitting a default that is an instance
|
||||
# of 'int' means both foo=false and foo=0 are accepted. Other falsish values are
|
||||
# not allowed, since they can't be parsed from CLI ('foo=')
|
||||
default_is_parsable = isinstance(variant.default, int) or variant.default
|
||||
|
||||
if not default_is_parsable:
|
||||
msg = f"Variant '{vname}' of package '{pkg_cls.name}' has an unparsable default value"
|
||||
return [error_cls(msg, [])]
|
||||
|
||||
try:
|
||||
vspec = variant.make_default()
|
||||
except spack.variant.MultipleValuesInExclusiveVariantError:
|
||||
msg = f"Can't create default value for variant '{vname}' in package '{pkg_cls.name}'"
|
||||
return [error_cls(msg, [])]
|
||||
|
||||
try:
|
||||
variant.validate_or_raise(vspec, pkg_cls.name)
|
||||
except spack.variant.InvalidVariantValueError:
|
||||
msg = "Default value of variant '{vname}' in package '{pkg.name}' is invalid"
|
||||
question = "Is it among the allowed values?"
|
||||
return [error_cls(msg, [question])]
|
||||
|
||||
return []
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for variant_name, entry in pkg_cls.variants.items():
|
||||
variant, _ = entry
|
||||
default_is_parsable = (
|
||||
# Permitting a default that is an instance on 'int' permits
|
||||
# to have foo=false or foo=0. Other falsish values are
|
||||
# not allowed, since they can't be parsed from cli ('foo=')
|
||||
isinstance(variant.default, int)
|
||||
or variant.default
|
||||
)
|
||||
if not default_is_parsable:
|
||||
error_msg = "Variant '{}' of package '{}' has a bad default value"
|
||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
||||
continue
|
||||
|
||||
try:
|
||||
vspec = variant.make_default()
|
||||
except spack.variant.MultipleValuesInExclusiveVariantError:
|
||||
error_msg = "Cannot create a default value for the variant '{}' in package '{}'"
|
||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
||||
continue
|
||||
|
||||
try:
|
||||
variant.validate_or_raise(vspec, pkg_cls=pkg_cls)
|
||||
except spack.variant.InvalidVariantValueError:
|
||||
error_msg = (
|
||||
"The default value of the variant '{}' in package '{}' failed validation"
|
||||
)
|
||||
question = "Is it among the allowed values?"
|
||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), [question]))
|
||||
|
||||
for vname in pkg_cls.variant_names():
|
||||
for _, variant_def in pkg_cls.variant_definitions(vname):
|
||||
errors.extend(check_variant(pkg_cls, variant_def, vname))
|
||||
return errors
|
||||
|
||||
|
||||
@@ -998,11 +1005,11 @@ def _ensure_variants_have_descriptions(pkgs, error_cls):
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for variant_name, entry in pkg_cls.variants.items():
|
||||
variant, _ = entry
|
||||
if not variant.description:
|
||||
error_msg = "Variant '{}' in package '{}' is missing a description"
|
||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
||||
for name in pkg_cls.variant_names():
|
||||
for when, variant in pkg_cls.variant_definitions(name):
|
||||
if not variant.description:
|
||||
msg = f"Variant '{name}' in package '{pkg_name}' is missing a description"
|
||||
errors.append(error_cls(msg, []))
|
||||
|
||||
return errors
|
||||
|
||||
@@ -1059,29 +1066,26 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
||||
|
||||
|
||||
def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
||||
variant_exceptions = (
|
||||
spack.variant.InconsistentValidationError,
|
||||
spack.variant.MultipleValuesInExclusiveVariantError,
|
||||
spack.variant.InvalidVariantValueError,
|
||||
KeyError,
|
||||
)
|
||||
errors = []
|
||||
variant_names = pkg.variant_names()
|
||||
summary = f"{pkg.name}: wrong variant in '{directive}' directive"
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
||||
|
||||
for name, v in constraint.variants.items():
|
||||
if name not in variant_names:
|
||||
msg = f"variant {name} does not exist in {pkg.name}"
|
||||
errors.append(error_cls(summary=summary, details=[msg, f"in {filename}"]))
|
||||
continue
|
||||
|
||||
try:
|
||||
variant, _ = pkg.variants[name]
|
||||
variant.validate_or_raise(v, pkg_cls=pkg)
|
||||
except variant_exceptions as e:
|
||||
summary = pkg.name + ': wrong variant in "{0}" directive'
|
||||
summary = summary.format(directive)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
||||
|
||||
error_msg = str(e).strip()
|
||||
if isinstance(e, KeyError):
|
||||
error_msg = "the variant {0} does not exist".format(error_msg)
|
||||
|
||||
err = error_cls(summary=summary, details=[error_msg, "in " + filename])
|
||||
|
||||
errors.append(err)
|
||||
spack.variant.prevalidate_variant_value(pkg, v, constraint, strict=True)
|
||||
except (
|
||||
spack.variant.InconsistentValidationError,
|
||||
spack.variant.MultipleValuesInExclusiveVariantError,
|
||||
spack.variant.InvalidVariantValueError,
|
||||
) as e:
|
||||
msg = str(e).strip()
|
||||
errors.append(error_cls(summary=summary, details=[msg, f"in {filename}"]))
|
||||
|
||||
return errors
|
||||
|
||||
@@ -1119,9 +1123,10 @@ def _extracts_errors(triggers, summary):
|
||||
for dname in dnames
|
||||
)
|
||||
|
||||
for vname, (variant, triggers) in pkg_cls.variants.items():
|
||||
summary = f"{pkg_name}: wrong 'when=' condition for the '{vname}' variant"
|
||||
errors.extend(_extracts_errors(triggers, summary))
|
||||
for when, variants_by_name in pkg_cls.variants.items():
|
||||
for vname, variant in variants_by_name.items():
|
||||
summary = f"{pkg_name}: wrong 'when=' condition for the '{vname}' variant"
|
||||
errors.extend(_extracts_errors([when], summary))
|
||||
|
||||
for when, providers, details in _error_items(pkg_cls.provided):
|
||||
errors.extend(
|
||||
|
@@ -33,7 +33,6 @@
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
import spack.caches
|
||||
import spack.cmd
|
||||
import spack.config as config
|
||||
import spack.database as spack_db
|
||||
import spack.error
|
||||
@@ -44,9 +43,9 @@
|
||||
import spack.oci.image
|
||||
import spack.oci.oci
|
||||
import spack.oci.opener
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.relocate as relocate
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.store
|
||||
@@ -1447,7 +1446,9 @@ def _oci_push_pkg_blob(
|
||||
filename = os.path.join(tmpdir, f"{spec.dag_hash()}.tar.gz")
|
||||
|
||||
# Create an oci.image.layer aka tarball of the package
|
||||
compressed_tarfile_checksum, tarfile_checksum = spack.oci.oci.create_tarball(spec, filename)
|
||||
compressed_tarfile_checksum, tarfile_checksum = _do_create_tarball(
|
||||
filename, spec.prefix, get_buildinfo_dict(spec)
|
||||
)
|
||||
|
||||
blob = spack.oci.oci.Blob(
|
||||
Digest.from_sha256(compressed_tarfile_checksum),
|
||||
@@ -2697,6 +2698,9 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
|
||||
for mirror in mirror_collection.values():
|
||||
fetch_url = mirror.fetch_url
|
||||
# TODO: oci:// does not support signing.
|
||||
if fetch_url.startswith("oci://"):
|
||||
continue
|
||||
keys_url = url_util.join(
|
||||
fetch_url, BUILD_CACHE_RELATIVE_PATH, BUILD_CACHE_KEYS_RELATIVE_PATH
|
||||
)
|
||||
|
@@ -14,6 +14,7 @@
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.modules
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
|
@@ -37,21 +37,16 @@
|
||||
import spack.binary_distribution
|
||||
import spack.config
|
||||
import spack.detection
|
||||
import spack.environment
|
||||
import spack.modules
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.platforms.linux
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.user_environment
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml
|
||||
import spack.util.url
|
||||
import spack.version
|
||||
from spack.installer import PackageInstaller
|
||||
|
||||
from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store
|
||||
from .clingo import ClingoBootstrapConcretizer
|
||||
@@ -283,7 +278,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
|
||||
# Install the spec that should make the module importable
|
||||
with spack.config.override(self.mirror_scope):
|
||||
concrete_spec.package.do_install(fail_fast=True)
|
||||
PackageInstaller([concrete_spec.package], fail_fast=True).install()
|
||||
|
||||
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
||||
self.last_search = info
|
||||
@@ -306,7 +301,7 @@ def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bo
|
||||
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||
tty.debug(msg.format(abstract_spec_str))
|
||||
with spack.config.override(self.mirror_scope):
|
||||
concrete_spec.package.do_install()
|
||||
PackageInstaller([concrete_spec.package], fail_fast=True).install()
|
||||
if _executables_in_store(executables, concrete_spec, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
|
@@ -14,9 +14,9 @@
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.environment
|
||||
import spack.spec
|
||||
import spack.tengine
|
||||
import spack.util.cpus
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
|
||||
from ._common import _root_spec
|
||||
from .config import root_path, spec_for_current_python, store_path
|
||||
|
@@ -53,6 +53,7 @@
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
from llnl.util.tty.log import MultiProcessFd
|
||||
|
||||
import spack.build_systems._checks
|
||||
import spack.build_systems.cmake
|
||||
import spack.build_systems.meson
|
||||
import spack.build_systems.python
|
||||
@@ -61,26 +62,20 @@
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.main
|
||||
import spack.multimethod
|
||||
import spack.package_base
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.schema.environment
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
import spack.user_environment
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.pattern
|
||||
from spack import traverse
|
||||
from spack.context import Context
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.error import InstallError, NoHeadersError, NoLibrariesError
|
||||
from spack.install_test import spack_install_test_log
|
||||
from spack.installer import InstallError
|
||||
from spack.util.cpus import determine_number_of_jobs
|
||||
from spack.util.environment import (
|
||||
SYSTEM_DIR_CASE_ENTRY,
|
||||
EnvironmentModifications,
|
||||
@@ -455,7 +450,7 @@ def set_wrapper_variables(pkg, env):
|
||||
env.set(SPACK_DEBUG, "TRUE")
|
||||
env.set(SPACK_SHORT_SPEC, pkg.spec.short_spec)
|
||||
env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format("{name}-{hash:7}"))
|
||||
env.set(SPACK_DEBUG_LOG_DIR, spack.main.spack_working_dir)
|
||||
env.set(SPACK_DEBUG_LOG_DIR, spack.paths.spack_working_dir)
|
||||
|
||||
if spack.config.get("config:ccache"):
|
||||
# Enable ccache in the compiler wrapper
|
||||
@@ -562,7 +557,7 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||
|
||||
jobs = determine_number_of_jobs(parallel=pkg.parallel)
|
||||
jobs = spack.config.determine_number_of_jobs(parallel=pkg.parallel)
|
||||
module.make_jobs = jobs
|
||||
|
||||
# TODO: make these build deps that can be installed if not found.
|
||||
@@ -1139,7 +1134,7 @@ def _setup_pkg_and_run(
|
||||
return_value = function(pkg, kwargs)
|
||||
write_pipe.send(return_value)
|
||||
|
||||
except StopPhase as e:
|
||||
except spack.error.StopPhase as e:
|
||||
# Do not create a full ChildError from this, it's not an error
|
||||
# it's a control statement.
|
||||
write_pipe.send(e)
|
||||
@@ -1300,7 +1295,7 @@ def exitcode_msg(p):
|
||||
p.join()
|
||||
|
||||
# If returns a StopPhase, raise it
|
||||
if isinstance(child_result, StopPhase):
|
||||
if isinstance(child_result, spack.error.StopPhase):
|
||||
# do not print
|
||||
raise child_result
|
||||
|
||||
@@ -1509,17 +1504,6 @@ def _make_child_error(msg, module, name, traceback, log, log_type, context):
|
||||
return ChildError(msg, module, name, traceback, log, log_type, context)
|
||||
|
||||
|
||||
class StopPhase(spack.error.SpackError):
|
||||
"""Pickle-able exception to control stopped builds."""
|
||||
|
||||
def __reduce__(self):
|
||||
return _make_stop_phase, (self.message, self.long_message)
|
||||
|
||||
|
||||
def _make_stop_phase(msg, long_msg):
|
||||
return StopPhase(msg, long_msg)
|
||||
|
||||
|
||||
def write_log_summary(out, log_type, log, last=None):
|
||||
errors, warnings = parse_log_events(log)
|
||||
nerr = len(errors)
|
||||
|
@@ -8,7 +8,7 @@
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.builder
|
||||
import spack.installer
|
||||
import spack.error
|
||||
import spack.relocate
|
||||
import spack.spec
|
||||
import spack.store
|
||||
@@ -34,7 +34,7 @@ def check_paths(path_list, filetype, predicate):
|
||||
if not predicate(abs_path):
|
||||
msg = "Install failed for {0}. No such {1} in prefix: {2}"
|
||||
msg = msg.format(pkg.name, filetype, path)
|
||||
raise spack.installer.InstallError(msg)
|
||||
raise spack.error.InstallError(msg)
|
||||
|
||||
check_paths(pkg.sanity_check_is_file, "file", os.path.isfile)
|
||||
check_paths(pkg.sanity_check_is_dir, "directory", os.path.isdir)
|
||||
@@ -42,7 +42,7 @@ def check_paths(path_list, filetype, predicate):
|
||||
ignore_file = llnl.util.lang.match_predicate(spack.store.STORE.layout.hidden_file_regexes)
|
||||
if all(map(ignore_file, os.listdir(pkg.prefix))):
|
||||
msg = "Install failed for {0}. Nothing was installed!"
|
||||
raise spack.installer.InstallError(msg.format(pkg.name))
|
||||
raise spack.error.InstallError(msg.format(pkg.name))
|
||||
|
||||
|
||||
def apply_macos_rpath_fixups(builder: spack.builder.Builder):
|
||||
|
@@ -13,6 +13,7 @@
|
||||
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, conflicts, depends_on
|
||||
from spack.multimethod import when
|
||||
@@ -248,7 +249,7 @@ def runs_ok(script_abs_path):
|
||||
|
||||
# An external gnuconfig may not not have a prefix.
|
||||
if gnuconfig_dir is None:
|
||||
raise spack.build_environment.InstallError(
|
||||
raise spack.error.InstallError(
|
||||
"Spack could not find substitutes for GNU config files because no "
|
||||
"prefix is available for the `gnuconfig` package. Make sure you set a "
|
||||
"prefix path instead of modules for external `gnuconfig`."
|
||||
@@ -268,7 +269,7 @@ def runs_ok(script_abs_path):
|
||||
msg += (
|
||||
" or the `gnuconfig` package prefix is misconfigured as" " an external package"
|
||||
)
|
||||
raise spack.build_environment.InstallError(msg)
|
||||
raise spack.error.InstallError(msg)
|
||||
|
||||
# Filter working substitutes
|
||||
candidates = [f for f in candidates if runs_ok(f)]
|
||||
@@ -293,9 +294,7 @@ def runs_ok(script_abs_path):
|
||||
and set the prefix to the directory containing the `config.guess` and
|
||||
`config.sub` files.
|
||||
"""
|
||||
raise spack.build_environment.InstallError(
|
||||
msg.format(", ".join(to_be_found), self.name)
|
||||
)
|
||||
raise spack.error.InstallError(msg.format(", ".join(to_be_found), self.name))
|
||||
|
||||
# Copy the good files over the bad ones
|
||||
for abs_path in to_be_patched:
|
||||
@@ -688,9 +687,8 @@ def _activate_or_not(
|
||||
|
||||
variant = variant or name
|
||||
|
||||
# Defensively look that the name passed as argument is among
|
||||
# variants
|
||||
if variant not in self.pkg.variants:
|
||||
# Defensively look that the name passed as argument is among variants
|
||||
if not self.pkg.has_variant(variant):
|
||||
msg = '"{0}" is not a variant of "{1}"'
|
||||
raise KeyError(msg.format(variant, self.pkg.name))
|
||||
|
||||
@@ -699,27 +697,19 @@ def _activate_or_not(
|
||||
|
||||
# Create a list of pairs. Each pair includes a configuration
|
||||
# option and whether or not that option is activated
|
||||
variant_desc, _ = self.pkg.variants[variant]
|
||||
if set(variant_desc.values) == set((True, False)):
|
||||
vdef = self.pkg.get_variant(variant)
|
||||
if set(vdef.values) == set((True, False)):
|
||||
# BoolValuedVariant carry information about a single option.
|
||||
# Nonetheless, for uniformity of treatment we'll package them
|
||||
# in an iterable of one element.
|
||||
condition = "+{name}".format(name=variant)
|
||||
options = [(name, condition in spec)]
|
||||
options = [(name, f"+{variant}" in spec)]
|
||||
else:
|
||||
condition = "{variant}={value}"
|
||||
# "feature_values" is used to track values which correspond to
|
||||
# features which can be enabled or disabled as understood by the
|
||||
# package's build system. It excludes values which have special
|
||||
# meanings and do not correspond to features (e.g. "none")
|
||||
feature_values = (
|
||||
getattr(variant_desc.values, "feature_values", None) or variant_desc.values
|
||||
)
|
||||
|
||||
options = [
|
||||
(value, condition.format(variant=variant, value=value) in spec)
|
||||
for value in feature_values
|
||||
]
|
||||
feature_values = getattr(vdef.values, "feature_values", None) or vdef.values
|
||||
options = [(value, f"{variant}={value}" in spec) for value in feature_values]
|
||||
|
||||
# For each allowed value in the list of values
|
||||
for option_value, activated in options:
|
||||
|
@@ -89,7 +89,7 @@ def define_cmake_cache_from_variant(self, cmake_var, variant=None, comment=""):
|
||||
if variant is None:
|
||||
variant = cmake_var.lower()
|
||||
|
||||
if variant not in self.pkg.variants:
|
||||
if not self.pkg.has_variant(variant):
|
||||
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
|
||||
|
||||
if variant not in self.pkg.spec.variants:
|
||||
|
@@ -15,6 +15,7 @@
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
@@ -145,6 +146,7 @@ def _values(x):
|
||||
default=default,
|
||||
values=_values,
|
||||
description="the build system generator to use",
|
||||
when="build_system=cmake",
|
||||
)
|
||||
for x in not_used:
|
||||
conflicts(f"generator={x}")
|
||||
@@ -344,7 +346,7 @@ def std_args(pkg, generator=None):
|
||||
msg = "Invalid CMake generator: '{0}'\n".format(generator)
|
||||
msg += "CMakePackage currently supports the following "
|
||||
msg += "primary generators: '{0}'".format("', '".join(valid_primary_generators))
|
||||
raise spack.package_base.InstallError(msg)
|
||||
raise spack.error.InstallError(msg)
|
||||
|
||||
try:
|
||||
build_type = pkg.spec.variants["build_type"].value
|
||||
@@ -504,7 +506,7 @@ def define_from_variant(self, cmake_var, variant=None):
|
||||
if variant is None:
|
||||
variant = cmake_var.lower()
|
||||
|
||||
if variant not in self.pkg.variants:
|
||||
if not self.pkg.has_variant(variant):
|
||||
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
|
||||
|
||||
if variant not in self.pkg.spec.variants:
|
||||
|
@@ -14,6 +14,7 @@
|
||||
|
||||
import spack.compiler
|
||||
import spack.package_base
|
||||
import spack.util.executable
|
||||
|
||||
# Local "type" for type hints
|
||||
Path = Union[str, pathlib.Path]
|
||||
|
@@ -1,192 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import glob
|
||||
import importlib
|
||||
import inspect
|
||||
import os
|
||||
import shutil
|
||||
import time
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.build_systems.cmake
|
||||
import spack.builder
|
||||
import spack.util.log_parse
|
||||
from spack.builder import run_after
|
||||
from spack.directives import depends_on, requires, variant
|
||||
from spack.package import CMakePackage
|
||||
|
||||
|
||||
class CTestBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||
"""
|
||||
This builder mirrors the behavior of a CMakeBuilder, but all commands are run through
|
||||
CTest. This ensures that xml files are created through CTest. This provides a unified
|
||||
buildstamp and improved xml over the spack generated ones.
|
||||
|
||||
An additional phase is added for running tests post installation. This allows for things
|
||||
like regression tests that can be used to monitior differences in behavior/performance
|
||||
without failing the install.
|
||||
"""
|
||||
|
||||
phases = ("cmake", "build", "install", "analysis")
|
||||
|
||||
@property
|
||||
def std_cmake_args(self):
|
||||
"""
|
||||
Args to always supply to CMake. CDash args don't do anything if you don't submit
|
||||
|
||||
TODO: workout how to get the track,build,site mapped correctly
|
||||
Currently this ignores the spack flags, and injests the CTestConfig.cmake files
|
||||
|
||||
In ExaWind it is hooked into additional infrastrucure.
|
||||
The spack flags are not ingestible to the package as far as I can currently tell.
|
||||
"""
|
||||
args = super().std_cmake_args
|
||||
if self.spec.variants["cdash_submit"].value:
|
||||
args.extend(
|
||||
[
|
||||
"-D",
|
||||
f"BUILDNAME={self.pkg.spec.name}",
|
||||
"-D",
|
||||
f"CTEST_BUILD_OPTIONS={self.pkg.spec.short_spec}",
|
||||
"-D",
|
||||
"SITE=TODO",
|
||||
]
|
||||
)
|
||||
return args
|
||||
|
||||
def ctest_args(self):
|
||||
args = ["-T", "Test"]
|
||||
args.append("--stop-time")
|
||||
overall_test_timeout = 60 * 60 * 4 # 4 hours TODO should probably be a variant
|
||||
args.append(time.strftime("%H:%M:%S", time.localtime(time.time() + overall_test_timeout)))
|
||||
args.append("-VV") # make sure lots of output can go to the log
|
||||
# a way to parse additional information to ctest exectution.
|
||||
# for ecample in exawind, we default to running unit-tests, but for nightly tests
|
||||
# we expand to our regression test suite through this variant
|
||||
extra_args = self.pkg.spec.variants["ctest_args"].value
|
||||
if extra_args:
|
||||
args.extend(extra_args.split())
|
||||
return args
|
||||
|
||||
@property
|
||||
def build_args(self):
|
||||
"""
|
||||
CTest arguments that translate to running a to the end of the build phase through CTest
|
||||
"""
|
||||
args = [
|
||||
"--group",
|
||||
self.pkg.spec.name,
|
||||
"-T",
|
||||
"Start",
|
||||
"-T",
|
||||
"Configure",
|
||||
"-T",
|
||||
"Build",
|
||||
"-VV",
|
||||
]
|
||||
return args
|
||||
|
||||
@property
|
||||
def submit_args(self):
|
||||
"""
|
||||
CTest arguments just for sumbmission. Allows us to split phases, where default CTest behavior is to configure, build, test and submit from a single command.
|
||||
"""
|
||||
args = ["-T", "Submit", "-V"]
|
||||
return args
|
||||
|
||||
def submit_cdash(self, pkg, spec, prefix):
|
||||
ctest = Executable(self.spec["cmake"].prefix.bin.ctest)
|
||||
ctest.add_default_env("CTEST_PARALLEL_LEVEL", str(make_jobs))
|
||||
build_env = os.environ.copy()
|
||||
ctest(*self.submit_args, env=build_env)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""
|
||||
The only reason to run through the CTest interface is if we want to submit to CDash with
|
||||
unified CTest xml's.
|
||||
If we aren't going to submit then we can just run as the CMakeBuilder
|
||||
"""
|
||||
if self.pkg.spec.variants["cdash_submit"].value:
|
||||
ctest = Executable(self.spec["cmake"].prefix.bin.ctest)
|
||||
ctest.add_default_env("CMAKE_BUILD_PARALLEL_LEVEL", str(make_jobs))
|
||||
with fs.working_dir(self.build_directory):
|
||||
build_env = os.environ.copy()
|
||||
# have ctest run, but we still want to submit if there are build failures where spack would stop.
|
||||
# check for errors and submit to cdash if there are failures
|
||||
output = ctest(
|
||||
*self.build_args, env=build_env, output=str.split, error=str.split
|
||||
).split("\n")
|
||||
errors, warnings = spack.util.log_parse.parse_log_events(output)
|
||||
if len(errors) > 0:
|
||||
errs = [str(e) for e in errors]
|
||||
tty.warn(f"Errors: {errs}")
|
||||
tty.warn(f"returncode {ctest.returncode}")
|
||||
self.submit_cdash(pkg, spec, prefix)
|
||||
raise BaseException(f"{self.pkg.spec.name} had build errors")
|
||||
|
||||
else:
|
||||
super().build(pkg, spec, prefix)
|
||||
|
||||
def analysis(self, pkg, spec, prefix):
|
||||
"""
|
||||
This method currently runs tests post install to avoid the undesired side effect
|
||||
of failing installs for failed tests with spack's built in testing infrastructure
|
||||
"""
|
||||
|
||||
with working_dir(self.build_directory):
|
||||
args = self.ctest_args()
|
||||
tty.debug("{} running CTest".format(self.pkg.spec.name))
|
||||
tty.debug("Running:: ctest" + " ".join(args))
|
||||
ctest = Executable(self.spec["cmake"].prefix.bin.ctest)
|
||||
ctest.add_default_env("CTEST_PARALLEL_LEVEL", str(make_jobs))
|
||||
ctest.add_default_env("CMAKE_BUILD_PARALLEL_LEVEL", str(make_jobs))
|
||||
build_env = os.environ.copy()
|
||||
ctest(*args, "-j", str(make_jobs), env=build_env, fail_on_error=False)
|
||||
|
||||
if self.pkg.spec.variants["cdash_submit"].value:
|
||||
self.submit_cdash(pkg, spec, prefix)
|
||||
|
||||
|
||||
class CtestPackage(CMakePackage):
|
||||
"""
|
||||
This package's default behavior is to act as a Standard CMakePackage,
|
||||
"""
|
||||
|
||||
CMakeBuilder = CTestBuilder
|
||||
variant("cdash_submit", default=False, description="Submit results to cdash")
|
||||
variant("ctest_args", default="", description="quoted string of arguments to send to ctest")
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
env.prepend_path("PYTHONPATH", os.environ["EXAWIND_MANAGER"])
|
||||
|
||||
def do_clean(self):
|
||||
"""
|
||||
A nice feature for development builds. Can be omitted from the final product.
|
||||
"""
|
||||
super().do_clean()
|
||||
if not self.stage.managed_by_spack:
|
||||
build_artifacts = glob.glob(os.path.join(self.stage.source_path, "spack-*"))
|
||||
for f in build_artifacts:
|
||||
if os.path.isfile(f):
|
||||
os.remove(f)
|
||||
if os.path.isdir(f):
|
||||
shutil.rmtree(f)
|
||||
ccjson = os.path.join(self.stage.source_path, "compile_commands.json")
|
||||
|
||||
if os.path.isfile(ccjson):
|
||||
os.remove(ccjson)
|
||||
|
||||
@run_after("cmake")
|
||||
def copy_compile_commands(self):
|
||||
"""
|
||||
A nice feature for development builds. Can be omitted from the final product.
|
||||
"""
|
||||
if self.spec.satisfies("dev_path=*"):
|
||||
target = os.path.join(self.stage.source_path, "compile_commands.json")
|
||||
source = os.path.join(self.build_directory, "compile_commands.json")
|
||||
if os.path.isfile(source):
|
||||
shutil.copyfile(source, target)
|
@@ -22,9 +22,10 @@
|
||||
install,
|
||||
)
|
||||
|
||||
import spack.builder
|
||||
import spack.error
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.package_base import InstallError
|
||||
from spack.error import InstallError
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.prefix import Prefix
|
||||
|
@@ -15,7 +15,7 @@
|
||||
import spack.util.path
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.directives import conflicts, license, redistribute, variant
|
||||
from spack.package_base import InstallError
|
||||
from spack.error import InstallError
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
|
||||
|
@@ -24,6 +24,8 @@
|
||||
import spack.detection
|
||||
import spack.multimethod
|
||||
import spack.package_base
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
|
@@ -11,9 +11,9 @@
|
||||
|
||||
import spack.builder
|
||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
|
||||
from spack.config import determine_number_of_jobs
|
||||
from spack.directives import build_system, extends, maintainers
|
||||
from spack.package_base import PackageBase
|
||||
from spack.util.cpus import determine_number_of_jobs
|
||||
from spack.util.environment import env_flag
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
|
@@ -10,7 +10,7 @@
|
||||
|
||||
from llnl.util import lang
|
||||
|
||||
import spack.build_environment
|
||||
import spack.error
|
||||
import spack.multimethod
|
||||
|
||||
#: Builder classes, as registered by the "builder" decorator
|
||||
@@ -461,15 +461,13 @@ def _on_phase_start(self, instance):
|
||||
# If a phase has a matching stop_before_phase attribute,
|
||||
# stop the installation process raising a StopPhase
|
||||
if getattr(instance, "stop_before_phase", None) == self.name:
|
||||
raise spack.build_environment.StopPhase(
|
||||
"Stopping before '{0}' phase".format(self.name)
|
||||
)
|
||||
raise spack.error.StopPhase("Stopping before '{0}' phase".format(self.name))
|
||||
|
||||
def _on_phase_exit(self, instance):
|
||||
# If a phase has a matching last_phase attribute,
|
||||
# stop the installation process raising a StopPhase
|
||||
if getattr(instance, "last_phase", None) == self.name:
|
||||
raise spack.build_environment.StopPhase("Stopping at '{0}' phase".format(self.name))
|
||||
raise spack.error.StopPhase("Stopping at '{0}' phase".format(self.name))
|
||||
|
||||
def copy(self):
|
||||
return copy.deepcopy(self)
|
||||
|
@@ -11,9 +11,7 @@
|
||||
from llnl.util.filesystem import mkdirp
|
||||
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.fetch_strategy
|
||||
import spack.mirror
|
||||
import spack.paths
|
||||
import spack.util.file_cache
|
||||
import spack.util.path
|
||||
|
@@ -31,6 +31,7 @@
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.concretize
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
import spack.main
|
||||
@@ -38,7 +39,6 @@
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.git
|
||||
import spack.util.gpg as gpg_util
|
||||
import spack.util.spack_yaml as syaml
|
||||
@@ -1219,8 +1219,8 @@ def main_script_replacements(cmd):
|
||||
# Capture the version of Spack used to generate the pipeline, that can be
|
||||
# passed to `git checkout` for version consistency. If we aren't in a Git
|
||||
# repository, presume we are a Spack release and use the Git tag instead.
|
||||
spack_version = spack.main.get_version()
|
||||
version_to_clone = spack.main.get_spack_commit() or f"v{spack.spack_version}"
|
||||
spack_version = spack.get_version()
|
||||
version_to_clone = spack.get_spack_commit() or f"v{spack.spack_version}"
|
||||
|
||||
output_object["variables"] = {
|
||||
"SPACK_ARTIFACTS_ROOT": rel_artifacts_root,
|
||||
|
@@ -17,7 +17,7 @@
|
||||
from llnl.util.tty.colify import colify
|
||||
from llnl.util.tty.color import colorize
|
||||
|
||||
import spack.config
|
||||
import spack.config # breaks a cycle.
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.extensions
|
||||
|
@@ -11,6 +11,7 @@
|
||||
import llnl.util.tty.color as color
|
||||
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
|
||||
description = "print architecture information about this machine"
|
||||
section = "system"
|
||||
|
@@ -16,11 +16,11 @@
|
||||
import spack.bootstrap.config
|
||||
import spack.bootstrap.core
|
||||
import spack.config
|
||||
import spack.main
|
||||
import spack.mirror
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "manage bootstrap configuration"
|
||||
|
@@ -23,14 +23,9 @@
|
||||
import spack.error
|
||||
import spack.mirror
|
||||
import spack.oci.oci
|
||||
import spack.oci.opener
|
||||
import spack.relocate
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.user_environment
|
||||
import spack.util.crypto
|
||||
import spack.util.parallel
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
|
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.cmd
|
||||
import spack.spec
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "change an existing spec in an environment"
|
||||
|
@@ -15,7 +15,6 @@
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.crypto
|
||||
import spack.util.web as web_util
|
||||
from spack.cmd.common import arguments
|
||||
from spack.package_base import (
|
||||
|
@@ -19,7 +19,6 @@
|
||||
import spack.cmd.buildcache as buildcache
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
import spack.environment.depfile
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.util.gpg as gpg_util
|
||||
|
@@ -10,11 +10,9 @@
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.caches
|
||||
import spack.cmd.test
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.util.path
|
||||
|
@@ -17,6 +17,7 @@
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.main
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
|
@@ -15,7 +15,6 @@
|
||||
import spack.deptypes as dt
|
||||
import spack.environment as ev
|
||||
import spack.mirror
|
||||
import spack.modules
|
||||
import spack.reporters
|
||||
import spack.spec
|
||||
import spack.store
|
||||
|
@@ -9,6 +9,7 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.spec
|
||||
|
||||
display_args = {"long": True, "show_flags": False, "variants": False, "indent": 4}
|
||||
|
||||
|
@@ -10,7 +10,6 @@
|
||||
import spack.cmd
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack import build_environment, traverse
|
||||
|
@@ -13,9 +13,9 @@
|
||||
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.error
|
||||
import spack.schema.env
|
||||
import spack.schema.packages
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.spack_yaml as syaml
|
||||
from spack.cmd.common import arguments
|
||||
@@ -256,7 +256,7 @@ def config_remove(args):
|
||||
existing.pop(value, None)
|
||||
else:
|
||||
# This should be impossible to reach
|
||||
raise spack.config.ConfigError("Config has nested non-dict values")
|
||||
raise spack.error.ConfigError("Config has nested non-dict values")
|
||||
|
||||
spack.config.set(path, existing, scope)
|
||||
|
||||
@@ -340,7 +340,7 @@ def _config_change(config_path, match_spec_str=None):
|
||||
if not changed:
|
||||
existing_requirements = spack.config.get(key_path)
|
||||
if isinstance(existing_requirements, str):
|
||||
raise spack.config.ConfigError(
|
||||
raise spack.error.ConfigError(
|
||||
"'config change' needs to append a requirement,"
|
||||
" but existing require: config is not a list"
|
||||
)
|
||||
@@ -536,11 +536,11 @@ def config_prefer_upstream(args):
|
||||
# Get and list all the variants that differ from the default.
|
||||
variants = []
|
||||
for var_name, variant in spec.variants.items():
|
||||
if var_name in ["patches"] or var_name not in spec.package.variants:
|
||||
if var_name in ["patches"] or not spec.package.has_variant(var_name):
|
||||
continue
|
||||
|
||||
variant_desc, _ = spec.package.variants[var_name]
|
||||
if variant.value != variant_desc.default:
|
||||
vdef = spec.package.get_variant(var_name)
|
||||
if variant.value != vdef.default:
|
||||
variants.append(str(variant))
|
||||
variants.sort()
|
||||
variants = " ".join(variants)
|
||||
|
@@ -13,7 +13,6 @@
|
||||
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.util.web
|
||||
from spack.spec import Spec
|
||||
from spack.url import (
|
||||
UndetectableNameError,
|
||||
|
@@ -13,11 +13,12 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack.config
|
||||
import spack
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.git
|
||||
from spack.main import get_version
|
||||
from spack.util.executable import which
|
||||
|
||||
description = "debugging commands for troubleshooting Spack"
|
||||
@@ -89,7 +90,7 @@ def report(args):
|
||||
host_os = host_platform.operating_system("frontend")
|
||||
host_target = host_platform.target("frontend")
|
||||
architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
|
||||
print("* **Spack:**", get_version())
|
||||
print("* **Spack:**", spack.get_version())
|
||||
print("* **Python:**", platform.python_version())
|
||||
print("* **Platform:**", architecture)
|
||||
|
||||
|
@@ -11,7 +11,6 @@
|
||||
import spack.cmd
|
||||
import spack.environment as ev
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
|
@@ -20,6 +20,7 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.environment as ev
|
||||
import spack.installer
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
@@ -142,4 +143,4 @@ def deprecate(parser, args):
|
||||
tty.die("Will not deprecate any packages.")
|
||||
|
||||
for dcate, dcator in zip(all_deprecate, all_deprecators):
|
||||
dcate.package.do_deprecate(dcator, symlink)
|
||||
spack.installer.deprecate(dcate, dcator, symlink)
|
||||
|
@@ -8,10 +8,13 @@
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.build_environment
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.repo
|
||||
from spack.cmd.common import arguments
|
||||
from spack.installer import PackageInstaller
|
||||
|
||||
description = "developer build: build from code in current working directory"
|
||||
section = "build"
|
||||
@@ -129,9 +132,9 @@ def dev_build(self, args):
|
||||
elif args.test == "root":
|
||||
tests = [spec.name for spec in specs]
|
||||
|
||||
spec.package.do_install(
|
||||
PackageInstaller(
|
||||
[spec.package],
|
||||
tests=tests,
|
||||
make_jobs=args.jobs,
|
||||
keep_prefix=args.keep_prefix,
|
||||
install_deps=not args.ignore_deps,
|
||||
verbose=not args.quiet,
|
||||
@@ -139,7 +142,7 @@ def dev_build(self, args):
|
||||
stop_before=args.before,
|
||||
skip_patch=args.skip_patch,
|
||||
stop_at=args.until,
|
||||
)
|
||||
).install()
|
||||
|
||||
# drop into the build environment of the package?
|
||||
if args.shell is not None:
|
||||
|
@@ -10,7 +10,6 @@
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.fetch_strategy
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
|
@@ -12,7 +12,6 @@
|
||||
import spack.cmd
|
||||
import spack.environment as ev
|
||||
import spack.solver.asp as asp
|
||||
import spack.util.environment
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
|
@@ -21,15 +21,12 @@
|
||||
import spack.cmd
|
||||
import spack.cmd.common
|
||||
import spack.cmd.common.arguments
|
||||
import spack.cmd.install
|
||||
import spack.cmd.modules
|
||||
import spack.cmd.uninstall
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.environment.depfile as depfile
|
||||
import spack.environment.environment
|
||||
import spack.environment.shell
|
||||
import spack.schema.env
|
||||
import spack.spec
|
||||
import spack.tengine
|
||||
from spack.cmd.common import arguments
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
@@ -18,9 +18,9 @@
|
||||
import spack.cray_manifest as cray_manifest
|
||||
import spack.detection
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "manage external packages in Spack configuration"
|
||||
|
@@ -8,7 +8,6 @@
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.traverse
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
|
@@ -10,10 +10,11 @@
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as color
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.cmd as cmd
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
|
@@ -16,6 +16,7 @@
|
||||
import spack.install_test
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.variant
|
||||
import spack.version
|
||||
from spack.cmd.common import arguments
|
||||
from spack.package_base import preferred_version
|
||||
@@ -333,26 +334,6 @@ def _fmt_variant(variant, max_name_default_len, indent, when=None, out=None):
|
||||
out.write("\n")
|
||||
|
||||
|
||||
def _variants_by_name_when(pkg):
|
||||
"""Adaptor to get variants keyed by { name: { when: { [Variant...] } }."""
|
||||
# TODO: replace with pkg.variants_by_name(when=True) when unified directive dicts are merged.
|
||||
variants = {}
|
||||
for name, (variant, whens) in sorted(pkg.variants.items()):
|
||||
for when in whens:
|
||||
variants.setdefault(name, {}).setdefault(when, []).append(variant)
|
||||
return variants
|
||||
|
||||
|
||||
def _variants_by_when_name(pkg):
|
||||
"""Adaptor to get variants keyed by { when: { name: Variant } }"""
|
||||
# TODO: replace with pkg.variants when unified directive dicts are merged.
|
||||
variants = {}
|
||||
for name, (variant, whens) in pkg.variants.items():
|
||||
for when in whens:
|
||||
variants.setdefault(when, {})[name] = variant
|
||||
return variants
|
||||
|
||||
|
||||
def _print_variants_header(pkg):
|
||||
"""output variants"""
|
||||
|
||||
@@ -363,32 +344,22 @@ def _print_variants_header(pkg):
|
||||
color.cprint("")
|
||||
color.cprint(section_title("Variants:"))
|
||||
|
||||
variants_by_name = _variants_by_name_when(pkg)
|
||||
|
||||
# Calculate the max length of the "name [default]" part of the variant display
|
||||
# This lets us know where to print variant values.
|
||||
max_name_default_len = max(
|
||||
color.clen(_fmt_name_and_default(variant))
|
||||
for name, when_variants in variants_by_name.items()
|
||||
for variants in when_variants.values()
|
||||
for variant in variants
|
||||
for name in pkg.variant_names()
|
||||
for _, variant in pkg.variant_definitions(name)
|
||||
)
|
||||
|
||||
return max_name_default_len, variants_by_name
|
||||
|
||||
|
||||
def _unconstrained_ver_first(item):
|
||||
"""sort key that puts specs with open version ranges first"""
|
||||
spec, _ = item
|
||||
return (spack.version.any_version not in spec.versions, spec)
|
||||
return max_name_default_len
|
||||
|
||||
|
||||
def print_variants_grouped_by_when(pkg):
|
||||
max_name_default_len, _ = _print_variants_header(pkg)
|
||||
max_name_default_len = _print_variants_header(pkg)
|
||||
|
||||
indent = 4
|
||||
variants = _variants_by_when_name(pkg)
|
||||
for when, variants_by_name in sorted(variants.items(), key=_unconstrained_ver_first):
|
||||
for when, variants_by_name in pkg.variant_items():
|
||||
padded_values = max_name_default_len + 4
|
||||
start_indent = indent
|
||||
|
||||
@@ -406,15 +377,14 @@ def print_variants_grouped_by_when(pkg):
|
||||
|
||||
|
||||
def print_variants_by_name(pkg):
|
||||
max_name_default_len, variants_by_name = _print_variants_header(pkg)
|
||||
max_name_default_len = _print_variants_header(pkg)
|
||||
max_name_default_len += 4
|
||||
|
||||
indent = 4
|
||||
for name, when_variants in variants_by_name.items():
|
||||
for when, variants in sorted(when_variants.items(), key=_unconstrained_ver_first):
|
||||
for variant in variants:
|
||||
_fmt_variant(variant, max_name_default_len, indent, when, out=sys.stdout)
|
||||
sys.stdout.write("\n")
|
||||
for name in pkg.variant_names():
|
||||
for when, variant in pkg.variant_definitions(name):
|
||||
_fmt_variant(variant, max_name_default_len, indent, when, out=sys.stdout)
|
||||
sys.stdout.write("\n")
|
||||
|
||||
|
||||
def print_variants(pkg, args):
|
||||
|
@@ -13,18 +13,15 @@
|
||||
from llnl.string import plural
|
||||
from llnl.util import lang, tty
|
||||
|
||||
import spack.build_environment
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.fetch_strategy
|
||||
import spack.package_base
|
||||
import spack.paths
|
||||
import spack.report
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.error import SpackError
|
||||
from spack.error import InstallError, SpackError
|
||||
from spack.installer import PackageInstaller
|
||||
|
||||
description = "build and install packages"
|
||||
@@ -287,7 +284,7 @@ def require_user_confirmation_for_overwrite(concrete_specs, args):
|
||||
tty.die("Reinstallation aborted.")
|
||||
|
||||
|
||||
def _dump_log_on_error(e: spack.build_environment.InstallError):
|
||||
def _dump_log_on_error(e: InstallError):
|
||||
e.print_context()
|
||||
assert e.pkg, "Expected InstallError to include the associated package"
|
||||
if not os.path.exists(e.pkg.log_path):
|
||||
@@ -352,7 +349,7 @@ def reporter_factory(specs):
|
||||
install_with_active_env(env, args, install_kwargs, reporter_factory)
|
||||
else:
|
||||
install_without_active_env(args, install_kwargs, reporter_factory)
|
||||
except spack.build_environment.InstallError as e:
|
||||
except InstallError as e:
|
||||
if args.show_log_on_error:
|
||||
_dump_log_on_error(e)
|
||||
raise
|
||||
@@ -477,5 +474,5 @@ def install_without_active_env(args, install_kwargs, reporter_factory):
|
||||
|
||||
installs = [s.package for s in concrete_specs]
|
||||
install_kwargs["explicit"] = [s.dag_hash() for s in concrete_specs]
|
||||
builder = PackageInstaller(installs, install_kwargs)
|
||||
builder = PackageInstaller(installs, **install_kwargs)
|
||||
builder.install()
|
||||
|
@@ -6,11 +6,10 @@
|
||||
import sys
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.find
|
||||
import spack.cmd.common
|
||||
import spack.environment as ev
|
||||
import spack.store
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.environment
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "add package to the user environment"
|
||||
|
@@ -8,9 +8,6 @@
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
|
@@ -17,7 +17,6 @@
|
||||
import spack.mirror
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.path
|
||||
import spack.util.web as web_util
|
||||
from spack.cmd.common import arguments
|
||||
from spack.error import SpackError
|
||||
|
@@ -15,6 +15,7 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.modules
|
||||
import spack.modules.common
|
||||
import spack.repo
|
||||
@@ -124,13 +125,13 @@ def check_module_set_name(name):
|
||||
names = [k for k in modules if k != "prefix_inspections"]
|
||||
|
||||
if not names:
|
||||
raise spack.config.ConfigError(
|
||||
raise spack.error.ConfigError(
|
||||
f"Module set configuration is missing. Cannot use module set '{name}'"
|
||||
)
|
||||
|
||||
pretty_names = "', '".join(names)
|
||||
|
||||
raise spack.config.ConfigError(
|
||||
raise spack.error.ConfigError(
|
||||
f"Cannot use invalid module set '{name}'.",
|
||||
f"Valid module set names are: '{pretty_names}'.",
|
||||
)
|
||||
@@ -172,7 +173,7 @@ def loads(module_type, specs, args, out=None):
|
||||
modules = list(
|
||||
(
|
||||
spec,
|
||||
spack.modules.common.get_module(
|
||||
spack.modules.get_module(
|
||||
module_type,
|
||||
spec,
|
||||
get_full_path=False,
|
||||
@@ -221,7 +222,7 @@ def find(module_type, specs, args):
|
||||
|
||||
try:
|
||||
modules = [
|
||||
spack.modules.common.get_module(
|
||||
spack.modules.get_module(
|
||||
module_type,
|
||||
spec,
|
||||
args.full_path,
|
||||
@@ -232,7 +233,7 @@ def find(module_type, specs, args):
|
||||
]
|
||||
|
||||
modules.append(
|
||||
spack.modules.common.get_module(
|
||||
spack.modules.get_module(
|
||||
module_type,
|
||||
single_spec,
|
||||
args.full_path,
|
||||
|
@@ -9,7 +9,6 @@
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.traverse
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
|
@@ -12,7 +12,6 @@
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.util.executable as exe
|
||||
import spack.util.package_hash as ph
|
||||
|
@@ -78,8 +78,8 @@ def python(parser, args, unknown_args):
|
||||
|
||||
# Run user choice of interpreter
|
||||
if args.python_interpreter == "ipython":
|
||||
return spack.cmd.python.ipython_interpreter(args)
|
||||
return spack.cmd.python.python_interpreter(args)
|
||||
return ipython_interpreter(args)
|
||||
return python_interpreter(args)
|
||||
|
||||
|
||||
def ipython_interpreter(args):
|
||||
|
@@ -6,7 +6,6 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.repo
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "revert checked out package source code"
|
||||
|
@@ -12,11 +12,12 @@
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.hash_types as ht
|
||||
import spack.package_base
|
||||
import spack.solver.asp as asp
|
||||
import spack.spec
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "concretize a specs using an ASP solver"
|
||||
|
@@ -14,6 +14,7 @@
|
||||
import spack.hash_types as ht
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.traverse
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "show what would be installed, given a spec"
|
||||
|
@@ -11,8 +11,6 @@
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.traverse
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
|
@@ -9,8 +9,8 @@
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.colify as colify
|
||||
|
||||
import spack.environment
|
||||
import spack.repo
|
||||
import spack.store
|
||||
import spack.tag
|
||||
|
||||
description = "show package tags and associated packages"
|
||||
|
@@ -15,11 +15,12 @@
|
||||
from llnl.util.tty import colify
|
||||
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.install_test
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.report
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "run spack's tests for an install"
|
||||
|
@@ -10,6 +10,7 @@
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.paths
|
||||
import spack.util.git
|
||||
|
@@ -6,6 +6,7 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "remove specs from an environment"
|
||||
|
@@ -10,6 +10,8 @@
|
||||
import re
|
||||
import sys
|
||||
|
||||
import spack.extensions
|
||||
|
||||
try:
|
||||
import pytest
|
||||
except ImportError:
|
||||
|
@@ -7,9 +7,10 @@
|
||||
import sys
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common
|
||||
import spack.error
|
||||
import spack.store
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.environment
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "remove package from the user environment"
|
||||
|
@@ -6,6 +6,7 @@
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.environment as ev
|
||||
import spack.store
|
||||
import spack.verify
|
||||
|
@@ -202,18 +202,6 @@ class Compiler:
|
||||
support for specific compilers, their possible names, arguments,
|
||||
and how to identify the particular type of compiler."""
|
||||
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names: List[str] = []
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names: List[str] = []
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names: List[str] = []
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names: List[str] = []
|
||||
|
||||
# Optional prefix regexes for searching for this type of compiler.
|
||||
# Prefixes are sometimes used for toolchains
|
||||
prefixes: List[str] = []
|
||||
@@ -619,18 +607,6 @@ def extract_version_from_output(cls, output):
|
||||
def cc_version(cls, cc):
|
||||
return cls.default_version(cc)
|
||||
|
||||
@classmethod
|
||||
def cxx_version(cls, cxx):
|
||||
return cls.default_version(cxx)
|
||||
|
||||
@classmethod
|
||||
def f77_version(cls, f77):
|
||||
return cls.default_version(f77)
|
||||
|
||||
@classmethod
|
||||
def fc_version(cls, fc):
|
||||
return cls.default_version(fc)
|
||||
|
||||
@classmethod
|
||||
def search_regexps(cls, language):
|
||||
# Compile all the regular expressions used for files beforehand.
|
||||
|
@@ -8,6 +8,7 @@
|
||||
"""
|
||||
import importlib
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Dict, List, Optional
|
||||
@@ -25,7 +26,6 @@
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.version
|
||||
from spack.operating_systems import windows_os
|
||||
from spack.util.environment import get_path
|
||||
from spack.util.naming import mod_to_class
|
||||
@@ -632,37 +632,34 @@ def is_mixed_toolchain(compiler):
|
||||
Args:
|
||||
compiler (spack.compiler.Compiler): a valid compiler object
|
||||
"""
|
||||
cc = os.path.basename(compiler.cc or "")
|
||||
cxx = os.path.basename(compiler.cxx or "")
|
||||
f77 = os.path.basename(compiler.f77 or "")
|
||||
fc = os.path.basename(compiler.fc or "")
|
||||
import spack.detection.path
|
||||
|
||||
executables = [
|
||||
os.path.basename(compiler.cc or ""),
|
||||
os.path.basename(compiler.cxx or ""),
|
||||
os.path.basename(compiler.f77 or ""),
|
||||
os.path.basename(compiler.fc or ""),
|
||||
]
|
||||
|
||||
toolchains = set()
|
||||
for compiler_cls in all_compiler_types():
|
||||
# Inspect all the compiler toolchain we know. If a compiler is the
|
||||
# only compiler supported there it belongs to that toolchain.
|
||||
def name_matches(name, name_list):
|
||||
# This is such that 'gcc' matches variations
|
||||
# like 'ggc-9' etc that are found in distros
|
||||
name, _, _ = name.partition("-")
|
||||
return len(name_list) == 1 and name and name in name_list
|
||||
finder = spack.detection.path.ExecutablesFinder()
|
||||
|
||||
if any(
|
||||
[
|
||||
name_matches(cc, compiler_cls.cc_names),
|
||||
name_matches(cxx, compiler_cls.cxx_names),
|
||||
name_matches(f77, compiler_cls.f77_names),
|
||||
name_matches(fc, compiler_cls.fc_names),
|
||||
]
|
||||
):
|
||||
tty.debug("[TOOLCHAIN] MATCH {0}".format(compiler_cls.__name__))
|
||||
toolchains.add(compiler_cls.__name__)
|
||||
for pkg_name in spack.repo.PATH.packages_with_tags(COMPILER_TAG):
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
patterns = finder.search_patterns(pkg=pkg_cls)
|
||||
if not patterns:
|
||||
continue
|
||||
joined_pattern = re.compile(r"|".join(patterns))
|
||||
|
||||
if any(joined_pattern.search(exe) for exe in executables):
|
||||
tty.debug(f"[TOOLCHAIN] MATCH {pkg_name}")
|
||||
toolchains.add(pkg_name)
|
||||
|
||||
if len(toolchains) > 1:
|
||||
if (
|
||||
toolchains == set(["Clang", "AppleClang", "Aocc"])
|
||||
toolchains == {"llvm", "apple-clang", "aocc"}
|
||||
# Msvc toolchain uses Intel ifx
|
||||
or toolchains == set(["Msvc", "Dpcpp", "Oneapi"])
|
||||
or toolchains == {"msvc", "intel-oneapi-compilers"}
|
||||
):
|
||||
return False
|
||||
tty.debug("[TOOLCHAINS] {0}".format(toolchains))
|
||||
|
@@ -13,18 +13,6 @@
|
||||
|
||||
|
||||
class Aocc(Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names = ["clang"]
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names = ["clang++"]
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ["flang"]
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ["flang"]
|
||||
|
||||
version_argument = "--version"
|
||||
|
||||
@property
|
||||
|
@@ -8,7 +8,6 @@
|
||||
|
||||
import spack.compiler
|
||||
import spack.compilers.clang
|
||||
import spack.util.executable
|
||||
from spack.version import Version
|
||||
|
||||
|
||||
|
@@ -9,18 +9,6 @@
|
||||
|
||||
|
||||
class Arm(spack.compiler.Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names = ["armclang"]
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names = ["armclang++"]
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ["armflang"]
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ["armflang"]
|
||||
|
||||
# Named wrapper links within lib/spack/env
|
||||
link_paths = {
|
||||
"cc": os.path.join("arm", "armclang"),
|
||||
@@ -90,11 +78,3 @@ def fc_pic_flag(self):
|
||||
return "-fPIC"
|
||||
|
||||
required_libs = ["libclang", "libflang"]
|
||||
|
||||
@classmethod
|
||||
def fc_version(cls, fc):
|
||||
return cls.default_version(fc)
|
||||
|
||||
@classmethod
|
||||
def f77_version(cls, f77):
|
||||
return cls.fc_version(f77)
|
||||
|
@@ -19,18 +19,6 @@ def __init__(self, *args, **kwargs):
|
||||
if not self.is_clang_based:
|
||||
self.version_argument = "-V"
|
||||
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names = ["craycc"]
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names = ["crayCC"]
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ["crayftn"]
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ["crayftn"]
|
||||
|
||||
# MacPorts builds gcc versions with prefixes and -mp-X.Y suffixes.
|
||||
suffixes = [r"-mp-\d\.\d"]
|
||||
|
||||
|
@@ -31,18 +31,6 @@
|
||||
|
||||
|
||||
class Clang(Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names = ["clang"]
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names = ["clang++"]
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ["flang-new", "flang"]
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ["flang-new", "flang"]
|
||||
|
||||
version_argument = "--version"
|
||||
|
||||
@property
|
||||
|
@@ -9,18 +9,6 @@
|
||||
|
||||
|
||||
class Fj(spack.compiler.Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names = ["fcc"]
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names = ["FCC"]
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ["frt"]
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ["frt"]
|
||||
|
||||
# Named wrapper links within build_env_path
|
||||
link_paths = {
|
||||
"cc": os.path.join("fj", "fcc"),
|
||||
|
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
from llnl.util.filesystem import ancestor
|
||||
|
||||
@@ -15,18 +14,6 @@
|
||||
|
||||
|
||||
class Gcc(spack.compiler.Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names = ["gcc"]
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names = ["g++"]
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ["gfortran"]
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ["gfortran"]
|
||||
|
||||
# MacPorts builds gcc versions with prefixes and -mp-X or -mp-X.Y suffixes.
|
||||
# Homebrew and Linuxbrew may build gcc with -X, -X.Y suffixes.
|
||||
# Old compatibility versions may contain XY suffixes.
|
||||
@@ -181,40 +168,6 @@ def default_version(cls, cc):
|
||||
version = cls.extract_version_from_output(output)
|
||||
return version
|
||||
|
||||
@classmethod
|
||||
def fc_version(cls, fc):
|
||||
"""Older versions of gfortran use the ``-dumpversion`` option.
|
||||
Output looks like this::
|
||||
|
||||
GNU Fortran (GCC) 4.4.7 20120313 (Red Hat 4.4.7-18)
|
||||
Copyright (C) 2010 Free Software Foundation, Inc.
|
||||
|
||||
or::
|
||||
|
||||
4.8.5
|
||||
|
||||
In GCC 7, this option was changed to only return the major
|
||||
version of the compiler::
|
||||
|
||||
7
|
||||
|
||||
A new ``-dumpfullversion`` option was added that gives us
|
||||
what we want::
|
||||
|
||||
7.2.0
|
||||
"""
|
||||
output = spack.compiler.get_compiler_version_output(fc, "-dumpversion")
|
||||
match = re.search(r"(?:GNU Fortran \(GCC\) )?([\d.]+)", output)
|
||||
version = match.group(match.lastindex) if match else "unknown"
|
||||
if Version(version) >= Version("7"):
|
||||
output = spack.compiler.get_compiler_version_output(fc, "-dumpfullversion")
|
||||
version = cls.extract_version_from_output(output)
|
||||
return version
|
||||
|
||||
@classmethod
|
||||
def f77_version(cls, f77):
|
||||
return cls.fc_version(f77)
|
||||
|
||||
@property
|
||||
def stdcxx_libs(self):
|
||||
return ("-lstdc++",)
|
||||
|
@@ -11,18 +11,6 @@
|
||||
|
||||
|
||||
class Intel(Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names = ["icc"]
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names = ["icpc"]
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ["ifort"]
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ["ifort"]
|
||||
|
||||
# Named wrapper links within build_env_path
|
||||
link_paths = {
|
||||
"cc": os.path.join("intel", "icc"),
|
||||
|
@@ -8,7 +8,7 @@
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import Dict, List
|
||||
from typing import Dict
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -117,18 +117,6 @@ def get_valid_fortran_pth():
|
||||
|
||||
|
||||
class Msvc(Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names: List[str] = ["cl"]
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names: List[str] = ["cl"]
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names: List[str] = ["ifx"]
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names: List[str] = ["ifx"]
|
||||
|
||||
# Named wrapper links within build_env_path
|
||||
# Due to the challenges of supporting compiler wrappers
|
||||
# in Windows, we leave these blank, and dynamically compute
|
||||
@@ -393,7 +381,3 @@ def fc_version(cls, fc):
|
||||
)
|
||||
clp = spack.util.executable.which_string("cl", path=sps)
|
||||
return cls.default_version(clp) if clp else fc_ver
|
||||
|
||||
@classmethod
|
||||
def f77_version(cls, f77):
|
||||
return cls.fc_version(f77)
|
||||
|
@@ -5,7 +5,6 @@
|
||||
|
||||
import os
|
||||
import re
|
||||
from typing import List
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
@@ -13,18 +12,6 @@
|
||||
|
||||
|
||||
class Nag(spack.compiler.Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names: List[str] = []
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names: List[str] = []
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ["nagfor"]
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ["nagfor"]
|
||||
|
||||
# Named wrapper links within build_env_path
|
||||
# Use default wrappers for C and C++, in case provided in compilers.yaml
|
||||
link_paths = {
|
||||
|
@@ -9,18 +9,6 @@
|
||||
|
||||
|
||||
class Nvhpc(Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names = ["nvc"]
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names = ["nvc++"]
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ["nvfortran"]
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ["nvfortran"]
|
||||
|
||||
# Named wrapper links within build_env_path
|
||||
link_paths = {
|
||||
"cc": os.path.join("nvhpc", "nvc"),
|
||||
|
@@ -13,18 +13,6 @@
|
||||
|
||||
|
||||
class Oneapi(Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names = ["icx"]
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names = ["icpx"]
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ["ifx"]
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ["ifx"]
|
||||
|
||||
# Named wrapper links within build_env_path
|
||||
link_paths = {
|
||||
"cc": os.path.join("oneapi", "icx"),
|
||||
|
@@ -10,18 +10,6 @@
|
||||
|
||||
|
||||
class Pgi(Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names = ["pgcc"]
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names = ["pgc++", "pgCC"]
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ["pgfortran", "pgf77"]
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ["pgfortran", "pgf95", "pgf90"]
|
||||
|
||||
# Named wrapper links within build_env_path
|
||||
link_paths = {
|
||||
"cc": os.path.join("pgi", "pgcc"),
|
||||
|
@@ -11,18 +11,6 @@
|
||||
|
||||
|
||||
class Rocmcc(spack.compilers.clang.Clang):
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names = ["amdclang"]
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names = ["amdclang++"]
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ["amdflang"]
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ["amdflang"]
|
||||
|
||||
@property
|
||||
def link_paths(self):
|
||||
link_paths = {
|
||||
@@ -61,14 +49,6 @@ def extract_version_from_output(cls, output):
|
||||
if match:
|
||||
return ".".join(match.groups())
|
||||
|
||||
@classmethod
|
||||
def fc_version(cls, fortran_compiler):
|
||||
return cls.default_version(fortran_compiler)
|
||||
|
||||
@classmethod
|
||||
def f77_version(cls, f77):
|
||||
return cls.fc_version(f77)
|
||||
|
||||
@property
|
||||
def stdcxx_libs(self):
|
||||
return ("-lstdc++",)
|
||||
|
@@ -10,18 +10,6 @@
|
||||
|
||||
|
||||
class Xl(Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names = ["xlc"]
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names = ["xlC", "xlc++"]
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ["xlf"]
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ["xlf90", "xlf95", "xlf2003", "xlf2008"]
|
||||
|
||||
# Named wrapper links within build_env_path
|
||||
link_paths = {
|
||||
"cc": os.path.join("xl", "xlc"),
|
||||
@@ -103,31 +91,3 @@ def fflags(self):
|
||||
# For Fortran 90 and beyond, it is set by default and has not impact.
|
||||
# Its use has no negative side effects.
|
||||
return "-qzerosize"
|
||||
|
||||
@classmethod
|
||||
def fc_version(cls, fc):
|
||||
# The fortran and C/C++ versions of the XL compiler are always
|
||||
# two units apart. By this we mean that the fortran release that
|
||||
# goes with XL C/C++ 11.1 is 13.1. Having such a difference in
|
||||
# version number is confusing spack quite a lot. Most notably
|
||||
# if you keep the versions as is the default xl compiler will
|
||||
# only have fortran and no C/C++. So we associate the Fortran
|
||||
# compiler with the version associated to the C/C++ compiler.
|
||||
# One last stumble. Version numbers over 10 have at least a .1
|
||||
# those under 10 a .0. There is no xlf 9.x or under currently
|
||||
# available. BG/P and BG/L can such a compiler mix and possibly
|
||||
# older version of AIX and linux on power.
|
||||
fortran_version = cls.default_version(fc)
|
||||
if fortran_version >= 16:
|
||||
# Starting with version 16.1, the XL C and Fortran compilers
|
||||
# have the same version. So no need to downgrade the Fortran
|
||||
# compiler version to match that of the C compiler version.
|
||||
return str(fortran_version)
|
||||
c_version = float(fortran_version) - 2
|
||||
if c_version < 10:
|
||||
c_version = c_version - 0.1
|
||||
return str(c_version)
|
||||
|
||||
@classmethod
|
||||
def f77_version(cls, f77):
|
||||
return cls.fc_version(f77)
|
||||
|
@@ -9,18 +9,6 @@
|
||||
|
||||
|
||||
class XlR(spack.compilers.xl.Xl):
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names = ["xlc_r"]
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names = ["xlC_r", "xlc++_r"]
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ["xlf_r"]
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ["xlf90_r", "xlf95_r", "xlf2003_r", "xlf2008_r"]
|
||||
|
||||
# Named wrapper links within build_env_path
|
||||
link_paths = {
|
||||
"cc": os.path.join("xl_r", "xlc_r"),
|
||||
|
@@ -8,16 +8,8 @@
|
||||
from contextlib import contextmanager
|
||||
from itertools import chain
|
||||
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.error
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.target
|
||||
import spack.tengine
|
||||
import spack.util.path
|
||||
|
||||
CHECK_COMPILER_EXISTENCE = True
|
||||
|
||||
|
@@ -39,6 +39,7 @@
|
||||
|
||||
from llnl.util import filesystem, lang, tty
|
||||
|
||||
import spack.error
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.schema
|
||||
@@ -48,17 +49,19 @@
|
||||
import spack.schema.compilers
|
||||
import spack.schema.concretizer
|
||||
import spack.schema.config
|
||||
import spack.schema.definitions
|
||||
import spack.schema.develop
|
||||
import spack.schema.env
|
||||
import spack.schema.mirrors
|
||||
import spack.schema.modules
|
||||
import spack.schema.packages
|
||||
import spack.schema.repos
|
||||
import spack.schema.upstreams
|
||||
import spack.schema.view
|
||||
|
||||
# Hacked yaml for configuration files preserves line numbers.
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.web as web_util
|
||||
from spack.error import SpackError
|
||||
from spack.util.cpus import cpus_available
|
||||
|
||||
#: Dict from section names -> schema for that section
|
||||
@@ -165,7 +168,7 @@ def get_section(self, section: str) -> Optional[YamlConfigDict]:
|
||||
|
||||
def _write_section(self, section: str) -> None:
|
||||
if not self.writable:
|
||||
raise ConfigError(f"Cannot write to immutable scope {self}")
|
||||
raise spack.error.ConfigError(f"Cannot write to immutable scope {self}")
|
||||
|
||||
filename = self.get_section_filename(section)
|
||||
data = self.get_section(section)
|
||||
@@ -277,7 +280,7 @@ def get_section(self, section: str) -> Optional[YamlConfigDict]:
|
||||
|
||||
def _write_section(self, section: str) -> None:
|
||||
if not self.writable:
|
||||
raise ConfigError(f"Cannot write to immutable scope {self}")
|
||||
raise spack.error.ConfigError(f"Cannot write to immutable scope {self}")
|
||||
data_to_write: Optional[YamlConfigDict] = self._raw_data
|
||||
|
||||
# If there is no existing data, this section SingleFileScope has never
|
||||
@@ -705,7 +708,7 @@ def print_section(self, section: str, blame: bool = False, *, scope=None) -> Non
|
||||
data[section] = self.get_config(section, scope=scope)
|
||||
syaml.dump_config(data, stream=sys.stdout, default_flow_style=False, blame=blame)
|
||||
except (syaml.SpackYAMLError, OSError) as e:
|
||||
raise ConfigError(f"cannot read '{section}' configuration") from e
|
||||
raise spack.error.ConfigError(f"cannot read '{section}' configuration") from e
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
@@ -807,7 +810,7 @@ def _add_command_line_scopes(
|
||||
_add_platform_scope(cfg, name, path, writable=False)
|
||||
continue
|
||||
else:
|
||||
raise ConfigError(f"Invalid configuration scope: {path}")
|
||||
raise spack.error.ConfigError(f"Invalid configuration scope: {path}")
|
||||
|
||||
for scope in manifest.env_config_scopes:
|
||||
scope.name = f"{name}:{scope.name}"
|
||||
@@ -1019,7 +1022,7 @@ def change_or_add(
|
||||
|
||||
if found:
|
||||
update_fn(section)
|
||||
spack.config.set(section_name, section, scope=scope)
|
||||
CONFIG.set(section_name, section, scope=scope)
|
||||
return
|
||||
|
||||
# If no scope meets the criteria specified by ``find_fn``,
|
||||
@@ -1032,14 +1035,14 @@ def change_or_add(
|
||||
break
|
||||
|
||||
if found:
|
||||
spack.config.set(section_name, section, scope=scope)
|
||||
CONFIG.set(section_name, section, scope=scope)
|
||||
return
|
||||
|
||||
# If no scopes define any config for the named section, then
|
||||
# modify the highest-priority scope.
|
||||
scope, section = configs_by_section[0]
|
||||
update_fn(section)
|
||||
spack.config.set(section_name, section, scope=scope)
|
||||
CONFIG.set(section_name, section, scope=scope)
|
||||
|
||||
|
||||
def update_all(section_name: str, change_fn: Callable[[str], bool]) -> None:
|
||||
@@ -1051,7 +1054,7 @@ def update_all(section_name: str, change_fn: Callable[[str], bool]) -> None:
|
||||
for scope, section in configs_by_section:
|
||||
modified = change_fn(section)
|
||||
if modified:
|
||||
spack.config.set(section_name, section, scope=scope)
|
||||
CONFIG.set(section_name, section, scope=scope)
|
||||
|
||||
|
||||
def _validate_section_name(section: str) -> None:
|
||||
@@ -1225,7 +1228,7 @@ def get_valid_type(path):
|
||||
return types[schema_type]()
|
||||
else:
|
||||
return type(None)
|
||||
raise ConfigError(f"Cannot determine valid type for path '{path}'.")
|
||||
raise spack.error.ConfigError(f"Cannot determine valid type for path '{path}'.")
|
||||
|
||||
|
||||
def remove_yaml(dest, source):
|
||||
@@ -1268,7 +1271,7 @@ def they_are(t):
|
||||
unmerge = sk in dest
|
||||
old_dest_value = dest.pop(sk, None)
|
||||
|
||||
if unmerge and not spack.config._override(sk):
|
||||
if unmerge and not _override(sk):
|
||||
dest[sk] = remove_yaml(old_dest_value, sv)
|
||||
|
||||
return dest
|
||||
@@ -1705,40 +1708,48 @@ def get_mark_from_yaml_data(obj):
|
||||
return mark
|
||||
|
||||
|
||||
def parse_spec_from_yaml_string(string: str) -> "spack.spec.Spec":
|
||||
"""Parse a spec from YAML and add file/line info to errors, if it's available.
|
||||
|
||||
Parse a ``Spec`` from the supplied string, but also intercept any syntax errors and
|
||||
add file/line information for debugging using file/line annotations from the string.
|
||||
|
||||
Arguments:
|
||||
string: a string representing a ``Spec`` from config YAML.
|
||||
|
||||
def determine_number_of_jobs(
|
||||
*,
|
||||
parallel: bool = False,
|
||||
max_cpus: int = cpus_available(),
|
||||
config: Optional[Configuration] = None,
|
||||
) -> int:
|
||||
"""
|
||||
Packages that require sequential builds need 1 job. Otherwise we use the
|
||||
number of jobs set on the command line. If not set, then we use the config
|
||||
defaults (which is usually set through the builtin config scope), but we
|
||||
cap to the number of CPUs available to avoid oversubscription.
|
||||
|
||||
Parameters:
|
||||
parallel: true when package supports parallel builds
|
||||
max_cpus: maximum number of CPUs to use (defaults to cpus_available())
|
||||
config: configuration object (defaults to global config)
|
||||
"""
|
||||
if not parallel:
|
||||
return 1
|
||||
|
||||
cfg = config or CONFIG
|
||||
|
||||
# Command line overrides all
|
||||
try:
|
||||
spec = spack.spec.Spec(string)
|
||||
return spec
|
||||
except spack.parser.SpecSyntaxError as e:
|
||||
mark = spack.config.get_mark_from_yaml_data(string)
|
||||
if mark:
|
||||
msg = f"{mark.name}:{mark.line + 1}: {str(e)}"
|
||||
raise spack.parser.SpecSyntaxError(msg) from e
|
||||
raise e
|
||||
command_line = cfg.get("config:build_jobs", default=None, scope="command_line")
|
||||
if command_line is not None:
|
||||
return command_line
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
return min(max_cpus, cfg.get("config:build_jobs", 16))
|
||||
|
||||
|
||||
class ConfigError(SpackError):
|
||||
"""Superclass for all Spack config related errors."""
|
||||
|
||||
|
||||
class ConfigSectionError(ConfigError):
|
||||
class ConfigSectionError(spack.error.ConfigError):
|
||||
"""Error for referring to a bad config section name in a configuration."""
|
||||
|
||||
|
||||
class ConfigFileError(ConfigError):
|
||||
class ConfigFileError(spack.error.ConfigError):
|
||||
"""Issue reading or accessing a configuration file."""
|
||||
|
||||
|
||||
class ConfigFormatError(ConfigError):
|
||||
class ConfigFormatError(spack.error.ConfigError):
|
||||
"""Raised when a configuration format does not match its schema."""
|
||||
|
||||
def __init__(
|
||||
|
@@ -6,6 +6,7 @@
|
||||
convenience functions.
|
||||
"""
|
||||
import copy
|
||||
import shlex
|
||||
from collections import namedtuple
|
||||
from typing import Optional
|
||||
|
||||
@@ -15,7 +16,7 @@
|
||||
import spack.tengine as tengine
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
from ..images import (
|
||||
from .images import (
|
||||
bootstrap_template_for,
|
||||
build_info,
|
||||
checkout_command,
|
||||
@@ -308,7 +309,54 @@ def __call__(self):
|
||||
return t.render(**self.to_dict())
|
||||
|
||||
|
||||
# Import after function definition all the modules in this package,
|
||||
# so that registration of writers will happen automatically
|
||||
from . import docker # noqa: F401 E402
|
||||
from . import singularity # noqa: F401 E402
|
||||
@writer("docker")
|
||||
class DockerContext(PathContext):
|
||||
"""Context used to instantiate a Dockerfile"""
|
||||
|
||||
#: Name of the template used for Dockerfiles
|
||||
template_name = "container/Dockerfile"
|
||||
|
||||
@tengine.context_property
|
||||
def manifest(self):
|
||||
manifest_str = super().manifest
|
||||
# Docker doesn't support HEREDOC, so we need to resort to
|
||||
# a horrible echo trick to have the manifest in the Dockerfile
|
||||
echoed_lines = []
|
||||
for idx, line in enumerate(manifest_str.split("\n")):
|
||||
quoted_line = shlex.quote(line)
|
||||
if idx == 0:
|
||||
echoed_lines.append("&& (echo " + quoted_line + " \\")
|
||||
continue
|
||||
echoed_lines.append("&& echo " + quoted_line + " \\")
|
||||
|
||||
echoed_lines[-1] = echoed_lines[-1].replace(" \\", ")")
|
||||
|
||||
return "\n".join(echoed_lines)
|
||||
|
||||
|
||||
@writer("singularity")
|
||||
class SingularityContext(PathContext):
|
||||
"""Context used to instantiate a Singularity definition file"""
|
||||
|
||||
#: Name of the template used for Singularity definition files
|
||||
template_name = "container/singularity.def"
|
||||
|
||||
@property
|
||||
def singularity_config(self):
|
||||
return self.container_config.get("singularity", {})
|
||||
|
||||
@tengine.context_property
|
||||
def runscript(self):
|
||||
return self.singularity_config.get("runscript", "")
|
||||
|
||||
@tengine.context_property
|
||||
def startscript(self):
|
||||
return self.singularity_config.get("startscript", "")
|
||||
|
||||
@tengine.context_property
|
||||
def test(self):
|
||||
return self.singularity_config.get("test", "")
|
||||
|
||||
@tengine.context_property
|
||||
def help(self):
|
||||
return self.singularity_config.get("help", "")
|
@@ -1,34 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import shlex
|
||||
|
||||
import spack.tengine as tengine
|
||||
|
||||
from . import PathContext, writer
|
||||
|
||||
|
||||
@writer("docker")
|
||||
class DockerContext(PathContext):
|
||||
"""Context used to instantiate a Dockerfile"""
|
||||
|
||||
#: Name of the template used for Dockerfiles
|
||||
template_name = "container/Dockerfile"
|
||||
|
||||
@tengine.context_property
|
||||
def manifest(self):
|
||||
manifest_str = super().manifest
|
||||
# Docker doesn't support HEREDOC, so we need to resort to
|
||||
# a horrible echo trick to have the manifest in the Dockerfile
|
||||
echoed_lines = []
|
||||
for idx, line in enumerate(manifest_str.split("\n")):
|
||||
quoted_line = shlex.quote(line)
|
||||
if idx == 0:
|
||||
echoed_lines.append("&& (echo " + quoted_line + " \\")
|
||||
continue
|
||||
echoed_lines.append("&& echo " + quoted_line + " \\")
|
||||
|
||||
echoed_lines[-1] = echoed_lines[-1].replace(" \\", ")")
|
||||
|
||||
return "\n".join(echoed_lines)
|
@@ -1,35 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import spack.tengine as tengine
|
||||
|
||||
from . import PathContext, writer
|
||||
|
||||
|
||||
@writer("singularity")
|
||||
class SingularityContext(PathContext):
|
||||
"""Context used to instantiate a Singularity definition file"""
|
||||
|
||||
#: Name of the template used for Singularity definition files
|
||||
template_name = "container/singularity.def"
|
||||
|
||||
@property
|
||||
def singularity_config(self):
|
||||
return self.container_config.get("singularity", {})
|
||||
|
||||
@tengine.context_property
|
||||
def runscript(self):
|
||||
return self.singularity_config.get("runscript", "")
|
||||
|
||||
@tengine.context_property
|
||||
def startscript(self):
|
||||
return self.singularity_config.get("startscript", "")
|
||||
|
||||
@tengine.context_property
|
||||
def test(self):
|
||||
return self.singularity_config.get("test", "")
|
||||
|
||||
@tengine.context_property
|
||||
def help(self):
|
||||
return self.singularity_config.get("help", "")
|
@@ -132,7 +132,7 @@ def spec_from_entry(entry):
|
||||
variant_strs = list()
|
||||
for name, value in entry["parameters"].items():
|
||||
# TODO: also ensure that the variant value is valid?
|
||||
if not (name in pkg_cls.variants):
|
||||
if not pkg_cls.has_variant(name):
|
||||
tty.debug(
|
||||
"Omitting variant {0} for entry {1}/{2}".format(
|
||||
name, entry["name"], entry["hash"][:7]
|
||||
|
@@ -25,8 +25,10 @@
|
||||
import llnl.util.tty
|
||||
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.operating_systems.windows_os as winOs
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
import spack.util.spack_yaml
|
||||
import spack.util.windows_registry
|
||||
|
||||
|
@@ -18,10 +18,12 @@
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty
|
||||
|
||||
import spack.spec
|
||||
import spack.util.elf as elf_utils
|
||||
import spack.util.environment
|
||||
import spack.util.environment as environment
|
||||
import spack.util.ld_so_conf
|
||||
import spack.util.parallel
|
||||
|
||||
from .common import (
|
||||
WindowsCompilerExternalPaths,
|
||||
@@ -406,7 +408,7 @@ def by_path(
|
||||
|
||||
result = collections.defaultdict(list)
|
||||
repository = spack.repo.PATH.ensure_unwrapped()
|
||||
with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor:
|
||||
with spack.util.parallel.make_concurrent_executor(max_workers, require_fork=False) as executor:
|
||||
for pkg in packages_to_search:
|
||||
executable_future = executor.submit(
|
||||
executables_finder.find,
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user