Compare commits
75 Commits
features/e
...
v0.20.3
Author | SHA1 | Date | |
---|---|---|---|
![]() |
6812713cf4 | ||
![]() |
4b07a24466 | ||
![]() |
a640a5b8e2 | ||
![]() |
210705e701 | ||
![]() |
77c707b88a | ||
![]() |
b3fa390226 | ||
![]() |
93be34bfc4 | ||
![]() |
da4ff7e2d4 | ||
![]() |
3246b08295 | ||
![]() |
fd4784805e | ||
![]() |
ca77a3013f | ||
![]() |
d3c0da48d9 | ||
![]() |
26b8f4ffeb | ||
![]() |
9cb4e4534f | ||
![]() |
19fc944bcc | ||
![]() |
12cb071fe2 | ||
![]() |
0e905f5e64 | ||
![]() |
dd30f2dea3 | ||
![]() |
e27723d046 | ||
![]() |
d7fcaef01e | ||
![]() |
f4cf4e468a | ||
![]() |
69fe0a5518 | ||
![]() |
f8483aca0d | ||
![]() |
6d5697cbe1 | ||
![]() |
e8658d6493 | ||
![]() |
0a4bd29ce5 | ||
![]() |
506b899676 | ||
![]() |
c2103b27f6 | ||
![]() |
1a2e845958 | ||
![]() |
e5f270c8da | ||
![]() |
0fd224404a | ||
![]() |
215020f9bb | ||
![]() |
b366cb3c90 | ||
![]() |
e0bba8f4a3 | ||
![]() |
60195d72c9 | ||
![]() |
2008503a1f | ||
![]() |
9df47aabdb | ||
![]() |
80e90b924a | ||
![]() |
c6ff664366 | ||
![]() |
d27debd940 | ||
![]() |
c93b8bceb8 | ||
![]() |
f602c67606 | ||
![]() |
3a082f0112 | ||
![]() |
9fb25b7404 | ||
![]() |
9924c92c40 | ||
![]() |
16cb6ac1ed | ||
![]() |
5821746258 | ||
![]() |
d860083b08 | ||
![]() |
f2d3818d5c | ||
![]() |
0052f330be | ||
![]() |
456db45c4a | ||
![]() |
e493ab31c6 | ||
![]() |
e0f45b33e9 | ||
![]() |
bb61ecb9b9 | ||
![]() |
9694225b80 | ||
![]() |
3b15e7bf41 | ||
![]() |
ac5f0cc340 | ||
![]() |
f67840511a | ||
![]() |
bd9cfa3a47 | ||
![]() |
96c262b13e | ||
![]() |
d22fd79a0b | ||
![]() |
8cf4bf7559 | ||
![]() |
14a703a4bb | ||
![]() |
d7726f80e8 | ||
![]() |
d69c3a6ab7 | ||
![]() |
1fd964140d | ||
![]() |
c9bab946d4 | ||
![]() |
74a5cd2bb0 | ||
![]() |
151ce6f923 | ||
![]() |
1c31ce82af | ||
![]() |
caab2cbfd2 | ||
![]() |
a6f41006eb | ||
![]() |
18b4670d9f | ||
![]() |
322fe415e4 | ||
![]() |
096bfa4ba9 |
5
.github/dependabot.yml
vendored
5
.github/dependabot.yml
vendored
@@ -5,3 +5,8 @@ updates:
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
# Requirements to build documentation
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/lib/spack/docs"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
4
.github/workflows/audit.yaml
vendored
4
.github/workflows/audit.yaml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 # @v2.1.0
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,audits
|
||||
|
4
.github/workflows/bootstrap.yml
vendored
4
.github/workflows/bootstrap.yml
vendored
@@ -183,7 +183,7 @@ jobs:
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
for ver in '3.6' '3.7' '3.8' '3.9' '3.10' ; do
|
||||
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
echo "Testing $ver_dir"
|
||||
@@ -214,7 +214,7 @@ jobs:
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
for ver in '3.6' '3.7' '3.8' '3.9' '3.10' ; do
|
||||
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
echo "Testing $ver_dir"
|
||||
|
31
.github/workflows/nightly-win-builds.yml
vendored
31
.github/workflows/nightly-win-builds.yml
vendored
@@ -1,31 +0,0 @@
|
||||
name: Windows Paraview Nightly
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 2 * * *' # Run at 2 am
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||
|
||||
|
||||
jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools coverage
|
||||
- name: Build Test
|
||||
run: |
|
||||
spack compiler find
|
||||
spack external find cmake ninja win-sdk win-wdk wgl msmpi
|
||||
spack -d install -y --cdash-upload-url https://cdash.spack.io/submit.php?project=Spack+on+Windows --cdash-track Nightly --only dependencies paraview
|
||||
exit 0
|
16
.github/workflows/unit_tests.yaml
vendored
16
.github/workflows/unit_tests.yaml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
# Test shell integration
|
||||
@@ -97,7 +97,7 @@ jobs:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -118,7 +118,7 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
|
||||
@@ -155,7 +155,7 @@ jobs:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -176,7 +176,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 # @v2.1.0
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
# Run unit tests on MacOS
|
||||
@@ -189,7 +189,7 @@ jobs:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -211,6 +211,6 @@ jobs:
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
||||
with:
|
||||
flags: unittests,macos
|
||||
|
6
.github/workflows/valid-style.yml
vendored
6
.github/workflows/valid-style.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -38,13 +38,13 @@ jobs:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python3 -m pip install --upgrade pip setuptools types-six black==23.1.0 mypy isort clingo flake8
|
||||
python3 -m pip install --upgrade pip setuptools types-six black==23.1.0 mypy isort clingo flake8==6.0.0
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
|
10
.github/workflows/windows_python.yml
vendored
10
.github/workflows/windows_python.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
||||
with:
|
||||
flags: unittests,windows
|
||||
unit-tests-cmd:
|
||||
@@ -42,7 +42,7 @@ jobs:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
||||
with:
|
||||
flags: unittests,windows
|
||||
build-abseil:
|
||||
@@ -66,7 +66,7 @@ jobs:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
@@ -1,10 +1,16 @@
|
||||
version: 2
|
||||
|
||||
build:
|
||||
os: "ubuntu-22.04"
|
||||
apt_packages:
|
||||
- graphviz
|
||||
tools:
|
||||
python: "3.11"
|
||||
|
||||
sphinx:
|
||||
configuration: lib/spack/docs/conf.py
|
||||
fail_on_warning: true
|
||||
|
||||
python:
|
||||
version: 3.7
|
||||
install:
|
||||
- requirements: lib/spack/docs/requirements.txt
|
||||
|
47
CHANGELOG.md
47
CHANGELOG.md
@@ -1,3 +1,50 @@
|
||||
# v0.20.3 (2023-10-31)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a bug where `spack mirror set-url` would drop configured connection info (reverts #34210)
|
||||
- Fix a minor issue with package hash computation for Python 3.12 (#40328)
|
||||
|
||||
|
||||
# v0.20.2 (2023-10-03)
|
||||
|
||||
## Features in this release
|
||||
|
||||
Spack now supports Python 3.12 (#40155)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Improve escaping in Tcl module files (#38375)
|
||||
- Make repo cache work on repositories with zero mtime (#39214)
|
||||
- Ignore errors for newer, incompatible buildcache version (#40279)
|
||||
- Print an error when git is required, but missing (#40254)
|
||||
- Ensure missing build dependencies get installed when using `spack install --overwrite` (#40252)
|
||||
- Fix an issue where Spack freezes when the build process unexpectedly exits (#39015)
|
||||
- Fix a bug where installation failures cause an unrelated `NameError` to be thrown (#39017)
|
||||
- Fix an issue where Spack package versions would be incorrectly derived from git tags (#39414)
|
||||
- Fix a bug triggered when file locking fails internally (#39188)
|
||||
- Prevent "spack external find" to error out when a directory cannot be accessed (#38755)
|
||||
- Fix multiple performance regressions in environments (#38771)
|
||||
- Add more ignored modules to `pyproject.toml` for `mypy` (#38769)
|
||||
|
||||
# v0.20.1 (2023-07-10)
|
||||
|
||||
## Spack Bugfixes
|
||||
|
||||
- Spec removed from an environment where not actually removed if `--force` was not given (#37877)
|
||||
- Speed-up module file generation (#37739)
|
||||
- Hotfix for a few recipes that treat CMake as a link dependency (#35816)
|
||||
- Fix re-running stand-alone test a second time, which was getting a trailing spurious failure (#37840)
|
||||
- Fixed reading JSON manifest on Cray, reporting non-concrete specs (#37909)
|
||||
- Fixed a few bugs when generating Dockerfiles from Spack (#37766,#37769)
|
||||
- Fixed a few long-standing bugs when generating module files (#36678,#38347,#38465,#38455)
|
||||
- Fixed issues with building Python extensions using an external Python (#38186)
|
||||
- Fixed compiler removal from command line (#38057)
|
||||
- Show external status as [e] (#33792)
|
||||
- Backported `archspec` fixes (#37793)
|
||||
- Improved a few error messages (#37791)
|
||||
|
||||
|
||||
# v0.20.0 (2023-05-21)
|
||||
|
||||
`v0.20.0` is a major feature release.
|
||||
|
16
lib/spack/docs/_pygments/style.py
Normal file
16
lib/spack/docs/_pygments/style.py
Normal file
@@ -0,0 +1,16 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
# We use our own extension of the default style with a few modifications
|
||||
from pygments.styles.default import DefaultStyle
|
||||
from pygments.token import Generic
|
||||
|
||||
|
||||
class SpackStyle(DefaultStyle):
|
||||
styles = DefaultStyle.styles.copy()
|
||||
background_color = "#f4f4f8"
|
||||
styles[Generic.Output] = "#355"
|
||||
styles[Generic.Prompt] = "bold #346ec9"
|
@@ -149,7 +149,6 @@ def setup(sphinx):
|
||||
# Get nice vector graphics
|
||||
graphviz_output_format = "svg"
|
||||
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ["_templates"]
|
||||
|
||||
@@ -233,30 +232,8 @@ def setup(sphinx):
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
# show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
# We use our own extension of the default style with a few modifications
|
||||
from pygments.style import Style
|
||||
from pygments.styles.default import DefaultStyle
|
||||
from pygments.token import Comment, Generic, Text
|
||||
|
||||
|
||||
class SpackStyle(DefaultStyle):
|
||||
styles = DefaultStyle.styles.copy()
|
||||
background_color = "#f4f4f8"
|
||||
styles[Generic.Output] = "#355"
|
||||
styles[Generic.Prompt] = "bold #346ec9"
|
||||
|
||||
|
||||
import pkg_resources
|
||||
|
||||
dist = pkg_resources.Distribution(__file__)
|
||||
sys.path.append(".") # make 'conf' module findable
|
||||
ep = pkg_resources.EntryPoint.parse("spack = conf:SpackStyle", dist=dist)
|
||||
dist._ep_map = {"pygments.styles": {"plugin1": ep}}
|
||||
pkg_resources.working_set.add(dist)
|
||||
|
||||
pygments_style = "spack"
|
||||
sys.path.append("./_pygments")
|
||||
pygments_style = "style.SpackStyle"
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
# modindex_common_prefix = []
|
||||
@@ -341,16 +318,15 @@ class SpackStyle(DefaultStyle):
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = "Spackdoc"
|
||||
|
||||
|
||||
# -- Options for LaTeX output --------------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
# 'papersize': 'letterpaper',
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
# 'pointsize': '10pt',
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
# 'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
|
@@ -143,26 +143,6 @@ The OS that are currently supported are summarized in the table below:
|
||||
* - Amazon Linux 2
|
||||
- ``amazonlinux:2``
|
||||
- ``spack/amazon-linux``
|
||||
* - AlmaLinux 8
|
||||
- ``almalinux:8``
|
||||
- ``spack/almalinux8``
|
||||
* - AlmaLinux 9
|
||||
- ``almalinux:9``
|
||||
- ``spack/almalinux9``
|
||||
* - Rocky Linux 8
|
||||
- ``rockylinux:8``
|
||||
- ``spack/rockylinux8``
|
||||
* - Rocky Linux 9
|
||||
- ``rockylinux:9``
|
||||
- ``spack/rockylinux9``
|
||||
* - Fedora Linux 37
|
||||
- ``fedora:37``
|
||||
- ``spack/fedora37``
|
||||
* - Fedora Linux 38
|
||||
- ``fedora:38``
|
||||
- ``spack/fedora38``
|
||||
|
||||
|
||||
|
||||
All the images are tagged with the corresponding release of Spack:
|
||||
|
||||
@@ -636,7 +616,7 @@ to customize the generation of container recipes:
|
||||
- No
|
||||
* - ``os_packages:command``
|
||||
- Tool used to manage system packages
|
||||
- ``apt``, ``yum``, ``zypper``, ``apk``, ``yum_amazon``
|
||||
- ``apt``, ``yum``, ``dnf``, ``dnf_epel``, ``zypper``, ``apk``, ``yum_amazon``
|
||||
- Only with custom base images
|
||||
* - ``os_packages:update``
|
||||
- Whether or not to update the list of available packages
|
||||
|
@@ -916,9 +916,9 @@ function, as shown in the example below:
|
||||
.. code-block:: yaml
|
||||
|
||||
projections:
|
||||
zlib: {name}-{version}
|
||||
^mpi: {name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}
|
||||
all: {name}-{version}/{compiler.name}-{compiler.version}
|
||||
zlib: "{name}-{version}"
|
||||
^mpi: "{name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}"
|
||||
all: "{name}-{version}/{compiler.name}-{compiler.version}"
|
||||
|
||||
The entries in the projections configuration file must all be either
|
||||
specs or the keyword ``all``. For each spec, the projection used will
|
||||
|
@@ -317,7 +317,7 @@ installed, but you know that new compilers have been added to your
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ module load gcc/4.9.0
|
||||
$ module load gcc-4.9.0
|
||||
$ spack compiler find
|
||||
==> Added 1 new compiler to ~/.spack/linux/compilers.yaml
|
||||
gcc@4.9.0
|
||||
|
@@ -35,27 +35,27 @@ showing lots of installed packages:
|
||||
$ module avail
|
||||
|
||||
--------------------------------------------------------------- ~/spack/share/spack/modules/linux-ubuntu14-x86_64 ---------------------------------------------------------------
|
||||
autoconf/2.69-gcc-4.8-qextxkq hwloc/1.11.6-gcc-6.3.0-akcisez m4/1.4.18-gcc-4.8-ev2znoc openblas/0.2.19-gcc-6.3.0-dhkmed6 py-setuptools/34.2.0-gcc-6.3.0-fadur4s
|
||||
automake/1.15-gcc-4.8-maqvukj isl/0.18-gcc-4.8-afi6taq m4/1.4.18-gcc-6.3.0-uppywnz openmpi/2.1.0-gcc-6.3.0-go2s4z5 py-six/1.10.0-gcc-6.3.0-p4dhkaw
|
||||
binutils/2.28-gcc-4.8-5s7c6rs libiconv/1.15-gcc-4.8-at46wg3 mawk/1.3.4-gcc-4.8-acjez57 openssl/1.0.2k-gcc-4.8-dkls5tk python/2.7.13-gcc-6.3.0-tyehea7
|
||||
bison/3.0.4-gcc-4.8-ek4luo5 libpciaccess/0.13.4-gcc-6.3.0-gmufnvh mawk/1.3.4-gcc-6.3.0-ostdoms openssl/1.0.2k-gcc-6.3.0-gxgr5or readline/7.0-gcc-4.8-xhufqhn
|
||||
bzip2/1.0.6-gcc-4.8-iffrxzn libsigsegv/2.11-gcc-4.8-pp2cvte mpc/1.0.3-gcc-4.8-g5mztc5 pcre/8.40-gcc-4.8-r5pbrxb readline/7.0-gcc-6.3.0-zzcyicg
|
||||
bzip2/1.0.6-gcc-6.3.0-bequudr libsigsegv/2.11-gcc-6.3.0-7enifnh mpfr/3.1.5-gcc-4.8-o7xm7az perl/5.24.1-gcc-4.8-dg5j65u sqlite/3.8.5-gcc-6.3.0-6zoruzj
|
||||
cmake/3.7.2-gcc-6.3.0-fowuuby libtool/2.4.6-gcc-4.8-7a523za mpich/3.2-gcc-6.3.0-dmvd3aw perl/5.24.1-gcc-6.3.0-6uzkpt6 tar/1.29-gcc-4.8-wse2ass
|
||||
curl/7.53.1-gcc-4.8-3fz46n6 libtool/2.4.6-gcc-6.3.0-n7zmbzt ncurses/6.0-gcc-4.8-dcpe7ia pkg-config/0.29.2-gcc-4.8-ib33t75 tcl/8.6.6-gcc-4.8-tfxzqbr
|
||||
expat/2.2.0-gcc-4.8-mrv6bd4 libxml2/2.9.4-gcc-4.8-ryzxnsu ncurses/6.0-gcc-6.3.0-ucbhcdy pkg-config/0.29.2-gcc-6.3.0-jpgubk3 util-macros/1.19.1-gcc-6.3.0-xorz2x2
|
||||
flex/2.6.3-gcc-4.8-yf345oo libxml2/2.9.4-gcc-6.3.0-rltzsdh netlib-lapack/3.6.1-gcc-6.3.0-js33dog py-appdirs/1.4.0-gcc-6.3.0-jxawmw7 xz/5.2.3-gcc-4.8-mew4log
|
||||
gcc/6.3.0-gcc-4.8-24puqve lmod/7.4.1-gcc-4.8-je4srhr netlib-scalapack/2.0.2-gcc-6.3.0-5aidk4l py-numpy/1.12.0-gcc-6.3.0-oemmoeu xz/5.2.3-gcc-6.3.0-3vqeuvb
|
||||
gettext/0.19.8.1-gcc-4.8-yymghlh lua/5.3.4-gcc-4.8-im75yaz netlib-scalapack/2.0.2-gcc-6.3.0-hjsemcn py-packaging/16.8-gcc-6.3.0-i2n3dtl zip/3.0-gcc-4.8-rwar22d
|
||||
gmp/6.1.2-gcc-4.8-5ub2wu5 lua-luafilesystem/1_6_3-gcc-4.8-wkey3nl netlib-scalapack/2.0.2-gcc-6.3.0-jva724b py-pyparsing/2.1.10-gcc-6.3.0-tbo6gmw zlib/1.2.11-gcc-4.8-pgxsxv7
|
||||
help2man/1.47.4-gcc-4.8-kcnqmau lua-luaposix/33.4.0-gcc-4.8-mdod2ry netlib-scalapack/2.0.2-gcc-6.3.0-rgqfr6d py-scipy/0.19.0-gcc-6.3.0-kr7nat4 zlib/1.2.11-gcc-6.3.0-7cqp6cj
|
||||
autoconf-2.69-gcc-4.8-qextxkq hwloc-1.11.6-gcc-6.3.0-akcisez m4-1.4.18-gcc-4.8-ev2znoc openblas-0.2.19-gcc-6.3.0-dhkmed6 py-setuptools-34.2.0-gcc-6.3.0-fadur4s
|
||||
automake-1.15-gcc-4.8-maqvukj isl-0.18-gcc-4.8-afi6taq m4-1.4.18-gcc-6.3.0-uppywnz openmpi-2.1.0-gcc-6.3.0-go2s4z5 py-six-1.10.0-gcc-6.3.0-p4dhkaw
|
||||
binutils-2.28-gcc-4.8-5s7c6rs libiconv-1.15-gcc-4.8-at46wg3 mawk-1.3.4-gcc-4.8-acjez57 openssl-1.0.2k-gcc-4.8-dkls5tk python-2.7.13-gcc-6.3.0-tyehea7
|
||||
bison-3.0.4-gcc-4.8-ek4luo5 libpciaccess-0.13.4-gcc-6.3.0-gmufnvh mawk-1.3.4-gcc-6.3.0-ostdoms openssl-1.0.2k-gcc-6.3.0-gxgr5or readline-7.0-gcc-4.8-xhufqhn
|
||||
bzip2-1.0.6-gcc-4.8-iffrxzn libsigsegv-2.11-gcc-4.8-pp2cvte mpc-1.0.3-gcc-4.8-g5mztc5 pcre-8.40-gcc-4.8-r5pbrxb readline-7.0-gcc-6.3.0-zzcyicg
|
||||
bzip2-1.0.6-gcc-6.3.0-bequudr libsigsegv-2.11-gcc-6.3.0-7enifnh mpfr-3.1.5-gcc-4.8-o7xm7az perl-5.24.1-gcc-4.8-dg5j65u sqlite-3.8.5-gcc-6.3.0-6zoruzj
|
||||
cmake-3.7.2-gcc-6.3.0-fowuuby libtool-2.4.6-gcc-4.8-7a523za mpich-3.2-gcc-6.3.0-dmvd3aw perl-5.24.1-gcc-6.3.0-6uzkpt6 tar-1.29-gcc-4.8-wse2ass
|
||||
curl-7.53.1-gcc-4.8-3fz46n6 libtool-2.4.6-gcc-6.3.0-n7zmbzt ncurses-6.0-gcc-4.8-dcpe7ia pkg-config-0.29.2-gcc-4.8-ib33t75 tcl-8.6.6-gcc-4.8-tfxzqbr
|
||||
expat-2.2.0-gcc-4.8-mrv6bd4 libxml2-2.9.4-gcc-4.8-ryzxnsu ncurses-6.0-gcc-6.3.0-ucbhcdy pkg-config-0.29.2-gcc-6.3.0-jpgubk3 util-macros-1.19.1-gcc-6.3.0-xorz2x2
|
||||
flex-2.6.3-gcc-4.8-yf345oo libxml2-2.9.4-gcc-6.3.0-rltzsdh netlib-lapack-3.6.1-gcc-6.3.0-js33dog py-appdirs-1.4.0-gcc-6.3.0-jxawmw7 xz-5.2.3-gcc-4.8-mew4log
|
||||
gcc-6.3.0-gcc-4.8-24puqve lmod-7.4.1-gcc-4.8-je4srhr netlib-scalapack-2.0.2-gcc-6.3.0-5aidk4l py-numpy-1.12.0-gcc-6.3.0-oemmoeu xz-5.2.3-gcc-6.3.0-3vqeuvb
|
||||
gettext-0.19.8.1-gcc-4.8-yymghlh lua-5.3.4-gcc-4.8-im75yaz netlib-scalapack-2.0.2-gcc-6.3.0-hjsemcn py-packaging-16.8-gcc-6.3.0-i2n3dtl zip-3.0-gcc-4.8-rwar22d
|
||||
gmp-6.1.2-gcc-4.8-5ub2wu5 lua-luafilesystem-1_6_3-gcc-4.8-wkey3nl netlib-scalapack-2.0.2-gcc-6.3.0-jva724b py-pyparsing-2.1.10-gcc-6.3.0-tbo6gmw zlib-1.2.11-gcc-4.8-pgxsxv7
|
||||
help2man-1.47.4-gcc-4.8-kcnqmau lua-luaposix-33.4.0-gcc-4.8-mdod2ry netlib-scalapack-2.0.2-gcc-6.3.0-rgqfr6d py-scipy-0.19.0-gcc-6.3.0-kr7nat4 zlib-1.2.11-gcc-6.3.0-7cqp6cj
|
||||
|
||||
The names should look familiar, as they resemble the output from ``spack find``.
|
||||
For example, you could type the following command to load the ``cmake`` module:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ module load cmake/3.7.2-gcc-6.3.0-fowuuby
|
||||
$ module load cmake-3.7.2-gcc-6.3.0-fowuuby
|
||||
|
||||
Neither of these is particularly pretty, easy to remember, or easy to
|
||||
type. Luckily, Spack offers many facilities for customizing the module
|
||||
@@ -779,35 +779,35 @@ cut-and-pasted into a shell script. For example:
|
||||
|
||||
$ spack module tcl loads --dependencies py-numpy git
|
||||
# bzip2@1.0.6%gcc@4.9.3=linux-x86_64
|
||||
module load bzip2/1.0.6-gcc-4.9.3-ktnrhkrmbbtlvnagfatrarzjojmkvzsx
|
||||
module load bzip2-1.0.6-gcc-4.9.3-ktnrhkrmbbtlvnagfatrarzjojmkvzsx
|
||||
# ncurses@6.0%gcc@4.9.3=linux-x86_64
|
||||
module load ncurses/6.0-gcc-4.9.3-kaazyneh3bjkfnalunchyqtygoe2mncv
|
||||
module load ncurses-6.0-gcc-4.9.3-kaazyneh3bjkfnalunchyqtygoe2mncv
|
||||
# zlib@1.2.8%gcc@4.9.3=linux-x86_64
|
||||
module load zlib/1.2.8-gcc-4.9.3-v3ufwaahjnviyvgjcelo36nywx2ufj7z
|
||||
module load zlib-1.2.8-gcc-4.9.3-v3ufwaahjnviyvgjcelo36nywx2ufj7z
|
||||
# sqlite@3.8.5%gcc@4.9.3=linux-x86_64
|
||||
module load sqlite/3.8.5-gcc-4.9.3-a3eediswgd5f3rmto7g3szoew5nhehbr
|
||||
module load sqlite-3.8.5-gcc-4.9.3-a3eediswgd5f3rmto7g3szoew5nhehbr
|
||||
# readline@6.3%gcc@4.9.3=linux-x86_64
|
||||
module load readline/6.3-gcc-4.9.3-se6r3lsycrwxyhreg4lqirp6xixxejh3
|
||||
module load readline-6.3-gcc-4.9.3-se6r3lsycrwxyhreg4lqirp6xixxejh3
|
||||
# python@3.5.1%gcc@4.9.3=linux-x86_64
|
||||
module load python/3.5.1-gcc-4.9.3-5q5rsrtjld4u6jiicuvtnx52m7tfhegi
|
||||
module load python-3.5.1-gcc-4.9.3-5q5rsrtjld4u6jiicuvtnx52m7tfhegi
|
||||
# py-setuptools@20.5%gcc@4.9.3=linux-x86_64
|
||||
module load py-setuptools/20.5-gcc-4.9.3-4qr2suj6p6glepnedmwhl4f62x64wxw2
|
||||
module load py-setuptools-20.5-gcc-4.9.3-4qr2suj6p6glepnedmwhl4f62x64wxw2
|
||||
# py-nose@1.3.7%gcc@4.9.3=linux-x86_64
|
||||
module load py-nose/1.3.7-gcc-4.9.3-pwhtjw2dvdvfzjwuuztkzr7b4l6zepli
|
||||
module load py-nose-1.3.7-gcc-4.9.3-pwhtjw2dvdvfzjwuuztkzr7b4l6zepli
|
||||
# openblas@0.2.17%gcc@4.9.3+shared=linux-x86_64
|
||||
module load openblas/0.2.17-gcc-4.9.3-pw6rmlom7apfsnjtzfttyayzc7nx5e7y
|
||||
module load openblas-0.2.17-gcc-4.9.3-pw6rmlom7apfsnjtzfttyayzc7nx5e7y
|
||||
# py-numpy@1.11.0%gcc@4.9.3+blas+lapack=linux-x86_64
|
||||
module load py-numpy/1.11.0-gcc-4.9.3-mulodttw5pcyjufva4htsktwty4qd52r
|
||||
module load py-numpy-1.11.0-gcc-4.9.3-mulodttw5pcyjufva4htsktwty4qd52r
|
||||
# curl@7.47.1%gcc@4.9.3=linux-x86_64
|
||||
module load curl/7.47.1-gcc-4.9.3-ohz3fwsepm3b462p5lnaquv7op7naqbi
|
||||
module load curl-7.47.1-gcc-4.9.3-ohz3fwsepm3b462p5lnaquv7op7naqbi
|
||||
# autoconf@2.69%gcc@4.9.3=linux-x86_64
|
||||
module load autoconf/2.69-gcc-4.9.3-bkibjqhgqm5e3o423ogfv2y3o6h2uoq4
|
||||
module load autoconf-2.69-gcc-4.9.3-bkibjqhgqm5e3o423ogfv2y3o6h2uoq4
|
||||
# cmake@3.5.0%gcc@4.9.3~doc+ncurses+openssl~qt=linux-x86_64
|
||||
module load cmake/3.5.0-gcc-4.9.3-x7xnsklmgwla3ubfgzppamtbqk5rwn7t
|
||||
module load cmake-3.5.0-gcc-4.9.3-x7xnsklmgwla3ubfgzppamtbqk5rwn7t
|
||||
# expat@2.1.0%gcc@4.9.3=linux-x86_64
|
||||
module load expat/2.1.0-gcc-4.9.3-6pkz2ucnk2e62imwakejjvbv6egncppd
|
||||
module load expat-2.1.0-gcc-4.9.3-6pkz2ucnk2e62imwakejjvbv6egncppd
|
||||
# git@2.8.0-rc2%gcc@4.9.3+curl+expat=linux-x86_64
|
||||
module load git/2.8.0-rc2-gcc-4.9.3-3bib4hqtnv5xjjoq5ugt3inblt4xrgkd
|
||||
module load git-2.8.0-rc2-gcc-4.9.3-3bib4hqtnv5xjjoq5ugt3inblt4xrgkd
|
||||
|
||||
The script may be further edited by removing unnecessary modules.
|
||||
|
||||
@@ -826,12 +826,12 @@ For example, consider the following on one system:
|
||||
.. code-block:: console
|
||||
|
||||
$ module avail
|
||||
linux-SuSE11-x86_64/antlr/2.7.7-gcc-5.3.0-bdpl46y
|
||||
linux-SuSE11-x86_64/antlr-2.7.7-gcc-5.3.0-bdpl46y
|
||||
|
||||
$ spack module tcl loads antlr # WRONG!
|
||||
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
|
||||
module load antlr/2.7.7-gcc-5.3.0-bdpl46y
|
||||
module load antlr-2.7.7-gcc-5.3.0-bdpl46y
|
||||
|
||||
$ spack module tcl loads --prefix linux-SuSE11-x86_64/ antlr
|
||||
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/antlr/2.7.7-gcc-5.3.0-bdpl46y
|
||||
module load linux-SuSE11-x86_64/antlr-2.7.7-gcc-5.3.0-bdpl46y
|
||||
|
@@ -1,13 +1,8 @@
|
||||
# These dependencies should be installed using pip in order
|
||||
# to build the documentation.
|
||||
|
||||
sphinx>=3.4,!=4.1.2,!=5.1.0
|
||||
sphinxcontrib-programoutput
|
||||
sphinx-design
|
||||
sphinx-rtd-theme
|
||||
python-levenshtein
|
||||
# Restrict to docutils <0.17 to workaround a list rendering issue in sphinx.
|
||||
# https://stackoverflow.com/questions/67542699
|
||||
docutils <0.17
|
||||
pygments <2.13
|
||||
urllib3 <2
|
||||
sphinx==6.2.1
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.4.1
|
||||
sphinx-rtd-theme==1.2.1
|
||||
python-levenshtein==0.21.0
|
||||
docutils==0.18.1
|
||||
pygments==2.15.1
|
||||
urllib3==2.0.2
|
||||
|
@@ -139,7 +139,7 @@ def get_fh(self, path):
|
||||
def release_by_stat(self, stat):
|
||||
key = (stat.st_dev, stat.st_ino, os.getpid())
|
||||
open_file = self._descriptors.get(key)
|
||||
assert open_file, "Attempted to close non-existing inode: %s" % stat.st_inode
|
||||
assert open_file, "Attempted to close non-existing inode: %s" % stat.st_ino
|
||||
|
||||
open_file.refs -= 1
|
||||
if not open_file.refs:
|
||||
|
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "0.21.0.dev0"
|
||||
__version__ = "0.20.3"
|
||||
spack_version = __version__
|
||||
|
||||
|
||||
|
@@ -199,11 +199,11 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
||||
with self._index_file_cache.read_transaction(cache_key):
|
||||
db._read_from_file(cache_path)
|
||||
except spack_db.InvalidDatabaseVersionError as e:
|
||||
msg = (
|
||||
tty.warn(
|
||||
f"you need a newer Spack version to read the buildcache index for the "
|
||||
f"following mirror: '{mirror_url}'. {e.database_version_message}"
|
||||
)
|
||||
raise BuildcacheIndexError(msg) from e
|
||||
return
|
||||
|
||||
spec_list = db.query_local(installed=False, in_buildcache=True)
|
||||
|
||||
|
@@ -589,6 +589,7 @@ def set_module_variables_for_package(pkg):
|
||||
|
||||
# TODO: make these build deps that can be installed if not found.
|
||||
m.make = MakeExecutable("make", jobs)
|
||||
m.gmake = MakeExecutable("gmake", jobs)
|
||||
m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
|
||||
# TODO: johnwparent: add package or builder support to define these build tools
|
||||
# for now there is no entrypoint for builders to define these on their
|
||||
@@ -1027,7 +1028,7 @@ def get_cmake_prefix_path(pkg):
|
||||
|
||||
|
||||
def _setup_pkg_and_run(
|
||||
serialized_pkg, function, kwargs, child_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||
serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||
):
|
||||
context = kwargs.get("context", "build")
|
||||
|
||||
@@ -1048,12 +1049,12 @@ def _setup_pkg_and_run(
|
||||
pkg, dirty=kwargs.get("dirty", False), context=context
|
||||
)
|
||||
return_value = function(pkg, kwargs)
|
||||
child_pipe.send(return_value)
|
||||
write_pipe.send(return_value)
|
||||
|
||||
except StopPhase as e:
|
||||
# Do not create a full ChildError from this, it's not an error
|
||||
# it's a control statement.
|
||||
child_pipe.send(e)
|
||||
write_pipe.send(e)
|
||||
except BaseException:
|
||||
# catch ANYTHING that goes wrong in the child process
|
||||
exc_type, exc, tb = sys.exc_info()
|
||||
@@ -1102,10 +1103,10 @@ def _setup_pkg_and_run(
|
||||
context,
|
||||
package_context,
|
||||
)
|
||||
child_pipe.send(ce)
|
||||
write_pipe.send(ce)
|
||||
|
||||
finally:
|
||||
child_pipe.close()
|
||||
write_pipe.close()
|
||||
if input_multiprocess_fd is not None:
|
||||
input_multiprocess_fd.close()
|
||||
|
||||
@@ -1149,7 +1150,7 @@ def child_fun():
|
||||
For more information on `multiprocessing` child process creation
|
||||
mechanisms, see https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
|
||||
"""
|
||||
parent_pipe, child_pipe = multiprocessing.Pipe()
|
||||
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
|
||||
input_multiprocess_fd = None
|
||||
jobserver_fd1 = None
|
||||
jobserver_fd2 = None
|
||||
@@ -1174,7 +1175,7 @@ def child_fun():
|
||||
serialized_pkg,
|
||||
function,
|
||||
kwargs,
|
||||
child_pipe,
|
||||
write_pipe,
|
||||
input_multiprocess_fd,
|
||||
jobserver_fd1,
|
||||
jobserver_fd2,
|
||||
@@ -1183,6 +1184,12 @@ def child_fun():
|
||||
|
||||
p.start()
|
||||
|
||||
# We close the writable end of the pipe now to be sure that p is the
|
||||
# only process which owns a handle for it. This ensures that when p
|
||||
# closes its handle for the writable end, read_pipe.recv() will
|
||||
# promptly report the readable end as being ready.
|
||||
write_pipe.close()
|
||||
|
||||
except InstallError as e:
|
||||
e.pkg = pkg
|
||||
raise
|
||||
@@ -1192,7 +1199,16 @@ def child_fun():
|
||||
if input_multiprocess_fd is not None:
|
||||
input_multiprocess_fd.close()
|
||||
|
||||
child_result = parent_pipe.recv()
|
||||
def exitcode_msg(p):
|
||||
typ = "exit" if p.exitcode >= 0 else "signal"
|
||||
return f"{typ} {abs(p.exitcode)}"
|
||||
|
||||
try:
|
||||
child_result = read_pipe.recv()
|
||||
except EOFError:
|
||||
p.join()
|
||||
raise InstallError(f"The process has stopped unexpectedly ({exitcode_msg(p)})")
|
||||
|
||||
p.join()
|
||||
|
||||
# If returns a StopPhase, raise it
|
||||
@@ -1212,6 +1228,10 @@ def child_fun():
|
||||
child_result.print_context()
|
||||
raise child_result
|
||||
|
||||
# Fallback. Usually caught beforehand in EOFError above.
|
||||
if p.exitcode != 0:
|
||||
raise InstallError(f"The process failed unexpectedly ({exitcode_msg(p)})")
|
||||
|
||||
return child_result
|
||||
|
||||
|
||||
@@ -1256,9 +1276,8 @@ def make_stack(tb, stack=None):
|
||||
func = getattr(obj, tb.tb_frame.f_code.co_name, "")
|
||||
if func:
|
||||
typename, *_ = func.__qualname__.partition(".")
|
||||
|
||||
if isinstance(obj, CONTEXT_BASES) and typename not in basenames:
|
||||
break
|
||||
if isinstance(obj, CONTEXT_BASES) and typename not in basenames:
|
||||
break
|
||||
else:
|
||||
return None
|
||||
|
||||
|
@@ -2,7 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections.abc
|
||||
import os
|
||||
from typing import Tuple
|
||||
|
||||
@@ -14,24 +13,21 @@
|
||||
from .cmake import CMakeBuilder, CMakePackage
|
||||
|
||||
|
||||
def cmake_cache_path(name, value, comment="", force=False):
|
||||
def cmake_cache_path(name, value, comment=""):
|
||||
"""Generate a string for a cmake cache variable"""
|
||||
force_str = " FORCE" if force else ""
|
||||
return 'set({0} "{1}" CACHE PATH "{2}"{3})\n'.format(name, value, comment, force_str)
|
||||
return 'set({0} "{1}" CACHE PATH "{2}")\n'.format(name, value, comment)
|
||||
|
||||
|
||||
def cmake_cache_string(name, value, comment="", force=False):
|
||||
def cmake_cache_string(name, value, comment=""):
|
||||
"""Generate a string for a cmake cache variable"""
|
||||
force_str = " FORCE" if force else ""
|
||||
return 'set({0} "{1}" CACHE STRING "{2}"{3})\n'.format(name, value, comment, force_str)
|
||||
return 'set({0} "{1}" CACHE STRING "{2}")\n'.format(name, value, comment)
|
||||
|
||||
|
||||
def cmake_cache_option(name, boolean_value, comment="", force=False):
|
||||
def cmake_cache_option(name, boolean_value, comment=""):
|
||||
"""Generate a string for a cmake configuration option"""
|
||||
|
||||
value = "ON" if boolean_value else "OFF"
|
||||
force_str = " FORCE" if force else ""
|
||||
return 'set({0} {1} CACHE BOOL "{2}"{3})\n'.format(name, value, comment, force_str)
|
||||
return 'set({0} {1} CACHE BOOL "{2}")\n'.format(name, value, comment)
|
||||
|
||||
|
||||
class CachedCMakeBuilder(CMakeBuilder):
|
||||
@@ -67,34 +63,6 @@ def cache_name(self):
|
||||
def cache_path(self):
|
||||
return os.path.join(self.pkg.stage.source_path, self.cache_name)
|
||||
|
||||
# Implement a version of the define_from_variant for Cached packages
|
||||
def define_cmake_cache_from_variant(self, cmake_var, variant=None, comment=""):
|
||||
"""Return a Cached CMake field from the given variant's value.
|
||||
See define_from_variant in lib/spack/spack/build_systems/cmake.py package
|
||||
"""
|
||||
|
||||
if variant is None:
|
||||
variant = cmake_var.lower()
|
||||
|
||||
if variant not in self.pkg.variants:
|
||||
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
|
||||
|
||||
if variant not in self.pkg.spec.variants:
|
||||
return ""
|
||||
|
||||
value = self.pkg.spec.variants[variant].value
|
||||
field = None
|
||||
if isinstance(value, bool):
|
||||
field = cmake_cache_option(cmake_var, value, comment)
|
||||
else:
|
||||
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
|
||||
value = ";".join(str(v) for v in value)
|
||||
else:
|
||||
value = str(value)
|
||||
field = cmake_cache_string(cmake_var, value, comment)
|
||||
|
||||
return field
|
||||
|
||||
def initconfig_compiler_entries(self):
|
||||
# This will tell cmake to use the Spack compiler wrappers when run
|
||||
# through Spack, but use the underlying compiler when run outside of
|
||||
@@ -162,17 +130,6 @@ def initconfig_compiler_entries(self):
|
||||
libs_string = libs_format_string.format(lang)
|
||||
entries.append(cmake_cache_string(libs_string, libs_flags))
|
||||
|
||||
# Set the generator in the cached config
|
||||
if self.spec.satisfies("generator=make"):
|
||||
entries.append(cmake_cache_string("CMAKE_GENERATOR", "Unix Makefiles"))
|
||||
if self.spec.satisfies("generator=ninja"):
|
||||
entries.append(cmake_cache_string("CMAKE_GENERATOR", "Ninja"))
|
||||
entries.append(
|
||||
cmake_cache_string(
|
||||
"CMAKE_MAKE_PROGRAM", "{0}/ninja".format(spec["ninja"].prefix.bin)
|
||||
)
|
||||
)
|
||||
|
||||
return entries
|
||||
|
||||
def initconfig_mpi_entries(self):
|
||||
@@ -238,57 +195,26 @@ def initconfig_hardware_entries(self):
|
||||
"#------------------{0}\n".format("-" * 60),
|
||||
]
|
||||
|
||||
# Provide standard CMake arguments for dependent CachedCMakePackages
|
||||
if spec.satisfies("^cuda"):
|
||||
entries.append("#------------------{0}".format("-" * 30))
|
||||
entries.append("# Cuda")
|
||||
entries.append("#------------------{0}\n".format("-" * 30))
|
||||
|
||||
cudatoolkitdir = spec["cuda"].prefix
|
||||
entries.append(cmake_cache_path("CUDAToolkit_ROOT", cudatoolkitdir))
|
||||
entries.append(cmake_cache_path("CMAKE_CUDA_COMPILER", "${CUDAToolkit_ROOT}/bin/nvcc"))
|
||||
entries.append(cmake_cache_path("CMAKE_CUDA_HOST_COMPILER", "${CMAKE_CXX_COMPILER}"))
|
||||
# Include the deprecated CUDA_TOOLKIT_ROOT_DIR for supporting BLT packages
|
||||
entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
|
||||
|
||||
archs = spec.variants["cuda_arch"].value
|
||||
if archs != "none":
|
||||
arch_str = ";".join(archs)
|
||||
entries.append(
|
||||
cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", "{0}".format(arch_str))
|
||||
)
|
||||
|
||||
if "+rocm" in spec:
|
||||
entries.append("#------------------{0}".format("-" * 30))
|
||||
entries.append("# ROCm")
|
||||
entries.append("#------------------{0}\n".format("-" * 30))
|
||||
|
||||
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
|
||||
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
|
||||
entries.append(
|
||||
cmake_cache_path("HIP_CXX_COMPILER", "{0}".format(self.spec["hip"].hipcc))
|
||||
)
|
||||
archs = self.spec.variants["amdgpu_target"].value
|
||||
if archs != "none":
|
||||
arch_str = ";".join(archs)
|
||||
entries.append(
|
||||
cmake_cache_string("CMAKE_HIP_ARCHITECTURES", "{0}".format(arch_str))
|
||||
)
|
||||
entries.append(cmake_cache_string("AMDGPU_TARGETS", "{0}".format(arch_str)))
|
||||
entries.append(cmake_cache_string("GPU_TARGETS", "{0}".format(arch_str)))
|
||||
cudacompiler = "${CUDA_TOOLKIT_ROOT_DIR}/bin/nvcc"
|
||||
entries.append(cmake_cache_path("CMAKE_CUDA_COMPILER", cudacompiler))
|
||||
entries.append(cmake_cache_path("CMAKE_CUDA_HOST_COMPILER", "${CMAKE_CXX_COMPILER}"))
|
||||
|
||||
return entries
|
||||
|
||||
def std_initconfig_entries(self):
|
||||
cmake_prefix_path_env = os.environ["CMAKE_PREFIX_PATH"]
|
||||
cmake_prefix_path = cmake_prefix_path_env.replace(os.pathsep, ";")
|
||||
return [
|
||||
"#------------------{0}".format("-" * 60),
|
||||
"# !!!! This is a generated file, edit at own risk !!!!",
|
||||
"#------------------{0}".format("-" * 60),
|
||||
"# CMake executable path: {0}".format(self.pkg.spec["cmake"].command.path),
|
||||
"#------------------{0}\n".format("-" * 60),
|
||||
cmake_cache_path("CMAKE_PREFIX_PATH", cmake_prefix_path),
|
||||
]
|
||||
|
||||
def initconfig_package_entries(self):
|
||||
|
@@ -180,51 +180,6 @@ def test(self):
|
||||
work_dir="spack-test",
|
||||
)
|
||||
|
||||
|
||||
class PythonPackage(PythonExtension):
|
||||
"""Specialized class for packages that are built using pip."""
|
||||
|
||||
#: Package name, version, and extension on PyPI
|
||||
pypi: Optional[str] = None
|
||||
|
||||
# To be used in UI queries that require to know which
|
||||
# build-system class we are using
|
||||
build_system_class = "PythonPackage"
|
||||
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||
legacy_buildsystem = "python_pip"
|
||||
|
||||
#: Callback names for install-time test
|
||||
install_time_test_callbacks = ["test"]
|
||||
|
||||
build_system("python_pip")
|
||||
|
||||
with spack.multimethod.when("build_system=python_pip"):
|
||||
extends("python")
|
||||
depends_on("py-pip", type="build")
|
||||
# FIXME: technically wheel is only needed when building from source, not when
|
||||
# installing a downloaded wheel, but I don't want to add wheel as a dep to every
|
||||
# package manually
|
||||
depends_on("py-wheel", type="build")
|
||||
|
||||
py_namespace: Optional[str] = None
|
||||
|
||||
@lang.classproperty
|
||||
def homepage(cls):
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return "https://pypi.org/project/" + name + "/"
|
||||
|
||||
@lang.classproperty
|
||||
def url(cls):
|
||||
if cls.pypi:
|
||||
return "https://files.pythonhosted.org/packages/source/" + cls.pypi[0] + "/" + cls.pypi
|
||||
|
||||
@lang.classproperty
|
||||
def list_url(cls):
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return "https://pypi.org/simple/" + name + "/"
|
||||
|
||||
def update_external_dependencies(self, extendee_spec=None):
|
||||
"""
|
||||
Ensure all external python packages have a python dependency
|
||||
@@ -270,6 +225,51 @@ def update_external_dependencies(self, extendee_spec=None):
|
||||
python._mark_concrete()
|
||||
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"))
|
||||
|
||||
|
||||
class PythonPackage(PythonExtension):
|
||||
"""Specialized class for packages that are built using pip."""
|
||||
|
||||
#: Package name, version, and extension on PyPI
|
||||
pypi: Optional[str] = None
|
||||
|
||||
# To be used in UI queries that require to know which
|
||||
# build-system class we are using
|
||||
build_system_class = "PythonPackage"
|
||||
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||
legacy_buildsystem = "python_pip"
|
||||
|
||||
#: Callback names for install-time test
|
||||
install_time_test_callbacks = ["test"]
|
||||
|
||||
build_system("python_pip")
|
||||
|
||||
with spack.multimethod.when("build_system=python_pip"):
|
||||
extends("python")
|
||||
depends_on("py-pip", type="build")
|
||||
# FIXME: technically wheel is only needed when building from source, not when
|
||||
# installing a downloaded wheel, but I don't want to add wheel as a dep to every
|
||||
# package manually
|
||||
depends_on("py-wheel", type="build")
|
||||
|
||||
py_namespace: Optional[str] = None
|
||||
|
||||
@lang.classproperty
|
||||
def homepage(cls):
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return "https://pypi.org/project/" + name + "/"
|
||||
|
||||
@lang.classproperty
|
||||
def url(cls):
|
||||
if cls.pypi:
|
||||
return "https://files.pythonhosted.org/packages/source/" + cls.pypi[0] + "/" + cls.pypi
|
||||
|
||||
@lang.classproperty
|
||||
def list_url(cls):
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return "https://pypi.org/simple/" + name + "/"
|
||||
|
||||
def get_external_python_for_prefix(self):
|
||||
"""
|
||||
For an external package that extends python, find the most likely spec for the python
|
||||
|
@@ -7,7 +7,7 @@
|
||||
|
||||
import llnl.util.lang as lang
|
||||
|
||||
from spack.directives import extends
|
||||
from spack.directives import extends, maintainers
|
||||
|
||||
from .generic import GenericBuilder, Package
|
||||
|
||||
@@ -71,6 +71,8 @@ class RPackage(Package):
|
||||
|
||||
GenericBuilder = RBuilder
|
||||
|
||||
maintainers("glennpj")
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
build_system_class = "RPackage"
|
||||
|
@@ -10,7 +10,6 @@
|
||||
from llnl.util.filesystem import find, join_path, working_dir
|
||||
|
||||
import spack.builder
|
||||
import spack.install_test
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.multimethod import when
|
||||
@@ -31,8 +30,8 @@ class SIPPackage(spack.package_base.PackageBase):
|
||||
#: Name of private sip module to install alongside package
|
||||
sip_module = "sip"
|
||||
|
||||
#: Callback names for install-time testing
|
||||
install_time_test_callbacks = ["test_imports"]
|
||||
#: Callback names for install-time test
|
||||
install_time_test_callbacks = ["test"]
|
||||
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||
legacy_buildsystem = "sip"
|
||||
|
||||
@@ -88,20 +87,18 @@ def python(self, *args, **kwargs):
|
||||
"""The python ``Executable``."""
|
||||
inspect.getmodule(self).python(*args, **kwargs)
|
||||
|
||||
def test_imports(self):
|
||||
def test(self):
|
||||
"""Attempts to import modules of the installed package."""
|
||||
|
||||
# Make sure we are importing the installed modules,
|
||||
# not the ones in the source directory
|
||||
python = inspect.getmodule(self).python
|
||||
for module in self.import_modules:
|
||||
with spack.install_test.test_part(
|
||||
self,
|
||||
"test_imports_{0}".format(module),
|
||||
self.run_test(
|
||||
inspect.getmodule(self).python.path,
|
||||
["-c", "import {0}".format(module)],
|
||||
purpose="checking import of {0}".format(module),
|
||||
work_dir="spack-test",
|
||||
):
|
||||
python("-c", "import {0}".format(module))
|
||||
)
|
||||
|
||||
|
||||
@spack.builder.builder("sip")
|
||||
|
@@ -911,19 +911,13 @@ def generate_gitlab_ci_yaml(
|
||||
# --check-index-only, then the override mirror needs to be added to
|
||||
# the configured mirrors when bindist.update() is run, or else we
|
||||
# won't fetch its index and include in our local cache.
|
||||
spack.mirror.add(
|
||||
spack.mirror.Mirror(remote_mirror_override, name="ci_pr_mirror"),
|
||||
cfg.default_modify_scope(),
|
||||
)
|
||||
spack.mirror.add("ci_pr_mirror", remote_mirror_override, cfg.default_modify_scope())
|
||||
|
||||
shared_pr_mirror = None
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
stack_name = os.environ.get("SPACK_CI_STACK_NAME", "")
|
||||
shared_pr_mirror = url_util.join(SHARED_PR_MIRROR_URL, stack_name)
|
||||
spack.mirror.add(
|
||||
spack.mirror.Mirror(shared_pr_mirror, name="ci_shared_pr_mirror"),
|
||||
cfg.default_modify_scope(),
|
||||
)
|
||||
spack.mirror.add("ci_shared_pr_mirror", shared_pr_mirror, cfg.default_modify_scope())
|
||||
|
||||
pipeline_artifacts_dir = artifacts_root
|
||||
if not pipeline_artifacts_dir:
|
||||
@@ -1095,7 +1089,7 @@ def generate_gitlab_ci_yaml(
|
||||
raise AttributeError
|
||||
|
||||
def main_script_replacements(cmd):
|
||||
return cmd.replace("{env_dir}", rel_concrete_env_dir)
|
||||
return cmd.replace("{env_dir}", concrete_env_dir)
|
||||
|
||||
job_object["script"] = _unpack_script(
|
||||
job_object["script"], op=main_script_replacements
|
||||
|
@@ -449,8 +449,9 @@ def ci_rebuild(args):
|
||||
# mirror now so it's used when we check for a hash match already
|
||||
# built for this spec.
|
||||
if pipeline_mirror_url:
|
||||
mirror = spack.mirror.Mirror(pipeline_mirror_url, name=spack_ci.TEMP_STORAGE_MIRROR_NAME)
|
||||
spack.mirror.add(mirror, cfg.default_modify_scope())
|
||||
spack.mirror.add(
|
||||
spack_ci.TEMP_STORAGE_MIRROR_NAME, pipeline_mirror_url, cfg.default_modify_scope()
|
||||
)
|
||||
pipeline_mirrors.append(pipeline_mirror_url)
|
||||
|
||||
# Check configured mirrors for a built spec with a matching hash
|
||||
@@ -465,10 +466,7 @@ def ci_rebuild(args):
|
||||
# could be installed from either the override mirror or any other configured
|
||||
# mirror (e.g. remote_mirror_url which is defined in the environment or
|
||||
# pipeline_mirror_url), which is also what we want.
|
||||
spack.mirror.add(
|
||||
spack.mirror.Mirror(remote_mirror_override, name="mirror_override"),
|
||||
cfg.default_modify_scope(),
|
||||
)
|
||||
spack.mirror.add("mirror_override", remote_mirror_override, cfg.default_modify_scope())
|
||||
pipeline_mirrors.append(remote_mirror_override)
|
||||
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
|
@@ -53,7 +53,7 @@ def setup_parser(subparser):
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
default=spack.config.default_modify_scope("compilers"),
|
||||
default=None,
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
|
||||
@@ -106,19 +106,21 @@ def compiler_find(args):
|
||||
|
||||
|
||||
def compiler_remove(args):
|
||||
cspec = spack.spec.CompilerSpec(args.compiler_spec)
|
||||
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
|
||||
if not compilers:
|
||||
tty.die("No compilers match spec %s" % cspec)
|
||||
elif not args.all and len(compilers) > 1:
|
||||
tty.error("Multiple compilers match spec %s. Choose one:" % cspec)
|
||||
colify(reversed(sorted([c.spec.display_str for c in compilers])), indent=4)
|
||||
compiler_spec = spack.spec.CompilerSpec(args.compiler_spec)
|
||||
candidate_compilers = spack.compilers.compilers_for_spec(compiler_spec, scope=args.scope)
|
||||
|
||||
if not candidate_compilers:
|
||||
tty.die("No compilers match spec %s" % compiler_spec)
|
||||
|
||||
if not args.all and len(candidate_compilers) > 1:
|
||||
tty.error(f"Multiple compilers match spec {compiler_spec}. Choose one:")
|
||||
colify(reversed(sorted([c.spec.display_str for c in candidate_compilers])), indent=4)
|
||||
tty.msg("Or, use `spack compiler remove -a` to remove all of them.")
|
||||
sys.exit(1)
|
||||
|
||||
for compiler in compilers:
|
||||
spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope)
|
||||
tty.msg("Removed compiler %s" % compiler.spec.display_str)
|
||||
for current_compiler in candidate_compilers:
|
||||
spack.compilers.remove_compiler_from_config(current_compiler.spec, scope=args.scope)
|
||||
tty.msg(f"{current_compiler.spec.display_str} has been removed")
|
||||
|
||||
|
||||
def compiler_info(args):
|
||||
|
@@ -145,26 +145,7 @@ def setup_parser(subparser):
|
||||
|
||||
def mirror_add(args):
|
||||
"""Add a mirror to Spack."""
|
||||
if (
|
||||
args.s3_access_key_id
|
||||
or args.s3_access_key_secret
|
||||
or args.s3_access_token
|
||||
or args.s3_profile
|
||||
or args.s3_endpoint_url
|
||||
):
|
||||
connection = {"url": args.url}
|
||||
if args.s3_access_key_id and args.s3_access_key_secret:
|
||||
connection["access_pair"] = (args.s3_access_key_id, args.s3_access_key_secret)
|
||||
if args.s3_access_token:
|
||||
connection["access_token"] = args.s3_access_token
|
||||
if args.s3_profile:
|
||||
connection["profile"] = args.s3_profile
|
||||
if args.s3_endpoint_url:
|
||||
connection["endpoint_url"] = args.s3_endpoint_url
|
||||
mirror = spack.mirror.Mirror(fetch_url=connection, push_url=connection, name=args.name)
|
||||
else:
|
||||
mirror = spack.mirror.Mirror(args.url, name=args.name)
|
||||
spack.mirror.add(mirror, args.scope)
|
||||
spack.mirror.add(args.name, args.url, args.scope, args)
|
||||
|
||||
|
||||
def mirror_remove(args):
|
||||
|
@@ -37,7 +37,6 @@
|
||||
"implicit_rpaths",
|
||||
"extra_rpaths",
|
||||
]
|
||||
_cache_config_file = []
|
||||
|
||||
# TODO: Caches at module level make it difficult to mock configurations in
|
||||
# TODO: unit tests. It might be worth reworking their implementation.
|
||||
@@ -155,52 +154,65 @@ def add_compilers_to_config(compilers, scope=None, init_config=True):
|
||||
compiler_config = get_compiler_config(scope, init_config)
|
||||
for compiler in compilers:
|
||||
compiler_config.append(_to_dict(compiler))
|
||||
global _cache_config_file
|
||||
_cache_config_file = compiler_config
|
||||
spack.config.set("compilers", compiler_config, scope=scope)
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def remove_compiler_from_config(compiler_spec, scope=None):
|
||||
"""Remove compilers from the config, by spec.
|
||||
"""Remove compilers from configuration by spec.
|
||||
|
||||
If scope is None, all the scopes are searched for removal.
|
||||
|
||||
Arguments:
|
||||
compiler_specs: a list of CompilerSpec objects.
|
||||
scope: configuration scope to modify.
|
||||
compiler_spec: compiler to be removed
|
||||
scope: configuration scope to modify
|
||||
"""
|
||||
# Need a better way for this
|
||||
global _cache_config_file
|
||||
candidate_scopes = [scope]
|
||||
if scope is None:
|
||||
candidate_scopes = spack.config.config.scopes.keys()
|
||||
|
||||
removal_happened = False
|
||||
for current_scope in candidate_scopes:
|
||||
removal_happened |= _remove_compiler_from_scope(compiler_spec, scope=current_scope)
|
||||
|
||||
return removal_happened
|
||||
|
||||
|
||||
def _remove_compiler_from_scope(compiler_spec, scope):
|
||||
"""Removes a compiler from a specific configuration scope.
|
||||
|
||||
Args:
|
||||
compiler_spec: compiler to be removed
|
||||
scope: configuration scope under consideration
|
||||
|
||||
Returns:
|
||||
True if one or more compiler entries were actually removed, False otherwise
|
||||
"""
|
||||
assert scope is not None, "a specific scope is needed when calling this function"
|
||||
compiler_config = get_compiler_config(scope)
|
||||
config_length = len(compiler_config)
|
||||
|
||||
filtered_compiler_config = [
|
||||
comp
|
||||
for comp in compiler_config
|
||||
compiler_entry
|
||||
for compiler_entry in compiler_config
|
||||
if not spack.spec.parse_with_version_concrete(
|
||||
comp["compiler"]["spec"], compiler=True
|
||||
compiler_entry["compiler"]["spec"], compiler=True
|
||||
).satisfies(compiler_spec)
|
||||
]
|
||||
|
||||
# Update the cache for changes
|
||||
_cache_config_file = filtered_compiler_config
|
||||
if len(filtered_compiler_config) == config_length: # No items removed
|
||||
CompilerSpecInsufficientlySpecificError(compiler_spec)
|
||||
spack.config.set("compilers", filtered_compiler_config, scope=scope)
|
||||
if len(filtered_compiler_config) == len(compiler_config):
|
||||
return False
|
||||
|
||||
# We need to preserve the YAML type for comments, hence we are copying the
|
||||
# items in the list that has just been retrieved
|
||||
compiler_config[:] = filtered_compiler_config
|
||||
spack.config.set("compilers", compiler_config, scope=scope)
|
||||
return True
|
||||
|
||||
|
||||
def all_compilers_config(scope=None, init_config=True):
|
||||
"""Return a set of specs for all the compiler versions currently
|
||||
available to build with. These are instances of CompilerSpec.
|
||||
"""
|
||||
# Get compilers for this architecture.
|
||||
# Create a cache of the config file so we don't load all the time.
|
||||
global _cache_config_file
|
||||
if not _cache_config_file:
|
||||
_cache_config_file = get_compiler_config(scope, init_config)
|
||||
return _cache_config_file
|
||||
else:
|
||||
return _cache_config_file
|
||||
return get_compiler_config(scope, init_config)
|
||||
|
||||
|
||||
def all_compiler_specs(scope=None, init_config=True):
|
||||
|
@@ -151,11 +151,7 @@ def setup_custom_environment(self, pkg, env):
|
||||
arch = arch.replace("-", "_")
|
||||
# vcvars can target specific sdk versions, force it to pick up concretized sdk
|
||||
# version, if needed by spec
|
||||
sdk_ver = (
|
||||
""
|
||||
if "win-sdk" not in pkg.spec or pkg.name == "win-sdk"
|
||||
else pkg.spec["win-sdk"].version.string + ".0"
|
||||
)
|
||||
sdk_ver = "" if "win-sdk" not in pkg.spec else pkg.spec["win-sdk"].version.string + ".0"
|
||||
# provide vcvars with msvc version selected by concretization,
|
||||
# not whatever it happens to pick up on the system (highest available version)
|
||||
out = subprocess.check_output( # novermin
|
||||
|
@@ -1353,17 +1353,11 @@ def use_configuration(*scopes_or_paths):
|
||||
configuration = _config_from(scopes_or_paths)
|
||||
config.clear_caches(), configuration.clear_caches()
|
||||
|
||||
# Save and clear the current compiler cache
|
||||
saved_compiler_cache = spack.compilers._cache_config_file
|
||||
spack.compilers._cache_config_file = []
|
||||
|
||||
saved_config, config = config, configuration
|
||||
|
||||
try:
|
||||
yield configuration
|
||||
finally:
|
||||
# Restore previous config files
|
||||
spack.compilers._cache_config_file = saved_compiler_cache
|
||||
config = saved_config
|
||||
|
||||
|
||||
|
@@ -17,7 +17,7 @@
|
||||
"template": "container/fedora_38.dockerfile",
|
||||
"image": "docker.io/fedora:38"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf",
|
||||
"build": "spack/fedora38",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
@@ -31,7 +31,7 @@
|
||||
"template": "container/fedora_37.dockerfile",
|
||||
"image": "docker.io/fedora:37"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf",
|
||||
"build": "spack/fedora37",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
@@ -45,7 +45,7 @@
|
||||
"template": "container/rockylinux_9.dockerfile",
|
||||
"image": "docker.io/rockylinux:9"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/rockylinux9",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
@@ -59,7 +59,7 @@
|
||||
"template": "container/rockylinux_8.dockerfile",
|
||||
"image": "docker.io/rockylinux:8"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/rockylinux8",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
@@ -73,7 +73,7 @@
|
||||
"template": "container/almalinux_9.dockerfile",
|
||||
"image": "quay.io/almalinux/almalinux:9"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/almalinux9",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
@@ -87,7 +87,7 @@
|
||||
"template": "container/almalinux_8.dockerfile",
|
||||
"image": "quay.io/almalinux/almalinux:8"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/almalinux8",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
@@ -101,7 +101,7 @@
|
||||
"template": "container/centos_stream.dockerfile",
|
||||
"image": "quay.io/centos/centos:stream"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/centos-stream",
|
||||
"final": {
|
||||
"image": "quay.io/centos/centos:stream"
|
||||
@@ -185,6 +185,16 @@
|
||||
"install": "apt-get -yqq install",
|
||||
"clean": "rm -rf /var/lib/apt/lists/*"
|
||||
},
|
||||
"dnf": {
|
||||
"update": "dnf update -y",
|
||||
"install": "dnf install -y",
|
||||
"clean": "rm -rf /var/cache/dnf && dnf clean all"
|
||||
},
|
||||
"dnf_epel": {
|
||||
"update": "dnf update -y && dnf install -y epel-release && dnf update -y",
|
||||
"install": "dnf install -y",
|
||||
"clean": "rm -rf /var/cache/dnf && dnf clean all"
|
||||
},
|
||||
"yum": {
|
||||
"update": "yum update -y && yum install -y epel-release && yum update -y",
|
||||
"install": "yum install -y",
|
||||
|
@@ -112,10 +112,15 @@ def path_to_dict(search_paths):
|
||||
# Reverse order of search directories so that a lib in the first
|
||||
# entry overrides later entries
|
||||
for search_path in reversed(search_paths):
|
||||
for lib in os.listdir(search_path):
|
||||
lib_path = os.path.join(search_path, lib)
|
||||
if llnl.util.filesystem.is_readable_file(lib_path):
|
||||
path_to_lib[lib_path] = lib
|
||||
try:
|
||||
for lib in os.listdir(search_path):
|
||||
lib_path = os.path.join(search_path, lib)
|
||||
if llnl.util.filesystem.is_readable_file(lib_path):
|
||||
path_to_lib[lib_path] = lib
|
||||
except OSError as e:
|
||||
msg = f"cannot scan '{search_path}' for external software: {str(e)}"
|
||||
llnl.util.tty.debug(msg)
|
||||
|
||||
return path_to_lib
|
||||
|
||||
|
||||
|
@@ -39,7 +39,6 @@
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
import spack.traverse
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.cpus
|
||||
import spack.util.environment
|
||||
@@ -51,6 +50,7 @@
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url
|
||||
import spack.version
|
||||
from spack import traverse
|
||||
from spack.filesystem_view import SimpleFilesystemView, inverse_view_func_parser, view_func_parser
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.spec import Spec
|
||||
@@ -437,32 +437,6 @@ def _is_dev_spec_and_has_changed(spec):
|
||||
return mtime > record.installation_time
|
||||
|
||||
|
||||
def _spec_needs_overwrite(spec, changed_dev_specs):
|
||||
"""Check whether the current spec needs to be overwritten because either it has
|
||||
changed itself or one of its dependencies have changed
|
||||
"""
|
||||
# if it's not installed, we don't need to overwrite it
|
||||
if not spec.installed:
|
||||
return False
|
||||
|
||||
# If the spec itself has changed this is a trivial decision
|
||||
if spec in changed_dev_specs:
|
||||
return True
|
||||
|
||||
# if spec and all deps aren't dev builds, we don't need to overwrite it
|
||||
if not any(spec.satisfies(c) for c in ("dev_path=*", "^dev_path=*")):
|
||||
return False
|
||||
|
||||
# If any dep needs overwrite, or any dep is missing and is a dev build then
|
||||
# overwrite this package
|
||||
if any(
|
||||
((not dep.installed) and dep.satisfies("dev_path=*"))
|
||||
or _spec_needs_overwrite(dep, changed_dev_specs)
|
||||
for dep in spec.traverse(root=False)
|
||||
):
|
||||
return True
|
||||
|
||||
|
||||
def _error_on_nonempty_view_dir(new_root):
|
||||
"""Defensively error when the target view path already exists and is not an
|
||||
empty directory. This usually happens when the view symlink was removed, but
|
||||
@@ -647,18 +621,16 @@ def specs_for_view(self, concretized_root_specs):
|
||||
From the list of concretized user specs in the environment, flatten
|
||||
the dags, and filter selected, installed specs, remove duplicates on dag hash.
|
||||
"""
|
||||
dag_hash = lambda spec: spec.dag_hash()
|
||||
|
||||
# With deps, requires traversal
|
||||
if self.link == "all" or self.link == "run":
|
||||
deptype = ("run") if self.link == "run" else ("link", "run")
|
||||
specs = list(
|
||||
spack.traverse.traverse_nodes(
|
||||
concretized_root_specs, deptype=deptype, key=dag_hash
|
||||
traverse.traverse_nodes(
|
||||
concretized_root_specs, deptype=deptype, key=traverse.by_dag_hash
|
||||
)
|
||||
)
|
||||
else:
|
||||
specs = list(dedupe(concretized_root_specs, key=dag_hash))
|
||||
specs = list(dedupe(concretized_root_specs, key=traverse.by_dag_hash))
|
||||
|
||||
# Filter selected, installed specs
|
||||
with spack.store.db.read_transaction():
|
||||
@@ -1824,17 +1796,29 @@ def _add_concrete_spec(self, spec, concrete, new=True):
|
||||
self.specs_by_hash[h] = concrete
|
||||
|
||||
def _get_overwrite_specs(self):
|
||||
# Collect all specs in the environment first before checking which ones
|
||||
# to rebuild to avoid checking the same specs multiple times
|
||||
specs_to_check = set()
|
||||
for dag_hash in self.concretized_order:
|
||||
root_spec = self.specs_by_hash[dag_hash]
|
||||
specs_to_check.update(root_spec.traverse(root=True))
|
||||
|
||||
changed_dev_specs = set(s for s in specs_to_check if _is_dev_spec_and_has_changed(s))
|
||||
# Find all dev specs that were modified.
|
||||
changed_dev_specs = [
|
||||
s
|
||||
for s in traverse.traverse_nodes(
|
||||
self.concrete_roots(), order="breadth", key=traverse.by_dag_hash
|
||||
)
|
||||
if _is_dev_spec_and_has_changed(s)
|
||||
]
|
||||
|
||||
# Collect their hashes, and the hashes of their installed parents.
|
||||
# Notice: with order=breadth all changed dev specs are at depth 0,
|
||||
# even if they occur as parents of one another.
|
||||
return [
|
||||
s.dag_hash() for s in specs_to_check if _spec_needs_overwrite(s, changed_dev_specs)
|
||||
spec.dag_hash()
|
||||
for depth, spec in traverse.traverse_nodes(
|
||||
changed_dev_specs,
|
||||
root=True,
|
||||
order="breadth",
|
||||
depth=True,
|
||||
direction="parents",
|
||||
key=traverse.by_dag_hash,
|
||||
)
|
||||
if depth == 0 or spec.installed
|
||||
]
|
||||
|
||||
def _install_log_links(self, spec):
|
||||
@@ -1941,7 +1925,7 @@ def install_specs(self, specs=None, **install_args):
|
||||
def all_specs(self):
|
||||
"""Return all specs, even those a user spec would shadow."""
|
||||
roots = [self.specs_by_hash[h] for h in self.concretized_order]
|
||||
specs = [s for s in spack.traverse.traverse_nodes(roots, lambda s: s.dag_hash())]
|
||||
specs = [s for s in traverse.traverse_nodes(roots, key=traverse.by_dag_hash)]
|
||||
specs.sort()
|
||||
return specs
|
||||
|
||||
@@ -1987,13 +1971,18 @@ def concrete_roots(self):
|
||||
roots *without* associated user spec"""
|
||||
return [root for _, root in self.concretized_specs()]
|
||||
|
||||
def get_by_hash(self, dag_hash):
|
||||
matches = {}
|
||||
roots = [self.specs_by_hash[h] for h in self.concretized_order]
|
||||
for spec in spack.traverse.traverse_nodes(roots, key=lambda s: s.dag_hash()):
|
||||
def get_by_hash(self, dag_hash: str) -> List[Spec]:
|
||||
# If it's not a partial hash prefix we can early exit
|
||||
early_exit = len(dag_hash) == 32
|
||||
matches = []
|
||||
for spec in traverse.traverse_nodes(
|
||||
self.concrete_roots(), key=traverse.by_dag_hash, order="breadth"
|
||||
):
|
||||
if spec.dag_hash().startswith(dag_hash):
|
||||
matches[spec.dag_hash()] = spec
|
||||
return list(matches.values())
|
||||
matches.append(spec)
|
||||
if early_exit:
|
||||
break
|
||||
return matches
|
||||
|
||||
def get_one_by_hash(self, dag_hash):
|
||||
"""Returns the single spec from the environment which matches the
|
||||
@@ -2005,11 +1994,14 @@ def get_one_by_hash(self, dag_hash):
|
||||
|
||||
def all_matching_specs(self, *specs: spack.spec.Spec) -> List[Spec]:
|
||||
"""Returns all concretized specs in the environment satisfying any of the input specs"""
|
||||
key = lambda s: s.dag_hash()
|
||||
# Look up abstract hashes ahead of time, to avoid O(n^2) traversal.
|
||||
specs = [s.lookup_hash() for s in specs]
|
||||
|
||||
# Avoid double lookup by directly calling _satisfies.
|
||||
return [
|
||||
s
|
||||
for s in spack.traverse.traverse_nodes(self.concrete_roots(), key=key)
|
||||
if any(s.satisfies(t) for t in specs)
|
||||
for s in traverse.traverse_nodes(self.concrete_roots(), key=traverse.by_dag_hash)
|
||||
if any(s._satisfies(t) for t in specs)
|
||||
]
|
||||
|
||||
@spack.repo.autospec
|
||||
@@ -2033,9 +2025,9 @@ def matching_spec(self, spec):
|
||||
env_root_to_user = {root.dag_hash(): user for user, root in self.concretized_specs()}
|
||||
root_matches, dep_matches = [], []
|
||||
|
||||
for env_spec in spack.traverse.traverse_nodes(
|
||||
for env_spec in traverse.traverse_nodes(
|
||||
specs=[root for _, root in self.concretized_specs()],
|
||||
key=lambda s: s.dag_hash(),
|
||||
key=traverse.by_dag_hash,
|
||||
order="breadth",
|
||||
):
|
||||
if not env_spec.satisfies(spec):
|
||||
@@ -2109,8 +2101,8 @@ def _get_environment_specs(self, recurse_dependencies=True):
|
||||
|
||||
if recurse_dependencies:
|
||||
specs.extend(
|
||||
spack.traverse.traverse_nodes(
|
||||
specs, root=False, deptype=("link", "run"), key=lambda s: s.dag_hash()
|
||||
traverse.traverse_nodes(
|
||||
specs, root=False, deptype=("link", "run"), key=traverse.by_dag_hash
|
||||
)
|
||||
)
|
||||
|
||||
@@ -2119,9 +2111,7 @@ def _get_environment_specs(self, recurse_dependencies=True):
|
||||
def _to_lockfile_dict(self):
|
||||
"""Create a dictionary to store a lockfile for this environment."""
|
||||
concrete_specs = {}
|
||||
for s in spack.traverse.traverse_nodes(
|
||||
self.specs_by_hash.values(), key=lambda s: s.dag_hash()
|
||||
):
|
||||
for s in traverse.traverse_nodes(self.specs_by_hash.values(), key=traverse.by_dag_hash):
|
||||
spec_dict = s.node_dict_with_hashes(hash=ht.dag_hash)
|
||||
# Assumes no legacy formats, since this was just created.
|
||||
spec_dict[ht.dag_hash.name] = s.dag_hash()
|
||||
@@ -2278,7 +2268,7 @@ def ensure_env_directory_exists(self, dot_env: bool = False) -> None:
|
||||
|
||||
def update_environment_repository(self) -> None:
|
||||
"""Updates the repository associated with the environment."""
|
||||
for spec in spack.traverse.traverse_nodes(self.new_specs):
|
||||
for spec in traverse.traverse_nodes(self.new_specs):
|
||||
if not spec.concrete:
|
||||
raise ValueError("specs passed to environment.write() must be concrete!")
|
||||
|
||||
|
@@ -763,7 +763,11 @@ def version_from_git(git_exe):
|
||||
@property
|
||||
def git(self):
|
||||
if not self._git:
|
||||
self._git = spack.util.git.git()
|
||||
try:
|
||||
self._git = spack.util.git.git(required=True)
|
||||
except CommandNotFoundError as exc:
|
||||
tty.error(str(exc))
|
||||
raise
|
||||
|
||||
# Disable advice for a quieter fetch
|
||||
# https://github.com/git/git/blob/master/Documentation/RelNotes/1.7.2.txt
|
||||
|
@@ -2436,10 +2436,11 @@ def get_deptypes(self, pkg):
|
||||
else:
|
||||
cache_only = self.install_args.get("dependencies_cache_only")
|
||||
|
||||
# Include build dependencies if pkg is not installed and cache_only
|
||||
# is False, or if build depdencies are explicitly called for
|
||||
# by include_build_deps.
|
||||
if include_build_deps or not (cache_only or pkg.spec.installed):
|
||||
# Include build dependencies if pkg is going to be built from sources, or
|
||||
# if build deps are explicitly requested.
|
||||
if include_build_deps or not (
|
||||
cache_only or pkg.spec.installed and not pkg.spec.dag_hash() in self.overwrite
|
||||
):
|
||||
deptypes.append("build")
|
||||
if self.run_tests(pkg):
|
||||
deptypes.append("test")
|
||||
|
@@ -114,10 +114,6 @@ def from_url(url: str):
|
||||
return Mirror(fetch_url=url)
|
||||
|
||||
def to_dict(self):
|
||||
# Keep it a key-value pair <name>: <url> when possible.
|
||||
if isinstance(self._fetch_url, str) and self._push_url is None:
|
||||
return self._fetch_url
|
||||
|
||||
if self._push_url is None:
|
||||
return syaml_dict([("fetch", self._fetch_url), ("push", self._fetch_url)])
|
||||
else:
|
||||
@@ -552,17 +548,30 @@ def mirror_cache_and_stats(path, skip_unstable_versions=False):
|
||||
return mirror_cache, mirror_stats
|
||||
|
||||
|
||||
def add(mirror: Mirror, scope=None):
|
||||
def add(name, url, scope, args={}):
|
||||
"""Add a named mirror in the given scope"""
|
||||
mirrors = spack.config.get("mirrors", scope=scope)
|
||||
if not mirrors:
|
||||
mirrors = syaml_dict()
|
||||
|
||||
if mirror.name in mirrors:
|
||||
tty.die("Mirror with name {} already exists.".format(mirror.name))
|
||||
if name in mirrors:
|
||||
tty.die("Mirror with name %s already exists." % name)
|
||||
|
||||
items = [(n, u) for n, u in mirrors.items()]
|
||||
items.insert(0, (mirror.name, mirror.to_dict()))
|
||||
mirror_data = url
|
||||
key_values = ["s3_access_key_id", "s3_access_token", "s3_profile"]
|
||||
# On creation, assume connection data is set for both
|
||||
if any(value for value in key_values if value in args):
|
||||
url_dict = {
|
||||
"url": url,
|
||||
"access_pair": (args.s3_access_key_id, args.s3_access_key_secret),
|
||||
"access_token": args.s3_access_token,
|
||||
"profile": args.s3_profile,
|
||||
"endpoint_url": args.s3_endpoint_url,
|
||||
}
|
||||
mirror_data = {"fetch": url_dict, "push": url_dict}
|
||||
|
||||
items.insert(0, (name, mirror_data))
|
||||
mirrors = syaml_dict(items)
|
||||
spack.config.set("mirrors", mirrors, scope=scope)
|
||||
|
||||
|
@@ -40,7 +40,7 @@
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import dedupe
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
|
||||
import spack.build_environment
|
||||
import spack.config
|
||||
@@ -393,7 +393,7 @@ class BaseConfiguration(object):
|
||||
querying easier. It needs to be sub-classed for specific module types.
|
||||
"""
|
||||
|
||||
default_projections = {"all": "{name}/{version}-{compiler.name}-{compiler.version}"}
|
||||
default_projections = {"all": "{name}-{version}-{compiler.name}-{compiler.version}"}
|
||||
|
||||
def __init__(self, spec, module_set_name, explicit=None):
|
||||
# Module where type(self) is defined
|
||||
@@ -671,7 +671,14 @@ def configure_options(self):
|
||||
# the configure option section
|
||||
return None
|
||||
|
||||
def modification_needs_formatting(self, modification):
|
||||
"""Returns True if environment modification entry needs to be formatted."""
|
||||
return (
|
||||
not isinstance(modification, (spack.util.environment.SetEnv)) or not modification.raw
|
||||
)
|
||||
|
||||
@tengine.context_property
|
||||
@memoized
|
||||
def environment_modifications(self):
|
||||
"""List of environment modifications to be processed."""
|
||||
# Modifications guessed by inspecting the spec prefix
|
||||
@@ -733,15 +740,29 @@ def environment_modifications(self):
|
||||
_check_tokens_are_valid(x.name, message=msg)
|
||||
# Transform them
|
||||
x.name = spec.format(x.name, transform=transform)
|
||||
try:
|
||||
# Not every command has a value
|
||||
x.value = spec.format(x.value)
|
||||
except AttributeError:
|
||||
pass
|
||||
if self.modification_needs_formatting(x):
|
||||
try:
|
||||
# Not every command has a value
|
||||
x.value = spec.format(x.value)
|
||||
except AttributeError:
|
||||
pass
|
||||
x.name = str(x.name).replace("-", "_")
|
||||
|
||||
return [(type(x).__name__, x) for x in env if x.name not in exclude]
|
||||
|
||||
@tengine.context_property
|
||||
def has_manpath_modifications(self):
|
||||
"""True if MANPATH environment variable is modified."""
|
||||
for modification_type, cmd in self.environment_modifications:
|
||||
if not isinstance(
|
||||
cmd, (spack.util.environment.PrependPath, spack.util.environment.AppendPath)
|
||||
):
|
||||
continue
|
||||
if cmd.name == "MANPATH":
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
@tengine.context_property
|
||||
def autoload(self):
|
||||
"""List of modules that needs to be loaded automatically."""
|
||||
|
@@ -108,5 +108,6 @@
|
||||
# These are just here for editor support; they will be replaced when the build env
|
||||
# is set up.
|
||||
make = MakeExecutable("make", jobs=1)
|
||||
gmake = MakeExecutable("gmake", jobs=1)
|
||||
ninja = MakeExecutable("ninja", jobs=1)
|
||||
configure = Executable(join_path(".", "configure"))
|
||||
|
@@ -1231,6 +1231,7 @@ def dependencies_of_type(cls, *deptypes):
|
||||
if any(dt in cls.dependencies[name][cond].type for cond in conds for dt in deptypes)
|
||||
)
|
||||
|
||||
# TODO: allow more than one active extendee.
|
||||
@property
|
||||
def extendee_spec(self):
|
||||
"""
|
||||
@@ -1246,7 +1247,6 @@ def extendee_spec(self):
|
||||
if dep.name in self.extendees:
|
||||
deps.append(dep)
|
||||
|
||||
# TODO: allow more than one active extendee.
|
||||
if deps:
|
||||
assert len(deps) == 1
|
||||
return deps[0]
|
||||
@@ -1256,7 +1256,6 @@ def extendee_spec(self):
|
||||
if self.spec._concrete:
|
||||
return None
|
||||
else:
|
||||
# TODO: do something sane here with more than one extendee
|
||||
# If it's not concrete, then return the spec from the
|
||||
# extends() directive since that is all we know so far.
|
||||
spec_str, kwargs = next(iter(self.extendees.items()))
|
||||
|
@@ -6,6 +6,7 @@
|
||||
import itertools
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
from collections import OrderedDict
|
||||
|
||||
import macholib.mach_o
|
||||
@@ -355,7 +356,13 @@ def _set_elf_rpaths(target, rpaths):
|
||||
# Join the paths using ':' as a separator
|
||||
rpaths_str = ":".join(rpaths)
|
||||
|
||||
patchelf, output = executable.Executable(_patchelf()), None
|
||||
# If we're relocating patchelf itself, make a copy and use it
|
||||
bak_path = None
|
||||
if target.endswith("/bin/patchelf"):
|
||||
bak_path = target + ".bak"
|
||||
shutil.copy(target, bak_path)
|
||||
|
||||
patchelf, output = executable.Executable(bak_path or _patchelf()), None
|
||||
try:
|
||||
# TODO: revisit the use of --force-rpath as it might be conditional
|
||||
# TODO: if we want to support setting RUNPATH from binary packages
|
||||
@@ -364,6 +371,9 @@ def _set_elf_rpaths(target, rpaths):
|
||||
except executable.ProcessError as e:
|
||||
msg = "patchelf --force-rpath --set-rpath {0} failed with error {1}"
|
||||
tty.warn(msg.format(target, e))
|
||||
finally:
|
||||
if bak_path and os.path.exists(bak_path):
|
||||
os.remove(bak_path)
|
||||
return output
|
||||
|
||||
|
||||
@@ -676,7 +686,7 @@ def is_relocatable(spec):
|
||||
Raises:
|
||||
ValueError: if the spec is not installed
|
||||
"""
|
||||
if not spec.install_status():
|
||||
if not spec.installed:
|
||||
raise ValueError("spec is not installed [{0}]".format(str(spec)))
|
||||
|
||||
if spec.external or spec.virtual:
|
||||
|
@@ -24,7 +24,7 @@
|
||||
import traceback
|
||||
import types
|
||||
import uuid
|
||||
from typing import Dict, Union
|
||||
from typing import Any, Dict, List, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang
|
||||
@@ -424,7 +424,7 @@ def _create_new_cache(self) -> Dict[str, os.stat_result]:
|
||||
def last_mtime(self):
|
||||
return max(sinfo.st_mtime for sinfo in self._packages_to_stats.values())
|
||||
|
||||
def modified_since(self, since):
|
||||
def modified_since(self, since: float) -> List[str]:
|
||||
return [name for name, sinfo in self._packages_to_stats.items() if sinfo.st_mtime > since]
|
||||
|
||||
def __getitem__(self, item):
|
||||
@@ -550,35 +550,34 @@ class RepoIndex(object):
|
||||
when they're needed.
|
||||
|
||||
``Indexers`` should be added to the ``RepoIndex`` using
|
||||
``add_index(name, indexer)``, and they should support the interface
|
||||
``add_indexer(name, indexer)``, and they should support the interface
|
||||
defined by ``Indexer``, so that the ``RepoIndex`` can read, generate,
|
||||
and update stored indices.
|
||||
|
||||
Generated indexes are accessed by name via ``__getitem__()``.
|
||||
Generated indexes are accessed by name via ``__getitem__()``."""
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, package_checker, namespace, cache):
|
||||
def __init__(
|
||||
self,
|
||||
package_checker: FastPackageChecker,
|
||||
namespace: str,
|
||||
cache: spack.util.file_cache.FileCache,
|
||||
):
|
||||
self.checker = package_checker
|
||||
self.packages_path = self.checker.packages_path
|
||||
if sys.platform == "win32":
|
||||
self.packages_path = spack.util.path.convert_to_posix_path(self.packages_path)
|
||||
self.namespace = namespace
|
||||
|
||||
self.indexers = {}
|
||||
self.indexes = {}
|
||||
self.indexers: Dict[str, Indexer] = {}
|
||||
self.indexes: Dict[str, Any] = {}
|
||||
self.cache = cache
|
||||
|
||||
def add_indexer(self, name, indexer):
|
||||
def add_indexer(self, name: str, indexer: Indexer):
|
||||
"""Add an indexer to the repo index.
|
||||
|
||||
Arguments:
|
||||
name (str): name of this indexer
|
||||
|
||||
indexer (object): an object that supports create(), read(),
|
||||
write(), and get_index() operations
|
||||
|
||||
"""
|
||||
name: name of this indexer
|
||||
indexer: object implementing the ``Indexer`` interface"""
|
||||
self.indexers[name] = indexer
|
||||
|
||||
def __getitem__(self, name):
|
||||
@@ -599,17 +598,15 @@ def _build_all_indexes(self):
|
||||
because the main bottleneck here is loading all the packages. It
|
||||
can take tens of seconds to regenerate sequentially, and we'd
|
||||
rather only pay that cost once rather than on several
|
||||
invocations.
|
||||
|
||||
"""
|
||||
invocations."""
|
||||
for name, indexer in self.indexers.items():
|
||||
self.indexes[name] = self._build_index(name, indexer)
|
||||
|
||||
def _build_index(self, name, indexer):
|
||||
def _build_index(self, name: str, indexer: Indexer):
|
||||
"""Determine which packages need an update, and update indexes."""
|
||||
|
||||
# Filename of the provider index cache (we assume they're all json)
|
||||
cache_filename = "{0}/{1}-index.json".format(name, self.namespace)
|
||||
cache_filename = f"{name}/{self.namespace}-index.json"
|
||||
|
||||
# Compute which packages needs to be updated in the cache
|
||||
index_mtime = self.cache.mtime(cache_filename)
|
||||
@@ -633,8 +630,7 @@ def _build_index(self, name, indexer):
|
||||
needs_update = self.checker.modified_since(new_index_mtime)
|
||||
|
||||
for pkg_name in needs_update:
|
||||
namespaced_name = "%s.%s" % (self.namespace, pkg_name)
|
||||
indexer.update(namespaced_name)
|
||||
indexer.update(f"{self.namespace}.{pkg_name}")
|
||||
|
||||
indexer.write(new)
|
||||
|
||||
@@ -1239,7 +1235,7 @@ def get_pkg_class(self, pkg_name):
|
||||
try:
|
||||
module = importlib.import_module(fullname)
|
||||
except ImportError:
|
||||
raise UnknownPackageError(fullname)
|
||||
raise UnknownPackageError(pkg_name)
|
||||
except Exception as e:
|
||||
msg = f"cannot load package '{pkg_name}' from the '{self.namespace}' repository: {e}"
|
||||
raise RepoError(msg) from e
|
||||
|
@@ -614,23 +614,6 @@ def multiple_values_error(self, attribute, pkg):
|
||||
def no_value_error(self, attribute, pkg):
|
||||
return f'Cannot select a single "{attribute}" for package "{pkg}"'
|
||||
|
||||
def _get_cause_tree(self, cause, conditions, condition_causes, literals, indent=" "):
|
||||
parents = [c for e, c in condition_causes if e == cause]
|
||||
local = "required because %s " % conditions[cause]
|
||||
|
||||
return [indent + local] + [
|
||||
c
|
||||
for parent in parents
|
||||
for c in self._get_cause_tree(
|
||||
parent, conditions, condition_causes, literals, indent=indent + " "
|
||||
)
|
||||
]
|
||||
|
||||
def get_cause_tree(self, cause):
|
||||
conditions = dict(extract_args(self.model, "condition"))
|
||||
condition_causes = list(extract_args(self.model, "condition_cause"))
|
||||
return self._get_cause_tree(cause, conditions, condition_causes, [])
|
||||
|
||||
def handle_error(self, msg, *args):
|
||||
"""Handle an error state derived by the solver."""
|
||||
if msg == "multiple_values_error":
|
||||
@@ -639,28 +622,14 @@ def handle_error(self, msg, *args):
|
||||
if msg == "no_value_error":
|
||||
return self.no_value_error(*args)
|
||||
|
||||
try:
|
||||
idx = args.index("startcauses")
|
||||
except ValueError:
|
||||
msg_args = args
|
||||
cause_args = []
|
||||
else:
|
||||
msg_args = args[:idx]
|
||||
cause_args = args[idx + 1 :]
|
||||
|
||||
msg = msg.format(*msg_args)
|
||||
|
||||
# For variant formatting, we sometimes have to construct specs
|
||||
# to format values properly. Find/replace all occurances of
|
||||
# Spec(...) with the string representation of the spec mentioned
|
||||
msg = msg.format(*args)
|
||||
specs_to_construct = re.findall(r"Spec\(([^)]*)\)", msg)
|
||||
for spec_str in specs_to_construct:
|
||||
msg = msg.replace("Spec(%s)" % spec_str, str(spack.spec.Spec(spec_str)))
|
||||
|
||||
for cause in set(cause_args):
|
||||
for c in self.get_cause_tree(cause):
|
||||
msg += f"\n{c}"
|
||||
|
||||
return msg
|
||||
|
||||
def message(self, errors) -> str:
|
||||
@@ -806,8 +775,6 @@ def visit(node):
|
||||
self.control.load(os.path.join(parent_dir, "concretize.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "display.lp"))
|
||||
if spack.error.debug:
|
||||
self.control.load(os.path.join(parent_dir, "causation.lp"))
|
||||
timer.stop("load")
|
||||
|
||||
# Grounding is the first step in the solve -- it turns our facts
|
||||
@@ -868,13 +835,7 @@ def on_model(model):
|
||||
|
||||
# print any unknown functions in the model
|
||||
for sym in best_model:
|
||||
if sym.name not in (
|
||||
"attr",
|
||||
"error",
|
||||
"opt_criterion",
|
||||
"condition",
|
||||
"condition_cause",
|
||||
):
|
||||
if sym.name not in ("attr", "error", "opt_criterion"):
|
||||
tty.debug(
|
||||
"UNKNOWN SYMBOL: %s(%s)" % (sym.name, ", ".join(stringify(sym.arguments)))
|
||||
)
|
||||
@@ -1305,11 +1266,7 @@ def package_provider_rules(self, pkg):
|
||||
for when in whens:
|
||||
msg = "%s provides %s when %s" % (pkg.name, provided, when)
|
||||
condition_id = self.condition(when, provided, pkg.name, msg)
|
||||
self.gen.fact(
|
||||
fn.imposed_constraint(
|
||||
condition_id, "virtual_condition_holds", pkg.name, provided.name
|
||||
)
|
||||
)
|
||||
self.gen.fact(fn.provider_condition(condition_id, when.name, provided.name))
|
||||
self.gen.newline()
|
||||
|
||||
def package_dependencies_rules(self, pkg):
|
||||
@@ -1330,25 +1287,16 @@ def package_dependencies_rules(self, pkg):
|
||||
if not deptypes:
|
||||
continue
|
||||
|
||||
msg = "%s depends on %s" % (pkg.name, dep.spec)
|
||||
msg = "%s depends on %s" % (pkg.name, dep.spec.name)
|
||||
if cond != spack.spec.Spec():
|
||||
msg += " when %s" % cond
|
||||
|
||||
condition_id = self.condition(cond, dep.spec, pkg.name, msg)
|
||||
self.gen.fact(fn.condition_requirement(condition_id, "spack_installed", pkg.name))
|
||||
self.gen.fact(fn.dependency_condition(condition_id, pkg.name, dep.spec.name))
|
||||
|
||||
for t in sorted(deptypes):
|
||||
# there is a declared dependency of type t
|
||||
self.gen.fact(
|
||||
fn.imposed_constraint(
|
||||
condition_id, "dependency_holds", pkg.name, dep.spec.name, t
|
||||
)
|
||||
)
|
||||
self.gen.fact(
|
||||
fn.imposed_constraint(
|
||||
condition_id, "virtual_node" if dep.spec.virtual else "node", dep.spec.name
|
||||
)
|
||||
)
|
||||
self.gen.fact(fn.dependency_type(condition_id, t))
|
||||
|
||||
self.gen.newline()
|
||||
|
||||
@@ -1502,11 +1450,7 @@ def external_packages(self):
|
||||
for local_idx, spec in enumerate(external_specs):
|
||||
msg = "%s available as external when satisfying %s" % (spec.name, spec)
|
||||
condition_id = self.condition(spec, msg=msg)
|
||||
self.gen.fact(
|
||||
fn.imposed_constraint(
|
||||
condition_id, "external_conditions_hold", pkg_name, local_idx
|
||||
)
|
||||
)
|
||||
self.gen.fact(fn.possible_external(condition_id, pkg_name, local_idx))
|
||||
self.possible_versions[spec.name].add(spec.version)
|
||||
self.gen.newline()
|
||||
|
||||
@@ -2350,29 +2294,16 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.define_target_constraints()
|
||||
|
||||
def literal_specs(self, specs):
|
||||
for spec in specs:
|
||||
for idx, spec in enumerate(specs):
|
||||
self.gen.h2("Spec: %s" % str(spec))
|
||||
self.gen.fact(fn.literal(idx))
|
||||
|
||||
# cannot use self.condition because it requires condition requirements
|
||||
condition_id = next(self._condition_id_counter)
|
||||
self.gen.fact(fn.condition(condition_id, "%s is provided as input spec" % spec))
|
||||
self.gen.fact(fn.literal(condition_id))
|
||||
|
||||
self.gen.fact(fn.condition_requirement(condition_id, "literal_solved", condition_id))
|
||||
|
||||
self.gen.fact(
|
||||
fn.imposed_constraint(
|
||||
condition_id, "virtual_root" if spec.virtual else "root", spec.name
|
||||
)
|
||||
)
|
||||
|
||||
self.gen.fact(fn.literal(idx, "virtual_root" if spec.virtual else "root", spec.name))
|
||||
for clause in self.spec_clauses(spec):
|
||||
self.gen.fact(fn.imposed_constraint(condition_id, *clause.args))
|
||||
self.gen.fact(fn.literal(idx, *clause.args))
|
||||
if clause.args[0] == "variant_set":
|
||||
self.gen.fact(
|
||||
fn.imposed_constraint(
|
||||
condition_id, "variant_default_value_from_cli", *clause.args[1:]
|
||||
)
|
||||
fn.literal(idx, "variant_default_value_from_cli", *clause.args[1:])
|
||||
)
|
||||
|
||||
if self.concretize_everything:
|
||||
@@ -2466,8 +2397,6 @@ class SpecBuilder(object):
|
||||
r"^root$",
|
||||
r"^virtual_node$",
|
||||
r"^virtual_root$",
|
||||
r"^.*holds?$",
|
||||
r"^literal.*$",
|
||||
]
|
||||
)
|
||||
)
|
||||
|
@@ -1,72 +0,0 @@
|
||||
% Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
%
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
% associated conditions by cause -> effect
|
||||
condition_cause(Effect, Cause) :-
|
||||
condition_holds(Effect), condition_holds(Cause),
|
||||
attr(Name, A1),
|
||||
condition_requirement(Effect, Name, A1),
|
||||
imposed_constraint(Cause, Name, A1).
|
||||
condition_cause(Effect, Cause) :-
|
||||
condition_holds(Effect), condition_holds(Cause),
|
||||
attr(Name, A1, A2),
|
||||
condition_requirement(Effect, Name, A1, A2),
|
||||
imposed_constraint(Cause, Name, A1, A2).
|
||||
condition_cause(Effect, Cause) :-
|
||||
condition_holds(Effect), condition_holds(Cause),
|
||||
attr(Name, A1, A2, A3),
|
||||
condition_requirement(Effect, Name, A1, A2, A3),
|
||||
imposed_constraint(Cause, Name, A1, A2, A3).
|
||||
condition_cause(Effect, Cause) :-
|
||||
condition_holds(Effect), condition_holds(Cause),
|
||||
attr(Name, A1, A2, A3, A4),
|
||||
condition_requirement(Effect, Name, A1, A2, A3, A4),
|
||||
imposed_constraint(Cause, Name, A1, A2, A3, A4).
|
||||
|
||||
% At most one variant for single valued variants
|
||||
error(0, "'{0}' required multiple values for single-valued variant '{1}'\n Requested 'Spec({1}={2})' and 'Spec({1}={3})'", Package, Variant, Value1, Value2, startcauses, Cause1, Cause2)
|
||||
:- attr("node", Package),
|
||||
variant(Package, Variant),
|
||||
variant_single_value(Package, Variant),
|
||||
build(Package),
|
||||
attr("variant_value", Package, Variant, Value1),
|
||||
imposed_constraint(Cause1, "variant_set", Package, Variant, Value1),
|
||||
condition_holds(Cause1),
|
||||
attr("variant_value", Package, Variant, Value2),
|
||||
imposed_constraint(Cause2, "variant_set", Package, Variant, Value2),
|
||||
condition_holds(Cause2),
|
||||
Value1 < Value2. % see[1] in concretize.lp
|
||||
|
||||
% We cannot have a version that violates another version constraint
|
||||
error(0, "Version '{0}' of {1} does not satisfy '@{2}'", Version, Package, Constraint, startcauses, VersionCause, ConstraintCause)
|
||||
:- attr("node", Package),
|
||||
attr("version", Package, Version),
|
||||
imposed_constraint(VersionCause, "node_version_satisfies", Package, Version),
|
||||
condition_holds(VersionCause),
|
||||
attr("node_version_satisfies", Package, Constraint),
|
||||
imposed_constraint(ConstraintCause, "node_version_satisfies", Package, Constraint),
|
||||
condition_holds(ConstraintCause),
|
||||
not version_satisfies(Package, Constraint, Version).
|
||||
|
||||
% A virtual package may or may not have a version, but never has more than one
|
||||
% Error to catch how it happens
|
||||
error(0, "Version '{0}' of {1} does not satisfy '@{2}'", Version, Virtual, Constraint, startcauses, VersionCause, ConstraintCause)
|
||||
:- attr("virtual_node", Virtual),
|
||||
attr("version", Virtual, Version),
|
||||
imposed_constraint(VersionCause, "node_version_satisfies", Virtual, Version),
|
||||
condition_holds(VersionCause),
|
||||
attr("node_version_satisfies", Virtual, Constraint),
|
||||
imposed_constraint(ConstraintCause, "node_version_satisfies", Virtual, Constraint),
|
||||
condition_holds(ConstraintCause),
|
||||
not version_satisfies(Virtual, Constraint, Version).
|
||||
|
||||
% More specific error message if the version cannot satisfy some constraint
|
||||
% Otherwise covered by `no_version_error` and `versions_conflict_error`.
|
||||
error(0, "Cannot satisfy '{0}@{1}'", Package, Constraint, startcauses, ConstraintCause)
|
||||
:- attr("node_version_satisfies", Package, Constraint),
|
||||
imposed_constraint(ConstraintCause, "node_version_satisfies", Package, Constraint),
|
||||
condition_holds(ConstraintCause),
|
||||
attr("version", Package, Version),
|
||||
not version_satisfies(Package, Constraint, Version).
|
@@ -12,8 +12,8 @@
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
% Give clingo the choice to solve an input spec or not
|
||||
{ attr("literal_solved", ID) } :- literal(ID).
|
||||
literal_not_solved(ID) :- not attr("literal_solved", ID), literal(ID).
|
||||
{ literal_solved(ID) } :- literal(ID).
|
||||
literal_not_solved(ID) :- not literal_solved(ID), literal(ID).
|
||||
|
||||
% If concretize_everything() is a fact, then we cannot have unsolved specs
|
||||
:- literal_not_solved(ID), concretize_everything.
|
||||
@@ -21,14 +21,24 @@ literal_not_solved(ID) :- not attr("literal_solved", ID), literal(ID).
|
||||
% Make a problem with "zero literals solved" unsat. This is to trigger
|
||||
% looking for solutions to the ASP problem with "errors", which results
|
||||
% in better reporting for users. See #30669 for details.
|
||||
1 { attr("literal_solved", ID) : literal(ID) }.
|
||||
1 { literal_solved(ID) : literal(ID) }.
|
||||
|
||||
opt_criterion(300, "number of input specs not concretized").
|
||||
#minimize{ 0@300: #true }.
|
||||
#minimize { 1@300,ID : literal_not_solved(ID) }.
|
||||
|
||||
% Map constraint on the literal ID to the correct PSID
|
||||
attr(Name, A1) :- literal(LiteralID, Name, A1), literal_solved(LiteralID).
|
||||
attr(Name, A1, A2) :- literal(LiteralID, Name, A1, A2), literal_solved(LiteralID).
|
||||
attr(Name, A1, A2, A3) :- literal(LiteralID, Name, A1, A2, A3), literal_solved(LiteralID).
|
||||
attr(Name, A1, A2, A3, A4) :- literal(LiteralID, Name, A1, A2, A3, A4), literal_solved(LiteralID).
|
||||
|
||||
#defined concretize_everything/0.
|
||||
#defined literal/1.
|
||||
#defined literal/3.
|
||||
#defined literal/4.
|
||||
#defined literal/5.
|
||||
#defined literal/6.
|
||||
|
||||
% Attributes for node packages which must have a single value
|
||||
attr_single_value("version").
|
||||
@@ -48,13 +58,6 @@ error(100, multiple_values_error, Attribute, Package)
|
||||
attr_single_value(Attribute),
|
||||
2 { attr(Attribute, Package, Version) }.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Define functions for error handling
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
#defined error/9.
|
||||
#defined condition_cause/2.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Version semantics
|
||||
%-----------------------------------------------------------------------------
|
||||
@@ -93,18 +96,7 @@ version_satisfies(Package, Constraint, HashVersion) :- version_satisfies(Package
|
||||
{ attr("version", Package, Version) : version_declared(Package, Version) }
|
||||
:- attr("node", Package).
|
||||
|
||||
% Error to ensure structure of the program is not violated
|
||||
error(2, "No version from '{0}' satisfies '@{1}' and '@{2}'", Package, Version1, Version2)
|
||||
:- attr("node", Package),
|
||||
attr("version", Package, Version1),
|
||||
attr("version", Package, Version2),
|
||||
Version1 < Version2. % see[1]
|
||||
|
||||
error(2, "No versions available for package '{0}'", Package)
|
||||
:- attr("node", Package), not attr("version", Package, _).
|
||||
|
||||
% A virtual package may or may not have a version, but never has more than one
|
||||
% fallback error for structure in case there's another way for it to happen
|
||||
error(100, "Cannot select a single version for virtual '{0}'", Virtual)
|
||||
:- attr("virtual_node", Virtual),
|
||||
2 { attr("version", Virtual, Version) }.
|
||||
@@ -158,7 +150,8 @@ possible_version_weight(Package, Weight)
|
||||
:- attr("node_version_satisfies", Package, Constraint),
|
||||
version_satisfies(Package, Constraint, _).
|
||||
|
||||
% Error for structure of program
|
||||
% More specific error message if the version cannot satisfy some constraint
|
||||
% Otherwise covered by `no_version_error` and `versions_conflict_error`.
|
||||
error(10, "Cannot satisfy '{0}@{1}'", Package, Constraint)
|
||||
:- attr("node_version_satisfies", Package, Constraint),
|
||||
attr("version", Package, Version),
|
||||
@@ -189,8 +182,9 @@ condition_holds(ID) :-
|
||||
attr(Name, A1, A2, A3) : condition_requirement(ID, Name, A1, A2, A3);
|
||||
attr(Name, A1, A2, A3, A4) : condition_requirement(ID, Name, A1, A2, A3, A4).
|
||||
|
||||
% condition_holds(ID) implies all imposed_constraints.
|
||||
impose(ID) :- condition_holds(ID).
|
||||
% condition_holds(ID) implies all imposed_constraints, unless do_not_impose(ID)
|
||||
% is derived. This allows imposed constraints to be canceled in special cases.
|
||||
impose(ID) :- condition_holds(ID), not do_not_impose(ID).
|
||||
|
||||
% conditions that hold impose constraints on other specs
|
||||
attr(Name, A1) :- impose(ID), imposed_constraint(ID, Name, A1).
|
||||
@@ -235,19 +229,33 @@ depends_on(Package, Dependency) :- attr("depends_on", Package, Dependency, _).
|
||||
% a dependency holds if its condition holds and if it is not external or
|
||||
% concrete. We chop off dependencies for externals, and dependencies of
|
||||
% concrete specs don't need to be resolved -- they arise from the concrete
|
||||
% specs themselves. This attr is used in constraints from dependency conditions
|
||||
attr("spack_installed", Package) :- build(Package), not external(Package).
|
||||
% specs themselves.
|
||||
dependency_holds(Package, Dependency, Type) :-
|
||||
dependency_condition(ID, Package, Dependency),
|
||||
dependency_type(ID, Type),
|
||||
build(Package),
|
||||
not external(Package),
|
||||
condition_holds(ID).
|
||||
|
||||
% We cut off dependencies of externals (as we don't really know them).
|
||||
% Don't impose constraints on dependencies that don't exist.
|
||||
do_not_impose(ID) :-
|
||||
not dependency_holds(Package, Dependency, _),
|
||||
dependency_condition(ID, Package, Dependency).
|
||||
|
||||
% declared dependencies are real if they're not virtual AND
|
||||
% the package is not an external.
|
||||
% They're only triggered if the associated dependnecy condition holds.
|
||||
attr("depends_on", Package, Dependency, Type)
|
||||
:- attr("dependency_holds", Package, Dependency, Type),
|
||||
:- dependency_holds(Package, Dependency, Type),
|
||||
not virtual(Dependency).
|
||||
|
||||
% every root must be a node
|
||||
attr("node", Package) :- attr("root", Package).
|
||||
|
||||
% dependencies imply new nodes
|
||||
attr("node", Dependency) :- attr("node", Package), depends_on(Package, Dependency).
|
||||
|
||||
% all nodes in the graph must be reachable from some root
|
||||
% this ensures a user can't say `zlib ^libiconv` (neither of which have any
|
||||
% dependencies) and get a two-node unconnected graph
|
||||
@@ -288,17 +296,14 @@ error(1, Msg) :- attr("node", Package),
|
||||
% if a package depends on a virtual, it's not external and we have a
|
||||
% provider for that virtual then it depends on the provider
|
||||
attr("depends_on", Package, Provider, Type)
|
||||
:- attr("dependency_holds", Package, Virtual, Type),
|
||||
:- dependency_holds(Package, Virtual, Type),
|
||||
provider(Provider, Virtual),
|
||||
not external(Package).
|
||||
|
||||
% If a package depends on a provider, the provider must be a node
|
||||
% nodes that are not indirected by a virtual are instantiated
|
||||
% directly from the imposed constraints of the dependency condition
|
||||
attr("node", Provider)
|
||||
:- attr("dependency_holds", Package, Virtual, Type),
|
||||
provider(Provider, Virtual),
|
||||
not external(Package).
|
||||
% dependencies on virtuals also imply that the virtual is a virtual node
|
||||
attr("virtual_node", Virtual)
|
||||
:- dependency_holds(Package, Virtual, Type),
|
||||
virtual(Virtual), not external(Package).
|
||||
|
||||
% If there's a virtual node, we must select one and only one provider.
|
||||
% The provider must be selected among the possible providers.
|
||||
@@ -325,11 +330,17 @@ attr("root", Package) :- attr("virtual_root", Virtual), provider(Package, Virtua
|
||||
% for environments that are concretized together (e.g. where we
|
||||
% asks to install "mpich" and "hdf5+mpi" and we want "mpich" to
|
||||
% be the mpi provider)
|
||||
provider(Package, Virtual) :- attr("node", Package), attr("virtual_condition_holds", Package, Virtual).
|
||||
provider(Package, Virtual) :- attr("node", Package), virtual_condition_holds(Package, Virtual).
|
||||
|
||||
% The provider provides the virtual if some provider condition holds.
|
||||
virtual_condition_holds(Provider, Virtual) :-
|
||||
provider_condition(ID, Provider, Virtual),
|
||||
condition_holds(ID),
|
||||
virtual(Virtual).
|
||||
|
||||
% A package cannot be the actual provider for a virtual if it does not
|
||||
% fulfill the conditions to provide that virtual
|
||||
:- provider(Package, Virtual), not attr("virtual_condition_holds", Package, Virtual),
|
||||
:- provider(Package, Virtual), not virtual_condition_holds(Package, Virtual),
|
||||
internal_error("Virtual when provides not respected").
|
||||
|
||||
#defined possible_provider/2.
|
||||
@@ -371,8 +382,14 @@ possible_provider_weight(Dependency, Virtual, 100, "fallback") :- provider(Depen
|
||||
|
||||
% do not warn if generated program contains none of these.
|
||||
#defined possible_provider/2.
|
||||
#defined provider_condition/3.
|
||||
#defined required_provider_condition/3.
|
||||
#defined required_provider_condition/4.
|
||||
#defined required_provider_condition/5.
|
||||
#defined required_provider_condition/6.
|
||||
#defined declared_dependency/3.
|
||||
#defined virtual/1.
|
||||
#defined virtual_condition_holds/2.
|
||||
#defined external/1.
|
||||
#defined external_spec/2.
|
||||
#defined external_version_declared/4.
|
||||
@@ -420,15 +437,25 @@ external(Package) :- attr("external_spec_selected", Package, _).
|
||||
|
||||
% determine if an external spec has been selected
|
||||
attr("external_spec_selected", Package, LocalIndex) :-
|
||||
attr("external_conditions_hold", Package, LocalIndex),
|
||||
external_conditions_hold(Package, LocalIndex),
|
||||
attr("node", Package),
|
||||
not attr("hash", Package, _).
|
||||
|
||||
external_conditions_hold(Package, LocalIndex) :-
|
||||
possible_external(ID, Package, LocalIndex), condition_holds(ID).
|
||||
|
||||
% it cannot happen that a spec is external, but none of the external specs
|
||||
% conditions hold.
|
||||
error(100, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package)
|
||||
:- external(Package),
|
||||
not attr("external_conditions_hold", Package, _).
|
||||
not external_conditions_hold(Package, _).
|
||||
|
||||
#defined possible_external/3.
|
||||
#defined external_spec_index/3.
|
||||
#defined external_spec_condition/3.
|
||||
#defined external_spec_condition/4.
|
||||
#defined external_spec_condition/5.
|
||||
#defined external_spec_condition/6.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Config required semantics
|
||||
@@ -567,6 +594,7 @@ attr("variant_value", Package, Variant, Value) :-
|
||||
variant(Package, Variant),
|
||||
build(Package).
|
||||
|
||||
|
||||
error(100, "'{0}' required multiple values for single-valued variant '{1}'", Package, Variant)
|
||||
:- attr("node", Package),
|
||||
variant(Package, Variant),
|
||||
@@ -637,7 +665,7 @@ variant_default_not_used(Package, Variant, Value)
|
||||
external_with_variant_set(Package, Variant, Value)
|
||||
:- attr("variant_value", Package, Variant, Value),
|
||||
condition_requirement(ID, "variant_value", Package, Variant, Value),
|
||||
imposed_constraint(ID, "external_conditions_hold", Package, _),
|
||||
possible_external(ID, Package, _),
|
||||
external(Package),
|
||||
attr("node", Package).
|
||||
|
||||
|
@@ -23,12 +23,5 @@
|
||||
#show error/4.
|
||||
#show error/5.
|
||||
#show error/6.
|
||||
#show error/7.
|
||||
#show error/8.
|
||||
#show error/9.
|
||||
|
||||
% show cause -> effect data for errors
|
||||
#show condition_cause/2.
|
||||
#show condition/2.
|
||||
|
||||
% debug
|
||||
|
@@ -50,6 +50,7 @@
|
||||
"""
|
||||
import collections
|
||||
import collections.abc
|
||||
import enum
|
||||
import io
|
||||
import itertools
|
||||
import os
|
||||
@@ -173,6 +174,16 @@
|
||||
SPECFILE_FORMAT_VERSION = 3
|
||||
|
||||
|
||||
# InstallStatus is used to map install statuses to symbols for display
|
||||
# Options are artificially disjoint for dispay purposes
|
||||
class InstallStatus(enum.Enum):
|
||||
installed = "@g{[+]} "
|
||||
upstream = "@g{[^]} "
|
||||
external = "@g{[e]} "
|
||||
absent = "@K{ - } "
|
||||
missing = "@r{[-]} "
|
||||
|
||||
|
||||
def colorize_spec(spec):
|
||||
"""Returns a spec colorized according to the colors specified in
|
||||
color_formats."""
|
||||
@@ -4333,7 +4344,7 @@ def write_attribute(spec, attribute, color):
|
||||
|
||||
if callable(current):
|
||||
raise SpecFormatStringError("Attempted to format callable object")
|
||||
if current is None:
|
||||
if not current:
|
||||
# We're not printing anything
|
||||
return
|
||||
|
||||
@@ -4401,12 +4412,20 @@ def __str__(self):
|
||||
def install_status(self):
|
||||
"""Helper for tree to print DB install status."""
|
||||
if not self.concrete:
|
||||
return None
|
||||
try:
|
||||
record = spack.store.db.get_record(self)
|
||||
return record.installed
|
||||
except KeyError:
|
||||
return None
|
||||
return InstallStatus.absent
|
||||
|
||||
if self.external:
|
||||
return InstallStatus.external
|
||||
|
||||
upstream, record = spack.store.db.query_by_spec_hash(self.dag_hash())
|
||||
if not record:
|
||||
return InstallStatus.absent
|
||||
elif upstream and record.installed:
|
||||
return InstallStatus.upstream
|
||||
elif record.installed:
|
||||
return InstallStatus.installed
|
||||
else:
|
||||
return InstallStatus.missing
|
||||
|
||||
def _installed_explicitly(self):
|
||||
"""Helper for tree to print DB install status."""
|
||||
@@ -4420,7 +4439,10 @@ def _installed_explicitly(self):
|
||||
|
||||
def tree(self, **kwargs):
|
||||
"""Prints out this spec and its dependencies, tree-formatted
|
||||
with indentation."""
|
||||
with indentation.
|
||||
|
||||
Status function may either output a boolean or an InstallStatus
|
||||
"""
|
||||
color = kwargs.pop("color", clr.get_color_when())
|
||||
depth = kwargs.pop("depth", False)
|
||||
hashes = kwargs.pop("hashes", False)
|
||||
@@ -4452,14 +4474,12 @@ def tree(self, **kwargs):
|
||||
|
||||
if status_fn:
|
||||
status = status_fn(node)
|
||||
if node.installed_upstream:
|
||||
out += clr.colorize("@g{[^]} ", color=color)
|
||||
elif status is None:
|
||||
out += clr.colorize("@K{ - } ", color=color) # !installed
|
||||
if status in list(InstallStatus):
|
||||
out += clr.colorize(status.value, color=color)
|
||||
elif status:
|
||||
out += clr.colorize("@g{[+]} ", color=color) # installed
|
||||
out += clr.colorize("@g{[+]} ", color=color)
|
||||
else:
|
||||
out += clr.colorize("@r{[-]} ", color=color) # missing
|
||||
out += clr.colorize("@r{[-]} ", color=color)
|
||||
|
||||
if hashes:
|
||||
out += clr.colorize("@K{%s} ", color=color) % node.dag_hash(hlen)
|
||||
|
@@ -100,9 +100,15 @@ def quote(text):
|
||||
return ['"{0}"'.format(line) for line in text]
|
||||
|
||||
|
||||
def curly_quote(text):
|
||||
"""Encloses each line of text in curly braces"""
|
||||
return ["{{{0}}}".format(line) for line in text]
|
||||
|
||||
|
||||
def _set_filters(env):
|
||||
"""Sets custom filters to the template engine environment"""
|
||||
env.filters["textwrap"] = textwrap.wrap
|
||||
env.filters["prepend_to_line"] = prepend_to_line
|
||||
env.filters["join"] = "\n".join
|
||||
env.filters["quote"] = quote
|
||||
env.filters["curly_quote"] = curly_quote
|
||||
|
@@ -115,9 +115,6 @@ def default_config(tmpdir, config_directory, monkeypatch, install_mockery_mutabl
|
||||
|
||||
spack.config.config, old_config = cfg, spack.config.config
|
||||
spack.config.config.set("repos", [spack.paths.mock_packages_path])
|
||||
# This is essential, otherwise the cache will create weird side effects
|
||||
# that will compromise subsequent tests if compilers.yaml is modified
|
||||
monkeypatch.setattr(spack.compilers, "_cache_config_file", [])
|
||||
njobs = spack.config.get("config:build_jobs")
|
||||
if not njobs:
|
||||
spack.config.set("config:build_jobs", 4, scope="user")
|
||||
|
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import pickle
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -40,10 +39,7 @@ def test_dump(tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
build_env("--dump", _out_file, "zlib")
|
||||
with open(_out_file) as f:
|
||||
if sys.platform == "win32":
|
||||
assert any(line.startswith('set "PATH=') for line in f.readlines())
|
||||
else:
|
||||
assert any(line.startswith("PATH=") for line in f.readlines())
|
||||
assert any(line.startswith("PATH=") for line in f.readlines())
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
|
||||
|
@@ -8,8 +8,6 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import llnl.util.filesystem
|
||||
|
||||
import spack.compilers
|
||||
import spack.main
|
||||
import spack.version
|
||||
@@ -18,124 +16,8 @@
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_compiler_version():
|
||||
return "4.5.3"
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_compiler_dir(tmpdir, mock_compiler_version):
|
||||
"""Return a directory containing a fake, but detectable compiler."""
|
||||
|
||||
tmpdir.ensure("bin", dir=True)
|
||||
bin_dir = tmpdir.join("bin")
|
||||
|
||||
gcc_path = bin_dir.join("gcc")
|
||||
gxx_path = bin_dir.join("g++")
|
||||
gfortran_path = bin_dir.join("gfortran")
|
||||
|
||||
gcc_path.write(
|
||||
"""\
|
||||
#!/bin/sh
|
||||
|
||||
for arg in "$@"; do
|
||||
if [ "$arg" = -dumpversion ]; then
|
||||
echo '%s'
|
||||
fi
|
||||
done
|
||||
"""
|
||||
% mock_compiler_version
|
||||
)
|
||||
|
||||
# Create some mock compilers in the temporary directory
|
||||
llnl.util.filesystem.set_executable(str(gcc_path))
|
||||
gcc_path.copy(gxx_path, mode=True)
|
||||
gcc_path.copy(gfortran_path, mode=True)
|
||||
|
||||
return str(tmpdir)
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="Cannot execute bash \
|
||||
script on Windows",
|
||||
)
|
||||
@pytest.mark.regression("11678,13138")
|
||||
def test_compiler_find_without_paths(no_compilers_yaml, working_env, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
with open("gcc", "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
#!/bin/sh
|
||||
echo "0.0.0"
|
||||
"""
|
||||
)
|
||||
os.chmod("gcc", 0o700)
|
||||
|
||||
os.environ["PATH"] = str(tmpdir)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "gcc" in output
|
||||
|
||||
|
||||
@pytest.mark.regression("17589")
|
||||
def test_compiler_find_no_apple_gcc(no_compilers_yaml, working_env, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
# make a script to emulate apple gcc's version args
|
||||
with open("gcc", "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
#!/bin/sh
|
||||
if [ "$1" = "-dumpversion" ]; then
|
||||
echo "4.2.1"
|
||||
elif [ "$1" = "--version" ]; then
|
||||
echo "Configured with: --prefix=/dummy"
|
||||
echo "Apple clang version 11.0.0 (clang-1100.0.33.16)"
|
||||
echo "Target: x86_64-apple-darwin18.7.0"
|
||||
echo "Thread model: posix"
|
||||
echo "InstalledDir: /dummy"
|
||||
else
|
||||
echo "clang: error: no input files"
|
||||
fi
|
||||
"""
|
||||
)
|
||||
os.chmod("gcc", 0o700)
|
||||
|
||||
os.environ["PATH"] = str(tmpdir)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "gcc" not in output
|
||||
|
||||
|
||||
def test_compiler_remove(mutable_config, mock_packages):
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") in spack.compilers.all_compiler_specs()
|
||||
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.5.0", add_paths=[], scope=None)
|
||||
spack.cmd.compiler.compiler_remove(args)
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="Cannot execute bash \
|
||||
script on Windows",
|
||||
)
|
||||
def test_compiler_add(mutable_config, mock_packages, mock_compiler_dir, mock_compiler_version):
|
||||
# Compilers available by default.
|
||||
old_compilers = set(spack.compilers.all_compiler_specs())
|
||||
|
||||
args = spack.util.pattern.Bunch(
|
||||
all=None, compiler_spec=None, add_paths=[mock_compiler_dir], scope=None
|
||||
)
|
||||
spack.cmd.compiler.compiler_find(args)
|
||||
|
||||
# Ensure new compiler is in there
|
||||
new_compilers = set(spack.compilers.all_compiler_specs())
|
||||
new_compiler = new_compilers - old_compilers
|
||||
assert any(c.version == spack.version.Version(mock_compiler_version) for c in new_compiler)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def clangdir(tmpdir):
|
||||
"""Create a directory with some dummy compiler scripts in it.
|
||||
def compilers_dir(mock_executable):
|
||||
"""Create a directory with some mock compiler scripts in it.
|
||||
|
||||
Scripts are:
|
||||
- clang
|
||||
@@ -145,11 +27,9 @@ def clangdir(tmpdir):
|
||||
- gfortran-8
|
||||
|
||||
"""
|
||||
with tmpdir.as_cwd():
|
||||
with open("clang", "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
#!/bin/sh
|
||||
clang_path = mock_executable(
|
||||
"clang",
|
||||
output="""
|
||||
if [ "$1" = "--version" ]; then
|
||||
echo "clang version 11.0.0 (clang-1100.0.33.16)"
|
||||
echo "Target: x86_64-apple-darwin18.7.0"
|
||||
@@ -159,12 +39,11 @@ def clangdir(tmpdir):
|
||||
echo "clang: error: no input files"
|
||||
exit 1
|
||||
fi
|
||||
"""
|
||||
)
|
||||
shutil.copy("clang", "clang++")
|
||||
""",
|
||||
)
|
||||
shutil.copy(clang_path, clang_path.parent / "clang++")
|
||||
|
||||
gcc_script = """\
|
||||
#!/bin/sh
|
||||
gcc_script = """
|
||||
if [ "$1" = "-dumpversion" ]; then
|
||||
echo "8"
|
||||
elif [ "$1" = "-dumpfullversion" ]; then
|
||||
@@ -178,30 +57,111 @@ def clangdir(tmpdir):
|
||||
exit 1
|
||||
fi
|
||||
"""
|
||||
with open("gcc-8", "w") as f:
|
||||
f.write(gcc_script.format("gcc", "gcc-8"))
|
||||
with open("g++-8", "w") as f:
|
||||
f.write(gcc_script.format("g++", "g++-8"))
|
||||
with open("gfortran-8", "w") as f:
|
||||
f.write(gcc_script.format("GNU Fortran", "gfortran-8"))
|
||||
os.chmod("clang", 0o700)
|
||||
os.chmod("clang++", 0o700)
|
||||
os.chmod("gcc-8", 0o700)
|
||||
os.chmod("g++-8", 0o700)
|
||||
os.chmod("gfortran-8", 0o700)
|
||||
mock_executable("gcc-8", output=gcc_script.format("gcc", "gcc-8"))
|
||||
mock_executable("g++-8", output=gcc_script.format("g++", "g++-8"))
|
||||
mock_executable("gfortran-8", output=gcc_script.format("GNU Fortran", "gfortran-8"))
|
||||
|
||||
yield tmpdir
|
||||
return clang_path.parent
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="Cannot execute bash \
|
||||
script on Windows",
|
||||
)
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
@pytest.mark.regression("11678,13138")
|
||||
def test_compiler_find_without_paths(no_compilers_yaml, working_env, mock_executable):
|
||||
"""Tests that 'spack compiler find' looks into PATH by default, if no specific path
|
||||
is given.
|
||||
"""
|
||||
gcc_path = mock_executable("gcc", output='echo "0.0.0"')
|
||||
|
||||
os.environ["PATH"] = str(gcc_path.parent)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "gcc" in output
|
||||
|
||||
|
||||
@pytest.mark.regression("17589")
|
||||
def test_compiler_find_no_apple_gcc(no_compilers_yaml, working_env, mock_executable):
|
||||
"""Tests that Spack won't mistake Apple's GCC as a "real" GCC, since it's really
|
||||
Clang with a few tweaks.
|
||||
"""
|
||||
gcc_path = mock_executable(
|
||||
"gcc",
|
||||
output="""
|
||||
if [ "$1" = "-dumpversion" ]; then
|
||||
echo "4.2.1"
|
||||
elif [ "$1" = "--version" ]; then
|
||||
echo "Configured with: --prefix=/dummy"
|
||||
echo "Apple clang version 11.0.0 (clang-1100.0.33.16)"
|
||||
echo "Target: x86_64-apple-darwin18.7.0"
|
||||
echo "Thread model: posix"
|
||||
echo "InstalledDir: /dummy"
|
||||
else
|
||||
echo "clang: error: no input files"
|
||||
fi
|
||||
""",
|
||||
)
|
||||
|
||||
os.environ["PATH"] = str(gcc_path.parent)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "gcc" not in output
|
||||
|
||||
|
||||
@pytest.mark.regression("37996")
|
||||
def test_compiler_remove(mutable_config, mock_packages):
|
||||
"""Tests that we can remove a compiler from configuration."""
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") in spack.compilers.all_compiler_specs()
|
||||
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.5.0", add_paths=[], scope=None)
|
||||
spack.cmd.compiler.compiler_remove(args)
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
|
||||
|
||||
|
||||
@pytest.mark.regression("37996")
|
||||
def test_removing_compilers_from_multiple_scopes(mutable_config, mock_packages):
|
||||
# Duplicate "site" scope into "user" scope
|
||||
site_config = spack.config.get("compilers", scope="site")
|
||||
spack.config.set("compilers", site_config, scope="user")
|
||||
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") in spack.compilers.all_compiler_specs()
|
||||
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.5.0", add_paths=[], scope=None)
|
||||
spack.cmd.compiler.compiler_remove(args)
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
def test_compiler_add(mutable_config, mock_packages, mock_executable):
|
||||
"""Tests that we can add a compiler to configuration."""
|
||||
expected_version = "4.5.3"
|
||||
gcc_path = mock_executable(
|
||||
"gcc",
|
||||
output=f"""\
|
||||
for arg in "$@"; do
|
||||
if [ "$arg" = -dumpversion ]; then
|
||||
echo '{expected_version}'
|
||||
fi
|
||||
done
|
||||
""",
|
||||
)
|
||||
bin_dir = gcc_path.parent
|
||||
root_dir = bin_dir.parent
|
||||
|
||||
compilers_before_find = set(spack.compilers.all_compiler_specs())
|
||||
args = spack.util.pattern.Bunch(
|
||||
all=None, compiler_spec=None, add_paths=[str(root_dir)], scope=None
|
||||
)
|
||||
spack.cmd.compiler.compiler_find(args)
|
||||
compilers_after_find = set(spack.compilers.all_compiler_specs())
|
||||
|
||||
compilers_added_by_find = compilers_after_find - compilers_before_find
|
||||
assert len(compilers_added_by_find) == 1
|
||||
new_compiler = compilers_added_by_find.pop()
|
||||
assert new_compiler.version == spack.version.Version(expected_version)
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
@pytest.mark.regression("17590")
|
||||
def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, clangdir):
|
||||
def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, compilers_dir):
|
||||
"""Ensure that we'll mix compilers with different suffixes when necessary."""
|
||||
os.environ["PATH"] = str(clangdir)
|
||||
os.environ["PATH"] = str(compilers_dir)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "clang@11.0.0" in output
|
||||
@@ -211,39 +171,33 @@ def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, clangdir):
|
||||
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
|
||||
gcc = next(c["compiler"] for c in config if c["compiler"]["spec"] == "gcc@=8.4.0")
|
||||
|
||||
gfortran_path = str(clangdir.join("gfortran-8"))
|
||||
gfortran_path = str(compilers_dir / "gfortran-8")
|
||||
|
||||
assert clang["paths"] == {
|
||||
"cc": str(clangdir.join("clang")),
|
||||
"cxx": str(clangdir.join("clang++")),
|
||||
"cc": str(compilers_dir / "clang"),
|
||||
"cxx": str(compilers_dir / "clang++"),
|
||||
# we only auto-detect mixed clang on macos
|
||||
"f77": gfortran_path if sys.platform == "darwin" else None,
|
||||
"fc": gfortran_path if sys.platform == "darwin" else None,
|
||||
}
|
||||
|
||||
assert gcc["paths"] == {
|
||||
"cc": str(clangdir.join("gcc-8")),
|
||||
"cxx": str(clangdir.join("g++-8")),
|
||||
"cc": str(compilers_dir / "gcc-8"),
|
||||
"cxx": str(compilers_dir / "g++-8"),
|
||||
"f77": gfortran_path,
|
||||
"fc": gfortran_path,
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="Cannot execute bash \
|
||||
script on Windows",
|
||||
)
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
@pytest.mark.regression("17590")
|
||||
def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, clangdir):
|
||||
def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, compilers_dir):
|
||||
"""Ensure that we'll pick 'clang' over 'clang-gpu' when there is a choice."""
|
||||
with clangdir.as_cwd():
|
||||
shutil.copy("clang", "clang-gpu")
|
||||
shutil.copy("clang++", "clang++-gpu")
|
||||
os.chmod("clang-gpu", 0o700)
|
||||
os.chmod("clang++-gpu", 0o700)
|
||||
clang_path = compilers_dir / "clang"
|
||||
shutil.copy(clang_path, clang_path.parent / "clang-gpu")
|
||||
shutil.copy(clang_path, clang_path.parent / "clang++-gpu")
|
||||
|
||||
os.environ["PATH"] = str(clangdir)
|
||||
os.environ["PATH"] = str(compilers_dir)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "clang@11.0.0" in output
|
||||
@@ -252,46 +206,38 @@ def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, clangdir
|
||||
config = spack.compilers.get_compiler_config("site", False)
|
||||
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
|
||||
|
||||
assert clang["paths"]["cc"] == str(clangdir.join("clang"))
|
||||
assert clang["paths"]["cxx"] == str(clangdir.join("clang++"))
|
||||
assert clang["paths"]["cc"] == str(compilers_dir / "clang")
|
||||
assert clang["paths"]["cxx"] == str(compilers_dir / "clang++")
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="Cannot execute bash \
|
||||
script on Windows",
|
||||
)
|
||||
def test_compiler_find_path_order(no_compilers_yaml, working_env, clangdir):
|
||||
"""Ensure that we find compilers that come first in the PATH first"""
|
||||
|
||||
with clangdir.as_cwd():
|
||||
os.mkdir("first_in_path")
|
||||
shutil.copy("gcc-8", "first_in_path/gcc-8")
|
||||
shutil.copy("g++-8", "first_in_path/g++-8")
|
||||
shutil.copy("gfortran-8", "first_in_path/gfortran-8")
|
||||
|
||||
# the first_in_path folder should be searched first
|
||||
os.environ["PATH"] = "{0}:{1}".format(str(clangdir.join("first_in_path")), str(clangdir))
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
def test_compiler_find_path_order(no_compilers_yaml, working_env, compilers_dir):
|
||||
"""Ensure that we look for compilers in the same order as PATH, when there are duplicates"""
|
||||
new_dir = compilers_dir / "first_in_path"
|
||||
new_dir.mkdir()
|
||||
for name in ("gcc-8", "g++-8", "gfortran-8"):
|
||||
shutil.copy(compilers_dir / name, new_dir / name)
|
||||
# Set PATH to have the new folder searched first
|
||||
os.environ["PATH"] = "{}:{}".format(str(new_dir), str(compilers_dir))
|
||||
|
||||
compiler("find", "--scope=site")
|
||||
|
||||
config = spack.compilers.get_compiler_config("site", False)
|
||||
|
||||
gcc = next(c["compiler"] for c in config if c["compiler"]["spec"] == "gcc@=8.4.0")
|
||||
|
||||
assert gcc["paths"] == {
|
||||
"cc": str(clangdir.join("first_in_path", "gcc-8")),
|
||||
"cxx": str(clangdir.join("first_in_path", "g++-8")),
|
||||
"f77": str(clangdir.join("first_in_path", "gfortran-8")),
|
||||
"fc": str(clangdir.join("first_in_path", "gfortran-8")),
|
||||
"cc": str(new_dir / "gcc-8"),
|
||||
"cxx": str(new_dir / "g++-8"),
|
||||
"f77": str(new_dir / "gfortran-8"),
|
||||
"fc": str(new_dir / "gfortran-8"),
|
||||
}
|
||||
|
||||
|
||||
def test_compiler_list_empty(no_compilers_yaml, working_env, clangdir):
|
||||
# Spack should not automatically search for compilers when listing them and none
|
||||
# are available. And when stdout is not a tty like in tests, there should be no
|
||||
# output and no error exit code.
|
||||
os.environ["PATH"] = str(clangdir)
|
||||
def test_compiler_list_empty(no_compilers_yaml, working_env, compilers_dir):
|
||||
"""Spack should not automatically search for compilers when listing them and none are
|
||||
available. And when stdout is not a tty like in tests, there should be no output and
|
||||
no error exit code.
|
||||
"""
|
||||
os.environ["PATH"] = str(compilers_dir)
|
||||
out = compiler("list")
|
||||
assert not out
|
||||
assert compiler.returncode == 0
|
||||
|
@@ -396,17 +396,16 @@ def reset_string():
|
||||
with envdir.as_cwd():
|
||||
with open("spack.yaml", "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
f"""\
|
||||
spack:
|
||||
specs:
|
||||
- %s@0.0.0
|
||||
- {test_spec}@0.0.0
|
||||
|
||||
develop:
|
||||
dev-build-test-install:
|
||||
spec: dev-build-test-install@0.0.0
|
||||
path: %s
|
||||
path: {build_dir}
|
||||
"""
|
||||
% (test_spec, build_dir)
|
||||
)
|
||||
|
||||
env("create", "test", "./spack.yaml")
|
||||
|
@@ -2688,7 +2688,7 @@ def test_modules_relative_to_views(environment_from_manifest, install_mockery, m
|
||||
|
||||
spec = e.specs_by_hash[e.concretized_order[0]]
|
||||
view_prefix = e.default_view.get_projection_for_spec(spec)
|
||||
modules_glob = "%s/modules/**/*/*" % e.path
|
||||
modules_glob = "%s/modules/**/*" % e.path
|
||||
modules = glob.glob(modules_glob)
|
||||
assert len(modules) == 1
|
||||
module = modules[0]
|
||||
@@ -2724,12 +2724,12 @@ def test_multiple_modules_post_env_hook(environment_from_manifest, install_mocke
|
||||
|
||||
spec = e.specs_by_hash[e.concretized_order[0]]
|
||||
view_prefix = e.default_view.get_projection_for_spec(spec)
|
||||
modules_glob = "%s/modules/**/*/*" % e.path
|
||||
modules_glob = "%s/modules/**/*" % e.path
|
||||
modules = glob.glob(modules_glob)
|
||||
assert len(modules) == 1
|
||||
module = modules[0]
|
||||
|
||||
full_modules_glob = "%s/full_modules/**/*/*" % e.path
|
||||
full_modules_glob = "%s/full_modules/**/*" % e.path
|
||||
full_modules = glob.glob(full_modules_glob)
|
||||
assert len(full_modules) == 1
|
||||
full_module = full_modules[0]
|
||||
|
@@ -44,9 +44,8 @@ def define_plat_exe(exe):
|
||||
|
||||
def test_find_external_single_package(mock_executable, executables_found, _platform_executables):
|
||||
pkgs_to_check = [spack.repo.path.get_pkg_class("cmake")]
|
||||
executables_found(
|
||||
{mock_executable("cmake", output="echo cmake version 1.foo"): define_plat_exe("cmake")}
|
||||
)
|
||||
cmake_path = mock_executable("cmake", output="echo cmake version 1.foo")
|
||||
executables_found({str(cmake_path): define_plat_exe("cmake")})
|
||||
|
||||
pkg_to_entries = spack.detection.by_executable(pkgs_to_check)
|
||||
|
||||
@@ -71,7 +70,7 @@ def test_find_external_two_instances_same_package(
|
||||
"cmake", output="echo cmake version 3.17.2", subdir=("base2", "bin")
|
||||
)
|
||||
cmake_exe = define_plat_exe("cmake")
|
||||
executables_found({cmake_path1: cmake_exe, cmake_path2: cmake_exe})
|
||||
executables_found({str(cmake_path1): cmake_exe, str(cmake_path2): cmake_exe})
|
||||
|
||||
pkg_to_entries = spack.detection.by_executable(pkgs_to_check)
|
||||
|
||||
@@ -107,7 +106,7 @@ def test_get_executables(working_env, mock_executable):
|
||||
cmake_path1 = mock_executable("cmake", output="echo cmake version 1.foo")
|
||||
path_to_exe = spack.detection.executables_in_path([os.path.dirname(cmake_path1)])
|
||||
cmake_exe = define_plat_exe("cmake")
|
||||
assert path_to_exe[cmake_path1] == cmake_exe
|
||||
assert path_to_exe[str(cmake_path1)] == cmake_exe
|
||||
|
||||
|
||||
external = SpackCommand("external")
|
||||
@@ -334,7 +333,7 @@ def test_packages_yaml_format(mock_executable, mutable_config, monkeypatch, _pla
|
||||
assert "extra_attributes" in external_gcc
|
||||
extra_attributes = external_gcc["extra_attributes"]
|
||||
assert "prefix" not in extra_attributes
|
||||
assert extra_attributes["compilers"]["c"] == gcc_exe
|
||||
assert extra_attributes["compilers"]["c"] == str(gcc_exe)
|
||||
|
||||
|
||||
def test_overriding_prefix(mock_executable, mutable_config, monkeypatch, _platform_executables):
|
||||
@@ -397,3 +396,30 @@ def test_use_tags_for_detection(command_args, mock_executable, mutable_config, m
|
||||
assert "The following specs have been" in output
|
||||
assert "cmake" in output
|
||||
assert "openssl" not in output
|
||||
|
||||
|
||||
@pytest.mark.regression("38733")
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="the test uses bash scripts")
|
||||
def test_failures_in_scanning_do_not_result_in_an_error(
|
||||
mock_executable, monkeypatch, mutable_config
|
||||
):
|
||||
"""Tests that scanning paths with wrong permissions, won't cause `external find` to error."""
|
||||
cmake_exe1 = mock_executable(
|
||||
"cmake", output="echo cmake version 3.19.1", subdir=("first", "bin")
|
||||
)
|
||||
cmake_exe2 = mock_executable(
|
||||
"cmake", output="echo cmake version 3.23.3", subdir=("second", "bin")
|
||||
)
|
||||
|
||||
# Remove access from the first directory executable
|
||||
cmake_exe1.parent.chmod(0o600)
|
||||
|
||||
value = os.pathsep.join([str(cmake_exe1.parent), str(cmake_exe2.parent)])
|
||||
monkeypatch.setenv("PATH", value)
|
||||
|
||||
output = external("find", "cmake")
|
||||
assert external.returncode == 0
|
||||
assert "The following specs have been" in output
|
||||
assert "cmake" in output
|
||||
assert "3.23.3" in output
|
||||
assert "3.19.1" not in output
|
||||
|
@@ -337,8 +337,6 @@ def test_compiler_flags_differ_identical_compilers(self):
|
||||
|
||||
# Get the compiler that matches the spec (
|
||||
compiler = spack.compilers.compiler_for_spec("clang@=12.2.0", spec.architecture)
|
||||
# Clear cache for compiler config since it has its own cache mechanism outside of config
|
||||
spack.compilers._cache_config_file = []
|
||||
|
||||
# Configure spack to have two identical compilers with different flags
|
||||
default_dict = spack.compilers._to_dict(compiler)
|
||||
@@ -2137,7 +2135,7 @@ def test_compiler_with_custom_non_numeric_version(self, mock_executable):
|
||||
{
|
||||
"compiler": {
|
||||
"spec": "gcc@foo",
|
||||
"paths": {"cc": gcc_path, "cxx": gcc_path, "f77": None, "fc": None},
|
||||
"paths": {"cc": str(gcc_path), "cxx": str(gcc_path), "f77": None, "fc": None},
|
||||
"operating_system": "debian6",
|
||||
"modules": [],
|
||||
}
|
||||
|
@@ -1669,22 +1669,21 @@ def clear_directive_functions():
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_executable(tmpdir):
|
||||
def mock_executable(tmp_path):
|
||||
"""Factory to create a mock executable in a temporary directory that
|
||||
output a custom string when run.
|
||||
"""
|
||||
import jinja2
|
||||
|
||||
shebang = "#!/bin/sh\n" if sys.platform != "win32" else "@ECHO OFF"
|
||||
|
||||
def _factory(name, output, subdir=("bin",)):
|
||||
f = tmpdir.ensure(*subdir, dir=True).join(name)
|
||||
executable_dir = tmp_path.joinpath(*subdir)
|
||||
executable_dir.mkdir(parents=True, exist_ok=True)
|
||||
executable_path = executable_dir / name
|
||||
if sys.platform == "win32":
|
||||
f += ".bat"
|
||||
t = jinja2.Template("{{ shebang }}{{ output }}\n")
|
||||
f.write(t.render(shebang=shebang, output=output))
|
||||
f.chmod(0o755)
|
||||
return str(f)
|
||||
executable_path = executable_dir / (name + ".bat")
|
||||
executable_path.write_text(f"{ shebang }{ output }\n")
|
||||
executable_path.chmod(0o755)
|
||||
return executable_path
|
||||
|
||||
return _factory
|
||||
|
||||
|
17
lib/spack/spack/test/data/sourceme_modules.sh
Normal file
17
lib/spack/spack/test/data/sourceme_modules.sh
Normal file
@@ -0,0 +1,17 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
_module_raw() { return 1; };
|
||||
module() { return 1; };
|
||||
ml() { return 1; };
|
||||
export -f _module_raw;
|
||||
export -f module;
|
||||
export -f ml;
|
||||
|
||||
export MODULES_AUTO_HANDLING=1
|
||||
export __MODULES_LMCONFLICT=bar&foo
|
||||
export NEW_VAR=new
|
@@ -31,164 +31,194 @@ class Amdfftw(FftwBase):
|
||||
Example : spack install amdfftw precision=float
|
||||
"""
|
||||
|
||||
_name = "amdfftw"
|
||||
_name = 'amdfftw'
|
||||
homepage = "https://developer.amd.com/amd-aocl/fftw/"
|
||||
url = "https://github.com/amd/amd-fftw/archive/3.0.tar.gz"
|
||||
git = "https://github.com/amd/amd-fftw.git"
|
||||
|
||||
maintainers("amd-toolchain-support")
|
||||
maintainers = ['amd-toolchain-support']
|
||||
|
||||
version("3.1", sha256="3e777f3acef13fa1910db097e818b1d0d03a6a36ef41186247c6ab1ab0afc132")
|
||||
version("3.0.1", sha256="87030c6bbb9c710f0a64f4f306ba6aa91dc4b182bb804c9022b35aef274d1a4c")
|
||||
version("3.0", sha256="a69deaf45478a59a69f77c4f7e9872967f1cfe996592dd12beb6318f18ea0bcd")
|
||||
version("2.2", sha256="de9d777236fb290c335860b458131678f75aa0799c641490c644c843f0e246f8")
|
||||
version('3.1', sha256='3e777f3acef13fa1910db097e818b1d0d03a6a36ef41186247c6ab1ab0afc132')
|
||||
version('3.0.1', sha256='87030c6bbb9c710f0a64f4f306ba6aa91dc4b182bb804c9022b35aef274d1a4c')
|
||||
version('3.0', sha256='a69deaf45478a59a69f77c4f7e9872967f1cfe996592dd12beb6318f18ea0bcd')
|
||||
version('2.2', sha256='de9d777236fb290c335860b458131678f75aa0799c641490c644c843f0e246f8')
|
||||
|
||||
variant("shared", default=True, description="Builds a shared version of the library")
|
||||
variant("openmp", default=True, description="Enable OpenMP support")
|
||||
variant("threads", default=False, description="Enable SMP threads support")
|
||||
variant("debug", default=False, description="Builds a debug version of the library")
|
||||
variant('shared', default=True,
|
||||
description='Builds a shared version of the library')
|
||||
variant('openmp', default=True,
|
||||
description='Enable OpenMP support')
|
||||
variant('threads', default=False,
|
||||
description='Enable SMP threads support')
|
||||
variant('debug', default=False,
|
||||
description='Builds a debug version of the library')
|
||||
variant(
|
||||
"amd-fast-planner",
|
||||
'amd-fast-planner',
|
||||
default=False,
|
||||
description="Option to reduce the planning time without much"
|
||||
"tradeoff in the performance. It is supported for"
|
||||
"Float and double precisions only.",
|
||||
)
|
||||
variant("amd-top-n-planner", default=False, description="Build with amd-top-n-planner support")
|
||||
description='Option to reduce the planning time without much'
|
||||
'tradeoff in the performance. It is supported for'
|
||||
'Float and double precisions only.')
|
||||
variant(
|
||||
"amd-mpi-vader-limit", default=False, description="Build with amd-mpi-vader-limit support"
|
||||
)
|
||||
variant("static", default=False, description="Build with static suppport")
|
||||
variant("amd-trans", default=False, description="Build with amd-trans suppport")
|
||||
variant("amd-app-opt", default=False, description="Build with amd-app-opt suppport")
|
||||
'amd-top-n-planner',
|
||||
default=False,
|
||||
description='Build with amd-top-n-planner support')
|
||||
variant(
|
||||
'amd-mpi-vader-limit',
|
||||
default=False,
|
||||
description='Build with amd-mpi-vader-limit support')
|
||||
variant(
|
||||
'static',
|
||||
default=False,
|
||||
description='Build with static suppport')
|
||||
variant(
|
||||
'amd-trans',
|
||||
default=False,
|
||||
description='Build with amd-trans suppport')
|
||||
variant(
|
||||
'amd-app-opt',
|
||||
default=False,
|
||||
description='Build with amd-app-opt suppport')
|
||||
|
||||
depends_on("texinfo")
|
||||
depends_on('texinfo')
|
||||
|
||||
provides("fftw-api@3", when="@2:")
|
||||
provides('fftw-api@3', when='@2:')
|
||||
|
||||
conflicts(
|
||||
"precision=quad",
|
||||
when="@2.2 %aocc",
|
||||
msg="Quad precision is not supported by AOCC clang version 2.2",
|
||||
)
|
||||
'precision=quad',
|
||||
when='@2.2 %aocc',
|
||||
msg='Quad precision is not supported by AOCC clang version 2.2')
|
||||
conflicts(
|
||||
"+debug", when="@2.2 %aocc", msg="debug mode is not supported by AOCC clang version 2.2"
|
||||
)
|
||||
conflicts("%gcc@:7.2", when="@2.2:", msg="GCC version above 7.2 is required for AMDFFTW")
|
||||
'+debug',
|
||||
when='@2.2 %aocc',
|
||||
msg='debug mode is not supported by AOCC clang version 2.2')
|
||||
conflicts(
|
||||
"+amd-fast-planner ", when="+mpi", msg="mpi thread is not supported with amd-fast-planner"
|
||||
)
|
||||
'%gcc@:7.2',
|
||||
when='@2.2:',
|
||||
msg='GCC version above 7.2 is required for AMDFFTW')
|
||||
conflicts(
|
||||
"+amd-fast-planner", when="@2.2", msg="amd-fast-planner is supported from 3.0 onwards"
|
||||
)
|
||||
'+amd-fast-planner ',
|
||||
when='+mpi',
|
||||
msg='mpi thread is not supported with amd-fast-planner')
|
||||
conflicts(
|
||||
"+amd-fast-planner",
|
||||
when="precision=quad",
|
||||
msg="Quad precision is not supported with amd-fast-planner",
|
||||
)
|
||||
'+amd-fast-planner',
|
||||
when='@2.2',
|
||||
msg='amd-fast-planner is supported from 3.0 onwards')
|
||||
conflicts(
|
||||
"+amd-fast-planner",
|
||||
when="precision=long_double",
|
||||
msg="long_double precision is not supported with amd-fast-planner",
|
||||
)
|
||||
'+amd-fast-planner',
|
||||
when='precision=quad',
|
||||
msg='Quad precision is not supported with amd-fast-planner')
|
||||
conflicts(
|
||||
"+amd-top-n-planner",
|
||||
when="@:3.0.0",
|
||||
msg="amd-top-n-planner is supported from 3.0.1 onwards",
|
||||
)
|
||||
'+amd-fast-planner',
|
||||
when='precision=long_double',
|
||||
msg='long_double precision is not supported with amd-fast-planner')
|
||||
conflicts(
|
||||
"+amd-top-n-planner",
|
||||
when="precision=long_double",
|
||||
msg="long_double precision is not supported with amd-top-n-planner",
|
||||
)
|
||||
'+amd-top-n-planner',
|
||||
when='@:3.0.0',
|
||||
msg='amd-top-n-planner is supported from 3.0.1 onwards')
|
||||
conflicts(
|
||||
"+amd-top-n-planner",
|
||||
when="precision=quad",
|
||||
msg="Quad precision is not supported with amd-top-n-planner",
|
||||
)
|
||||
'+amd-top-n-planner',
|
||||
when='precision=long_double',
|
||||
msg='long_double precision is not supported with amd-top-n-planner')
|
||||
conflicts(
|
||||
"+amd-top-n-planner",
|
||||
when="+amd-fast-planner",
|
||||
msg="amd-top-n-planner cannot be used with amd-fast-planner",
|
||||
)
|
||||
'+amd-top-n-planner',
|
||||
when='precision=quad',
|
||||
msg='Quad precision is not supported with amd-top-n-planner')
|
||||
conflicts(
|
||||
"+amd-top-n-planner", when="+threads", msg="amd-top-n-planner works only for single thread"
|
||||
)
|
||||
'+amd-top-n-planner',
|
||||
when='+amd-fast-planner',
|
||||
msg='amd-top-n-planner cannot be used with amd-fast-planner')
|
||||
conflicts(
|
||||
"+amd-top-n-planner", when="+mpi", msg="mpi thread is not supported with amd-top-n-planner"
|
||||
)
|
||||
'+amd-top-n-planner',
|
||||
when='+threads',
|
||||
msg='amd-top-n-planner works only for single thread')
|
||||
conflicts(
|
||||
"+amd-top-n-planner",
|
||||
when="+openmp",
|
||||
msg="openmp thread is not supported with amd-top-n-planner",
|
||||
)
|
||||
'+amd-top-n-planner',
|
||||
when='+mpi',
|
||||
msg='mpi thread is not supported with amd-top-n-planner')
|
||||
conflicts(
|
||||
"+amd-mpi-vader-limit",
|
||||
when="@:3.0.0",
|
||||
msg="amd-mpi-vader-limit is supported from 3.0.1 onwards",
|
||||
)
|
||||
'+amd-top-n-planner',
|
||||
when='+openmp',
|
||||
msg='openmp thread is not supported with amd-top-n-planner')
|
||||
conflicts(
|
||||
"+amd-mpi-vader-limit",
|
||||
when="precision=quad",
|
||||
msg="Quad precision is not supported with amd-mpi-vader-limit",
|
||||
)
|
||||
conflicts("+amd-trans", when="+threads", msg="amd-trans works only for single thread")
|
||||
conflicts("+amd-trans", when="+mpi", msg="mpi thread is not supported with amd-trans")
|
||||
conflicts("+amd-trans", when="+openmp", msg="openmp thread is not supported with amd-trans")
|
||||
'+amd-mpi-vader-limit',
|
||||
when='@:3.0.0',
|
||||
msg='amd-mpi-vader-limit is supported from 3.0.1 onwards')
|
||||
conflicts(
|
||||
"+amd-trans",
|
||||
when="precision=long_double",
|
||||
msg="long_double precision is not supported with amd-trans",
|
||||
)
|
||||
'+amd-mpi-vader-limit',
|
||||
when='precision=quad',
|
||||
msg='Quad precision is not supported with amd-mpi-vader-limit')
|
||||
conflicts(
|
||||
"+amd-trans", when="precision=quad", msg="Quad precision is not supported with amd-trans"
|
||||
)
|
||||
conflicts("+amd-app-opt", when="@:3.0.1", msg="amd-app-opt is supported from 3.1 onwards")
|
||||
conflicts("+amd-app-opt", when="+mpi", msg="mpi thread is not supported with amd-app-opt")
|
||||
'+amd-trans',
|
||||
when='+threads',
|
||||
msg='amd-trans works only for single thread')
|
||||
conflicts(
|
||||
"+amd-app-opt",
|
||||
when="precision=long_double",
|
||||
msg="long_double precision is not supported with amd-app-opt",
|
||||
)
|
||||
'+amd-trans',
|
||||
when='+mpi',
|
||||
msg='mpi thread is not supported with amd-trans')
|
||||
conflicts(
|
||||
"+amd-app-opt",
|
||||
when="precision=quad",
|
||||
msg="Quad precision is not supported with amd-app-opt",
|
||||
)
|
||||
'+amd-trans',
|
||||
when='+openmp',
|
||||
msg='openmp thread is not supported with amd-trans')
|
||||
conflicts(
|
||||
'+amd-trans',
|
||||
when='precision=long_double',
|
||||
msg='long_double precision is not supported with amd-trans')
|
||||
conflicts(
|
||||
'+amd-trans',
|
||||
when='precision=quad',
|
||||
msg='Quad precision is not supported with amd-trans')
|
||||
conflicts(
|
||||
'+amd-app-opt',
|
||||
when='@:3.0.1',
|
||||
msg='amd-app-opt is supported from 3.1 onwards')
|
||||
conflicts(
|
||||
'+amd-app-opt',
|
||||
when='+mpi',
|
||||
msg='mpi thread is not supported with amd-app-opt')
|
||||
conflicts(
|
||||
'+amd-app-opt',
|
||||
when='precision=long_double',
|
||||
msg='long_double precision is not supported with amd-app-opt')
|
||||
conflicts(
|
||||
'+amd-app-opt',
|
||||
when='precision=quad',
|
||||
msg='Quad precision is not supported with amd-app-opt')
|
||||
|
||||
def configure(self, spec, prefix):
|
||||
"""Configure function"""
|
||||
# Base options
|
||||
options = ["--prefix={0}".format(prefix), "--enable-amd-opt"]
|
||||
options = [
|
||||
'--prefix={0}'.format(prefix),
|
||||
'--enable-amd-opt'
|
||||
]
|
||||
|
||||
# Check if compiler is AOCC
|
||||
if "%aocc" in spec:
|
||||
options.append("CC={0}".format(os.path.basename(spack_cc)))
|
||||
options.append("FC={0}".format(os.path.basename(spack_fc)))
|
||||
options.append("F77={0}".format(os.path.basename(spack_fc)))
|
||||
if '%aocc' in spec:
|
||||
options.append('CC={0}'.format(os.path.basename(spack_cc)))
|
||||
options.append('FC={0}'.format(os.path.basename(spack_fc)))
|
||||
options.append('F77={0}'.format(os.path.basename(spack_fc)))
|
||||
|
||||
if "+debug" in spec:
|
||||
options.append("--enable-debug")
|
||||
if '+debug' in spec:
|
||||
options.append('--enable-debug')
|
||||
|
||||
if "+mpi" in spec:
|
||||
options.append("--enable-mpi")
|
||||
options.append("--enable-amd-mpifft")
|
||||
if '+mpi' in spec:
|
||||
options.append('--enable-mpi')
|
||||
options.append('--enable-amd-mpifft')
|
||||
else:
|
||||
options.append("--disable-mpi")
|
||||
options.append("--disable-amd-mpifft")
|
||||
options.append('--disable-mpi')
|
||||
options.append('--disable-amd-mpifft')
|
||||
|
||||
options.extend(self.enable_or_disable("shared"))
|
||||
options.extend(self.enable_or_disable("openmp"))
|
||||
options.extend(self.enable_or_disable("threads"))
|
||||
options.extend(self.enable_or_disable("amd-fast-planner"))
|
||||
options.extend(self.enable_or_disable("amd-top-n-planner"))
|
||||
options.extend(self.enable_or_disable("amd-mpi-vader-limit"))
|
||||
options.extend(self.enable_or_disable("static"))
|
||||
options.extend(self.enable_or_disable("amd-trans"))
|
||||
options.extend(self.enable_or_disable("amd-app-opt"))
|
||||
options.extend(self.enable_or_disable('shared'))
|
||||
options.extend(self.enable_or_disable('openmp'))
|
||||
options.extend(self.enable_or_disable('threads'))
|
||||
options.extend(self.enable_or_disable('amd-fast-planner'))
|
||||
options.extend(self.enable_or_disable('amd-top-n-planner'))
|
||||
options.extend(self.enable_or_disable('amd-mpi-vader-limit'))
|
||||
options.extend(self.enable_or_disable('static'))
|
||||
options.extend(self.enable_or_disable('amd-trans'))
|
||||
options.extend(self.enable_or_disable('amd-app-opt'))
|
||||
|
||||
if not self.compiler.f77 or not self.compiler.fc:
|
||||
options.append("--disable-fortran")
|
||||
options.append('--disable-fortran')
|
||||
|
||||
# Cross compilation is supported in amd-fftw by making use of target
|
||||
# variable to set AMD_ARCH configure option.
|
||||
@@ -196,16 +226,17 @@ class Amdfftw(FftwBase):
|
||||
# use target variable to set appropriate -march option in AMD_ARCH.
|
||||
arch = spec.architecture
|
||||
options.append(
|
||||
"AMD_ARCH={0}".format(arch.target.optimization_flags(spec.compiler).split("=")[-1])
|
||||
)
|
||||
'AMD_ARCH={0}'.format(
|
||||
arch.target.optimization_flags(
|
||||
spec.compiler).split('=')[-1]))
|
||||
|
||||
# Specific SIMD support.
|
||||
# float and double precisions are supported
|
||||
simd_features = ["sse2", "avx", "avx2"]
|
||||
simd_features = ['sse2', 'avx', 'avx2']
|
||||
|
||||
simd_options = []
|
||||
for feature in simd_features:
|
||||
msg = "--enable-{0}" if feature in spec.target else "--disable-{0}"
|
||||
msg = '--enable-{0}' if feature in spec.target else '--disable-{0}'
|
||||
simd_options.append(msg.format(feature))
|
||||
|
||||
# When enabling configure option "--enable-amd-opt", do not use the
|
||||
@@ -215,19 +246,20 @@ class Amdfftw(FftwBase):
|
||||
# Double is the default precision, for all the others we need
|
||||
# to enable the corresponding option.
|
||||
enable_precision = {
|
||||
"float": ["--enable-float"],
|
||||
"double": None,
|
||||
"long_double": ["--enable-long-double"],
|
||||
"quad": ["--enable-quad-precision"],
|
||||
'float': ['--enable-float'],
|
||||
'double': None,
|
||||
'long_double': ['--enable-long-double'],
|
||||
'quad': ['--enable-quad-precision']
|
||||
}
|
||||
|
||||
# Different precisions must be configured and compiled one at a time
|
||||
configure = Executable("../configure")
|
||||
configure = Executable('../configure')
|
||||
for precision in self.selected_precisions:
|
||||
|
||||
opts = (enable_precision[precision] or []) + options[:]
|
||||
|
||||
# SIMD optimizations are available only for float and double
|
||||
if precision in ("float", "double"):
|
||||
if precision in ('float', 'double'):
|
||||
opts += simd_options
|
||||
|
||||
with working_dir(precision, create=True):
|
||||
|
@@ -16,21 +16,21 @@ from spack.package import *
|
||||
|
||||
class Llvm(CMakePackage, CudaPackage):
|
||||
"""The LLVM Project is a collection of modular and reusable compiler and
|
||||
toolchain technologies. Despite its name, LLVM has little to do
|
||||
with traditional virtual machines, though it does provide helpful
|
||||
libraries that can be used to build them. The name "LLVM" itself
|
||||
is not an acronym; it is the full name of the project.
|
||||
toolchain technologies. Despite its name, LLVM has little to do
|
||||
with traditional virtual machines, though it does provide helpful
|
||||
libraries that can be used to build them. The name "LLVM" itself
|
||||
is not an acronym; it is the full name of the project.
|
||||
"""
|
||||
|
||||
homepage = "https://llvm.org/"
|
||||
url = "https://github.com/llvm/llvm-project/archive/llvmorg-7.1.0.tar.gz"
|
||||
list_url = "https://releases.llvm.org/download.html"
|
||||
git = "https://github.com/llvm/llvm-project"
|
||||
maintainers("trws", "haampie")
|
||||
maintainers = ['trws', 'haampie']
|
||||
|
||||
tags = ["e4s"]
|
||||
tags = ['e4s']
|
||||
|
||||
generator = "Ninja"
|
||||
generator = 'Ninja'
|
||||
|
||||
family = "compiler" # Used by lmod
|
||||
|
||||
@@ -80,12 +80,13 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
# to save space, build with `build_type=Release`.
|
||||
|
||||
variant(
|
||||
"clang", default=True, description="Build the LLVM C/C++/Objective-C compiler frontend"
|
||||
"clang",
|
||||
default=True,
|
||||
description="Build the LLVM C/C++/Objective-C compiler frontend",
|
||||
)
|
||||
variant(
|
||||
"flang",
|
||||
default=False,
|
||||
when="@11: +clang",
|
||||
default=False, when='@11: +clang',
|
||||
description="Build the LLVM Fortran compiler frontend "
|
||||
"(experimental - parser only, needs GCC)",
|
||||
)
|
||||
@@ -94,23 +95,27 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
default=False,
|
||||
description="Include debugging code in OpenMP runtime libraries",
|
||||
)
|
||||
variant("lldb", default=True, when="+clang", description="Build the LLVM debugger")
|
||||
variant("lldb", default=True, when='+clang', description="Build the LLVM debugger")
|
||||
variant("lld", default=True, description="Build the LLVM linker")
|
||||
variant("mlir", default=False, when="@10:", description="Build with MLIR support")
|
||||
variant("mlir", default=False, when='@10:', description="Build with MLIR support")
|
||||
variant(
|
||||
"internal_unwind", default=True, when="+clang", description="Build the libcxxabi libunwind"
|
||||
"internal_unwind",
|
||||
default=True, when='+clang',
|
||||
description="Build the libcxxabi libunwind",
|
||||
)
|
||||
variant(
|
||||
"polly",
|
||||
default=True,
|
||||
description="Build the LLVM polyhedral optimization plugin, " "only builds for 3.7.0+",
|
||||
description="Build the LLVM polyhedral optimization plugin, "
|
||||
"only builds for 3.7.0+",
|
||||
)
|
||||
variant(
|
||||
"libcxx", default=True, when="+clang", description="Build the LLVM C++ standard library"
|
||||
"libcxx",
|
||||
default=True, when='+clang',
|
||||
description="Build the LLVM C++ standard library",
|
||||
)
|
||||
variant(
|
||||
"compiler-rt",
|
||||
when="+clang",
|
||||
"compiler-rt", when='+clang',
|
||||
default=True,
|
||||
description="Build LLVM compiler runtime, including sanitizers",
|
||||
)
|
||||
@@ -119,7 +124,11 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
default=(sys.platform != "darwin"),
|
||||
description="Add support for LTO with the gold linker plugin",
|
||||
)
|
||||
variant("split_dwarf", default=False, description="Build with split dwarf information")
|
||||
variant(
|
||||
"split_dwarf",
|
||||
default=False,
|
||||
description="Build with split dwarf information",
|
||||
)
|
||||
variant(
|
||||
"llvm_dylib",
|
||||
default=True,
|
||||
@@ -127,40 +136,18 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
)
|
||||
variant(
|
||||
"link_llvm_dylib",
|
||||
default=False,
|
||||
when="+llvm_dylib",
|
||||
default=False, when='+llvm_dylib',
|
||||
description="Link LLVM tools against the LLVM shared library",
|
||||
)
|
||||
variant(
|
||||
"targets",
|
||||
default="none",
|
||||
description=(
|
||||
"What targets to build. Spack's target family is always added "
|
||||
"(e.g. X86 is automatically enabled when targeting znver2)."
|
||||
),
|
||||
values=(
|
||||
"all",
|
||||
"none",
|
||||
"aarch64",
|
||||
"amdgpu",
|
||||
"arm",
|
||||
"avr",
|
||||
"bpf",
|
||||
"cppbackend",
|
||||
"hexagon",
|
||||
"lanai",
|
||||
"mips",
|
||||
"msp430",
|
||||
"nvptx",
|
||||
"powerpc",
|
||||
"riscv",
|
||||
"sparc",
|
||||
"systemz",
|
||||
"webassembly",
|
||||
"x86",
|
||||
"xcore",
|
||||
),
|
||||
multi=True,
|
||||
description=("What targets to build. Spack's target family is always added "
|
||||
"(e.g. X86 is automatically enabled when targeting znver2)."),
|
||||
values=("all", "none", "aarch64", "amdgpu", "arm", "avr", "bpf", "cppbackend",
|
||||
"hexagon", "lanai", "mips", "msp430", "nvptx", "powerpc", "riscv",
|
||||
"sparc", "systemz", "webassembly", "x86", "xcore"),
|
||||
multi=True
|
||||
)
|
||||
variant(
|
||||
"build_type",
|
||||
@@ -170,52 +157,51 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
)
|
||||
variant(
|
||||
"omp_tsan",
|
||||
default=False,
|
||||
when="@6:",
|
||||
default=False, when='@6:',
|
||||
description="Build with OpenMP capable thread sanitizer",
|
||||
)
|
||||
variant(
|
||||
"omp_as_runtime",
|
||||
default=True,
|
||||
when="+clang @12:",
|
||||
when='+clang @12:',
|
||||
description="Build OpenMP runtime via ENABLE_RUNTIME by just-built Clang",
|
||||
)
|
||||
variant(
|
||||
"code_signing",
|
||||
default=False,
|
||||
when="+lldb platform=darwin",
|
||||
description="Enable code-signing on macOS",
|
||||
)
|
||||
variant('code_signing', default=False,
|
||||
when='+lldb platform=darwin',
|
||||
description="Enable code-signing on macOS")
|
||||
variant("python", default=False, description="Install python bindings")
|
||||
variant("version_suffix", default="none", description="Add a symbol suffix")
|
||||
variant('version_suffix', default='none', description="Add a symbol suffix")
|
||||
variant(
|
||||
"shlib_symbol_version",
|
||||
default="none",
|
||||
'shlib_symbol_version',
|
||||
default='none',
|
||||
description="Add shared library symbol version",
|
||||
when="@13:",
|
||||
when='@13:'
|
||||
)
|
||||
variant(
|
||||
"z3", default=False, when="+clang @8:", description="Use Z3 for the clang static analyzer"
|
||||
'z3',
|
||||
default=False,
|
||||
when='+clang @8:',
|
||||
description='Use Z3 for the clang static analyzer'
|
||||
)
|
||||
|
||||
provides("libllvm@14", when="@14.0.0:14")
|
||||
provides("libllvm@13", when="@13.0.0:13")
|
||||
provides("libllvm@12", when="@12.0.0:12")
|
||||
provides("libllvm@11", when="@11.0.0:11")
|
||||
provides("libllvm@10", when="@10.0.0:10")
|
||||
provides("libllvm@9", when="@9.0.0:9")
|
||||
provides("libllvm@8", when="@8.0.0:8")
|
||||
provides("libllvm@7", when="@7.0.0:7")
|
||||
provides("libllvm@6", when="@6.0.0:6")
|
||||
provides("libllvm@5", when="@5.0.0:5")
|
||||
provides("libllvm@4", when="@4.0.0:4")
|
||||
provides("libllvm@3", when="@3.0.0:3")
|
||||
provides('libllvm@14', when='@14.0.0:14')
|
||||
provides('libllvm@13', when='@13.0.0:13')
|
||||
provides('libllvm@12', when='@12.0.0:12')
|
||||
provides('libllvm@11', when='@11.0.0:11')
|
||||
provides('libllvm@10', when='@10.0.0:10')
|
||||
provides('libllvm@9', when='@9.0.0:9')
|
||||
provides('libllvm@8', when='@8.0.0:8')
|
||||
provides('libllvm@7', when='@7.0.0:7')
|
||||
provides('libllvm@6', when='@6.0.0:6')
|
||||
provides('libllvm@5', when='@5.0.0:5')
|
||||
provides('libllvm@4', when='@4.0.0:4')
|
||||
provides('libllvm@3', when='@3.0.0:3')
|
||||
|
||||
extends("python", when="+python")
|
||||
|
||||
# Build dependency
|
||||
depends_on("cmake@3.4.3:", type="build")
|
||||
depends_on("cmake@3.13.4:", type="build", when="@12:")
|
||||
depends_on('cmake@3.13.4:', type='build', when='@12:')
|
||||
depends_on("ninja", type="build")
|
||||
depends_on("python@2.7:2.8", when="@:4 ~python", type="build")
|
||||
depends_on("python", when="@5: ~python", type="build")
|
||||
@@ -256,7 +242,7 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
# clang/lib: a lambda parameter cannot shadow an explicitly captured entity
|
||||
conflicts("%clang@8:", when="@:4")
|
||||
# Internal compiler error on gcc 8.4 on aarch64 https://bugzilla.redhat.com/show_bug.cgi?id=1958295
|
||||
conflicts("%gcc@8.4:8.4.9", when="@12: target=aarch64:")
|
||||
conflicts('%gcc@8.4:8.4.9', when='@12: target=aarch64:')
|
||||
|
||||
# When these versions are concretized, but not explicitly with +libcxx, these
|
||||
# conflicts will enable clingo to set ~libcxx, making the build successful:
|
||||
@@ -266,17 +252,17 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
# GCC 11 - latest stable release per GCC release page
|
||||
# Clang: 11, 12 - latest two stable releases per LLVM release page
|
||||
# AppleClang 12 - latest stable release per Xcode release page
|
||||
conflicts("%gcc@:10", when="@13:+libcxx")
|
||||
conflicts("%clang@:10", when="@13:+libcxx")
|
||||
conflicts("%gcc@:10", when="@13:+libcxx")
|
||||
conflicts("%clang@:10", when="@13:+libcxx")
|
||||
conflicts("%apple-clang@:11", when="@13:+libcxx")
|
||||
|
||||
# libcxx-4 and compiler-rt-4 fail to build with "newer" clang and gcc versions:
|
||||
conflicts("%gcc@7:", when="@:4+libcxx")
|
||||
conflicts("%clang@6:", when="@:4+libcxx")
|
||||
conflicts("%apple-clang@6:", when="@:4+libcxx")
|
||||
conflicts("%gcc@7:", when="@:4+compiler-rt")
|
||||
conflicts("%clang@6:", when="@:4+compiler-rt")
|
||||
conflicts("%apple-clang@6:", when="@:4+compiler-rt")
|
||||
conflicts('%gcc@7:', when='@:4+libcxx')
|
||||
conflicts('%clang@6:', when='@:4+libcxx')
|
||||
conflicts('%apple-clang@6:', when='@:4+libcxx')
|
||||
conflicts('%gcc@7:', when='@:4+compiler-rt')
|
||||
conflicts('%clang@6:', when='@:4+compiler-rt')
|
||||
conflicts('%apple-clang@6:', when='@:4+compiler-rt')
|
||||
|
||||
# cuda_arch value must be specified
|
||||
conflicts("cuda_arch=none", when="+cuda", msg="A value for cuda_arch must be specified.")
|
||||
@@ -284,27 +270,27 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
# LLVM bug https://bugs.llvm.org/show_bug.cgi?id=48234
|
||||
# CMake bug: https://gitlab.kitware.com/cmake/cmake/-/issues/21469
|
||||
# Fixed in upstream versions of both
|
||||
conflicts("^cmake@3.19.0", when="@6:11.0.0")
|
||||
conflicts('^cmake@3.19.0', when='@6:11.0.0')
|
||||
|
||||
# Github issue #4986
|
||||
patch("llvm_gcc7.patch", when="@4.0.0:4.0.1+lldb %gcc@7.0:")
|
||||
|
||||
# sys/ustat.h has been removed in favour of statfs from glibc-2.28. Use fixed sizes:
|
||||
patch("llvm5-sanitizer-ustat.patch", when="@4:6.0.0+compiler-rt")
|
||||
patch('llvm5-sanitizer-ustat.patch', when="@4:6.0.0+compiler-rt")
|
||||
|
||||
# Fix lld templates: https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=230463
|
||||
patch("llvm4-lld-ELF-Symbols.patch", when="@4+lld%clang@6:")
|
||||
patch("llvm5-lld-ELF-Symbols.patch", when="@5+lld%clang@7:")
|
||||
patch('llvm4-lld-ELF-Symbols.patch', when="@4+lld%clang@6:")
|
||||
patch('llvm5-lld-ELF-Symbols.patch', when="@5+lld%clang@7:")
|
||||
|
||||
# Fix missing std:size_t in 'llvm@4:5' when built with '%clang@7:'
|
||||
patch("xray_buffer_queue-cstddef.patch", when="@4:5+compiler-rt%clang@7:")
|
||||
patch('xray_buffer_queue-cstddef.patch', when="@4:5+compiler-rt%clang@7:")
|
||||
|
||||
# https://github.com/llvm/llvm-project/commit/947f9692440836dcb8d88b74b69dd379d85974ce
|
||||
patch("sanitizer-ipc_perm_mode.patch", when="@5:7+compiler-rt%clang@11:")
|
||||
patch("sanitizer-ipc_perm_mode.patch", when="@5:9+compiler-rt%gcc@9:")
|
||||
patch('sanitizer-ipc_perm_mode.patch', when="@5:7+compiler-rt%clang@11:")
|
||||
patch('sanitizer-ipc_perm_mode.patch', when="@5:9+compiler-rt%gcc@9:")
|
||||
|
||||
# github.com/spack/spack/issues/24270: MicrosoftDemangle for %gcc@10: and %clang@13:
|
||||
patch("missing-includes.patch", when="@8")
|
||||
patch('missing-includes.patch', when='@8')
|
||||
|
||||
# Backport from llvm master + additional fix
|
||||
# see https://bugs.llvm.org/show_bug.cgi?id=39696
|
||||
@@ -329,33 +315,33 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
patch("llvm_python_path.patch", when="@:11")
|
||||
|
||||
# Workaround for issue https://github.com/spack/spack/issues/18197
|
||||
patch("llvm7_intel.patch", when="@7 %intel@18.0.2,19.0.0:19.1.99")
|
||||
patch('llvm7_intel.patch', when='@7 %intel@18.0.2,19.0.0:19.1.99')
|
||||
|
||||
# Remove cyclades support to build against newer kernel headers
|
||||
# https://reviews.llvm.org/D102059
|
||||
patch("no_cyclades.patch", when="@10:12.0.0")
|
||||
patch("no_cyclades9.patch", when="@6:9")
|
||||
patch('no_cyclades.patch', when='@10:12.0.0')
|
||||
patch('no_cyclades9.patch', when='@6:9')
|
||||
|
||||
patch("llvm-gcc11.patch", when="@9:11%gcc@11:")
|
||||
patch('llvm-gcc11.patch', when='@9:11%gcc@11:')
|
||||
|
||||
# add -lpthread to build OpenMP libraries with Fujitsu compiler
|
||||
patch("llvm12-thread.patch", when="@12 %fj")
|
||||
patch("llvm13-thread.patch", when="@13 %fj")
|
||||
patch('llvm12-thread.patch', when='@12 %fj')
|
||||
patch('llvm13-thread.patch', when='@13 %fj')
|
||||
|
||||
# avoid build failed with Fujitsu compiler
|
||||
patch("llvm13-fujitsu.patch", when="@13 %fj")
|
||||
patch('llvm13-fujitsu.patch', when='@13 %fj')
|
||||
|
||||
# patch for missing hwloc.h include for libompd
|
||||
patch("llvm14-hwloc-ompd.patch", when="@14")
|
||||
patch('llvm14-hwloc-ompd.patch', when='@14')
|
||||
|
||||
# make libflags a list in openmp subproject when ~omp_as_runtime
|
||||
patch("libomp-libflags-as-list.patch", when="@3.7:")
|
||||
patch('libomp-libflags-as-list.patch', when='@3.7:')
|
||||
|
||||
# The functions and attributes below implement external package
|
||||
# detection for LLVM. See:
|
||||
#
|
||||
# https://spack.readthedocs.io/en/latest/packaging_guide.html#making-a-package-discoverable-with-spack-external-find
|
||||
executables = ["clang", "flang", "ld.lld", "lldb"]
|
||||
executables = ['clang', 'flang', 'ld.lld', 'lldb']
|
||||
|
||||
@classmethod
|
||||
def filter_detected_exes(cls, prefix, exes_in_prefix):
|
||||
@@ -365,7 +351,7 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
# on some port and would hang Spack during detection.
|
||||
# clang-cl and clang-cpp are dev tools that we don't
|
||||
# need to test
|
||||
if any(x in exe for x in ("vscode", "cpp", "-cl", "-gpu")):
|
||||
if any(x in exe for x in ('vscode', 'cpp', '-cl', '-gpu')):
|
||||
continue
|
||||
result.append(exe)
|
||||
return result
|
||||
@@ -374,20 +360,20 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
def determine_version(cls, exe):
|
||||
version_regex = re.compile(
|
||||
# Normal clang compiler versions are left as-is
|
||||
r"clang version ([^ )\n]+)-svn[~.\w\d-]*|"
|
||||
r'clang version ([^ )\n]+)-svn[~.\w\d-]*|'
|
||||
# Don't include hyphenated patch numbers in the version
|
||||
# (see https://github.com/spack/spack/pull/14365 for details)
|
||||
r"clang version ([^ )\n]+?)-[~.\w\d-]*|"
|
||||
r"clang version ([^ )\n]+)|"
|
||||
r'clang version ([^ )\n]+?)-[~.\w\d-]*|'
|
||||
r'clang version ([^ )\n]+)|'
|
||||
# LLDB
|
||||
r"lldb version ([^ )\n]+)|"
|
||||
r'lldb version ([^ )\n]+)|'
|
||||
# LLD
|
||||
r"LLD ([^ )\n]+) \(compatible with GNU linkers\)"
|
||||
r'LLD ([^ )\n]+) \(compatible with GNU linkers\)'
|
||||
)
|
||||
try:
|
||||
compiler = Executable(exe)
|
||||
output = compiler("--version", output=str, error=str)
|
||||
if "Apple" in output:
|
||||
output = compiler('--version', output=str, error=str)
|
||||
if 'Apple' in output:
|
||||
return None
|
||||
match = version_regex.search(output)
|
||||
if match:
|
||||
@@ -401,39 +387,38 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
|
||||
@classmethod
|
||||
def determine_variants(cls, exes, version_str):
|
||||
variants, compilers = ["+clang"], {}
|
||||
variants, compilers = ['+clang'], {}
|
||||
lld_found, lldb_found = False, False
|
||||
for exe in exes:
|
||||
if "clang++" in exe:
|
||||
compilers["cxx"] = exe
|
||||
elif "clang" in exe:
|
||||
compilers["c"] = exe
|
||||
elif "flang" in exe:
|
||||
variants.append("+flang")
|
||||
compilers["fc"] = exe
|
||||
compilers["f77"] = exe
|
||||
elif "ld.lld" in exe:
|
||||
if 'clang++' in exe:
|
||||
compilers['cxx'] = exe
|
||||
elif 'clang' in exe:
|
||||
compilers['c'] = exe
|
||||
elif 'flang' in exe:
|
||||
variants.append('+flang')
|
||||
compilers['fc'] = exe
|
||||
compilers['f77'] = exe
|
||||
elif 'ld.lld' in exe:
|
||||
lld_found = True
|
||||
compilers["ld"] = exe
|
||||
elif "lldb" in exe:
|
||||
compilers['ld'] = exe
|
||||
elif 'lldb' in exe:
|
||||
lldb_found = True
|
||||
compilers["lldb"] = exe
|
||||
compilers['lldb'] = exe
|
||||
|
||||
variants.append("+lld" if lld_found else "~lld")
|
||||
variants.append("+lldb" if lldb_found else "~lldb")
|
||||
variants.append('+lld' if lld_found else '~lld')
|
||||
variants.append('+lldb' if lldb_found else '~lldb')
|
||||
|
||||
return "".join(variants), {"compilers": compilers}
|
||||
return ''.join(variants), {'compilers': compilers}
|
||||
|
||||
@classmethod
|
||||
def validate_detected_spec(cls, spec, extra_attributes):
|
||||
# For LLVM 'compilers' is a mandatory attribute
|
||||
msg = 'the extra attribute "compilers" must be set for ' 'the detected spec "{0}"'.format(
|
||||
spec
|
||||
)
|
||||
assert "compilers" in extra_attributes, msg
|
||||
compilers = extra_attributes["compilers"]
|
||||
for key in ("c", "cxx"):
|
||||
msg = "{0} compiler not found for {1}"
|
||||
msg = ('the extra attribute "compilers" must be set for '
|
||||
'the detected spec "{0}"'.format(spec))
|
||||
assert 'compilers' in extra_attributes, msg
|
||||
compilers = extra_attributes['compilers']
|
||||
for key in ('c', 'cxx'):
|
||||
msg = '{0} compiler not found for {1}'
|
||||
assert key in compilers, msg.format(key, spec)
|
||||
|
||||
@property
|
||||
@@ -441,10 +426,10 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
msg = "cannot retrieve C compiler [spec is not concrete]"
|
||||
assert self.spec.concrete, msg
|
||||
if self.spec.external:
|
||||
return self.spec.extra_attributes["compilers"].get("c", None)
|
||||
return self.spec.extra_attributes['compilers'].get('c', None)
|
||||
result = None
|
||||
if "+clang" in self.spec:
|
||||
result = os.path.join(self.spec.prefix.bin, "clang")
|
||||
if '+clang' in self.spec:
|
||||
result = os.path.join(self.spec.prefix.bin, 'clang')
|
||||
return result
|
||||
|
||||
@property
|
||||
@@ -452,10 +437,10 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
msg = "cannot retrieve C++ compiler [spec is not concrete]"
|
||||
assert self.spec.concrete, msg
|
||||
if self.spec.external:
|
||||
return self.spec.extra_attributes["compilers"].get("cxx", None)
|
||||
return self.spec.extra_attributes['compilers'].get('cxx', None)
|
||||
result = None
|
||||
if "+clang" in self.spec:
|
||||
result = os.path.join(self.spec.prefix.bin, "clang++")
|
||||
if '+clang' in self.spec:
|
||||
result = os.path.join(self.spec.prefix.bin, 'clang++')
|
||||
return result
|
||||
|
||||
@property
|
||||
@@ -463,10 +448,10 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
msg = "cannot retrieve Fortran compiler [spec is not concrete]"
|
||||
assert self.spec.concrete, msg
|
||||
if self.spec.external:
|
||||
return self.spec.extra_attributes["compilers"].get("fc", None)
|
||||
return self.spec.extra_attributes['compilers'].get('fc', None)
|
||||
result = None
|
||||
if "+flang" in self.spec:
|
||||
result = os.path.join(self.spec.prefix.bin, "flang")
|
||||
if '+flang' in self.spec:
|
||||
result = os.path.join(self.spec.prefix.bin, 'flang')
|
||||
return result
|
||||
|
||||
@property
|
||||
@@ -474,25 +459,27 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
msg = "cannot retrieve Fortran 77 compiler [spec is not concrete]"
|
||||
assert self.spec.concrete, msg
|
||||
if self.spec.external:
|
||||
return self.spec.extra_attributes["compilers"].get("f77", None)
|
||||
return self.spec.extra_attributes['compilers'].get('f77', None)
|
||||
result = None
|
||||
if "+flang" in self.spec:
|
||||
result = os.path.join(self.spec.prefix.bin, "flang")
|
||||
if '+flang' in self.spec:
|
||||
result = os.path.join(self.spec.prefix.bin, 'flang')
|
||||
return result
|
||||
|
||||
@property
|
||||
def libs(self):
|
||||
return LibraryList(self.llvm_config("--libfiles", "all", result="list"))
|
||||
return LibraryList(self.llvm_config("--libfiles", "all",
|
||||
result="list"))
|
||||
|
||||
@run_before("cmake")
|
||||
@run_before('cmake')
|
||||
def codesign_check(self):
|
||||
if self.spec.satisfies("+code_signing"):
|
||||
codesign = which("codesign")
|
||||
mkdir("tmp")
|
||||
llvm_check_file = join_path("tmp", "llvm_check")
|
||||
copy("/usr/bin/false", llvm_check_file)
|
||||
codesign = which('codesign')
|
||||
mkdir('tmp')
|
||||
llvm_check_file = join_path('tmp', 'llvm_check')
|
||||
copy('/usr/bin/false', llvm_check_file)
|
||||
try:
|
||||
codesign("-f", "-s", "lldb_codesign", "--dryrun", llvm_check_file)
|
||||
codesign('-f', '-s', 'lldb_codesign', '--dryrun',
|
||||
llvm_check_file)
|
||||
|
||||
except ProcessError:
|
||||
# Newer LLVM versions have a simple script that sets up
|
||||
@@ -502,32 +489,32 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
setup()
|
||||
except Exception:
|
||||
raise RuntimeError(
|
||||
"spack was unable to either find or set up"
|
||||
"code-signing on your system. Please refer to"
|
||||
"https://lldb.llvm.org/resources/build.html#"
|
||||
"code-signing-on-macos for details on how to"
|
||||
"create this identity."
|
||||
'spack was unable to either find or set up'
|
||||
'code-signing on your system. Please refer to'
|
||||
'https://lldb.llvm.org/resources/build.html#'
|
||||
'code-signing-on-macos for details on how to'
|
||||
'create this identity.'
|
||||
)
|
||||
|
||||
def flag_handler(self, name, flags):
|
||||
if name == "cxxflags":
|
||||
if name == 'cxxflags':
|
||||
flags.append(self.compiler.cxx11_flag)
|
||||
return (None, flags, None)
|
||||
elif name == "ldflags" and self.spec.satisfies("%intel"):
|
||||
flags.append("-shared-intel")
|
||||
return (None, flags, None)
|
||||
return (flags, None, None)
|
||||
return(None, flags, None)
|
||||
elif name == 'ldflags' and self.spec.satisfies('%intel'):
|
||||
flags.append('-shared-intel')
|
||||
return(None, flags, None)
|
||||
return(flags, None, None)
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
"""When using %clang, add only its ld.lld-$ver and/or ld.lld to our PATH"""
|
||||
if self.compiler.name in ["clang", "apple-clang"]:
|
||||
for lld in "ld.lld-{0}".format(self.compiler.version.version[0]), "ld.lld":
|
||||
if self.compiler.name in ['clang', 'apple-clang']:
|
||||
for lld in 'ld.lld-{0}'.format(self.compiler.version.version[0]), 'ld.lld':
|
||||
bin = os.path.join(os.path.dirname(self.compiler.cc), lld)
|
||||
sym = os.path.join(self.stage.path, "ld.lld")
|
||||
sym = os.path.join(self.stage.path, 'ld.lld')
|
||||
if os.path.exists(bin) and not os.path.exists(sym):
|
||||
mkdirp(self.stage.path)
|
||||
os.symlink(bin, sym)
|
||||
env.prepend_path("PATH", self.stage.path)
|
||||
env.prepend_path('PATH', self.stage.path)
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
if "+clang" in self.spec:
|
||||
@@ -544,7 +531,7 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
define = CMakePackage.define
|
||||
from_variant = self.define_from_variant
|
||||
|
||||
python = spec["python"]
|
||||
python = spec['python']
|
||||
cmake_args = [
|
||||
define("LLVM_REQUIRES_RTTI", True),
|
||||
define("LLVM_ENABLE_RTTI", True),
|
||||
@@ -557,13 +544,14 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
define("LIBOMP_HWLOC_INSTALL_DIR", spec["hwloc"].prefix),
|
||||
]
|
||||
|
||||
version_suffix = spec.variants["version_suffix"].value
|
||||
if version_suffix != "none":
|
||||
cmake_args.append(define("LLVM_VERSION_SUFFIX", version_suffix))
|
||||
version_suffix = spec.variants['version_suffix'].value
|
||||
if version_suffix != 'none':
|
||||
cmake_args.append(define('LLVM_VERSION_SUFFIX', version_suffix))
|
||||
|
||||
shlib_symbol_version = spec.variants.get("shlib_symbol_version", None)
|
||||
if shlib_symbol_version is not None and shlib_symbol_version.value != "none":
|
||||
cmake_args.append(define("LLVM_SHLIB_SYMBOL_VERSION", shlib_symbol_version.value))
|
||||
shlib_symbol_version = spec.variants.get('shlib_symbol_version', None)
|
||||
if shlib_symbol_version is not None and shlib_symbol_version.value != 'none':
|
||||
cmake_args.append(define('LLVM_SHLIB_SYMBOL_VERSION',
|
||||
shlib_symbol_version.value))
|
||||
|
||||
if python.version >= Version("3"):
|
||||
cmake_args.append(define("Python3_EXECUTABLE", python.command.path))
|
||||
@@ -574,56 +562,47 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
runtimes = []
|
||||
|
||||
if "+cuda" in spec:
|
||||
cmake_args.extend(
|
||||
[
|
||||
define("CUDA_TOOLKIT_ROOT_DIR", spec["cuda"].prefix),
|
||||
define(
|
||||
"LIBOMPTARGET_NVPTX_COMPUTE_CAPABILITIES",
|
||||
",".join(spec.variants["cuda_arch"].value),
|
||||
),
|
||||
define(
|
||||
"CLANG_OPENMP_NVPTX_DEFAULT_ARCH",
|
||||
"sm_{0}".format(spec.variants["cuda_arch"].value[-1]),
|
||||
),
|
||||
]
|
||||
)
|
||||
cmake_args.extend([
|
||||
define("CUDA_TOOLKIT_ROOT_DIR", spec["cuda"].prefix),
|
||||
define("LIBOMPTARGET_NVPTX_COMPUTE_CAPABILITIES",
|
||||
",".join(spec.variants["cuda_arch"].value)),
|
||||
define("CLANG_OPENMP_NVPTX_DEFAULT_ARCH",
|
||||
"sm_{0}".format(spec.variants["cuda_arch"].value[-1])),
|
||||
])
|
||||
if "+omp_as_runtime" in spec:
|
||||
cmake_args.extend(
|
||||
[
|
||||
define("LIBOMPTARGET_NVPTX_ENABLE_BCLIB", True),
|
||||
# work around bad libelf detection in libomptarget
|
||||
define(
|
||||
"LIBOMPTARGET_DEP_LIBELF_INCLUDE_DIR", spec["libelf"].prefix.include
|
||||
),
|
||||
]
|
||||
)
|
||||
cmake_args.extend([
|
||||
define("LIBOMPTARGET_NVPTX_ENABLE_BCLIB", True),
|
||||
# work around bad libelf detection in libomptarget
|
||||
define("LIBOMPTARGET_DEP_LIBELF_INCLUDE_DIR",
|
||||
spec["libelf"].prefix.include),
|
||||
])
|
||||
else:
|
||||
# still build libomptarget but disable cuda
|
||||
cmake_args.extend(
|
||||
[
|
||||
define("CUDA_TOOLKIT_ROOT_DIR", "IGNORE"),
|
||||
define("CUDA_SDK_ROOT_DIR", "IGNORE"),
|
||||
define("CUDA_NVCC_EXECUTABLE", "IGNORE"),
|
||||
define("LIBOMPTARGET_DEP_CUDA_DRIVER_LIBRARIES", "IGNORE"),
|
||||
]
|
||||
)
|
||||
cmake_args.extend([
|
||||
define("CUDA_TOOLKIT_ROOT_DIR", "IGNORE"),
|
||||
define("CUDA_SDK_ROOT_DIR", "IGNORE"),
|
||||
define("CUDA_NVCC_EXECUTABLE", "IGNORE"),
|
||||
define("LIBOMPTARGET_DEP_CUDA_DRIVER_LIBRARIES", "IGNORE"),
|
||||
])
|
||||
|
||||
cmake_args.append(from_variant("LIBOMPTARGET_ENABLE_DEBUG", "omp_debug"))
|
||||
|
||||
if "+lldb" in spec:
|
||||
projects.append("lldb")
|
||||
cmake_args.append(define("LLDB_ENABLE_LIBEDIT", True))
|
||||
cmake_args.append(define("LLDB_ENABLE_NCURSES", True))
|
||||
cmake_args.append(define("LLDB_ENABLE_LIBXML2", False))
|
||||
if spec.version >= Version("10"):
|
||||
cmake_args.append(from_variant("LLDB_ENABLE_PYTHON", "python"))
|
||||
cmake_args.append(define('LLDB_ENABLE_LIBEDIT', True))
|
||||
cmake_args.append(define('LLDB_ENABLE_NCURSES', True))
|
||||
cmake_args.append(define('LLDB_ENABLE_LIBXML2', False))
|
||||
if spec.version >= Version('10'):
|
||||
cmake_args.append(from_variant("LLDB_ENABLE_PYTHON", 'python'))
|
||||
else:
|
||||
cmake_args.append(define("LLDB_DISABLE_PYTHON", "~python" in spec))
|
||||
cmake_args.append(define("LLDB_DISABLE_PYTHON", '~python' in spec))
|
||||
if spec.satisfies("@5.0.0: +python"):
|
||||
cmake_args.append(define("LLDB_USE_SYSTEM_SIX", True))
|
||||
|
||||
if "+gold" in spec:
|
||||
cmake_args.append(define("LLVM_BINUTILS_INCDIR", spec["binutils"].prefix.include))
|
||||
cmake_args.append(
|
||||
define("LLVM_BINUTILS_INCDIR", spec["binutils"].prefix.include)
|
||||
)
|
||||
|
||||
if "+clang" in spec:
|
||||
projects.append("clang")
|
||||
@@ -633,10 +612,10 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
else:
|
||||
projects.append("openmp")
|
||||
|
||||
if "@8" in spec:
|
||||
cmake_args.append(from_variant("CLANG_ANALYZER_ENABLE_Z3_SOLVER", "z3"))
|
||||
elif "@9:" in spec:
|
||||
cmake_args.append(from_variant("LLVM_ENABLE_Z3_SOLVER", "z3"))
|
||||
if '@8' in spec:
|
||||
cmake_args.append(from_variant('CLANG_ANALYZER_ENABLE_Z3_SOLVER', 'z3'))
|
||||
elif '@9:' in spec:
|
||||
cmake_args.append(from_variant('LLVM_ENABLE_Z3_SOLVER', 'z3'))
|
||||
|
||||
if "+flang" in spec:
|
||||
projects.append("flang")
|
||||
@@ -655,26 +634,26 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
projects.append("polly")
|
||||
cmake_args.append(define("LINK_POLLY_INTO_TOOLS", True))
|
||||
|
||||
cmake_args.extend(
|
||||
[
|
||||
define("BUILD_SHARED_LIBS", False),
|
||||
from_variant("LLVM_BUILD_LLVM_DYLIB", "llvm_dylib"),
|
||||
from_variant("LLVM_LINK_LLVM_DYLIB", "link_llvm_dylib"),
|
||||
from_variant("LLVM_USE_SPLIT_DWARF", "split_dwarf"),
|
||||
# By default on Linux, libc++.so is a ldscript. CMake fails to add
|
||||
# CMAKE_INSTALL_RPATH to it, which fails. Statically link libc++abi.a
|
||||
# into libc++.so, linking with -lc++ or -stdlib=libc++ is enough.
|
||||
define("LIBCXX_ENABLE_STATIC_ABI_LIBRARY", True),
|
||||
]
|
||||
)
|
||||
cmake_args.extend([
|
||||
define("BUILD_SHARED_LIBS", False),
|
||||
from_variant("LLVM_BUILD_LLVM_DYLIB", "llvm_dylib"),
|
||||
from_variant("LLVM_LINK_LLVM_DYLIB", "link_llvm_dylib"),
|
||||
from_variant("LLVM_USE_SPLIT_DWARF", "split_dwarf"),
|
||||
# By default on Linux, libc++.so is a ldscript. CMake fails to add
|
||||
# CMAKE_INSTALL_RPATH to it, which fails. Statically link libc++abi.a
|
||||
# into libc++.so, linking with -lc++ or -stdlib=libc++ is enough.
|
||||
define('LIBCXX_ENABLE_STATIC_ABI_LIBRARY', True)
|
||||
])
|
||||
|
||||
cmake_args.append(define("LLVM_TARGETS_TO_BUILD", get_llvm_targets_to_build(spec)))
|
||||
cmake_args.append(define(
|
||||
"LLVM_TARGETS_TO_BUILD",
|
||||
get_llvm_targets_to_build(spec)))
|
||||
|
||||
cmake_args.append(from_variant("LIBOMP_TSAN_SUPPORT", "omp_tsan"))
|
||||
|
||||
if self.compiler.name == "gcc":
|
||||
compiler = Executable(self.compiler.cc)
|
||||
gcc_output = compiler("-print-search-dirs", output=str, error=str)
|
||||
gcc_output = compiler('-print-search-dirs', output=str, error=str)
|
||||
|
||||
for line in gcc_output.splitlines():
|
||||
if line.startswith("install:"):
|
||||
@@ -686,7 +665,7 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
cmake_args.append(define("GCC_INSTALL_PREFIX", gcc_prefix))
|
||||
|
||||
if self.spec.satisfies("~code_signing platform=darwin"):
|
||||
cmake_args.append(define("LLDB_USE_SYSTEM_DEBUGSERVER", True))
|
||||
cmake_args.append(define('LLDB_USE_SYSTEM_DEBUGSERVER', True))
|
||||
|
||||
# Semicolon seperated list of projects to enable
|
||||
cmake_args.append(define("LLVM_ENABLE_PROJECTS", projects))
|
||||
@@ -710,24 +689,20 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
# rebuild libomptarget to get bytecode runtime library files
|
||||
with working_dir(ompdir, create=True):
|
||||
cmake_args = [
|
||||
"-G",
|
||||
"Ninja",
|
||||
define("CMAKE_BUILD_TYPE", spec.variants["build_type"].value),
|
||||
'-G', 'Ninja',
|
||||
define('CMAKE_BUILD_TYPE', spec.variants['build_type'].value),
|
||||
define("CMAKE_C_COMPILER", spec.prefix.bin + "/clang"),
|
||||
define("CMAKE_CXX_COMPILER", spec.prefix.bin + "/clang++"),
|
||||
define("CMAKE_INSTALL_PREFIX", spec.prefix),
|
||||
define("CMAKE_PREFIX_PATH", prefix_paths),
|
||||
define('CMAKE_PREFIX_PATH', prefix_paths)
|
||||
]
|
||||
cmake_args.extend(self.cmake_args())
|
||||
cmake_args.extend(
|
||||
[
|
||||
define("LIBOMPTARGET_NVPTX_ENABLE_BCLIB", True),
|
||||
define(
|
||||
"LIBOMPTARGET_DEP_LIBELF_INCLUDE_DIR", spec["libelf"].prefix.include
|
||||
),
|
||||
self.stage.source_path + "/openmp",
|
||||
]
|
||||
)
|
||||
cmake_args.extend([
|
||||
define("LIBOMPTARGET_NVPTX_ENABLE_BCLIB", True),
|
||||
define("LIBOMPTARGET_DEP_LIBELF_INCLUDE_DIR",
|
||||
spec["libelf"].prefix.include),
|
||||
self.stage.source_path + "/openmp",
|
||||
])
|
||||
|
||||
cmake(*cmake_args)
|
||||
ninja()
|
||||
@@ -742,22 +717,22 @@ class Llvm(CMakePackage, CudaPackage):
|
||||
install_tree("bin", join_path(self.prefix, "libexec", "llvm"))
|
||||
|
||||
def llvm_config(self, *args, **kwargs):
|
||||
lc = Executable(self.prefix.bin.join("llvm-config"))
|
||||
if not kwargs.get("output"):
|
||||
kwargs["output"] = str
|
||||
lc = Executable(self.prefix.bin.join('llvm-config'))
|
||||
if not kwargs.get('output'):
|
||||
kwargs['output'] = str
|
||||
ret = lc(*args, **kwargs)
|
||||
if kwargs.get("result") == "list":
|
||||
if kwargs.get('result') == "list":
|
||||
return ret.split()
|
||||
else:
|
||||
return ret
|
||||
|
||||
|
||||
def get_llvm_targets_to_build(spec):
|
||||
targets = spec.variants["targets"].value
|
||||
targets = spec.variants['targets'].value
|
||||
|
||||
# Build everything?
|
||||
if "all" in targets:
|
||||
return "all"
|
||||
if 'all' in targets:
|
||||
return 'all'
|
||||
|
||||
# Convert targets variant values to CMake LLVM_TARGETS_TO_BUILD array.
|
||||
spack_to_cmake = {
|
||||
@@ -778,10 +753,10 @@ def get_llvm_targets_to_build(spec):
|
||||
"systemz": "SystemZ",
|
||||
"webassembly": "WebAssembly",
|
||||
"x86": "X86",
|
||||
"xcore": "XCore",
|
||||
"xcore": "XCore"
|
||||
}
|
||||
|
||||
if "none" in targets:
|
||||
if 'none' in targets:
|
||||
llvm_targets = set()
|
||||
else:
|
||||
llvm_targets = set(spack_to_cmake[target] for target in targets)
|
||||
|
@@ -22,140 +22,127 @@ class PyTorch(PythonPackage, CudaPackage):
|
||||
with strong GPU acceleration."""
|
||||
|
||||
homepage = "https://pytorch.org/"
|
||||
git = "https://github.com/pytorch/pytorch.git"
|
||||
git = "https://github.com/pytorch/pytorch.git"
|
||||
|
||||
maintainers("adamjstewart")
|
||||
maintainers = ['adamjstewart']
|
||||
|
||||
# Exact set of modules is version- and variant-specific, just attempt to import the
|
||||
# core libraries to ensure that the package was successfully installed.
|
||||
import_modules = ["torch", "torch.autograd", "torch.nn", "torch.utils"]
|
||||
import_modules = ['torch', 'torch.autograd', 'torch.nn', 'torch.utils']
|
||||
|
||||
version("master", branch="master", submodules=True)
|
||||
version("1.10.1", tag="v1.10.1", submodules=True)
|
||||
version("1.10.0", tag="v1.10.0", submodules=True)
|
||||
version("1.9.1", tag="v1.9.1", submodules=True)
|
||||
version("1.9.0", tag="v1.9.0", submodules=True)
|
||||
version("1.8.2", tag="v1.8.2", submodules=True)
|
||||
version("1.8.1", tag="v1.8.1", submodules=True)
|
||||
version("1.8.0", tag="v1.8.0", submodules=True)
|
||||
version("1.7.1", tag="v1.7.1", submodules=True)
|
||||
version("1.7.0", tag="v1.7.0", submodules=True)
|
||||
version("1.6.0", tag="v1.6.0", submodules=True)
|
||||
version("1.5.1", tag="v1.5.1", submodules=True)
|
||||
version("1.5.0", tag="v1.5.0", submodules=True)
|
||||
version("1.4.1", tag="v1.4.1", submodules=True)
|
||||
version(
|
||||
"1.4.0",
|
||||
tag="v1.4.0",
|
||||
submodules=True,
|
||||
deprecated=True,
|
||||
submodules_delete=["third_party/fbgemm"],
|
||||
)
|
||||
version("1.3.1", tag="v1.3.1", submodules=True)
|
||||
version("1.3.0", tag="v1.3.0", submodules=True)
|
||||
version("1.2.0", tag="v1.2.0", submodules=True)
|
||||
version("1.1.0", tag="v1.1.0", submodules=True)
|
||||
version("1.0.1", tag="v1.0.1", submodules=True)
|
||||
version("1.0.0", tag="v1.0.0", submodules=True)
|
||||
version(
|
||||
"0.4.1",
|
||||
tag="v0.4.1",
|
||||
submodules=True,
|
||||
deprecated=True,
|
||||
submodules_delete=["third_party/nervanagpu"],
|
||||
)
|
||||
version("0.4.0", tag="v0.4.0", submodules=True, deprecated=True)
|
||||
version("0.3.1", tag="v0.3.1", submodules=True, deprecated=True)
|
||||
version('master', branch='master', submodules=True)
|
||||
version('1.10.1', tag='v1.10.1', submodules=True)
|
||||
version('1.10.0', tag='v1.10.0', submodules=True)
|
||||
version('1.9.1', tag='v1.9.1', submodules=True)
|
||||
version('1.9.0', tag='v1.9.0', submodules=True)
|
||||
version('1.8.2', tag='v1.8.2', submodules=True)
|
||||
version('1.8.1', tag='v1.8.1', submodules=True)
|
||||
version('1.8.0', tag='v1.8.0', submodules=True)
|
||||
version('1.7.1', tag='v1.7.1', submodules=True)
|
||||
version('1.7.0', tag='v1.7.0', submodules=True)
|
||||
version('1.6.0', tag='v1.6.0', submodules=True)
|
||||
version('1.5.1', tag='v1.5.1', submodules=True)
|
||||
version('1.5.0', tag='v1.5.0', submodules=True)
|
||||
version('1.4.1', tag='v1.4.1', submodules=True)
|
||||
version('1.4.0', tag='v1.4.0', submodules=True, deprecated=True,
|
||||
submodules_delete=['third_party/fbgemm'])
|
||||
version('1.3.1', tag='v1.3.1', submodules=True)
|
||||
version('1.3.0', tag='v1.3.0', submodules=True)
|
||||
version('1.2.0', tag='v1.2.0', submodules=True)
|
||||
version('1.1.0', tag='v1.1.0', submodules=True)
|
||||
version('1.0.1', tag='v1.0.1', submodules=True)
|
||||
version('1.0.0', tag='v1.0.0', submodules=True)
|
||||
version('0.4.1', tag='v0.4.1', submodules=True, deprecated=True,
|
||||
submodules_delete=['third_party/nervanagpu'])
|
||||
version('0.4.0', tag='v0.4.0', submodules=True, deprecated=True)
|
||||
version('0.3.1', tag='v0.3.1', submodules=True, deprecated=True)
|
||||
|
||||
is_darwin = sys.platform == "darwin"
|
||||
is_darwin = sys.platform == 'darwin'
|
||||
|
||||
# All options are defined in CMakeLists.txt.
|
||||
# Some are listed in setup.py, but not all.
|
||||
variant("caffe2", default=True, description="Build Caffe2")
|
||||
variant("test", default=False, description="Build C++ test binaries")
|
||||
variant("cuda", default=not is_darwin, description="Use CUDA")
|
||||
variant("rocm", default=False, description="Use ROCm")
|
||||
variant("cudnn", default=not is_darwin, description="Use cuDNN")
|
||||
variant("fbgemm", default=True, description="Use FBGEMM (quantized 8-bit server operators)")
|
||||
variant("kineto", default=True, description="Use Kineto profiling library")
|
||||
variant("magma", default=not is_darwin, description="Use MAGMA")
|
||||
variant("metal", default=is_darwin, description="Use Metal for Caffe2 iOS build")
|
||||
variant("nccl", default=not is_darwin, description="Use NCCL")
|
||||
variant("nnpack", default=True, description="Use NNPACK")
|
||||
variant("numa", default=not is_darwin, description="Use NUMA")
|
||||
variant("numpy", default=True, description="Use NumPy")
|
||||
variant("openmp", default=True, description="Use OpenMP for parallel code")
|
||||
variant("qnnpack", default=True, description="Use QNNPACK (quantized 8-bit operators)")
|
||||
variant("valgrind", default=not is_darwin, description="Use Valgrind")
|
||||
variant("xnnpack", default=True, description="Use XNNPACK")
|
||||
variant("mkldnn", default=True, description="Use MKLDNN")
|
||||
variant("distributed", default=not is_darwin, description="Use distributed")
|
||||
variant("mpi", default=not is_darwin, description="Use MPI for Caffe2")
|
||||
variant("gloo", default=not is_darwin, description="Use Gloo")
|
||||
variant("tensorpipe", default=not is_darwin, description="Use TensorPipe")
|
||||
variant("onnx_ml", default=True, description="Enable traditional ONNX ML API")
|
||||
variant("breakpad", default=True, description="Enable breakpad crash dump library")
|
||||
variant('caffe2', default=True, description='Build Caffe2')
|
||||
variant('test', default=False, description='Build C++ test binaries')
|
||||
variant('cuda', default=not is_darwin, description='Use CUDA')
|
||||
variant('rocm', default=False, description='Use ROCm')
|
||||
variant('cudnn', default=not is_darwin, description='Use cuDNN')
|
||||
variant('fbgemm', default=True, description='Use FBGEMM (quantized 8-bit server operators)')
|
||||
variant('kineto', default=True, description='Use Kineto profiling library')
|
||||
variant('magma', default=not is_darwin, description='Use MAGMA')
|
||||
variant('metal', default=is_darwin, description='Use Metal for Caffe2 iOS build')
|
||||
variant('nccl', default=not is_darwin, description='Use NCCL')
|
||||
variant('nnpack', default=True, description='Use NNPACK')
|
||||
variant('numa', default=not is_darwin, description='Use NUMA')
|
||||
variant('numpy', default=True, description='Use NumPy')
|
||||
variant('openmp', default=True, description='Use OpenMP for parallel code')
|
||||
variant('qnnpack', default=True, description='Use QNNPACK (quantized 8-bit operators)')
|
||||
variant('valgrind', default=not is_darwin, description='Use Valgrind')
|
||||
variant('xnnpack', default=True, description='Use XNNPACK')
|
||||
variant('mkldnn', default=True, description='Use MKLDNN')
|
||||
variant('distributed', default=not is_darwin, description='Use distributed')
|
||||
variant('mpi', default=not is_darwin, description='Use MPI for Caffe2')
|
||||
variant('gloo', default=not is_darwin, description='Use Gloo')
|
||||
variant('tensorpipe', default=not is_darwin, description='Use TensorPipe')
|
||||
variant('onnx_ml', default=True, description='Enable traditional ONNX ML API')
|
||||
variant('breakpad', default=True, description='Enable breakpad crash dump library')
|
||||
|
||||
conflicts("+cuda", when="+rocm")
|
||||
conflicts("+cudnn", when="~cuda")
|
||||
conflicts("+magma", when="~cuda")
|
||||
conflicts("+nccl", when="~cuda~rocm")
|
||||
conflicts("+nccl", when="platform=darwin")
|
||||
conflicts("+numa", when="platform=darwin", msg="Only available on Linux")
|
||||
conflicts("+valgrind", when="platform=darwin", msg="Only available on Linux")
|
||||
conflicts("+mpi", when="~distributed")
|
||||
conflicts("+gloo", when="~distributed")
|
||||
conflicts("+tensorpipe", when="~distributed")
|
||||
conflicts("+kineto", when="@:1.7")
|
||||
conflicts("+valgrind", when="@:1.7")
|
||||
conflicts("~caffe2", when="@0.4.0:1.6") # no way to disable caffe2?
|
||||
conflicts("+caffe2", when="@:0.3.1") # caffe2 did not yet exist?
|
||||
conflicts("+tensorpipe", when="@:1.5")
|
||||
conflicts("+xnnpack", when="@:1.4")
|
||||
conflicts("~onnx_ml", when="@:1.4") # no way to disable ONNX?
|
||||
conflicts("+rocm", when="@:0.4")
|
||||
conflicts("+cudnn", when="@:0.4")
|
||||
conflicts("+fbgemm", when="@:0.4,1.4.0")
|
||||
conflicts("+qnnpack", when="@:0.4")
|
||||
conflicts("+mkldnn", when="@:0.4")
|
||||
conflicts("+breakpad", when="@:1.9") # Option appeared in 1.10.0
|
||||
conflicts("+breakpad", when="target=ppc64:", msg="Unsupported")
|
||||
conflicts("+breakpad", when="target=ppc64le:", msg="Unsupported")
|
||||
conflicts('+cuda', when='+rocm')
|
||||
conflicts('+cudnn', when='~cuda')
|
||||
conflicts('+magma', when='~cuda')
|
||||
conflicts('+nccl', when='~cuda~rocm')
|
||||
conflicts('+nccl', when='platform=darwin')
|
||||
conflicts('+numa', when='platform=darwin', msg='Only available on Linux')
|
||||
conflicts('+valgrind', when='platform=darwin', msg='Only available on Linux')
|
||||
conflicts('+mpi', when='~distributed')
|
||||
conflicts('+gloo', when='~distributed')
|
||||
conflicts('+tensorpipe', when='~distributed')
|
||||
conflicts('+kineto', when='@:1.7')
|
||||
conflicts('+valgrind', when='@:1.7')
|
||||
conflicts('~caffe2', when='@0.4.0:1.6') # no way to disable caffe2?
|
||||
conflicts('+caffe2', when='@:0.3.1') # caffe2 did not yet exist?
|
||||
conflicts('+tensorpipe', when='@:1.5')
|
||||
conflicts('+xnnpack', when='@:1.4')
|
||||
conflicts('~onnx_ml', when='@:1.4') # no way to disable ONNX?
|
||||
conflicts('+rocm', when='@:0.4')
|
||||
conflicts('+cudnn', when='@:0.4')
|
||||
conflicts('+fbgemm', when='@:0.4,1.4.0')
|
||||
conflicts('+qnnpack', when='@:0.4')
|
||||
conflicts('+mkldnn', when='@:0.4')
|
||||
conflicts('+breakpad', when='@:1.9') # Option appeared in 1.10.0
|
||||
conflicts('+breakpad', when='target=ppc64:', msg='Unsupported')
|
||||
conflicts('+breakpad', when='target=ppc64le:', msg='Unsupported')
|
||||
|
||||
conflicts(
|
||||
"cuda_arch=none",
|
||||
when="+cuda",
|
||||
msg="Must specify CUDA compute capabilities of your GPU, see "
|
||||
"https://developer.nvidia.com/cuda-gpus",
|
||||
)
|
||||
conflicts('cuda_arch=none', when='+cuda',
|
||||
msg='Must specify CUDA compute capabilities of your GPU, see '
|
||||
'https://developer.nvidia.com/cuda-gpus')
|
||||
|
||||
# Required dependencies
|
||||
depends_on("cmake@3.5:", type="build")
|
||||
depends_on('cmake@3.5:', type='build')
|
||||
# Use Ninja generator to speed up build times, automatically used if found
|
||||
depends_on("ninja@1.5:", when="@1.1.0:", type="build")
|
||||
depends_on('ninja@1.5:', when='@1.1.0:', type='build')
|
||||
# See python_min_version in setup.py
|
||||
depends_on("python@3.6.2:", when="@1.7.1:", type=("build", "link", "run"))
|
||||
depends_on("python@3.6.1:", when="@1.6.0:1.7.0", type=("build", "link", "run"))
|
||||
depends_on("python@3.5:", when="@1.5.0:1.5", type=("build", "link", "run"))
|
||||
depends_on("python@2.7:2.8,3.5:", when="@1.4.0:1.4", type=("build", "link", "run"))
|
||||
depends_on("python@2.7:2.8,3.5:3.7", when="@:1.3", type=("build", "link", "run"))
|
||||
depends_on("py-setuptools", type=("build", "run"))
|
||||
depends_on("py-future", when="@1.5:", type=("build", "run"))
|
||||
depends_on("py-future", when="@1.1: ^python@:2", type=("build", "run"))
|
||||
depends_on("py-pyyaml", type=("build", "run"))
|
||||
depends_on("py-typing", when="@0.4: ^python@:3.4", type=("build", "run"))
|
||||
depends_on("py-typing-extensions", when="@1.7:", type=("build", "run"))
|
||||
depends_on("py-pybind11@2.6.2", when="@1.8.0:", type=("build", "link", "run"))
|
||||
depends_on("py-pybind11@2.3.0", when="@1.1.0:1.7", type=("build", "link", "run"))
|
||||
depends_on("py-pybind11@2.2.4", when="@1.0.0:1.0", type=("build", "link", "run"))
|
||||
depends_on("py-pybind11@2.2.2", when="@0.4.0:0.4", type=("build", "link", "run"))
|
||||
depends_on("py-dataclasses", when="@1.7: ^python@3.6.0:3.6", type=("build", "run"))
|
||||
depends_on("py-tqdm", type="run")
|
||||
depends_on("py-protobuf", when="@0.4:", type=("build", "run"))
|
||||
depends_on("protobuf", when="@0.4:")
|
||||
depends_on("blas")
|
||||
depends_on("lapack")
|
||||
depends_on("eigen", when="@0.4:")
|
||||
depends_on('python@3.6.2:', when='@1.7.1:', type=('build', 'link', 'run'))
|
||||
depends_on('python@3.6.1:', when='@1.6.0:1.7.0', type=('build', 'link', 'run'))
|
||||
depends_on('python@3.5:', when='@1.5.0:1.5', type=('build', 'link', 'run'))
|
||||
depends_on('python@2.7:2.8,3.5:', when='@1.4.0:1.4', type=('build', 'link', 'run'))
|
||||
depends_on('python@2.7:2.8,3.5:3.7', when='@:1.3', type=('build', 'link', 'run'))
|
||||
depends_on('py-setuptools', type=('build', 'run'))
|
||||
depends_on('py-future', when='@1.5:', type=('build', 'run'))
|
||||
depends_on('py-future', when='@1.1: ^python@:2', type=('build', 'run'))
|
||||
depends_on('py-pyyaml', type=('build', 'run'))
|
||||
depends_on('py-typing', when='@0.4: ^python@:3.4', type=('build', 'run'))
|
||||
depends_on('py-typing-extensions', when='@1.7:', type=('build', 'run'))
|
||||
depends_on('py-pybind11@2.6.2', when='@1.8.0:', type=('build', 'link', 'run'))
|
||||
depends_on('py-pybind11@2.3.0', when='@1.1.0:1.7', type=('build', 'link', 'run'))
|
||||
depends_on('py-pybind11@2.2.4', when='@1.0.0:1.0', type=('build', 'link', 'run'))
|
||||
depends_on('py-pybind11@2.2.2', when='@0.4.0:0.4', type=('build', 'link', 'run'))
|
||||
depends_on('py-dataclasses', when='@1.7: ^python@3.6.0:3.6', type=('build', 'run'))
|
||||
depends_on('py-tqdm', type='run')
|
||||
depends_on('py-protobuf', when='@0.4:', type=('build', 'run'))
|
||||
depends_on('protobuf', when='@0.4:')
|
||||
depends_on('blas')
|
||||
depends_on('lapack')
|
||||
depends_on('eigen', when='@0.4:')
|
||||
# https://github.com/pytorch/pytorch/issues/60329
|
||||
# depends_on('cpuinfo@2020-12-17', when='@1.8.0:')
|
||||
# depends_on('cpuinfo@2020-06-11', when='@1.6.0:1.7')
|
||||
@@ -165,30 +152,30 @@ class PyTorch(PythonPackage, CudaPackage):
|
||||
# depends_on('sleef@3.4.0_2019-07-30', when='@1.6.0:1.7')
|
||||
# https://github.com/Maratyszcza/FP16/issues/18
|
||||
# depends_on('fp16@2020-05-14', when='@1.6.0:')
|
||||
depends_on("pthreadpool@2021-04-13", when="@1.9.0:")
|
||||
depends_on("pthreadpool@2020-10-05", when="@1.8.0:1.8")
|
||||
depends_on("pthreadpool@2020-06-15", when="@1.6.0:1.7")
|
||||
depends_on("psimd@2020-05-17", when="@1.6.0:")
|
||||
depends_on("fxdiv@2020-04-17", when="@1.6.0:")
|
||||
depends_on("benchmark", when="@1.6:+test")
|
||||
depends_on('pthreadpool@2021-04-13', when='@1.9.0:')
|
||||
depends_on('pthreadpool@2020-10-05', when='@1.8.0:1.8')
|
||||
depends_on('pthreadpool@2020-06-15', when='@1.6.0:1.7')
|
||||
depends_on('psimd@2020-05-17', when='@1.6.0:')
|
||||
depends_on('fxdiv@2020-04-17', when='@1.6.0:')
|
||||
depends_on('benchmark', when='@1.6:+test')
|
||||
|
||||
# Optional dependencies
|
||||
depends_on("cuda@7.5:", when="+cuda", type=("build", "link", "run"))
|
||||
depends_on("cuda@9:", when="@1.1:+cuda", type=("build", "link", "run"))
|
||||
depends_on("cuda@9.2:", when="@1.6:+cuda", type=("build", "link", "run"))
|
||||
depends_on("cudnn@6.0:7", when="@:1.0+cudnn")
|
||||
depends_on("cudnn@7.0:7", when="@1.1.0:1.5+cudnn")
|
||||
depends_on("cudnn@7.0:", when="@1.6.0:+cudnn")
|
||||
depends_on("magma", when="+magma")
|
||||
depends_on("nccl", when="+nccl")
|
||||
depends_on("numactl", when="+numa")
|
||||
depends_on("py-numpy", when="+numpy", type=("build", "run"))
|
||||
depends_on("llvm-openmp", when="%apple-clang +openmp")
|
||||
depends_on("valgrind", when="+valgrind")
|
||||
depends_on('cuda@7.5:', when='+cuda', type=('build', 'link', 'run'))
|
||||
depends_on('cuda@9:', when='@1.1:+cuda', type=('build', 'link', 'run'))
|
||||
depends_on('cuda@9.2:', when='@1.6:+cuda', type=('build', 'link', 'run'))
|
||||
depends_on('cudnn@6.0:7', when='@:1.0+cudnn')
|
||||
depends_on('cudnn@7.0:7', when='@1.1.0:1.5+cudnn')
|
||||
depends_on('cudnn@7.0:', when='@1.6.0:+cudnn')
|
||||
depends_on('magma', when='+magma')
|
||||
depends_on('nccl', when='+nccl')
|
||||
depends_on('numactl', when='+numa')
|
||||
depends_on('py-numpy', when='+numpy', type=('build', 'run'))
|
||||
depends_on('llvm-openmp', when='%apple-clang +openmp')
|
||||
depends_on('valgrind', when='+valgrind')
|
||||
# https://github.com/pytorch/pytorch/issues/60332
|
||||
# depends_on('xnnpack@2021-02-22', when='@1.8.0:+xnnpack')
|
||||
# depends_on('xnnpack@2020-03-23', when='@1.6.0:1.7+xnnpack')
|
||||
depends_on("mpi", when="+mpi")
|
||||
depends_on('mpi', when='+mpi')
|
||||
# https://github.com/pytorch/pytorch/issues/60270
|
||||
# depends_on('gloo@2021-05-04', when='@1.9.0:+gloo')
|
||||
# depends_on('gloo@2020-09-18', when='@1.7.0:1.8+gloo')
|
||||
@@ -196,35 +183,31 @@ class PyTorch(PythonPackage, CudaPackage):
|
||||
# https://github.com/pytorch/pytorch/issues/60331
|
||||
# depends_on('onnx@1.8.0_2020-11-03', when='@1.8.0:+onnx_ml')
|
||||
# depends_on('onnx@1.7.0_2020-05-31', when='@1.6.0:1.7+onnx_ml')
|
||||
depends_on("mkl", when="+mkldnn")
|
||||
depends_on('mkl', when='+mkldnn')
|
||||
|
||||
# Test dependencies
|
||||
depends_on("py-hypothesis", type="test")
|
||||
depends_on("py-six", type="test")
|
||||
depends_on("py-psutil", type="test")
|
||||
depends_on('py-hypothesis', type='test')
|
||||
depends_on('py-six', type='test')
|
||||
depends_on('py-psutil', type='test')
|
||||
|
||||
# Fix BLAS being overridden by MKL
|
||||
# https://github.com/pytorch/pytorch/issues/60328
|
||||
patch(
|
||||
"https://patch-diff.githubusercontent.com/raw/pytorch/pytorch/pull/59220.patch",
|
||||
sha256="e37afffe45cf7594c22050109942370e49983ad772d12ebccf508377dc9dcfc9",
|
||||
when="@1.2.0:",
|
||||
)
|
||||
patch('https://patch-diff.githubusercontent.com/raw/pytorch/pytorch/pull/59220.patch',
|
||||
sha256='e37afffe45cf7594c22050109942370e49983ad772d12ebccf508377dc9dcfc9',
|
||||
when='@1.2.0:')
|
||||
|
||||
# Fixes build on older systems with glibc <2.12
|
||||
patch(
|
||||
"https://patch-diff.githubusercontent.com/raw/pytorch/pytorch/pull/55063.patch",
|
||||
sha256="e17eaa42f5d7c18bf0d7c37d7b0910127a01ad53fdce3e226a92893356a70395",
|
||||
when="@1.1.0:1.8.1",
|
||||
)
|
||||
patch('https://patch-diff.githubusercontent.com/raw/pytorch/pytorch/pull/55063.patch',
|
||||
sha256='e17eaa42f5d7c18bf0d7c37d7b0910127a01ad53fdce3e226a92893356a70395',
|
||||
when='@1.1.0:1.8.1')
|
||||
|
||||
# Fixes CMake configuration error when XNNPACK is disabled
|
||||
# https://github.com/pytorch/pytorch/pull/35607
|
||||
# https://github.com/pytorch/pytorch/pull/37865
|
||||
patch("xnnpack.patch", when="@1.5.0:1.5")
|
||||
patch('xnnpack.patch', when='@1.5.0:1.5')
|
||||
|
||||
# Fixes build error when ROCm is enabled for pytorch-1.5 release
|
||||
patch("rocm.patch", when="@1.5.0:1.5+rocm")
|
||||
patch('rocm.patch', when='@1.5.0:1.5+rocm')
|
||||
|
||||
# Fixes fatal error: sleef.h: No such file or directory
|
||||
# https://github.com/pytorch/pytorch/pull/35359
|
||||
@@ -233,56 +216,47 @@ class PyTorch(PythonPackage, CudaPackage):
|
||||
|
||||
# Fixes compilation with Clang 9.0.0 and Apple Clang 11.0.3
|
||||
# https://github.com/pytorch/pytorch/pull/37086
|
||||
patch(
|
||||
"https://github.com/pytorch/pytorch/commit/e921cd222a8fbeabf5a3e74e83e0d8dfb01aa8b5.patch",
|
||||
sha256="17561b16cd2db22f10c0fe1fdcb428aecb0ac3964ba022a41343a6bb8cba7049",
|
||||
when="@1.1:1.5",
|
||||
)
|
||||
patch('https://github.com/pytorch/pytorch/commit/e921cd222a8fbeabf5a3e74e83e0d8dfb01aa8b5.patch',
|
||||
sha256='17561b16cd2db22f10c0fe1fdcb428aecb0ac3964ba022a41343a6bb8cba7049',
|
||||
when='@1.1:1.5')
|
||||
|
||||
# Removes duplicate definition of getCusparseErrorString
|
||||
# https://github.com/pytorch/pytorch/issues/32083
|
||||
patch("cusparseGetErrorString.patch", when="@0.4.1:1.0^cuda@10.1.243:")
|
||||
patch('cusparseGetErrorString.patch', when='@0.4.1:1.0^cuda@10.1.243:')
|
||||
|
||||
# Fixes 'FindOpenMP.cmake'
|
||||
# to detect openmp settings used by Fujitsu compiler.
|
||||
patch("detect_omp_of_fujitsu_compiler.patch", when="%fj")
|
||||
patch('detect_omp_of_fujitsu_compiler.patch', when='%fj')
|
||||
|
||||
# Fix compilation of +distributed~tensorpipe
|
||||
# https://github.com/pytorch/pytorch/issues/68002
|
||||
patch(
|
||||
"https://github.com/pytorch/pytorch/commit/c075f0f633fa0136e68f0a455b5b74d7b500865c.patch",
|
||||
sha256="e69e41b5c171bfb00d1b5d4ee55dd5e4c8975483230274af4ab461acd37e40b8",
|
||||
when="@1.10.0+distributed~tensorpipe",
|
||||
)
|
||||
patch('https://github.com/pytorch/pytorch/commit/c075f0f633fa0136e68f0a455b5b74d7b500865c.patch',
|
||||
sha256='e69e41b5c171bfb00d1b5d4ee55dd5e4c8975483230274af4ab461acd37e40b8', when='@1.10.0+distributed~tensorpipe')
|
||||
|
||||
# Both build and install run cmake/make/make install
|
||||
# Only run once to speed up build times
|
||||
phases = ["install"]
|
||||
phases = ['install']
|
||||
|
||||
@property
|
||||
def libs(self):
|
||||
root = join_path(
|
||||
self.prefix, self.spec["python"].package.site_packages_dir, "torch", "lib"
|
||||
)
|
||||
return find_libraries("libtorch", root)
|
||||
root = join_path(self.prefix, self.spec['python'].package.site_packages_dir,
|
||||
'torch', 'lib')
|
||||
return find_libraries('libtorch', root)
|
||||
|
||||
@property
|
||||
def headers(self):
|
||||
root = join_path(
|
||||
self.prefix, self.spec["python"].package.site_packages_dir, "torch", "include"
|
||||
)
|
||||
root = join_path(self.prefix, self.spec['python'].package.site_packages_dir,
|
||||
'torch', 'include')
|
||||
headers = find_all_headers(root)
|
||||
headers.directories = [root]
|
||||
return headers
|
||||
|
||||
@when("@1.5.0:")
|
||||
@when('@1.5.0:')
|
||||
def patch(self):
|
||||
# https://github.com/pytorch/pytorch/issues/52208
|
||||
filter_file(
|
||||
"torch_global_deps PROPERTIES LINKER_LANGUAGE C",
|
||||
"torch_global_deps PROPERTIES LINKER_LANGUAGE CXX",
|
||||
"caffe2/CMakeLists.txt",
|
||||
)
|
||||
filter_file('torch_global_deps PROPERTIES LINKER_LANGUAGE C',
|
||||
'torch_global_deps PROPERTIES LINKER_LANGUAGE CXX',
|
||||
'caffe2/CMakeLists.txt')
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
"""Set environment variables used to control the build.
|
||||
@@ -295,8 +269,7 @@ class PyTorch(PythonPackage, CudaPackage):
|
||||
most flags defined in ``CMakeLists.txt`` can be specified as
|
||||
environment variables.
|
||||
"""
|
||||
|
||||
def enable_or_disable(variant, keyword="USE", var=None, newer=False):
|
||||
def enable_or_disable(variant, keyword='USE', var=None, newer=False):
|
||||
"""Set environment variable to enable or disable support for a
|
||||
particular variant.
|
||||
|
||||
@@ -311,135 +284,137 @@ class PyTorch(PythonPackage, CudaPackage):
|
||||
|
||||
# Version 1.1.0 switched from NO_* to USE_* or BUILD_*
|
||||
# But some newer variants have always used USE_* or BUILD_*
|
||||
if self.spec.satisfies("@1.1:") or newer:
|
||||
if "+" + variant in self.spec:
|
||||
env.set(keyword + "_" + var, "ON")
|
||||
if self.spec.satisfies('@1.1:') or newer:
|
||||
if '+' + variant in self.spec:
|
||||
env.set(keyword + '_' + var, 'ON')
|
||||
else:
|
||||
env.set(keyword + "_" + var, "OFF")
|
||||
env.set(keyword + '_' + var, 'OFF')
|
||||
else:
|
||||
if "+" + variant in self.spec:
|
||||
env.unset("NO_" + var)
|
||||
if '+' + variant in self.spec:
|
||||
env.unset('NO_' + var)
|
||||
else:
|
||||
env.set("NO_" + var, "ON")
|
||||
env.set('NO_' + var, 'ON')
|
||||
|
||||
# Build in parallel to speed up build times
|
||||
env.set("MAX_JOBS", make_jobs)
|
||||
env.set('MAX_JOBS', make_jobs)
|
||||
|
||||
# Spack logs have trouble handling colored output
|
||||
env.set("COLORIZE_OUTPUT", "OFF")
|
||||
env.set('COLORIZE_OUTPUT', 'OFF')
|
||||
|
||||
if self.spec.satisfies("@0.4:"):
|
||||
enable_or_disable("test", keyword="BUILD")
|
||||
if self.spec.satisfies('@0.4:'):
|
||||
enable_or_disable('test', keyword='BUILD')
|
||||
|
||||
if self.spec.satisfies("@1.7:"):
|
||||
enable_or_disable("caffe2", keyword="BUILD")
|
||||
if self.spec.satisfies('@1.7:'):
|
||||
enable_or_disable('caffe2', keyword='BUILD')
|
||||
|
||||
enable_or_disable("cuda")
|
||||
if "+cuda" in self.spec:
|
||||
enable_or_disable('cuda')
|
||||
if '+cuda' in self.spec:
|
||||
# cmake/public/cuda.cmake
|
||||
# cmake/Modules_CUDA_fix/upstream/FindCUDA.cmake
|
||||
env.unset("CUDA_ROOT")
|
||||
torch_cuda_arch = ";".join(
|
||||
"{0:.1f}".format(float(i) / 10.0) for i in self.spec.variants["cuda_arch"].value
|
||||
)
|
||||
env.set("TORCH_CUDA_ARCH_LIST", torch_cuda_arch)
|
||||
env.unset('CUDA_ROOT')
|
||||
torch_cuda_arch = ';'.join('{0:.1f}'.format(float(i) / 10.0) for i
|
||||
in
|
||||
self.spec.variants['cuda_arch'].value)
|
||||
env.set('TORCH_CUDA_ARCH_LIST', torch_cuda_arch)
|
||||
|
||||
enable_or_disable("rocm")
|
||||
enable_or_disable('rocm')
|
||||
|
||||
enable_or_disable("cudnn")
|
||||
if "+cudnn" in self.spec:
|
||||
enable_or_disable('cudnn')
|
||||
if '+cudnn' in self.spec:
|
||||
# cmake/Modules_CUDA_fix/FindCUDNN.cmake
|
||||
env.set("CUDNN_INCLUDE_DIR", self.spec["cudnn"].prefix.include)
|
||||
env.set("CUDNN_LIBRARY", self.spec["cudnn"].libs[0])
|
||||
env.set('CUDNN_INCLUDE_DIR', self.spec['cudnn'].prefix.include)
|
||||
env.set('CUDNN_LIBRARY', self.spec['cudnn'].libs[0])
|
||||
|
||||
enable_or_disable("fbgemm")
|
||||
if self.spec.satisfies("@1.8:"):
|
||||
enable_or_disable("kineto")
|
||||
enable_or_disable("magma")
|
||||
enable_or_disable("metal")
|
||||
if self.spec.satisfies("@1.10:"):
|
||||
enable_or_disable("breakpad")
|
||||
enable_or_disable('fbgemm')
|
||||
if self.spec.satisfies('@1.8:'):
|
||||
enable_or_disable('kineto')
|
||||
enable_or_disable('magma')
|
||||
enable_or_disable('metal')
|
||||
if self.spec.satisfies('@1.10:'):
|
||||
enable_or_disable('breakpad')
|
||||
|
||||
enable_or_disable("nccl")
|
||||
if "+nccl" in self.spec:
|
||||
env.set("NCCL_LIB_DIR", self.spec["nccl"].libs.directories[0])
|
||||
env.set("NCCL_INCLUDE_DIR", self.spec["nccl"].prefix.include)
|
||||
enable_or_disable('nccl')
|
||||
if '+nccl' in self.spec:
|
||||
env.set('NCCL_LIB_DIR', self.spec['nccl'].libs.directories[0])
|
||||
env.set('NCCL_INCLUDE_DIR', self.spec['nccl'].prefix.include)
|
||||
|
||||
# cmake/External/nnpack.cmake
|
||||
enable_or_disable("nnpack")
|
||||
enable_or_disable('nnpack')
|
||||
|
||||
enable_or_disable("numa")
|
||||
if "+numa" in self.spec:
|
||||
enable_or_disable('numa')
|
||||
if '+numa' in self.spec:
|
||||
# cmake/Modules/FindNuma.cmake
|
||||
env.set("NUMA_ROOT_DIR", self.spec["numactl"].prefix)
|
||||
env.set('NUMA_ROOT_DIR', self.spec['numactl'].prefix)
|
||||
|
||||
# cmake/Modules/FindNumPy.cmake
|
||||
enable_or_disable("numpy")
|
||||
enable_or_disable('numpy')
|
||||
# cmake/Modules/FindOpenMP.cmake
|
||||
enable_or_disable("openmp", newer=True)
|
||||
enable_or_disable("qnnpack")
|
||||
if self.spec.satisfies("@1.3:"):
|
||||
enable_or_disable("qnnpack", var="PYTORCH_QNNPACK")
|
||||
if self.spec.satisfies("@1.8:"):
|
||||
enable_or_disable("valgrind")
|
||||
if self.spec.satisfies("@1.5:"):
|
||||
enable_or_disable("xnnpack")
|
||||
enable_or_disable("mkldnn")
|
||||
enable_or_disable("distributed")
|
||||
enable_or_disable("mpi")
|
||||
enable_or_disable('openmp', newer=True)
|
||||
enable_or_disable('qnnpack')
|
||||
if self.spec.satisfies('@1.3:'):
|
||||
enable_or_disable('qnnpack', var='PYTORCH_QNNPACK')
|
||||
if self.spec.satisfies('@1.8:'):
|
||||
enable_or_disable('valgrind')
|
||||
if self.spec.satisfies('@1.5:'):
|
||||
enable_or_disable('xnnpack')
|
||||
enable_or_disable('mkldnn')
|
||||
enable_or_disable('distributed')
|
||||
enable_or_disable('mpi')
|
||||
# cmake/Modules/FindGloo.cmake
|
||||
enable_or_disable("gloo", newer=True)
|
||||
if self.spec.satisfies("@1.6:"):
|
||||
enable_or_disable("tensorpipe")
|
||||
enable_or_disable('gloo', newer=True)
|
||||
if self.spec.satisfies('@1.6:'):
|
||||
enable_or_disable('tensorpipe')
|
||||
|
||||
if "+onnx_ml" in self.spec:
|
||||
env.set("ONNX_ML", "ON")
|
||||
if '+onnx_ml' in self.spec:
|
||||
env.set('ONNX_ML', 'ON')
|
||||
else:
|
||||
env.set("ONNX_ML", "OFF")
|
||||
env.set('ONNX_ML', 'OFF')
|
||||
|
||||
if not self.spec.satisfies("@master"):
|
||||
env.set("PYTORCH_BUILD_VERSION", self.version)
|
||||
env.set("PYTORCH_BUILD_NUMBER", 0)
|
||||
if not self.spec.satisfies('@master'):
|
||||
env.set('PYTORCH_BUILD_VERSION', self.version)
|
||||
env.set('PYTORCH_BUILD_NUMBER', 0)
|
||||
|
||||
# BLAS to be used by Caffe2
|
||||
# Options defined in cmake/Dependencies.cmake and cmake/Modules/FindBLAS.cmake
|
||||
if self.spec["blas"].name == "atlas":
|
||||
env.set("BLAS", "ATLAS")
|
||||
env.set("WITH_BLAS", "atlas")
|
||||
elif self.spec["blas"].name in ["blis", "amdblis"]:
|
||||
env.set("BLAS", "BLIS")
|
||||
env.set("WITH_BLAS", "blis")
|
||||
elif self.spec["blas"].name == "eigen":
|
||||
env.set("BLAS", "Eigen")
|
||||
elif self.spec["lapack"].name in ["libflame", "amdlibflame"]:
|
||||
env.set("BLAS", "FLAME")
|
||||
env.set("WITH_BLAS", "FLAME")
|
||||
elif self.spec["blas"].name in ["intel-mkl", "intel-parallel-studio", "intel-oneapi-mkl"]:
|
||||
env.set("BLAS", "MKL")
|
||||
env.set("WITH_BLAS", "mkl")
|
||||
elif self.spec["blas"].name == "openblas":
|
||||
env.set("BLAS", "OpenBLAS")
|
||||
env.set("WITH_BLAS", "open")
|
||||
elif self.spec["blas"].name == "veclibfort":
|
||||
env.set("BLAS", "vecLib")
|
||||
env.set("WITH_BLAS", "veclib")
|
||||
if self.spec['blas'].name == 'atlas':
|
||||
env.set('BLAS', 'ATLAS')
|
||||
env.set('WITH_BLAS', 'atlas')
|
||||
elif self.spec['blas'].name in ['blis', 'amdblis']:
|
||||
env.set('BLAS', 'BLIS')
|
||||
env.set('WITH_BLAS', 'blis')
|
||||
elif self.spec['blas'].name == 'eigen':
|
||||
env.set('BLAS', 'Eigen')
|
||||
elif self.spec['lapack'].name in ['libflame', 'amdlibflame']:
|
||||
env.set('BLAS', 'FLAME')
|
||||
env.set('WITH_BLAS', 'FLAME')
|
||||
elif self.spec['blas'].name in [
|
||||
'intel-mkl', 'intel-parallel-studio', 'intel-oneapi-mkl']:
|
||||
env.set('BLAS', 'MKL')
|
||||
env.set('WITH_BLAS', 'mkl')
|
||||
elif self.spec['blas'].name == 'openblas':
|
||||
env.set('BLAS', 'OpenBLAS')
|
||||
env.set('WITH_BLAS', 'open')
|
||||
elif self.spec['blas'].name == 'veclibfort':
|
||||
env.set('BLAS', 'vecLib')
|
||||
env.set('WITH_BLAS', 'veclib')
|
||||
else:
|
||||
env.set("BLAS", "Generic")
|
||||
env.set("WITH_BLAS", "generic")
|
||||
env.set('BLAS', 'Generic')
|
||||
env.set('WITH_BLAS', 'generic')
|
||||
|
||||
# Don't use vendored third-party libraries when possible
|
||||
env.set("BUILD_CUSTOM_PROTOBUF", "OFF")
|
||||
env.set("USE_SYSTEM_NCCL", "ON")
|
||||
env.set("USE_SYSTEM_EIGEN_INSTALL", "ON")
|
||||
if self.spec.satisfies("@0.4:"):
|
||||
env.set("pybind11_DIR", self.spec["py-pybind11"].prefix)
|
||||
env.set("pybind11_INCLUDE_DIR", self.spec["py-pybind11"].prefix.include)
|
||||
if self.spec.satisfies("@1.10:"):
|
||||
env.set("USE_SYSTEM_PYBIND11", "ON")
|
||||
env.set('BUILD_CUSTOM_PROTOBUF', 'OFF')
|
||||
env.set('USE_SYSTEM_NCCL', 'ON')
|
||||
env.set('USE_SYSTEM_EIGEN_INSTALL', 'ON')
|
||||
if self.spec.satisfies('@0.4:'):
|
||||
env.set('pybind11_DIR', self.spec['py-pybind11'].prefix)
|
||||
env.set('pybind11_INCLUDE_DIR',
|
||||
self.spec['py-pybind11'].prefix.include)
|
||||
if self.spec.satisfies('@1.10:'):
|
||||
env.set('USE_SYSTEM_PYBIND11', 'ON')
|
||||
# https://github.com/pytorch/pytorch/issues/60334
|
||||
# if self.spec.satisfies('@1.8:'):
|
||||
# env.set('USE_SYSTEM_SLEEF', 'ON')
|
||||
if self.spec.satisfies("@1.6:"):
|
||||
if self.spec.satisfies('@1.6:'):
|
||||
# env.set('USE_SYSTEM_LIBS', 'ON')
|
||||
# https://github.com/pytorch/pytorch/issues/60329
|
||||
# env.set('USE_SYSTEM_CPUINFO', 'ON')
|
||||
@@ -447,26 +422,27 @@ class PyTorch(PythonPackage, CudaPackage):
|
||||
# env.set('USE_SYSTEM_GLOO', 'ON')
|
||||
# https://github.com/Maratyszcza/FP16/issues/18
|
||||
# env.set('USE_SYSTEM_FP16', 'ON')
|
||||
env.set("USE_SYSTEM_PTHREADPOOL", "ON")
|
||||
env.set("USE_SYSTEM_PSIMD", "ON")
|
||||
env.set("USE_SYSTEM_FXDIV", "ON")
|
||||
env.set("USE_SYSTEM_BENCHMARK", "ON")
|
||||
env.set('USE_SYSTEM_PTHREADPOOL', 'ON')
|
||||
env.set('USE_SYSTEM_PSIMD', 'ON')
|
||||
env.set('USE_SYSTEM_FXDIV', 'ON')
|
||||
env.set('USE_SYSTEM_BENCHMARK', 'ON')
|
||||
# https://github.com/pytorch/pytorch/issues/60331
|
||||
# env.set('USE_SYSTEM_ONNX', 'ON')
|
||||
# https://github.com/pytorch/pytorch/issues/60332
|
||||
# env.set('USE_SYSTEM_XNNPACK', 'ON')
|
||||
|
||||
@run_before("install")
|
||||
@run_before('install')
|
||||
def build_amd(self):
|
||||
if "+rocm" in self.spec:
|
||||
python(os.path.join("tools", "amd_build", "build_amd.py"))
|
||||
if '+rocm' in self.spec:
|
||||
python(os.path.join('tools', 'amd_build', 'build_amd.py'))
|
||||
|
||||
@run_after("install")
|
||||
@run_after('install')
|
||||
@on_package_attributes(run_tests=True)
|
||||
def install_test(self):
|
||||
with working_dir("test"):
|
||||
python("run_test.py")
|
||||
with working_dir('test'):
|
||||
python('run_test.py')
|
||||
|
||||
# Tests need to be re-added since `phases` was overridden
|
||||
run_after("install")(PythonPackage._run_default_install_time_test_callbacks)
|
||||
run_after("install")(PythonPackage.sanity_check_prefix)
|
||||
run_after('install')(
|
||||
PythonPackage._run_default_install_time_test_callbacks)
|
||||
run_after('install')(PythonPackage.sanity_check_prefix)
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -400,7 +400,7 @@ def test_sanitize_literals(env, exclude, include):
|
||||
({"SHLVL": "1"}, ["SH.*"], [], [], ["SHLVL"]),
|
||||
# Check we can include using a regex
|
||||
({"SHLVL": "1"}, ["SH.*"], ["SH.*"], ["SHLVL"], []),
|
||||
# Check regex to exclude Modules v4 related vars
|
||||
# Check regex to exclude Environment Modules related vars
|
||||
(
|
||||
{"MODULES_LMALTNAME": "1", "MODULES_LMCONFLICT": "2"},
|
||||
["MODULES_(.*)"],
|
||||
@@ -415,6 +415,13 @@ def test_sanitize_literals(env, exclude, include):
|
||||
[],
|
||||
["A_modquar", "b_modquar", "C_modshare"],
|
||||
),
|
||||
(
|
||||
{"__MODULES_LMTAG": "1", "__MODULES_LMPREREQ": "2"},
|
||||
["__MODULES_(.*)"],
|
||||
[],
|
||||
[],
|
||||
["__MODULES_LMTAG", "__MODULES_LMPREREQ"],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_sanitize_regex(env, exclude, include, expected, deleted):
|
||||
@@ -489,3 +496,19 @@ def test_exclude_lmod_variables():
|
||||
# Check that variables related to lmod are not in there
|
||||
modifications = env.group_by_name()
|
||||
assert not any(x.startswith("LMOD_") for x in modifications)
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@pytest.mark.regression("13504")
|
||||
def test_exclude_modules_variables():
|
||||
# Construct the list of environment modifications
|
||||
file = os.path.join(datadir, "sourceme_modules.sh")
|
||||
env = EnvironmentModifications.from_sourcing_file(file)
|
||||
|
||||
# Check that variables related to modules are not in there
|
||||
modifications = env.group_by_name()
|
||||
assert not any(x.startswith("MODULES_") for x in modifications)
|
||||
assert not any(x.startswith("__MODULES_") for x in modifications)
|
||||
assert not any(x.startswith("BASH_FUNC_ml") for x in modifications)
|
||||
assert not any(x.startswith("BASH_FUNC_module") for x in modifications)
|
||||
assert not any(x.startswith("BASH_FUNC__module_raw") for x in modifications)
|
||||
|
@@ -1386,6 +1386,30 @@ def test_single_external_implicit_install(install_mockery, explicit_args, is_exp
|
||||
assert spack.store.db.get_record(pkg).explicit == is_explicit
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="Windows breaks overwrite install due to prefix normalization inconsistencies",
|
||||
)
|
||||
def test_overwrite_install_does_install_build_deps(install_mockery, mock_fetch):
|
||||
"""When overwrite installing something from sources, build deps should be installed."""
|
||||
s = spack.spec.Spec("dtrun3").concretized()
|
||||
create_installer([(s, {})]).install()
|
||||
|
||||
# Verify there is a pure build dep
|
||||
edge = s.edges_to_dependencies(name="dtbuild3").pop()
|
||||
assert edge.deptypes == ("build",)
|
||||
build_dep = edge.spec
|
||||
|
||||
# Uninstall the build dep
|
||||
build_dep.package.do_uninstall()
|
||||
|
||||
# Overwrite install the root dtrun3
|
||||
create_installer([(s, {"overwrite": [s.dag_hash()]})]).install()
|
||||
|
||||
# Verify that the build dep was also installed.
|
||||
assert build_dep.installed
|
||||
|
||||
|
||||
@pytest.mark.parametrize("run_tests", [True, False])
|
||||
def test_print_install_test_log_skipped(install_mockery, mock_packages, capfd, run_tests):
|
||||
"""Confirm printing of install log skipped if not run/no failures."""
|
||||
|
@@ -167,6 +167,46 @@ def test_prepend_path_separator(self, modulefile_content, module_configuration):
|
||||
assert len([x for x in content if 'append_path("SPACE", "qux", " ")' in x]) == 1
|
||||
assert len([x for x in content if 'remove_path("SPACE", "qux", " ")' in x]) == 1
|
||||
|
||||
@pytest.mark.regression("11355")
|
||||
def test_manpath_setup(self, modulefile_content, module_configuration):
|
||||
"""Tests specific setup of MANPATH environment variable."""
|
||||
|
||||
module_configuration("autoload_direct")
|
||||
|
||||
# no manpath set by module
|
||||
content = modulefile_content("mpileaks")
|
||||
assert len([x for x in content if 'append_path("MANPATH", "", ":")' in x]) == 0
|
||||
|
||||
# manpath set by module with prepend_path
|
||||
content = modulefile_content("module-manpath-prepend")
|
||||
assert (
|
||||
len([x for x in content if 'prepend_path("MANPATH", "/path/to/man", ":")' in x]) == 1
|
||||
)
|
||||
assert (
|
||||
len([x for x in content if 'prepend_path("MANPATH", "/path/to/share/man", ":")' in x])
|
||||
== 1
|
||||
)
|
||||
assert len([x for x in content if 'append_path("MANPATH", "", ":")' in x]) == 1
|
||||
|
||||
# manpath set by module with append_path
|
||||
content = modulefile_content("module-manpath-append")
|
||||
assert len([x for x in content if 'append_path("MANPATH", "/path/to/man", ":")' in x]) == 1
|
||||
assert len([x for x in content if 'append_path("MANPATH", "", ":")' in x]) == 1
|
||||
|
||||
# manpath set by module with setenv
|
||||
content = modulefile_content("module-manpath-setenv")
|
||||
assert len([x for x in content if 'setenv("MANPATH", "/path/to/man")' in x]) == 1
|
||||
assert len([x for x in content if 'append_path("MANPATH", "", ":")' in x]) == 0
|
||||
|
||||
@pytest.mark.regression("29578")
|
||||
def test_setenv_raw_value(self, modulefile_content, module_configuration):
|
||||
"""Tests that we can set environment variable value without formatting it."""
|
||||
|
||||
module_configuration("autoload_direct")
|
||||
content = modulefile_content("module-setenv-raw")
|
||||
|
||||
assert len([x for x in content if 'setenv("FOO", "{{name}}, {name}, {{}}, {}")' in x]) == 1
|
||||
|
||||
def test_help_message(self, modulefile_content, module_configuration):
|
||||
"""Tests the generation of module help message."""
|
||||
|
||||
@@ -192,6 +232,18 @@ def test_help_message(self, modulefile_content, module_configuration):
|
||||
)
|
||||
assert help_msg in "".join(content)
|
||||
|
||||
content = modulefile_content("module-long-help target=core2")
|
||||
|
||||
help_msg = (
|
||||
"help([[Name : module-long-help]])"
|
||||
"help([[Version: 1.0]])"
|
||||
"help([[Target : core2]])"
|
||||
"help()"
|
||||
"help([[Package to test long description message generated in modulefile."
|
||||
"Message too long is wrapped over multiple lines.]])"
|
||||
)
|
||||
assert help_msg in "".join(content)
|
||||
|
||||
def test_exclude(self, modulefile_content, module_configuration):
|
||||
"""Tests excluding the generation of selected modules."""
|
||||
module_configuration("exclude")
|
||||
|
@@ -29,7 +29,7 @@ def test_simple_case(self, modulefile_content, module_configuration):
|
||||
module_configuration("autoload_direct")
|
||||
content = modulefile_content(mpich_spec_string)
|
||||
|
||||
assert 'module-whatis "mpich @3.0.4"' in content
|
||||
assert "module-whatis {mpich @3.0.4}" in content
|
||||
|
||||
def test_autoload_direct(self, modulefile_content, module_configuration):
|
||||
"""Tests the automatic loading of direct dependencies."""
|
||||
@@ -37,6 +37,11 @@ def test_autoload_direct(self, modulefile_content, module_configuration):
|
||||
module_configuration("autoload_direct")
|
||||
content = modulefile_content(mpileaks_spec_string)
|
||||
|
||||
assert (
|
||||
len([x for x in content if "if {![info exists ::env(LMOD_VERSION_MAJOR)]} {" in x])
|
||||
== 1
|
||||
)
|
||||
assert len([x for x in content if "depends-on " in x]) == 2
|
||||
assert len([x for x in content if "module load " in x]) == 2
|
||||
|
||||
# dtbuild1 has
|
||||
@@ -46,6 +51,11 @@ def test_autoload_direct(self, modulefile_content, module_configuration):
|
||||
# Just make sure the 'build' dependency is not there
|
||||
content = modulefile_content("dtbuild1")
|
||||
|
||||
assert (
|
||||
len([x for x in content if "if {![info exists ::env(LMOD_VERSION_MAJOR)]} {" in x])
|
||||
== 1
|
||||
)
|
||||
assert len([x for x in content if "depends-on " in x]) == 2
|
||||
assert len([x for x in content if "module load " in x]) == 2
|
||||
|
||||
# The configuration file sets the verbose keyword to False
|
||||
@@ -58,6 +68,11 @@ def test_autoload_all(self, modulefile_content, module_configuration):
|
||||
module_configuration("autoload_all")
|
||||
content = modulefile_content(mpileaks_spec_string)
|
||||
|
||||
assert (
|
||||
len([x for x in content if "if {![info exists ::env(LMOD_VERSION_MAJOR)]} {" in x])
|
||||
== 1
|
||||
)
|
||||
assert len([x for x in content if "depends-on " in x]) == 5
|
||||
assert len([x for x in content if "module load " in x]) == 5
|
||||
|
||||
# dtbuild1 has
|
||||
@@ -67,6 +82,11 @@ def test_autoload_all(self, modulefile_content, module_configuration):
|
||||
# Just make sure the 'build' dependency is not there
|
||||
content = modulefile_content("dtbuild1")
|
||||
|
||||
assert (
|
||||
len([x for x in content if "if {![info exists ::env(LMOD_VERSION_MAJOR)]} {" in x])
|
||||
== 1
|
||||
)
|
||||
assert len([x for x in content if "depends-on " in x]) == 2
|
||||
assert len([x for x in content if "module load " in x]) == 2
|
||||
|
||||
def test_prerequisites_direct(self, modulefile_content, module_configuration):
|
||||
@@ -92,17 +112,18 @@ def test_alter_environment(self, modulefile_content, module_configuration):
|
||||
content = modulefile_content("mpileaks platform=test target=x86_64")
|
||||
|
||||
assert len([x for x in content if x.startswith("prepend-path CMAKE_PREFIX_PATH")]) == 0
|
||||
assert len([x for x in content if 'setenv FOO "foo"' in x]) == 1
|
||||
assert len([x for x in content if 'setenv OMPI_MCA_mpi_leave_pinned "1"' in x]) == 1
|
||||
assert len([x for x in content if 'setenv OMPI_MCA_MPI_LEAVE_PINNED "1"' in x]) == 0
|
||||
assert len([x for x in content if "setenv FOO {foo}" in x]) == 1
|
||||
assert len([x for x in content if "setenv OMPI_MCA_mpi_leave_pinned {1}" in x]) == 1
|
||||
assert len([x for x in content if "setenv OMPI_MCA_MPI_LEAVE_PINNED {1}" in x]) == 0
|
||||
assert len([x for x in content if "unsetenv BAR" in x]) == 1
|
||||
assert len([x for x in content if "setenv MPILEAKS_ROOT" in x]) == 1
|
||||
|
||||
content = modulefile_content("libdwarf platform=test target=core2")
|
||||
|
||||
assert len([x for x in content if x.startswith("prepend-path CMAKE_PREFIX_PATH")]) == 0
|
||||
assert len([x for x in content if 'setenv FOO "foo"' in x]) == 0
|
||||
assert len([x for x in content if "setenv FOO {foo}" in x]) == 0
|
||||
assert len([x for x in content if "unsetenv BAR" in x]) == 0
|
||||
assert len([x for x in content if "depends-on foo/bar" in x]) == 1
|
||||
assert len([x for x in content if "module load foo/bar" in x]) == 1
|
||||
assert len([x for x in content if "setenv LIBDWARF_ROOT" in x]) == 1
|
||||
|
||||
@@ -112,14 +133,63 @@ def test_prepend_path_separator(self, modulefile_content, module_configuration):
|
||||
module_configuration("module_path_separator")
|
||||
content = modulefile_content("module-path-separator")
|
||||
|
||||
assert len([x for x in content if 'append-path --delim ":" COLON "foo"' in x]) == 1
|
||||
assert len([x for x in content if 'prepend-path --delim ":" COLON "foo"' in x]) == 1
|
||||
assert len([x for x in content if 'remove-path --delim ":" COLON "foo"' in x]) == 1
|
||||
assert len([x for x in content if 'append-path --delim ";" SEMICOLON "bar"' in x]) == 1
|
||||
assert len([x for x in content if 'prepend-path --delim ";" SEMICOLON "bar"' in x]) == 1
|
||||
assert len([x for x in content if 'remove-path --delim ";" SEMICOLON "bar"' in x]) == 1
|
||||
assert len([x for x in content if 'append-path --delim " " SPACE "qux"' in x]) == 1
|
||||
assert len([x for x in content if 'remove-path --delim " " SPACE "qux"' in x]) == 1
|
||||
assert len([x for x in content if "append-path --delim {:} COLON {foo}" in x]) == 1
|
||||
assert len([x for x in content if "prepend-path --delim {:} COLON {foo}" in x]) == 1
|
||||
assert len([x for x in content if "remove-path --delim {:} COLON {foo}" in x]) == 1
|
||||
assert len([x for x in content if "append-path --delim {;} SEMICOLON {bar}" in x]) == 1
|
||||
assert len([x for x in content if "prepend-path --delim {;} SEMICOLON {bar}" in x]) == 1
|
||||
assert len([x for x in content if "remove-path --delim {;} SEMICOLON {bar}" in x]) == 1
|
||||
assert len([x for x in content if "append-path --delim { } SPACE {qux}" in x]) == 1
|
||||
assert len([x for x in content if "remove-path --delim { } SPACE {qux}" in x]) == 1
|
||||
|
||||
@pytest.mark.regression("11355")
|
||||
def test_manpath_setup(self, modulefile_content, module_configuration):
|
||||
"""Tests specific setup of MANPATH environment variable."""
|
||||
|
||||
module_configuration("autoload_direct")
|
||||
|
||||
# no manpath set by module
|
||||
content = modulefile_content("mpileaks")
|
||||
assert len([x for x in content if "append-path --delim {:} MANPATH {}" in x]) == 0
|
||||
|
||||
# manpath set by module with prepend-path
|
||||
content = modulefile_content("module-manpath-prepend")
|
||||
assert (
|
||||
len([x for x in content if "prepend-path --delim {:} MANPATH {/path/to/man}" in x])
|
||||
== 1
|
||||
)
|
||||
assert (
|
||||
len(
|
||||
[
|
||||
x
|
||||
for x in content
|
||||
if "prepend-path --delim {:} MANPATH {/path/to/share/man}" in x
|
||||
]
|
||||
)
|
||||
== 1
|
||||
)
|
||||
assert len([x for x in content if "append-path --delim {:} MANPATH {}" in x]) == 1
|
||||
|
||||
# manpath set by module with append-path
|
||||
content = modulefile_content("module-manpath-append")
|
||||
assert (
|
||||
len([x for x in content if "append-path --delim {:} MANPATH {/path/to/man}" in x]) == 1
|
||||
)
|
||||
assert len([x for x in content if "append-path --delim {:} MANPATH {}" in x]) == 1
|
||||
|
||||
# manpath set by module with setenv
|
||||
content = modulefile_content("module-manpath-setenv")
|
||||
assert len([x for x in content if "setenv MANPATH {/path/to/man}" in x]) == 1
|
||||
assert len([x for x in content if "append-path --delim {:} MANPATH {}" in x]) == 0
|
||||
|
||||
@pytest.mark.regression("29578")
|
||||
def test_setenv_raw_value(self, modulefile_content, module_configuration):
|
||||
"""Tests that we can set environment variable value without formatting it."""
|
||||
|
||||
module_configuration("autoload_direct")
|
||||
content = modulefile_content("module-setenv-raw")
|
||||
|
||||
assert len([x for x in content if "setenv FOO {{{name}}, {name}, {{}}, {}}" in x]) == 1
|
||||
|
||||
def test_help_message(self, modulefile_content, module_configuration):
|
||||
"""Tests the generation of module help message."""
|
||||
@@ -129,11 +199,11 @@ def test_help_message(self, modulefile_content, module_configuration):
|
||||
|
||||
help_msg = (
|
||||
"proc ModulesHelp { } {"
|
||||
' puts stderr "Name : mpileaks"'
|
||||
' puts stderr "Version: 2.3"'
|
||||
' puts stderr "Target : core2"'
|
||||
' puts stderr ""'
|
||||
' puts stderr "Mpileaks is a mock package that passes audits"'
|
||||
" puts stderr {Name : mpileaks}"
|
||||
" puts stderr {Version: 2.3}"
|
||||
" puts stderr {Target : core2}"
|
||||
" puts stderr {}"
|
||||
" puts stderr {Mpileaks is a mock package that passes audits}"
|
||||
"}"
|
||||
)
|
||||
assert help_msg in "".join(content)
|
||||
@@ -142,9 +212,23 @@ def test_help_message(self, modulefile_content, module_configuration):
|
||||
|
||||
help_msg = (
|
||||
"proc ModulesHelp { } {"
|
||||
' puts stderr "Name : libdwarf"'
|
||||
' puts stderr "Version: 20130729"'
|
||||
' puts stderr "Target : core2"'
|
||||
" puts stderr {Name : libdwarf}"
|
||||
" puts stderr {Version: 20130729}"
|
||||
" puts stderr {Target : core2}"
|
||||
"}"
|
||||
)
|
||||
assert help_msg in "".join(content)
|
||||
|
||||
content = modulefile_content("module-long-help target=core2")
|
||||
|
||||
help_msg = (
|
||||
"proc ModulesHelp { } {"
|
||||
" puts stderr {Name : module-long-help}"
|
||||
" puts stderr {Version: 1.0}"
|
||||
" puts stderr {Target : core2}"
|
||||
" puts stderr {}"
|
||||
" puts stderr {Package to test long description message generated in modulefile.}"
|
||||
" puts stderr {Message too long is wrapped over multiple lines.}"
|
||||
"}"
|
||||
)
|
||||
assert help_msg in "".join(content)
|
||||
@@ -299,14 +383,14 @@ def test_setup_environment(self, modulefile_content, module_configuration):
|
||||
content = modulefile_content("mpileaks")
|
||||
|
||||
assert len([x for x in content if "setenv FOOBAR" in x]) == 1
|
||||
assert len([x for x in content if 'setenv FOOBAR "mpileaks"' in x]) == 1
|
||||
assert len([x for x in content if "setenv FOOBAR {mpileaks}" in x]) == 1
|
||||
|
||||
spec = spack.spec.Spec("mpileaks")
|
||||
spec.concretize()
|
||||
content = modulefile_content(str(spec["callpath"]))
|
||||
|
||||
assert len([x for x in content if "setenv FOOBAR" in x]) == 1
|
||||
assert len([x for x in content if 'setenv FOOBAR "callpath"' in x]) == 1
|
||||
assert len([x for x in content if "setenv FOOBAR {callpath}" in x]) == 1
|
||||
|
||||
def test_override_config(self, module_configuration, factory):
|
||||
"""Tests overriding some sections of the configuration file."""
|
||||
@@ -347,7 +431,7 @@ def test_extend_context(self, modulefile_content, module_configuration):
|
||||
|
||||
assert 'puts stderr "sentence from package"' in content
|
||||
|
||||
short_description = 'module-whatis "This package updates the context for Tcl modulefiles."'
|
||||
short_description = "module-whatis {This package updates the context for Tcl modulefiles.}"
|
||||
assert short_description in content
|
||||
|
||||
@pytest.mark.regression("4400")
|
||||
@@ -394,10 +478,16 @@ def test_autoload_with_constraints(self, modulefile_content, module_configuratio
|
||||
|
||||
# Test the mpileaks that should have the autoloaded dependencies
|
||||
content = modulefile_content("mpileaks ^mpich2")
|
||||
assert len([x for x in content if "depends-on " in x]) == 2
|
||||
assert len([x for x in content if "module load " in x]) == 2
|
||||
|
||||
# Test the mpileaks that should NOT have the autoloaded dependencies
|
||||
content = modulefile_content("mpileaks ^mpich")
|
||||
assert (
|
||||
len([x for x in content if "if {![info exists ::env(LMOD_VERSION_MAJOR)]} {" in x])
|
||||
== 0
|
||||
)
|
||||
assert len([x for x in content if "depends-on " in x]) == 0
|
||||
assert len([x for x in content if "module load " in x]) == 0
|
||||
|
||||
def test_modules_no_arch(self, factory, module_configuration):
|
||||
|
@@ -173,6 +173,14 @@ def test_ensure_binary_is_relocatable(source_file, is_relocatable):
|
||||
assert relocatable == is_relocatable
|
||||
|
||||
|
||||
@pytest.mark.requires_executables("patchelf", "strings", "file")
|
||||
@skip_unless_linux
|
||||
def test_patchelf_is_relocatable():
|
||||
patchelf = os.path.realpath(spack.relocate._patchelf())
|
||||
assert llnl.util.filesystem.is_exe(patchelf)
|
||||
spack.relocate.ensure_binary_is_relocatable(patchelf)
|
||||
|
||||
|
||||
@skip_unless_linux
|
||||
def test_ensure_binary_is_relocatable_errors(tmpdir):
|
||||
# The file passed in as argument must exist...
|
||||
@@ -233,6 +241,30 @@ def test_normalize_relative_paths(start_path, relative_paths, expected):
|
||||
assert normalized == expected
|
||||
|
||||
|
||||
def test_set_elf_rpaths(mock_patchelf):
|
||||
# Try to relocate a mock version of patchelf and check
|
||||
# the call made to patchelf itself
|
||||
patchelf = mock_patchelf("echo $@")
|
||||
rpaths = ["/usr/lib", "/usr/lib64", "/opt/local/lib"]
|
||||
output = spack.relocate._set_elf_rpaths(str(patchelf), rpaths)
|
||||
|
||||
# Assert that the arguments of the call to patchelf are as expected
|
||||
assert "--force-rpath" in output
|
||||
assert "--set-rpath " + ":".join(rpaths) in output
|
||||
assert str(patchelf) in output
|
||||
|
||||
|
||||
@skip_unless_linux
|
||||
def test_set_elf_rpaths_warning(mock_patchelf):
|
||||
# Mock a failing patchelf command and ensure it warns users
|
||||
patchelf = mock_patchelf("exit 1")
|
||||
rpaths = ["/usr/lib", "/usr/lib64", "/opt/local/lib"]
|
||||
# To avoid using capfd in order to check if the warning was triggered
|
||||
# here we just check that output is not set
|
||||
output = spack.relocate._set_elf_rpaths(str(patchelf), rpaths)
|
||||
assert output is None
|
||||
|
||||
|
||||
@pytest.mark.requires_executables("patchelf", "strings", "file", "gcc")
|
||||
@skip_unless_linux
|
||||
def test_relocate_text_bin(binary_with_rpaths, prefix_like):
|
||||
|
@@ -660,7 +660,6 @@ def test_spec_formatting(self, default_mock_concretization):
|
||||
("{architecture.os}", "", "os", lambda spec: spec.architecture),
|
||||
("{architecture.target}", "", "target", lambda spec: spec.architecture),
|
||||
("{prefix}", "", "prefix", lambda spec: spec),
|
||||
("{external}", "", "external", lambda spec: spec), # test we print "False"
|
||||
]
|
||||
|
||||
hash_segments = [
|
||||
|
@@ -119,10 +119,7 @@ def test_dump_environment(prepare_environment_for_tests, tmpdir):
|
||||
dumpfile_path = str(tmpdir.join("envdump.txt"))
|
||||
envutil.dump_environment(dumpfile_path)
|
||||
with open(dumpfile_path, "r") as dumpfile:
|
||||
if sys.platform == "win32":
|
||||
assert 'set "TEST_ENV_VAR={}"\n'.format(test_paths) in list(dumpfile)
|
||||
else:
|
||||
assert "TEST_ENV_VAR={0}; export TEST_ENV_VAR\n".format(test_paths) in list(dumpfile)
|
||||
assert "TEST_ENV_VAR={0}; export TEST_ENV_VAR\n".format(test_paths) in list(dumpfile)
|
||||
|
||||
|
||||
def test_reverse_environment_modifications(working_env):
|
||||
|
@@ -244,6 +244,7 @@ def check_ast_roundtrip(code1, filename="internal", mode="exec"):
|
||||
assert ast.dump(ast1) == ast.dump(ast2), error_msg
|
||||
|
||||
|
||||
@pytest.mark.xfail(reason="https://github.com/spack/spack/pull/38424")
|
||||
def test_core_lib_files():
|
||||
"""Roundtrip source files from the Python core libs."""
|
||||
test_directories = [
|
||||
|
@@ -17,6 +17,7 @@
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
from spack.version import (
|
||||
SEMVER_REGEX,
|
||||
GitVersion,
|
||||
StandardVersion,
|
||||
Version,
|
||||
@@ -935,7 +936,7 @@ def test_inclusion_upperbound():
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_git_version_repo_attached_after_serialization(
|
||||
mock_git_version_info, mock_packages, monkeypatch
|
||||
mock_git_version_info, mock_packages, config, monkeypatch
|
||||
):
|
||||
"""Test that a GitVersion instance can be serialized and deserialized
|
||||
without losing its repository reference.
|
||||
@@ -954,7 +955,9 @@ def test_git_version_repo_attached_after_serialization(
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_resolved_git_version_is_shown_in_str(mock_git_version_info, mock_packages, monkeypatch):
|
||||
def test_resolved_git_version_is_shown_in_str(
|
||||
mock_git_version_info, mock_packages, config, monkeypatch
|
||||
):
|
||||
"""Test that a GitVersion from a commit without a user supplied version is printed
|
||||
as <hash>=<version>, and not just <hash>."""
|
||||
repo_path, _, commits = mock_git_version_info
|
||||
@@ -968,9 +971,31 @@ def test_resolved_git_version_is_shown_in_str(mock_git_version_info, mock_packag
|
||||
assert str(spec.version) == f"{commit}=1.0-git.1"
|
||||
|
||||
|
||||
def test_unresolvable_git_versions_error(mock_packages):
|
||||
def test_unresolvable_git_versions_error(config, mock_packages):
|
||||
"""Test that VersionLookupError is raised when a git prop is not set on a package."""
|
||||
with pytest.raises(VersionLookupError):
|
||||
# The package exists, but does not have a git property set. When dereferencing
|
||||
# the version, we should get VersionLookupError, not a generic AttributeError.
|
||||
spack.spec.Spec(f"git-test-commit@{'a' * 40}").version.ref_version
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"tag,expected",
|
||||
[
|
||||
("v100.2.3", "100.2.3"),
|
||||
("v1.2.3", "1.2.3"),
|
||||
("v1.2.3-pre.release+build.1", "1.2.3-pre.release+build.1"),
|
||||
("v1.2.3+build.1", "1.2.3+build.1"),
|
||||
("v1.2.3+build_1", None),
|
||||
("v1.2.3-pre.release", "1.2.3-pre.release"),
|
||||
("v1.2.3-pre_release", None),
|
||||
("1.2.3", "1.2.3"),
|
||||
("1.2.3.", None),
|
||||
],
|
||||
)
|
||||
def test_semver_regex(tag, expected):
|
||||
result = SEMVER_REGEX.search(tag)
|
||||
if expected is None:
|
||||
assert result is None
|
||||
else:
|
||||
assert result.group() == expected
|
||||
|
@@ -552,3 +552,8 @@ def traverse_tree(specs, cover="nodes", deptype="all", key=id, depth_first=True)
|
||||
return traverse_breadth_first_tree_nodes(None, edges)
|
||||
|
||||
return traverse_edges(specs, order="pre", cover=cover, deptype=deptype, key=key, depth=True)
|
||||
|
||||
|
||||
def by_dag_hash(s: "spack.spec.Spec") -> str:
|
||||
"""Used very often as a key function for traversals."""
|
||||
return s.dag_hash()
|
||||
|
@@ -171,11 +171,7 @@ def path_put_first(var_name: str, directories: List[Path]):
|
||||
BASH_FUNCTION_FINDER = re.compile(r"BASH_FUNC_(.*?)\(\)")
|
||||
|
||||
|
||||
def _win_env_var_to_set_line(var: str, val: str) -> str:
|
||||
return f'set "{var}={val}"'
|
||||
|
||||
|
||||
def _nix_env_var_to_source_line(var: str, val: str) -> str:
|
||||
def _env_var_to_source_line(var: str, val: str) -> str:
|
||||
if var.startswith("BASH_FUNC"):
|
||||
source_line = "function {fname}{decl}; export -f {fname}".format(
|
||||
fname=BASH_FUNCTION_FINDER.sub(r"\1", var), decl=val
|
||||
@@ -185,13 +181,6 @@ def _nix_env_var_to_source_line(var: str, val: str) -> str:
|
||||
return source_line
|
||||
|
||||
|
||||
def _env_var_to_source_line(var: str, val: str) -> str:
|
||||
if sys.platform == "win32":
|
||||
return _win_env_var_to_set_line(var, val)
|
||||
else:
|
||||
return _nix_env_var_to_source_line(var, val)
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def dump_environment(path: Path, environment: Optional[MutableMapping[str, str]] = None):
|
||||
"""Dump an environment dictionary to a source-able file.
|
||||
@@ -351,13 +340,20 @@ def execute(self, env: MutableMapping[str, str]):
|
||||
|
||||
|
||||
class SetEnv(NameValueModifier):
|
||||
__slots__ = ("force",)
|
||||
__slots__ = ("force", "raw")
|
||||
|
||||
def __init__(
|
||||
self, name: str, value: str, *, trace: Optional[Trace] = None, force: bool = False
|
||||
self,
|
||||
name: str,
|
||||
value: str,
|
||||
*,
|
||||
trace: Optional[Trace] = None,
|
||||
force: bool = False,
|
||||
raw: bool = False,
|
||||
):
|
||||
super().__init__(name, value, trace=trace)
|
||||
self.force = force
|
||||
self.raw = raw
|
||||
|
||||
def execute(self, env: MutableMapping[str, str]):
|
||||
tty.debug(f"SetEnv: {self.name}={str(self.value)}", level=3)
|
||||
@@ -501,15 +497,16 @@ def _trace(self) -> Optional[Trace]:
|
||||
return Trace(filename=filename, lineno=lineno, context=current_context)
|
||||
|
||||
@system_env_normalize
|
||||
def set(self, name: str, value: str, *, force: bool = False):
|
||||
def set(self, name: str, value: str, *, force: bool = False, raw: bool = False):
|
||||
"""Stores a request to set an environment variable.
|
||||
|
||||
Args:
|
||||
name: name of the environment variable
|
||||
value: value of the environment variable
|
||||
force: if True, audit will not consider this modification a warning
|
||||
raw: if True, format of value string is skipped
|
||||
"""
|
||||
item = SetEnv(name, value, trace=self._trace(), force=force)
|
||||
item = SetEnv(name, value, trace=self._trace(), force=force, raw=raw)
|
||||
self.env_modifications.append(item)
|
||||
|
||||
@system_env_normalize
|
||||
@@ -768,16 +765,21 @@ def from_sourcing_file(
|
||||
"PS1",
|
||||
"PS2",
|
||||
"ENV",
|
||||
# Environment modules v4
|
||||
# Environment Modules or Lmod
|
||||
"LOADEDMODULES",
|
||||
"_LMFILES_",
|
||||
"BASH_FUNC_module()",
|
||||
"MODULEPATH",
|
||||
"MODULES_(.*)",
|
||||
r"(\w*)_mod(quar|share)",
|
||||
# Lmod configuration
|
||||
r"LMOD_(.*)",
|
||||
"MODULERCFILE",
|
||||
"BASH_FUNC_ml()",
|
||||
"BASH_FUNC_module()",
|
||||
# Environment Modules-specific configuration
|
||||
"MODULESHOME",
|
||||
"BASH_FUNC__module_raw()",
|
||||
r"MODULES_(.*)",
|
||||
r"__MODULES_(.*)",
|
||||
r"(\w*)_mod(quar|share)",
|
||||
# Lmod-specific configuration
|
||||
r"LMOD_(.*)",
|
||||
]
|
||||
)
|
||||
|
||||
|
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import errno
|
||||
import math
|
||||
import os
|
||||
import shutil
|
||||
|
||||
@@ -151,18 +152,17 @@ def __exit__(cm, type, value, traceback):
|
||||
|
||||
return WriteTransaction(self._get_lock(key), acquire=WriteContextManager)
|
||||
|
||||
def mtime(self, key):
|
||||
"""Return modification time of cache file, or 0 if it does not exist.
|
||||
def mtime(self, key) -> float:
|
||||
"""Return modification time of cache file, or -inf if it does not exist.
|
||||
|
||||
Time is in units returned by os.stat in the mtime field, which is
|
||||
platform-dependent.
|
||||
|
||||
"""
|
||||
if not self.init_entry(key):
|
||||
return 0
|
||||
return -math.inf
|
||||
else:
|
||||
sinfo = os.stat(self.cache_path(key))
|
||||
return sinfo.st_mtime
|
||||
return os.stat(self.cache_path(key)).st_mtime
|
||||
|
||||
def remove(self, key):
|
||||
file = self.cache_path(key)
|
||||
|
@@ -454,6 +454,10 @@ def visit_ClassDef(self, node):
|
||||
self.fill("@")
|
||||
self.dispatch(deco)
|
||||
self.fill("class " + node.name)
|
||||
if getattr(node, "type_params", False):
|
||||
self.write("[")
|
||||
interleave(lambda: self.write(", "), self.dispatch, node.type_params)
|
||||
self.write("]")
|
||||
with self.delimit_if("(", ")", condition=node.bases or node.keywords):
|
||||
comma = False
|
||||
for e in node.bases:
|
||||
@@ -499,6 +503,10 @@ def __FunctionDef_helper(self, node, fill_suffix):
|
||||
self.dispatch(deco)
|
||||
def_str = fill_suffix + " " + node.name
|
||||
self.fill(def_str)
|
||||
if getattr(node, "type_params", False):
|
||||
self.write("[")
|
||||
interleave(lambda: self.write(", "), self.dispatch, node.type_params)
|
||||
self.write("]")
|
||||
with self.delimit("(", ")"):
|
||||
self.dispatch(node.args)
|
||||
if getattr(node, "returns", False):
|
||||
@@ -1249,3 +1257,27 @@ def visit_MatchOr(self, node):
|
||||
with self.require_parens(_Precedence.BOR, node):
|
||||
self.set_precedence(pnext(_Precedence.BOR), *node.patterns)
|
||||
interleave(lambda: self.write(" | "), self.dispatch, node.patterns)
|
||||
|
||||
def visit_TypeAlias(self, node):
|
||||
self.fill("type ")
|
||||
self.dispatch(node.name)
|
||||
if node.type_params:
|
||||
self.write("[")
|
||||
interleave(lambda: self.write(", "), self.dispatch, node.type_params)
|
||||
self.write("]")
|
||||
self.write(" = ")
|
||||
self.dispatch(node.value)
|
||||
|
||||
def visit_TypeVar(self, node):
|
||||
self.write(node.name)
|
||||
if node.bound:
|
||||
self.write(": ")
|
||||
self.dispatch(node.bound)
|
||||
|
||||
def visit_TypeVarTuple(self, node):
|
||||
self.write("*")
|
||||
self.write(node.name)
|
||||
|
||||
def visit_ParamSpec(self, node):
|
||||
self.write("**")
|
||||
self.write(node.name)
|
||||
|
@@ -40,11 +40,16 @@
|
||||
SEGMENT_REGEX = re.compile(r"(?:(?P<num>[0-9]+)|(?P<str>[a-zA-Z]+))(?P<sep>[_.-]*)")
|
||||
|
||||
# regular expression for semantic versioning
|
||||
SEMVER_REGEX = re.compile(
|
||||
".+(?P<semver>([0-9]+)[.]([0-9]+)[.]([0-9]+)"
|
||||
"(?:-([0-9A-Za-z-]+(?:[.][0-9A-Za-z-]+)*))?"
|
||||
"(?:[+][0-9A-Za-z-]+)?)"
|
||||
)
|
||||
_VERSION_CORE = r"\d+\.\d+\.\d+"
|
||||
_IDENT = r"[0-9A-Za-z-]+"
|
||||
_SEPARATED_IDENT = rf"{_IDENT}(?:\.{_IDENT})*"
|
||||
_PRERELEASE = rf"\-{_SEPARATED_IDENT}"
|
||||
_BUILD = rf"\+{_SEPARATED_IDENT}"
|
||||
_SEMVER = rf"{_VERSION_CORE}(?:{_PRERELEASE})?(?:{_BUILD})?"
|
||||
|
||||
# clamp on the end, so versions like v1.2.3-rc1 will match
|
||||
# without the leading 'v'.
|
||||
SEMVER_REGEX = re.compile(rf"{_SEMVER}$")
|
||||
|
||||
# Infinity-like versions. The order in the list implies the comparison rules
|
||||
infinity_versions = ["stable", "trunk", "head", "master", "main", "develop"]
|
||||
@@ -1319,11 +1324,10 @@ def lookup_ref(self, ref) -> Tuple[Optional[str], int]:
|
||||
commit_to_version[tag_commit] = v
|
||||
break
|
||||
else:
|
||||
# try to parse tag to copare versions spack does not know
|
||||
match = SEMVER_REGEX.match(tag)
|
||||
# try to parse tag to compare versions spack does not know
|
||||
match = SEMVER_REGEX.search(tag)
|
||||
if match:
|
||||
semver = match.groupdict()["semver"]
|
||||
commit_to_version[tag_commit] = semver
|
||||
commit_to_version[tag_commit] = match.group()
|
||||
|
||||
ancestor_commits = []
|
||||
for tag_commit in commit_to_version:
|
||||
|
@@ -141,12 +141,29 @@ ignore_missing_imports = true
|
||||
ignore_errors = true
|
||||
ignore_missing_imports = true
|
||||
|
||||
# pytest (which we depend on) optionally imports numpy, which requires Python 3.8 in
|
||||
# recent versions. mypy still imports its .pyi file, which has positional-only
|
||||
# arguments, which don't work in 3.7, which causes mypy to bail out early if you have
|
||||
# numpy installed.
|
||||
# Spack imports a number of external packages, and they *may* require Python 3.8 or
|
||||
# higher in recent versions. This can cause mypy to fail because we check for 3.7
|
||||
# compatibility. We could restrict mypy to run for the oldest supported version (3.7),
|
||||
# but that means most developers won't be able to run mypy, which means it'll fail
|
||||
# more in CI. Instead, we exclude these imported packages from mypy checking.
|
||||
[[tool.mypy.overrides]]
|
||||
module = 'numpy'
|
||||
module = [
|
||||
'IPython',
|
||||
'altgraph',
|
||||
'attr',
|
||||
'boto3',
|
||||
'botocore',
|
||||
'distro',
|
||||
'jinja2',
|
||||
'jsonschema',
|
||||
'macholib',
|
||||
'markupsafe',
|
||||
'numpy',
|
||||
'pyristent',
|
||||
'pytest',
|
||||
'ruamel.yaml',
|
||||
'six',
|
||||
]
|
||||
follow_imports = 'skip'
|
||||
follow_imports_for_stubs = true
|
||||
|
||||
|
@@ -14,26 +14,6 @@ default:
|
||||
SPACK_TARGET_PLATFORM: "linux"
|
||||
SPACK_TARGET_ARCH: "x86_64_v3"
|
||||
|
||||
.linux_skylake:
|
||||
variables:
|
||||
SPACK_TARGET_PLATFORM: "linux"
|
||||
SPACK_TARGET_ARCH: "skylake_avx512"
|
||||
|
||||
.linux_icelake:
|
||||
variables:
|
||||
SPACK_TARGET_PLATFORM: "linux"
|
||||
SPACK_TARGET_ARCH: "icelake"
|
||||
|
||||
.linux_neoverse_n1:
|
||||
variables:
|
||||
SPACK_TARGET_PLATFORM: "linux"
|
||||
SPACK_TARGET_ARCH: "neoverse_n1"
|
||||
|
||||
.linux_neoverse_v1:
|
||||
variables:
|
||||
SPACK_TARGET_PLATFORM: "linux"
|
||||
SPACK_TARGET_ARCH: "neoverse_v1"
|
||||
|
||||
.linux_aarch64:
|
||||
variables:
|
||||
SPACK_TARGET_PLATFORM: "linux"
|
||||
@@ -128,38 +108,6 @@ default:
|
||||
extends: [ ".base-job", ".generate-base" ]
|
||||
tags: ["spack", "public", "medium", "x86_64"]
|
||||
|
||||
.darwin-generate-base:
|
||||
stage: generate
|
||||
script:
|
||||
- export SPACK_DISABLE_LOCAL_CONFIG=1
|
||||
- export SPACK_USER_CACHE_PATH=$(pwd)/_user_cache
|
||||
- uname -a || true
|
||||
- grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true
|
||||
- nproc || true
|
||||
- . "./share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
- cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME}
|
||||
- spack env activate --without-view .
|
||||
- spack -d ci generate --check-index-only
|
||||
--buildcache-destination "${SPACK_BUILDCACHE_DESTINATION}"
|
||||
--artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml"
|
||||
after_script:
|
||||
- cat /proc/loadavg || true
|
||||
artifacts:
|
||||
paths:
|
||||
- "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
interruptible: true
|
||||
timeout: 60 minutes
|
||||
retry:
|
||||
max: 2
|
||||
when:
|
||||
- always
|
||||
|
||||
.darwin-generate:
|
||||
extends: [ ".base-job", ".darwin-generate-base" ]
|
||||
|
||||
|
||||
.generate-deprecated:
|
||||
extends: [ ".base-job" ]
|
||||
stage: generate
|
||||
@@ -268,6 +216,124 @@ protected-publish:
|
||||
# - artifacts: True
|
||||
# job: my-super-cool-stack-generate
|
||||
|
||||
########################################
|
||||
# E4S Mac Stack
|
||||
#
|
||||
# With no near-future plans to have
|
||||
# protected aws runners running mac
|
||||
# builds, it seems best to decouple
|
||||
# them from the rest of the stacks for
|
||||
# the time being. This way they can
|
||||
# still run on UO runners and be signed
|
||||
# using the previous approach.
|
||||
########################################
|
||||
# .e4s-mac:
|
||||
# variables:
|
||||
# SPACK_CI_STACK_NAME: e4s-mac
|
||||
# allow_failure: True
|
||||
|
||||
# .mac-pr:
|
||||
# only:
|
||||
# - /^pr[\d]+_.*$/
|
||||
# - /^github\/pr[\d]+_.*$/
|
||||
# variables:
|
||||
# SPACK_BUILDCACHE_DESTINATION: "s3://spack-binaries-prs/${CI_COMMIT_REF_NAME}"
|
||||
# SPACK_PRUNE_UNTOUCHED: "True"
|
||||
|
||||
# .mac-protected:
|
||||
# only:
|
||||
# - /^develop$/
|
||||
# - /^releases\/v.*/
|
||||
# - /^v.*/
|
||||
# - /^github\/develop$/
|
||||
# variables:
|
||||
# SPACK_BUILDCACHE_DESTINATION: "s3://spack-binaries/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME}"
|
||||
|
||||
# .mac-pr-build:
|
||||
# extends: [ ".mac-pr", ".build" ]
|
||||
# variables:
|
||||
# AWS_ACCESS_KEY_ID: ${PR_MIRRORS_AWS_ACCESS_KEY_ID}
|
||||
# AWS_SECRET_ACCESS_KEY: ${PR_MIRRORS_AWS_SECRET_ACCESS_KEY}
|
||||
# .mac-protected-build:
|
||||
# extends: [ ".mac-protected", ".build" ]
|
||||
# variables:
|
||||
# AWS_ACCESS_KEY_ID: ${PROTECTED_MIRRORS_AWS_ACCESS_KEY_ID}
|
||||
# AWS_SECRET_ACCESS_KEY: ${PROTECTED_MIRRORS_AWS_SECRET_ACCESS_KEY}
|
||||
# SPACK_SIGNING_KEY: ${PACKAGE_SIGNING_KEY}
|
||||
|
||||
# e4s-mac-pr-generate:
|
||||
# extends: [".e4s-mac", ".mac-pr"]
|
||||
# stage: generate
|
||||
# script:
|
||||
# - tmp="$(mktemp -d)"; export SPACK_USER_CONFIG_PATH="$tmp"; export SPACK_USER_CACHE_PATH="$tmp"
|
||||
# - . "./share/spack/setup-env.sh"
|
||||
# - spack --version
|
||||
# - cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME}
|
||||
# - spack env activate --without-view .
|
||||
# - spack ci generate --check-index-only
|
||||
# --buildcache-destination "${SPACK_BUILDCACHE_DESTINATION}"
|
||||
# --artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
# --output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml"
|
||||
# artifacts:
|
||||
# paths:
|
||||
# - "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
# tags:
|
||||
# - lambda
|
||||
# interruptible: true
|
||||
# retry:
|
||||
# max: 2
|
||||
# when:
|
||||
# - runner_system_failure
|
||||
# - stuck_or_timeout_failure
|
||||
# timeout: 60 minutes
|
||||
|
||||
# e4s-mac-protected-generate:
|
||||
# extends: [".e4s-mac", ".mac-protected"]
|
||||
# stage: generate
|
||||
# script:
|
||||
# - tmp="$(mktemp -d)"; export SPACK_USER_CONFIG_PATH="$tmp"; export SPACK_USER_CACHE_PATH="$tmp"
|
||||
# - . "./share/spack/setup-env.sh"
|
||||
# - spack --version
|
||||
# - cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME}
|
||||
# - spack env activate --without-view .
|
||||
# - spack ci generate --check-index-only
|
||||
# --artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
# --output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml"
|
||||
# artifacts:
|
||||
# paths:
|
||||
# - "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
# tags:
|
||||
# - omicron
|
||||
# interruptible: true
|
||||
# retry:
|
||||
# max: 2
|
||||
# when:
|
||||
# - runner_system_failure
|
||||
# - stuck_or_timeout_failure
|
||||
# timeout: 60 minutes
|
||||
|
||||
# e4s-mac-pr-build:
|
||||
# extends: [ ".e4s-mac", ".mac-pr-build" ]
|
||||
# trigger:
|
||||
# include:
|
||||
# - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
# job: e4s-mac-pr-generate
|
||||
# strategy: depend
|
||||
# needs:
|
||||
# - artifacts: True
|
||||
# job: e4s-mac-pr-generate
|
||||
|
||||
# e4s-mac-protected-build:
|
||||
# extends: [ ".e4s-mac", ".mac-protected-build" ]
|
||||
# trigger:
|
||||
# include:
|
||||
# - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
# job: e4s-mac-protected-generate
|
||||
# strategy: depend
|
||||
# needs:
|
||||
# - artifacts: True
|
||||
# job: e4s-mac-protected-generate
|
||||
|
||||
########################################
|
||||
# E4S pipeline
|
||||
########################################
|
||||
@@ -676,28 +742,6 @@ ml-linux-x86_64-rocm-build:
|
||||
- artifacts: True
|
||||
job: ml-linux-x86_64-rocm-generate
|
||||
|
||||
########################################
|
||||
# Machine Learning - Darwin aarch64 (MPS)
|
||||
########################################
|
||||
.ml-darwin-aarch64-mps:
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: ml-darwin-aarch64-mps
|
||||
|
||||
ml-darwin-aarch64-mps-generate:
|
||||
tags: [ "macos-ventura", "apple-clang-14", "aarch64-macos" ]
|
||||
extends: [ ".ml-darwin-aarch64-mps", ".darwin-generate"]
|
||||
|
||||
ml-darwin-aarch64-mps-build:
|
||||
extends: [ ".ml-darwin-aarch64-mps", ".build" ]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: ml-darwin-aarch64-mps-generate
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: ml-darwin-aarch64-mps-generate
|
||||
|
||||
########################################
|
||||
# Deprecated CI testing
|
||||
########################################
|
||||
@@ -718,100 +762,3 @@ deprecated-ci-build:
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: deprecated-ci-generate
|
||||
|
||||
########################################
|
||||
# AWS PCLUSTER
|
||||
########################################
|
||||
|
||||
.aws-pcluster-generate-image:
|
||||
image: { "name": "ghcr.io/spack/pcluster-amazonlinux-2:v2023-05-25", "entrypoint": [""] }
|
||||
|
||||
.aws-pcluster-generate:
|
||||
before_script:
|
||||
# Use gcc from local container buildcache
|
||||
- - . "./share/spack/setup-env.sh"
|
||||
- . /etc/profile.d/modules.sh
|
||||
- spack mirror add local-cache /bootstrap/local-cache
|
||||
- spack gpg trust /bootstrap/public-key
|
||||
- cd "${CI_PROJECT_DIR}" && curl -sOL https://raw.githubusercontent.com/spack/spack-configs/main/AWS/parallelcluster/postinstall.sh
|
||||
- sed -i -e "s/spack arch -t/echo ${SPACK_TARGET_ARCH}/g" postinstall.sh
|
||||
- /bin/bash postinstall.sh -fg
|
||||
- spack config --scope site add "packages:all:target:\"target=${SPACK_TARGET_ARCH}\""
|
||||
after_script:
|
||||
- - mv "${CI_PROJECT_DIR}/postinstall.sh" "${CI_PROJECT_DIR}/jobs_scratch_dir/"
|
||||
|
||||
# Icelake (one pipeline per target)
|
||||
.aws-pcluster-icelake:
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: aws-pcluster-icelake
|
||||
|
||||
aws-pcluster-generate-icelake:
|
||||
extends: [ ".linux_icelake", ".aws-pcluster-icelake", ".generate", ".tags-x86_64_v4", ".aws-pcluster-generate", ".aws-pcluster-generate-image" ]
|
||||
|
||||
aws-pcluster-build-icelake:
|
||||
extends: [ ".linux_icelake", ".aws-pcluster-icelake", ".build" ]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: aws-pcluster-generate-icelake
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: aws-pcluster-generate-icelake
|
||||
|
||||
# Skylake_avx512 (one pipeline per target)
|
||||
.aws-pcluster-skylake:
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: aws-pcluster-skylake
|
||||
|
||||
aws-pcluster-generate-skylake:
|
||||
extends: [ ".linux_skylake", ".aws-pcluster-skylake", ".generate", ".tags-x86_64_v4", ".aws-pcluster-generate", ".aws-pcluster-generate-image" ]
|
||||
|
||||
aws-pcluster-build-skylake:
|
||||
extends: [ ".linux_skylake", ".aws-pcluster-skylake", ".build" ]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: aws-pcluster-generate-skylake
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: aws-pcluster-generate-skylake
|
||||
|
||||
# Neoverse_n1 (one pipeline per target)
|
||||
.aws-pcluster-neoverse_n1:
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: aws-pcluster-neoverse_n1
|
||||
|
||||
aws-pcluster-generate-neoverse_n1:
|
||||
extends: [ ".linux_neoverse_n1", ".aws-pcluster-neoverse_n1", ".generate-aarch64", ".aws-pcluster-generate", ".aws-pcluster-generate-image" ]
|
||||
|
||||
aws-pcluster-build-neoverse_n1:
|
||||
extends: [ ".linux_neoverse_n1", ".aws-pcluster-neoverse_n1", ".build" ]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: aws-pcluster-generate-neoverse_n1
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: aws-pcluster-generate-neoverse_n1
|
||||
|
||||
# Neoverse_v1 (one pipeline per target)
|
||||
.aws-pcluster-neoverse_v1:
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: aws-pcluster-neoverse_v1
|
||||
|
||||
aws-pcluster-generate-neoverse_v1:
|
||||
extends: [ ".linux_neoverse_v1", ".aws-pcluster-neoverse_v1", ".generate-aarch64", ".aws-pcluster-generate", ".aws-pcluster-generate-image" ]
|
||||
|
||||
aws-pcluster-build-neoverse_v1:
|
||||
extends: [ ".linux_neoverse_v1", ".aws-pcluster-neoverse_v1", ".build" ]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: aws-pcluster-generate-neoverse_v1
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: aws-pcluster-generate-neoverse_v1
|
||||
|
@@ -1,27 +0,0 @@
|
||||
compilers:
|
||||
- compiler:
|
||||
spec: apple-clang@14.0.0
|
||||
paths:
|
||||
cc: /usr/bin/clang
|
||||
cxx: /usr/bin/clang++
|
||||
f77: /opt/homebrew/bin/gfortran
|
||||
fc: /opt/homebrew/bin/gfortran
|
||||
flags: {}
|
||||
operating_system: ventura
|
||||
target: aarch64
|
||||
modules: []
|
||||
environment: {}
|
||||
extra_rpaths: []
|
||||
- compiler:
|
||||
spec: gcc@12.2.0
|
||||
paths:
|
||||
cc: /opt/homebrew/bin/gcc-12
|
||||
cxx: /opt/homebrew/bin/g++-12
|
||||
f77: /opt/homebrew/bin/gfortran-12
|
||||
fc: /opt/homebrew/bin/gfortran-12
|
||||
flags: {}
|
||||
operating_system: ventura
|
||||
target: aarch64
|
||||
modules: []
|
||||
environment: {}
|
||||
extra_rpaths: []
|
@@ -1,3 +0,0 @@
|
||||
config:
|
||||
install_tree:
|
||||
root: $spack/opt/spack
|
14
share/spack/gitlab/cloud_pipelines/configs/darwin/ci.yaml
Normal file
14
share/spack/gitlab/cloud_pipelines/configs/darwin/ci.yaml
Normal file
@@ -0,0 +1,14 @@
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
script: |
|
||||
- tmp="$(mktemp -d)"; export SPACK_USER_CONFIG_PATH="$tmp"; export SPACK_USER_CACHE_PATH="$tmp"
|
||||
- . "./share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
- spack arch
|
||||
- cd ${SPACK_CONCRETE_ENV_DIR}
|
||||
- spack env activate --without-view .
|
||||
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
|
||||
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
|
||||
- spack --color=always --backtrace ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
|
||||
tags: ["lambda"]
|
@@ -1,11 +0,0 @@
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- any-job:
|
||||
variables:
|
||||
SPACK_TARGET_ARCH: icelake
|
||||
- build-job:
|
||||
before_script:
|
||||
- - curl -LfsS "https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.x86_64-linux-gnu.tar.gz" -o gmake.tar.gz
|
||||
- printf "fef1f59e56d2d11e6d700ba22d3444b6e583c663d6883fd0a4f63ab8bd280f0f gmake.tar.gz" | sha256sum --check --strict --quiet
|
||||
- tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null
|
||||
tags: ["x86_64_v4"]
|
@@ -1,7 +0,0 @@
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- any-job:
|
||||
variables:
|
||||
SPACK_TARGET_ARCH: neoverse_n1
|
||||
- build-job:
|
||||
tags: ["aarch64", "graviton2"]
|
@@ -1,7 +0,0 @@
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- any-job:
|
||||
variables:
|
||||
SPACK_TARGET_ARCH: neoverse_v1
|
||||
- build-job:
|
||||
tags: ["aarch64", "graviton3"]
|
@@ -1,11 +0,0 @@
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- any-job:
|
||||
variables:
|
||||
SPACK_TARGET_ARCH: skylake_avx512
|
||||
- build-job:
|
||||
before_script:
|
||||
- - curl -LfsS "https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.x86_64-linux-gnu.tar.gz" -o gmake.tar.gz
|
||||
- printf "fef1f59e56d2d11e6d700ba22d3444b6e583c663d6883fd0a4f63ab8bd280f0f gmake.tar.gz" | sha256sum --check --strict --quiet
|
||||
- tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null
|
||||
tags: ["x86_64_v4"]
|
@@ -225,7 +225,7 @@ spack:
|
||||
- - $compiler
|
||||
- - $target
|
||||
|
||||
mirrors: { "mirror": "s3://spack-binaries/develop/aws-ahug-aarch64" }
|
||||
mirrors: { "mirror": "s3://spack-binaries/releases/v0.20/aws-ahug-aarch64" }
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
|
@@ -222,7 +222,7 @@ spack:
|
||||
- - $compiler
|
||||
- - $target
|
||||
|
||||
mirrors: { "mirror": "s3://spack-binaries/develop/aws-ahug" }
|
||||
mirrors: { "mirror": "s3://spack-binaries/releases/v0.20/aws-ahug" }
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
|
@@ -132,7 +132,7 @@ spack:
|
||||
- - $target
|
||||
|
||||
|
||||
mirrors: { "mirror": "s3://spack-binaries/develop/aws-isc-aarch64" }
|
||||
mirrors: { "mirror": "s3://spack-binaries/releases/v0.20/aws-isc-aarch64" }
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
|
@@ -143,7 +143,7 @@ spack:
|
||||
- - $target
|
||||
|
||||
|
||||
mirrors: { "mirror": "s3://spack-binaries/develop/aws-isc" }
|
||||
mirrors: { "mirror": "s3://spack-binaries/releases/v0.20/aws-isc" }
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
|
@@ -1,55 +0,0 @@
|
||||
spack:
|
||||
view: false
|
||||
|
||||
definitions:
|
||||
- compiler_specs:
|
||||
- gcc
|
||||
- gettext
|
||||
|
||||
- compiler_target:
|
||||
- '%gcc@7.3.1 target=x86_64_v3'
|
||||
|
||||
- optimized_configs:
|
||||
# - gromacs
|
||||
- lammps
|
||||
# - mpas-model
|
||||
- openfoam
|
||||
# - palace
|
||||
# - py-devito
|
||||
# - quantum-espresso
|
||||
# - wrf
|
||||
|
||||
- optimized_libs:
|
||||
- mpich
|
||||
- openmpi
|
||||
|
||||
specs:
|
||||
- matrix:
|
||||
- - $compiler_specs
|
||||
- - $compiler_target
|
||||
- $optimized_configs
|
||||
# - $optimized_libs
|
||||
|
||||
mirrors: { "mirror": "s3://spack-binaries/develop/aws-pcluster-icelake" }
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
image: { "name": "ghcr.io/spack/pcluster-amazonlinux-2:v2023-05-25", "entrypoint": [""] }
|
||||
before_script:
|
||||
- - . "./share/spack/setup-env.sh"
|
||||
- . /etc/profile.d/modules.sh
|
||||
- spack --version
|
||||
- spack arch
|
||||
# Use gcc from local container buildcache
|
||||
- - spack mirror add local-cache /bootstrap/local-cache
|
||||
- spack gpg trust /bootstrap/public-key
|
||||
- - /bin/bash "${SPACK_ARTIFACTS_ROOT}/postinstall.sh" -fg
|
||||
- spack config --scope site add "packages:all:target:\"target=${SPACK_TARGET_ARCH}\""
|
||||
- signing-job:
|
||||
before_script:
|
||||
# Do not distribute Intel & ARM binaries
|
||||
- - for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/ | grep intel-oneapi | awk '{print $4}' | sed -e 's?^.*build_cache/??g'); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/$i; done
|
||||
- for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/ | grep armpl | awk '{print $4}' | sed -e 's?^.*build_cache/??g'); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/$i; done
|
||||
cdash:
|
||||
build-group: AWS Packages
|
@@ -1,58 +0,0 @@
|
||||
spack:
|
||||
view: false
|
||||
|
||||
definitions:
|
||||
- compiler_specs:
|
||||
- gcc
|
||||
- gettext
|
||||
|
||||
- compiler_target:
|
||||
- '%gcc@7.3.1 target=aarch64'
|
||||
|
||||
- optimized_configs:
|
||||
- gromacs
|
||||
# - lammps
|
||||
# - mpas-model
|
||||
- openfoam
|
||||
- palace
|
||||
# - py-devito
|
||||
# - quantum-espresso
|
||||
# - wrf
|
||||
|
||||
- optimized_libs:
|
||||
- mpich
|
||||
- openmpi
|
||||
|
||||
specs:
|
||||
- matrix:
|
||||
- - $compiler_specs
|
||||
- - $compiler_target
|
||||
- $optimized_configs
|
||||
- $optimized_libs
|
||||
|
||||
|
||||
mirrors: { "mirror": "s3://spack-binaries/develop/aws-pcluster-neoverse_n1" }
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
image: { "name": "ghcr.io/spack/pcluster-amazonlinux-2:v2023-05-25", "entrypoint": [""] }
|
||||
tags: ["aarch64"]
|
||||
before_script:
|
||||
- - . "./share/spack/setup-env.sh"
|
||||
- . /etc/profile.d/modules.sh
|
||||
- spack --version
|
||||
- spack arch
|
||||
# Use gcc from local container buildcache
|
||||
- - spack mirror add local-cache /bootstrap/local-cache
|
||||
- spack gpg trust /bootstrap/public-key
|
||||
- - /bin/bash "${SPACK_ARTIFACTS_ROOT}/postinstall.sh" -fg
|
||||
- spack config --scope site add "packages:all:target:\"target=${SPACK_TARGET_ARCH}\""
|
||||
- signing-job:
|
||||
before_script:
|
||||
# Do not distribute Intel & ARM binaries
|
||||
- - for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/ | grep intel-oneapi | awk '{print $4}' | sed -e 's?^.*build_cache/??g'); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/$i; done
|
||||
- for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/ | grep armpl | awk '{print $4}' | sed -e 's?^.*build_cache/??g'); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/$i; done
|
||||
|
||||
cdash:
|
||||
build-group: AWS Packages
|
@@ -1,58 +0,0 @@
|
||||
spack:
|
||||
view: false
|
||||
|
||||
definitions:
|
||||
- compiler_specs:
|
||||
- gcc
|
||||
- gettext
|
||||
|
||||
- compiler_target:
|
||||
- '%gcc@7.3.1 target=aarch64'
|
||||
|
||||
- optimized_configs:
|
||||
- gromacs
|
||||
# - lammps
|
||||
# - mpas-model
|
||||
- openfoam
|
||||
- palace
|
||||
# - py-devito
|
||||
# - quantum-espresso
|
||||
# - wrf
|
||||
|
||||
- optimized_libs:
|
||||
- mpich
|
||||
- openmpi
|
||||
|
||||
specs:
|
||||
- matrix:
|
||||
- - $compiler_specs
|
||||
- - $compiler_target
|
||||
- $optimized_configs
|
||||
- $optimized_libs
|
||||
|
||||
|
||||
mirrors: { "mirror": "s3://spack-binaries/develop/aws-pcluster-neoverse_v1" }
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
image: { "name": "ghcr.io/spack/pcluster-amazonlinux-2:v2023-05-25", "entrypoint": [""] }
|
||||
tags: ["aarch64"]
|
||||
before_script:
|
||||
- - . "./share/spack/setup-env.sh"
|
||||
- . /etc/profile.d/modules.sh
|
||||
- spack --version
|
||||
- spack arch
|
||||
# Use gcc from local container buildcache
|
||||
- - spack mirror add local-cache /bootstrap/local-cache
|
||||
- spack gpg trust /bootstrap/public-key
|
||||
- - /bin/bash "${SPACK_ARTIFACTS_ROOT}/postinstall.sh" -fg
|
||||
- spack config --scope site add "packages:all:target:\"target=${SPACK_TARGET_ARCH}\""
|
||||
- signing-job:
|
||||
before_script:
|
||||
# Do not distribute Intel & ARM binaries
|
||||
- - for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/ | grep intel-oneapi | awk '{print $4}' | sed -e 's?^.*build_cache/??g'); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/$i; done
|
||||
- for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/ | grep armpl | awk '{print $4}' | sed -e 's?^.*build_cache/??g'); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/$i; done
|
||||
|
||||
cdash:
|
||||
build-group: AWS Packages
|
@@ -1,55 +0,0 @@
|
||||
spack:
|
||||
view: false
|
||||
|
||||
definitions:
|
||||
- compiler_specs:
|
||||
- gcc
|
||||
- gettext
|
||||
|
||||
- compiler_target:
|
||||
- '%gcc@7.3.1 target=x86_64_v3'
|
||||
|
||||
- optimized_configs:
|
||||
# - gromacs
|
||||
- lammps
|
||||
# - mpas-model
|
||||
- openfoam
|
||||
# - palace
|
||||
# - py-devito
|
||||
# - quantum-espresso
|
||||
# - wrf
|
||||
|
||||
- optimized_libs:
|
||||
- mpich
|
||||
- openmpi
|
||||
|
||||
specs:
|
||||
- matrix:
|
||||
- - $compiler_specs
|
||||
- - $compiler_target
|
||||
- $optimized_configs
|
||||
# - $optimized_libs
|
||||
|
||||
mirrors: { "mirror": "s3://spack-binaries/develop/aws-pcluster-skylake" }
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
image: { "name": "ghcr.io/spack/pcluster-amazonlinux-2:v2023-05-25", "entrypoint": [""] }
|
||||
before_script:
|
||||
- - . "./share/spack/setup-env.sh"
|
||||
- . /etc/profile.d/modules.sh
|
||||
- spack --version
|
||||
- spack arch
|
||||
# Use gcc from local container buildcache
|
||||
- - spack mirror add local-cache /bootstrap/local-cache
|
||||
- spack gpg trust /bootstrap/public-key
|
||||
- - /bin/bash "${SPACK_ARTIFACTS_ROOT}/postinstall.sh" -fg
|
||||
- spack config --scope site add "packages:all:target:\"target=${SPACK_TARGET_ARCH}\""
|
||||
- signing-job:
|
||||
before_script:
|
||||
# Do not distribute Intel & ARM binaries
|
||||
- - for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/ | grep intel-oneapi | awk '{print $4}' | sed -e 's?^.*build_cache/??g'); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/$i; done
|
||||
- for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/ | grep armpl | awk '{print $4}' | sed -e 's?^.*build_cache/??g'); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/$i; done
|
||||
cdash:
|
||||
build-group: AWS Packages
|
@@ -21,7 +21,7 @@ spack:
|
||||
- - $default_specs
|
||||
- - $arch
|
||||
|
||||
mirrors: { "mirror": "s3://spack-binaries/develop/build_systems" }
|
||||
mirrors: { "mirror": "s3://spack-binaries/releases/v0.20/build_systems" }
|
||||
|
||||
cdash:
|
||||
build-group: Build Systems
|
||||
|
@@ -64,7 +64,7 @@ spack:
|
||||
- [$sdk_base_spec]
|
||||
- [$^visit_specs]
|
||||
|
||||
mirrors: { "mirror": "s3://spack-binaries/develop/data-vis-sdk" }
|
||||
mirrors: { "mirror": "s3://spack-binaries/releases/v0.20/data-vis-sdk" }
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
|
@@ -21,7 +21,7 @@ spack:
|
||||
- readline
|
||||
|
||||
mirrors:
|
||||
mirror: s3://spack-binaries/develop/deprecated
|
||||
mirror: s3://spack-binaries/releases/v0.20/deprecated
|
||||
gitlab-ci:
|
||||
broken-tests-packages:
|
||||
- gptune
|
||||
|
40
share/spack/gitlab/cloud_pipelines/stacks/e4s-mac/spack.yaml
Normal file
40
share/spack/gitlab/cloud_pipelines/stacks/e4s-mac/spack.yaml
Normal file
@@ -0,0 +1,40 @@
|
||||
spack:
|
||||
view: false
|
||||
packages:
|
||||
all:
|
||||
compiler: [apple-clang@13.1.6]
|
||||
target: [m1]
|
||||
|
||||
definitions:
|
||||
- easy_specs:
|
||||
- berkeley-db
|
||||
- ncurses
|
||||
- gcc
|
||||
- py-jupyterlab
|
||||
- py-scipy
|
||||
- py-matplotlib
|
||||
- py-pandas
|
||||
|
||||
- arch:
|
||||
- '%apple-clang@13.1.6 target=m1'
|
||||
|
||||
specs:
|
||||
|
||||
- matrix:
|
||||
- - $easy_specs
|
||||
- - $arch
|
||||
|
||||
mirrors: { "mirror": "s3://spack-binaries/releases/v0.20/e4s-mac" }
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- cleanup-job:
|
||||
before_script: |
|
||||
- export SPACK_USER_CACHE_PATH=$(pwd)/.spack-user-cache
|
||||
- export SPACK_USER_CONFIG_PATH=$(pwd)/.spack-user-config
|
||||
- . "./share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
tags: [lambda]
|
||||
|
||||
cdash:
|
||||
build-group: E4S Mac
|
@@ -263,7 +263,7 @@ spack:
|
||||
# SKIPPED
|
||||
# - flecsi # dependency pfunit marks oneapi as an unsupported compiler
|
||||
|
||||
mirrors: { "mirror": "s3://spack-binaries/develop/e4s-oneapi" }
|
||||
mirrors: { "mirror": "s3://spack-binaries/releases/v0.20/e4s-oneapi" }
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
|
@@ -207,7 +207,7 @@ spack:
|
||||
# bricks: VSBrick-7pt.py-Scalar-8x8x8-1:30:3: error: 'vfloat512' was not declared in this scope
|
||||
|
||||
|
||||
mirrors: { "mirror": "s3://spack-binaries/develop/e4s-power" }
|
||||
mirrors: { "mirror": "s3://spack-binaries/releases/v0.20/e4s-power" }
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
|
@@ -70,9 +70,8 @@ spack:
|
||||
- charliecloud
|
||||
- conduit
|
||||
- datatransferkit
|
||||
- dealii
|
||||
- dyninst
|
||||
- ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 +paraview +pnetcdf +sz +unifyfs +veloc ~visit +vtkm +zfp ^hdf5@1.14
|
||||
- ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 +paraview +pnetcdf +sz +unifyfs +veloc +visit +vtkm +zfp
|
||||
- exaworks
|
||||
- flecsi
|
||||
- flit
|
||||
@@ -166,7 +165,7 @@ spack:
|
||||
- chai ~benchmarks ~tests +cuda ^umpire ~shared
|
||||
- cusz +cuda
|
||||
- dealii +cuda
|
||||
- ecp-data-vis-sdk +cuda ~ascent +adios2 +hdf5 +paraview +sz +vtkm +zfp ^hdf5@1.14 # Removing ascent because RAJA build failure
|
||||
- ecp-data-vis-sdk +cuda +adios2 +hdf5 +paraview +vtkm +zfp # Removing ascent because Dray is hung in CI. +ascent
|
||||
- flecsi +cuda
|
||||
- flux-core +cuda
|
||||
- ginkgo +cuda
|
||||
@@ -200,7 +199,7 @@ spack:
|
||||
- cabana +rocm
|
||||
- caliper +rocm
|
||||
- chai ~benchmarks +rocm
|
||||
- ecp-data-vis-sdk +adios2 +hdf5 +paraview +pnetcdf +sz +vtkm +zfp +rocm ^hdf5@1.14 # Excludes ascent for now due to C++ standard issues
|
||||
- ecp-data-vis-sdk +paraview +vtkm +rocm
|
||||
- gasnet +rocm
|
||||
- ginkgo +rocm
|
||||
- heffte +rocm
|
||||
@@ -233,7 +232,7 @@ spack:
|
||||
# CUDA failures
|
||||
#- parsec +cuda # parsec/mca/device/cuda/transfer.c:168: multiple definition of `parsec_CUDA_d2h_max_flows';
|
||||
|
||||
mirrors: { "mirror": "s3://spack-binaries/develop/e4s" }
|
||||
mirrors: { "mirror": "s3://spack-binaries/releases/v0.20/e4s" }
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
|
@@ -51,7 +51,7 @@ spack:
|
||||
# FAILURES
|
||||
# - kokkos +wrapper +cuda cuda_arch=80 ^cuda@12.0.0 # https://github.com/spack/spack/issues/35378
|
||||
|
||||
mirrors: { "mirror": "s3://spack-binaries/develop/gpu-tests" }
|
||||
mirrors: { "mirror": "s3://spack-binaries/releases/v0.20/gpu-tests" }
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user