Compare commits
39 Commits
bugfix/com
...
my_branch
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f70d2c7ccb | ||
|
|
62d18d9af7 | ||
|
|
8824040eda | ||
|
|
d744b83584 | ||
|
|
14601b59ef | ||
|
|
8d7360106f | ||
|
|
881f184eab | ||
|
|
63bec85a24 | ||
|
|
159975a561 | ||
|
|
25ba7d071a | ||
|
|
54b79d5661 | ||
|
|
235f93c241 | ||
|
|
a73c5ffb0b | ||
|
|
ce53ce284b | ||
|
|
ae68318475 | ||
|
|
bd63c19b94 | ||
|
|
3f3bcacd16 | ||
|
|
d349d4ab0b | ||
|
|
e61b3c96f6 | ||
|
|
bcbb0a3b85 | ||
|
|
3c556ab318 | ||
|
|
563ae5188e | ||
|
|
f4b3561f71 | ||
|
|
0748a1b290 | ||
|
|
0bfece0c5e | ||
|
|
6e9b16279a | ||
|
|
cf71baff30 | ||
|
|
b9831acb44 | ||
|
|
5f2edb1860 | ||
|
|
8b6809cf66 | ||
|
|
a93a4274aa | ||
|
|
59bb42ee7f | ||
|
|
11d382bff1 | ||
|
|
57889ec446 | ||
|
|
66bda49c44 | ||
|
|
94a2ab5359 | ||
|
|
6d0044f703 | ||
|
|
3fa447a84a | ||
|
|
d501ce0c7e |
23
.github/workflows/bootstrap.yml
vendored
23
.github/workflows/bootstrap.yml
vendored
@@ -12,7 +12,6 @@ on:
|
||||
# built-in repository or documentation
|
||||
- 'var/spack/repos/builtin/**'
|
||||
- '!var/spack/repos/builtin/packages/clingo-bootstrap/**'
|
||||
- '!var/spack/repos/builtin/packages/clingo/**'
|
||||
- '!var/spack/repos/builtin/packages/python/**'
|
||||
- '!var/spack/repos/builtin/packages/re2c/**'
|
||||
- 'lib/spack/docs/**'
|
||||
@@ -20,16 +19,11 @@ on:
|
||||
# nightly at 2:16 AM
|
||||
- cron: '16 2 * * *'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
fedora-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "fedora:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -63,7 +57,6 @@ jobs:
|
||||
ubuntu-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -100,7 +93,6 @@ jobs:
|
||||
ubuntu-clingo-binaries-and-patchelf:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -134,7 +126,6 @@ jobs:
|
||||
opensuse-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "opensuse/leap:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -163,7 +154,6 @@ jobs:
|
||||
|
||||
macos-clingo-sources:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -180,19 +170,17 @@ jobs:
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-clingo-binaries:
|
||||
runs-on: ${{ matrix.macos-version }}
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
macos-version: ['macos-10.15', 'macos-11', 'macos-12']
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Bootstrap clingo
|
||||
@@ -207,11 +195,10 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Setup repo
|
||||
@@ -229,7 +216,6 @@ jobs:
|
||||
ubuntu-gnupg-binaries:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -264,7 +250,6 @@ jobs:
|
||||
ubuntu-gnupg-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -300,7 +285,6 @@ jobs:
|
||||
|
||||
macos-gnupg-binaries:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -318,7 +302,6 @@ jobs:
|
||||
|
||||
macos-gnupg-sources:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
|
||||
9
.github/workflows/build-containers.yml
vendored
9
.github/workflows/build-containers.yml
vendored
@@ -19,10 +19,6 @@ on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
deploy-images:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -47,7 +43,6 @@ jobs:
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
@@ -80,7 +75,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
uses: actions/upload-artifact@6673cd052c4cd6fcf4b4e6e60ea986c889389535
|
||||
with:
|
||||
name: dockerfiles
|
||||
path: dockerfiles
|
||||
@@ -99,7 +94,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
|
||||
13
.github/workflows/macos_python.yml
vendored
13
.github/workflows/macos_python.yml
vendored
@@ -16,21 +16,16 @@ on:
|
||||
- '.github/workflows/macos_python.yml'
|
||||
# TODO: run if we touch any of the recipes involved in this
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
# GitHub Action Limits
|
||||
# https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions
|
||||
|
||||
jobs:
|
||||
install_gcc:
|
||||
name: gcc with clang
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
@@ -41,12 +36,11 @@ jobs:
|
||||
|
||||
install_jupyter_clang:
|
||||
name: jupyter
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 700
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
@@ -56,11 +50,10 @@ jobs:
|
||||
|
||||
install_scipy_clang:
|
||||
name: scipy, mpl, pd
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
|
||||
1
.github/workflows/setup_git.ps1
vendored
1
.github/workflows/setup_git.ps1
vendored
@@ -4,7 +4,6 @@ Set-Location spack
|
||||
|
||||
git config --global user.email "spack@example.com"
|
||||
git config --global user.name "Test User"
|
||||
git config --global core.longpaths true
|
||||
|
||||
if ($(git branch --show-current) -ne "develop")
|
||||
{
|
||||
|
||||
21
.github/workflows/unit_tests.yaml
vendored
21
.github/workflows/unit_tests.yaml
vendored
@@ -9,11 +9,6 @@ on:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# Validate that the code can be run on all the Python versions
|
||||
# supported by Spack
|
||||
@@ -21,7 +16,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install Python Packages
|
||||
@@ -39,7 +34,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install Python packages
|
||||
@@ -114,7 +109,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -179,7 +174,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install System packages
|
||||
@@ -245,7 +240,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install System packages
|
||||
@@ -294,7 +289,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -337,7 +332,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install Python packages
|
||||
@@ -350,7 +345,7 @@ jobs:
|
||||
coverage run $(which spack) audit packages
|
||||
coverage combine
|
||||
coverage xml
|
||||
- name: Package audits (without coverage)
|
||||
- name: Package audits (wwithout coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
|
||||
23
.github/workflows/windows_python.yml
vendored
23
.github/workflows/windows_python.yml
vendored
@@ -9,11 +9,6 @@ on:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
@@ -23,7 +18,7 @@ jobs:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python Packages
|
||||
@@ -41,7 +36,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -63,7 +58,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -83,7 +78,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -103,7 +98,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -128,7 +123,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -144,11 +139,11 @@ jobs:
|
||||
echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
env:
|
||||
ProgressPreference: SilentlyContinue
|
||||
- uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Windows Spack Installer Bundle
|
||||
path: ${{ env.installer_root }}\pkg\Spack.exe
|
||||
- uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Windows Spack Installer
|
||||
path: ${{ env.installer_root}}\pkg\Spack.msi
|
||||
@@ -159,7 +154,7 @@ jobs:
|
||||
run:
|
||||
shell: pwsh
|
||||
steps:
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
||||
204
CHANGELOG.md
204
CHANGELOG.md
@@ -1,205 +1,3 @@
|
||||
# v0.18.0 (2022-05-28)
|
||||
|
||||
`v0.18.0` is a major feature release.
|
||||
|
||||
## Major features in this release
|
||||
|
||||
1. **Concretizer now reuses by default**
|
||||
|
||||
`spack install --reuse` was introduced in `v0.17.0`, and `--reuse`
|
||||
is now the default concretization mode. Spack will try hard to
|
||||
resolve dependencies using installed packages or binaries (#30396).
|
||||
|
||||
To avoid reuse and to use the latest package configurations, (the
|
||||
old default), you can use `spack install --fresh`, or add
|
||||
configuration like this to your environment or `concretizer.yaml`:
|
||||
|
||||
```yaml
|
||||
concretizer:
|
||||
reuse: false
|
||||
```
|
||||
|
||||
2. **Finer-grained hashes**
|
||||
|
||||
Spack hashes now include `link`, `run`, *and* `build` dependencies,
|
||||
as well as a canonical hash of package recipes. Previously, hashes
|
||||
only included `link` and `run` dependencies (though `build`
|
||||
dependencies were stored by environments). We coarsened the hash to
|
||||
reduce churn in user installations, but the new default concretizer
|
||||
behavior mitigates this concern and gets us reuse *and* provenance.
|
||||
You will be able to see the build dependencies of new installations
|
||||
with `spack find`. Old installations will not change and their
|
||||
hashes will not be affected. (#28156, #28504, #30717, #30861)
|
||||
|
||||
3. **Improved error messages**
|
||||
|
||||
Error handling with the new concretizer is now done with
|
||||
optimization criteria rather than with unsatisfiable cores, and
|
||||
Spack reports many more details about conflicting constraints.
|
||||
(#30669)
|
||||
|
||||
4. **Unify environments when possible**
|
||||
|
||||
Environments have thus far supported `concretization: together` or
|
||||
`concretization: separately`. These have been replaced by a new
|
||||
preference in `concretizer.yaml`:
|
||||
|
||||
```yaml
|
||||
concretizer:
|
||||
unify: [true|false|when_possible]
|
||||
```
|
||||
|
||||
`concretizer:unify:when_possible` will *try* to resolve a fully
|
||||
unified environment, but if it cannot, it will create multiple
|
||||
configurations of some packages where it has to. For large
|
||||
environments that previously had to be concretized separately, this
|
||||
can result in a huge speedup (40-50x). (#28941)
|
||||
|
||||
5. **Automatically find externals on Cray machines**
|
||||
|
||||
Spack can now automatically discover installed packages in the Cray
|
||||
Programming Environment by running `spack external find` (or `spack
|
||||
external read-cray-manifest` to *only* query the PE). Packages from
|
||||
the PE (e.g., `cray-mpich` are added to the database with full
|
||||
dependency information, and compilers from the PE are added to
|
||||
`compilers.yaml`. Available with the June 2022 release of the Cray
|
||||
Programming Environment. (#24894, #30428)
|
||||
|
||||
6. **New binary format and hardened signing**
|
||||
|
||||
Spack now has an updated binary format, with improvements for
|
||||
security. The new format has a detached signature file, and Spack
|
||||
verifies the signature before untarring or decompressing the binary
|
||||
package. The previous format embedded the signature in a `tar`
|
||||
file, which required the client to run `tar` *before* verifying
|
||||
(#30750). Spack can still install from build caches using the old
|
||||
format, but we encourage users to switch to the new format going
|
||||
forward.
|
||||
|
||||
Production GitLab pipelines have been hardened to securely sign
|
||||
binaries. There is now a separate signing stage so that signing
|
||||
keys are never exposed to build system code, and signing keys are
|
||||
ephemeral and only live as long as the signing pipeline stage.
|
||||
(#30753)
|
||||
|
||||
7. **Bootstrap mirror generation**
|
||||
|
||||
The `spack bootstrap mirror` command can automatically create a
|
||||
mirror for bootstrapping the concretizer and other needed
|
||||
dependencies in an air-gapped environment. (#28556)
|
||||
|
||||
8. **Nascent Windows support**
|
||||
|
||||
Spack now has initial support for Windows. Spack core has been
|
||||
refactored to run in the Windows environment, and a small number of
|
||||
packages can now build for Windows. More details are
|
||||
[in the documentation](https://spack.rtfd.io/en/latest/getting_started.html#spack-on-windows)
|
||||
(#27021, #28385, many more)
|
||||
|
||||
9. **Makefile generation**
|
||||
|
||||
`spack env depfile` can be used to generate a `Makefile` from an
|
||||
environment, which can be used to build packages the environment
|
||||
in parallel on a single node. e.g.:
|
||||
|
||||
```console
|
||||
spack -e myenv env depfile > Makefile
|
||||
make
|
||||
```
|
||||
|
||||
Spack propagates `gmake` jobserver information to builds so that
|
||||
their jobs can share cores. (#30039, #30254, #30302, #30526)
|
||||
|
||||
10. **New variant features**
|
||||
|
||||
In addition to being conditional themselves, variants can now have
|
||||
[conditional *values*](https://spack.readthedocs.io/en/latest/packaging_guide.html#conditional-possible-values)
|
||||
that are only possible for certain configurations of a package. (#29530)
|
||||
|
||||
Variants can be
|
||||
[declared "sticky"](https://spack.readthedocs.io/en/latest/packaging_guide.html#sticky-variants),
|
||||
which prevents them from being enabled or disabled by the
|
||||
concretizer. Sticky variants must be set explicitly by users
|
||||
on the command line or in `packages.yaml`. (#28630)
|
||||
|
||||
* Allow conditional possible values in variants
|
||||
* Add a "sticky" property to variants
|
||||
|
||||
|
||||
## Other new features of note
|
||||
|
||||
* Environment views can optionally link only `run` dependencies
|
||||
with `link:run` (#29336)
|
||||
* `spack external find --all` finds library-only packages in
|
||||
addition to build dependencies (#28005)
|
||||
* Customizable `config:license_dir` option (#30135)
|
||||
* `spack external find --path PATH` takes a custom search path (#30479)
|
||||
* `spack spec` has a new `--format` argument like `spack find` (#27908)
|
||||
* `spack concretize --quiet` skips printing concretized specs (#30272)
|
||||
* `spack info` now has cleaner output and displays test info (#22097)
|
||||
* Package-level submodule option for git commit versions (#30085, #30037)
|
||||
* Using `/hash` syntax to refer to concrete specs in an environment
|
||||
now works even if `/hash` is not installed. (#30276)
|
||||
|
||||
## Major internal refactors
|
||||
|
||||
* full hash (see above)
|
||||
* new develop versioning scheme `0.19.0-dev0`
|
||||
* Allow for multiple dependencies/dependents from the same package (#28673)
|
||||
* Splice differing virtual packages (#27919)
|
||||
|
||||
## Performance Improvements
|
||||
|
||||
* Concretization of large environments with `unify: when_possible` is
|
||||
much faster than concretizing separately (#28941, see above)
|
||||
* Single-pass view generation algorithm is 2.6x faster (#29443)
|
||||
|
||||
## Archspec improvements
|
||||
|
||||
* `oneapi` and `dpcpp` flag support (#30783)
|
||||
* better support for `M1` and `a64fx` (#30683)
|
||||
|
||||
## Removals and Deprecations
|
||||
|
||||
* Spack no longer supports Python `2.6` (#27256)
|
||||
* Removed deprecated `--run-tests` option of `spack install`;
|
||||
use `spack test` (#30461)
|
||||
* Removed deprecated `spack flake8`; use `spack style` (#27290)
|
||||
|
||||
* Deprecate `spack:concretization` config option; use
|
||||
`concretizer:unify` (#30038)
|
||||
* Deprecate top-level module configuration; use module sets (#28659)
|
||||
* `spack activate` and `spack deactivate` are deprecated in favor of
|
||||
environments; will be removed in `0.19.0` (#29430; see also `link:run`
|
||||
in #29336 above)
|
||||
|
||||
## Notable Bugfixes
|
||||
|
||||
* Fix bug that broke locks with many parallel builds (#27846)
|
||||
* Many bugfixes and consistency improvements for the new concretizer
|
||||
and `--reuse` (#30357, #30092, #29835, #29933, #28605, #29694, #28848)
|
||||
|
||||
## Packages
|
||||
|
||||
* `CMakePackage` uses `CMAKE_INSTALL_RPATH_USE_LINK_PATH` (#29703)
|
||||
* Refactored `lua` support: `lua-lang` virtual supports both
|
||||
`lua` and `luajit` via new `LuaPackage` build system(#28854)
|
||||
* PythonPackage: now installs packages with `pip` (#27798)
|
||||
* Python: improve site_packages_dir handling (#28346)
|
||||
* Extends: support spec, not just package name (#27754)
|
||||
* `find_libraries`: search for both .so and .dylib on macOS (#28924)
|
||||
* Use stable URLs and `?full_index=1` for all github patches (#29239)
|
||||
|
||||
## Spack community stats
|
||||
|
||||
* 6,416 total packages, 458 new since `v0.17.0`
|
||||
* 219 new Python packages
|
||||
* 60 new R packages
|
||||
* 377 people contributed to this release
|
||||
* 337 committers to packages
|
||||
* 85 committers to core
|
||||
|
||||
|
||||
# v0.17.2 (2022-04-13)
|
||||
|
||||
### Spack bugfixes
|
||||
@@ -213,7 +11,7 @@
|
||||
* Fixed a few bugs affecting the spack ci command (#29518, #29419)
|
||||
* Fix handling of Intel compiler environment (#29439)
|
||||
* Fix a few edge cases when reindexing the DB (#28764)
|
||||
* Remove "Known issues" from documentation (#29664)
|
||||
* Remove "Known issues" from documentation (#29664)
|
||||
* Other miscellaneous bugfixes (0b72e070583fc5bcd016f5adc8a84c99f2b7805f, #28403, #29261)
|
||||
|
||||
# v0.17.1 (2021-12-23)
|
||||
|
||||
@@ -6,15 +6,34 @@ bootstrap:
|
||||
# by Spack is installed in a "store" subfolder of this root directory
|
||||
root: $user_cache_path/bootstrap
|
||||
# Methods that can be used to bootstrap software. Each method may or
|
||||
# may not be able to bootstrap all the software that Spack needs,
|
||||
# may not be able to bootstrap all of the software that Spack needs,
|
||||
# depending on its type.
|
||||
sources:
|
||||
- name: 'github-actions-v0.2'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.2
|
||||
type: buildcache
|
||||
description: |
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
info:
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.2
|
||||
homepage: https://github.com/spack/spack-bootstrap-mirrors
|
||||
releases: https://github.com/spack/spack-bootstrap-mirrors/releases
|
||||
- name: 'github-actions-v0.1'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.1
|
||||
- name: 'spack-install'
|
||||
metadata: $spack/share/spack/bootstrap/spack-install
|
||||
type: buildcache
|
||||
description: |
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
info:
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.1
|
||||
homepage: https://github.com/spack/spack-bootstrap-mirrors
|
||||
releases: https://github.com/spack/spack-bootstrap-mirrors/releases
|
||||
# This method is just Spack bootstrapping the software it needs from sources.
|
||||
# It has been added here so that users can selectively disable bootstrapping
|
||||
# from sources by "untrusting" it.
|
||||
- name: spack-install
|
||||
type: install
|
||||
description: |
|
||||
Specs built from sources by Spack. May take a long time.
|
||||
trusted:
|
||||
# By default we trust bootstrapping from sources and from binaries
|
||||
# produced on Github via the workflow
|
||||
|
||||
@@ -28,9 +28,3 @@ concretizer:
|
||||
# instance concretize with target "icelake" while running on "haswell").
|
||||
# If "true" only allow targets that are compatible with the host.
|
||||
host_compatible: true
|
||||
# When "true" concretize root specs of environments together, so that each unique
|
||||
# package in an environment corresponds to one concrete spec. This ensures
|
||||
# environments can always be activated. When "false" perform concretization separately
|
||||
# on each root spec, allowing different versions and variants of the same package in
|
||||
# an environment.
|
||||
unify: false
|
||||
@@ -33,9 +33,6 @@ config:
|
||||
template_dirs:
|
||||
- $spack/share/spack/templates
|
||||
|
||||
# Directory where licenses should be located
|
||||
license_dir: $spack/etc/spack/licenses
|
||||
|
||||
# Temporary locations Spack can try to use for builds.
|
||||
#
|
||||
# Recommended options are given below.
|
||||
|
||||
@@ -50,13 +50,6 @@ build cache files for the "ninja" spec:
|
||||
Note that the targeted spec must already be installed. Once you have a build cache,
|
||||
you can add it as a mirror, discussed next.
|
||||
|
||||
.. warning::
|
||||
|
||||
Spack improved the format used for binary caches in v0.18. The entire v0.18 series
|
||||
will be able to verify and install binary caches both in the new and in the old format.
|
||||
Support for using the old format is expected to end in v0.19, so we advise users to
|
||||
recreate relevant buildcaches using Spack v0.18 or higher.
|
||||
|
||||
---------------------------------------
|
||||
Finding or installing build cache files
|
||||
---------------------------------------
|
||||
|
||||
@@ -1,160 +0,0 @@
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _bootstrapping:
|
||||
|
||||
=============
|
||||
Bootstrapping
|
||||
=============
|
||||
|
||||
In the :ref:`Getting started <getting_started>` Section we already mentioned that
|
||||
Spack can bootstrap some of its dependencies, including ``clingo``. In fact, there
|
||||
is an entire command dedicated to the management of every aspect of bootstrapping:
|
||||
|
||||
.. command-output:: spack bootstrap --help
|
||||
|
||||
The first thing to know to understand bootstrapping in Spack is that each of
|
||||
Spack's dependencies is bootstrapped lazily; i.e. the first time it is needed and
|
||||
can't be found. You can readily check if any prerequisite for using Spack
|
||||
is missing by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap status
|
||||
Spack v0.17.1 - python@3.8
|
||||
|
||||
[FAIL] Core Functionalities
|
||||
[B] MISSING "clingo": required to concretize specs
|
||||
|
||||
[FAIL] Binary packages
|
||||
[B] MISSING "gpg2": required to sign/verify buildcaches
|
||||
|
||||
|
||||
Spack will take care of bootstrapping any missing dependency marked as [B]. Dependencies marked as [-] are instead required to be found on the system.
|
||||
|
||||
In the case of the output shown above Spack detected that both ``clingo`` and ``gnupg``
|
||||
are missing and it's giving detailed information on why they are needed and whether
|
||||
they can be bootstrapped. Running a command that concretize a spec, like:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack solve zlib
|
||||
==> Bootstrapping clingo from pre-built binaries
|
||||
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.1/build_cache/darwin-catalina-x86_64/apple-clang-12.0.0/clingo-bootstrap-spack/darwin-catalina-x86_64-apple-clang-12.0.0-clingo-bootstrap-spack-p5on7i4hejl775ezndzfdkhvwra3hatn.spack
|
||||
==> Installing "clingo-bootstrap@spack%apple-clang@12.0.0~docs~ipo+python build_type=Release arch=darwin-catalina-x86_64" from a buildcache
|
||||
[ ... ]
|
||||
|
||||
triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||
|
||||
-----------------------
|
||||
The Bootstrapping store
|
||||
-----------------------
|
||||
|
||||
The software installed for bootstrapping purposes is deployed in a separate store.
|
||||
Its location can be checked with the following command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap root
|
||||
|
||||
It can also be changed with the same command by just specifying the newly desired path:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap root /opt/spack/bootstrap
|
||||
|
||||
You can check what is installed in the bootstrapping store at any time using:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack find -b
|
||||
==> Showing internal bootstrap store at "/Users/spack/.spack/bootstrap/store"
|
||||
==> 11 installed packages
|
||||
-- darwin-catalina-x86_64 / apple-clang@12.0.0 ------------------
|
||||
clingo-bootstrap@spack libassuan@2.5.5 libgpg-error@1.42 libksba@1.5.1 pinentry@1.1.1 zlib@1.2.11
|
||||
gnupg@2.3.1 libgcrypt@1.9.3 libiconv@1.16 npth@1.6 python@3.8
|
||||
|
||||
In case it is needed you can remove all the software in the current bootstrapping store with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack clean -b
|
||||
==> Removing bootstrapped software and configuration in "/Users/spack/.spack/bootstrap"
|
||||
|
||||
% spack find -b
|
||||
==> Showing internal bootstrap store at "/Users/spack/.spack/bootstrap/store"
|
||||
==> 0 installed packages
|
||||
|
||||
--------------------------------------------
|
||||
Enabling and disabling bootstrapping methods
|
||||
--------------------------------------------
|
||||
|
||||
Bootstrapping is always performed by trying the methods listed by:
|
||||
|
||||
.. command-output:: spack bootstrap list
|
||||
|
||||
in the order they appear, from top to bottom. By default Spack is
|
||||
configured to try first bootstrapping from pre-built binaries and to
|
||||
fall-back to bootstrapping from sources if that failed.
|
||||
|
||||
If need be, you can disable bootstrapping altogether by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap disable
|
||||
|
||||
in which case it's your responsibility to ensure Spack runs in an
|
||||
environment where all its prerequisites are installed. You can
|
||||
also configure Spack to skip certain bootstrapping methods by *untrusting*
|
||||
them. For instance:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap untrust github-actions
|
||||
==> "github-actions" is now untrusted and will not be used for bootstrapping
|
||||
|
||||
tells Spack to skip trying to bootstrap from binaries. To add the "github-actions" method back you can:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap trust github-actions
|
||||
|
||||
There is also an option to reset the bootstrapping configuration to Spack's defaults:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap reset
|
||||
==> Bootstrapping configuration is being reset to Spack's defaults. Current configuration will be lost.
|
||||
Do you want to continue? [Y/n]
|
||||
%
|
||||
|
||||
----------------------------------------
|
||||
Creating a mirror for air-gapped systems
|
||||
----------------------------------------
|
||||
|
||||
Spack's default configuration for bootstrapping relies on the user having
|
||||
access to the internet, either to fetch pre-compiled binaries or source tarballs.
|
||||
Sometimes though Spack is deployed on air-gapped systems where such access is denied.
|
||||
|
||||
To help with similar situations Spack has a command that recreates, in a local folder
|
||||
of choice, a mirror containing the source tarballs and/or binary packages needed for
|
||||
bootstrapping.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap mirror --binary-packages /opt/bootstrap
|
||||
==> Adding "clingo-bootstrap@spack+python %apple-clang target=x86_64" and dependencies to the mirror at /opt/bootstrap/local-mirror
|
||||
==> Adding "gnupg@2.3: %apple-clang target=x86_64" and dependencies to the mirror at /opt/bootstrap/local-mirror
|
||||
==> Adding "patchelf@0.13.1:0.13.99 %apple-clang target=x86_64" and dependencies to the mirror at /opt/bootstrap/local-mirror
|
||||
==> Adding binary packages from "https://github.com/alalazo/spack-bootstrap-mirrors/releases/download/v0.1-rc.2/bootstrap-buildcache.tar.gz" to the mirror at /opt/bootstrap/local-mirror
|
||||
|
||||
To register the mirror on the platform where it's supposed to be used run the following command(s):
|
||||
% spack bootstrap add --trust local-sources /opt/bootstrap/metadata/sources
|
||||
% spack bootstrap add --trust local-binaries /opt/bootstrap/metadata/binaries
|
||||
|
||||
|
||||
This command needs to be run on a machine with internet access and the resulting folder
|
||||
has to be moved over to the air-gapped system. Once the local sources are added using the
|
||||
commands suggested at the prompt, they can be used to bootstrap Spack.
|
||||
@@ -39,7 +39,6 @@ on these ideas for each distinct build system that Spack supports:
|
||||
|
||||
build_systems/autotoolspackage
|
||||
build_systems/cmakepackage
|
||||
build_systems/cachedcmakepackage
|
||||
build_systems/mesonpackage
|
||||
build_systems/qmakepackage
|
||||
build_systems/sippackage
|
||||
@@ -62,12 +61,11 @@ on these ideas for each distinct build system that Spack supports:
|
||||
|
||||
build_systems/bundlepackage
|
||||
build_systems/cudapackage
|
||||
build_systems/custompackage
|
||||
build_systems/inteloneapipackage
|
||||
build_systems/intelpackage
|
||||
build_systems/multiplepackage
|
||||
build_systems/rocmpackage
|
||||
build_systems/sourceforgepackage
|
||||
build_systems/custompackage
|
||||
build_systems/multiplepackage
|
||||
|
||||
For reference, the :py:mod:`Build System API docs <spack.build_systems>`
|
||||
provide a list of build systems and methods/attributes that can be
|
||||
|
||||
@@ -1,123 +0,0 @@
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _cachedcmakepackage:
|
||||
|
||||
------------------
|
||||
CachedCMakePackage
|
||||
------------------
|
||||
|
||||
The CachedCMakePackage base class is used for CMake-based workflows
|
||||
that create a CMake cache file prior to running ``cmake``. This is
|
||||
useful for packages with arguments longer than the system limit, and
|
||||
for reproducibility.
|
||||
|
||||
The documentation for this class assumes that the user is familiar with
|
||||
the ``CMakePackage`` class from which it inherits. See the documentation
|
||||
for :ref:`CMakePackage <cmakepackage>`.
|
||||
|
||||
^^^^^^
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``CachedCMakePackage`` base class comes with the following phases:
|
||||
|
||||
#. ``initconfig`` - generate the CMake cache file
|
||||
#. ``cmake`` - generate the Makefile
|
||||
#. ``build`` - build the package
|
||||
#. ``install`` - install the package
|
||||
|
||||
By default, these phases run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ mkdir spack-build
|
||||
$ cd spack-build
|
||||
$ cat << EOF > name-arch-compiler@version.cmake
|
||||
# Write information on compilers and dependencies
|
||||
# includes information on mpi and cuda if applicable
|
||||
$ cmake .. -DCMAKE_INSTALL_PREFIX=/path/to/installation/prefix -C name-arch-compiler@version.cmake
|
||||
$ make
|
||||
$ make test # optional
|
||||
$ make install
|
||||
|
||||
The ``CachedCMakePackage`` class inherits from the ``CMakePackage``
|
||||
class, and accepts all of the same options and adds all of the same
|
||||
flags to the ``cmake`` command. Similar to the ``CMakePAckage`` class,
|
||||
you may need to add a few arguments yourself, and the
|
||||
``CachedCMakePackage`` provides the same interface to add those
|
||||
flags.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Adding entries to the CMake cache
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In addition to adding flags to the ``cmake`` command, you may need to
|
||||
add entries to the CMake cache in the ``initconfig`` phase. This can
|
||||
be done by overriding one of four methods:
|
||||
|
||||
#. ``CachedCMakePackage.initconfig_compiler_entries``
|
||||
#. ``CachedCMakePackage.initconfig_mpi_entries``
|
||||
#. ``CachedCMakePackage.initconfig_hardware_entries``
|
||||
#. ``CachedCMakePackage.initconfig_package_entries``
|
||||
|
||||
Each of these methods returns a list of CMake cache strings. The
|
||||
distinction between these methods is merely to provide a
|
||||
well-structured and legible cmake cache file -- otherwise, entries
|
||||
from each of these methods are handled identically.
|
||||
|
||||
Spack also provides convenience methods for generating CMake cache
|
||||
entries. These methods are available at module scope in every Spack
|
||||
package. Because CMake parses boolean options, strings, and paths
|
||||
differently, there are three such methods:
|
||||
|
||||
#. ``cmake_cache_option``
|
||||
#. ``cmake_cache_string``
|
||||
#. ``cmake_cache_path``
|
||||
|
||||
These methods each accept three parameters -- the name of the CMake
|
||||
variable associated with the entry, the value of the entry, and an
|
||||
optional comment -- and return strings in the appropriate format to be
|
||||
returned from any of the ``initconfig*`` methods. Additionally, these
|
||||
methods may return comments beginning with the ``#`` character.
|
||||
|
||||
A typical usage of these methods may look something like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def initconfig_mpi_entries(self)
|
||||
# Get existing MPI configurations
|
||||
entries = super(self, Foo).initconfig_mpi_entries()
|
||||
|
||||
# The existing MPI configurations key on whether ``mpi`` is in the spec
|
||||
# This spec has an MPI variant, and we need to enable MPI when it is on.
|
||||
# This hypothetical package controls MPI with the ``FOO_MPI`` option to
|
||||
# cmake.
|
||||
if '+mpi' in self.spec:
|
||||
entries.append(cmake_cache_option('FOO_MPI', True, "enable mpi"))
|
||||
else:
|
||||
entries.append(cmake_cache_option('FOO_MPI', False, "disable mpi"))
|
||||
|
||||
def initconfig_package_entries(self):
|
||||
# Package specific options
|
||||
entries = []
|
||||
|
||||
entries.append('#Entries for build options')
|
||||
|
||||
bar_on = '+bar' in self.spec
|
||||
entries.append(cmake_cache_option('FOO_BAR', bar_on, 'toggle bar'))
|
||||
|
||||
entries.append('#Entries for dependencies')
|
||||
|
||||
if self.spec['blas'].name == 'baz': # baz is our blas provider
|
||||
entries.append(cmake_cache_string('FOO_BLAS', 'baz', 'Use baz'))
|
||||
entries.append(cmake_cache_path('BAZ_PREFIX', self.spec['baz'].prefix))
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
External documentation
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
For more information on CMake cache files, see:
|
||||
https://cmake.org/cmake/help/latest/manual/cmake.1.html
|
||||
@@ -84,8 +84,8 @@ build ``hdf5`` with Intel oneAPI MPI do::
|
||||
|
||||
spack install hdf5 +mpi ^intel-oneapi-mpi
|
||||
|
||||
Using Externally Installed oneAPI Tools
|
||||
=======================================
|
||||
Using an Externally Installed oneAPI
|
||||
====================================
|
||||
|
||||
Spack can also use oneAPI tools that are manually installed with
|
||||
`Intel Installers`_. The procedures for configuring Spack to use
|
||||
@@ -110,7 +110,7 @@ Another option is to manually add the configuration to
|
||||
Libraries
|
||||
---------
|
||||
|
||||
If you want Spack to use oneMKL that you have installed without Spack in
|
||||
If you want Spack to use MKL that you have installed without Spack in
|
||||
the default location, then add the following to
|
||||
``~/.spack/packages.yaml``, adjusting the version as appropriate::
|
||||
|
||||
@@ -139,7 +139,7 @@ You can also use Spack-installed libraries. For example::
|
||||
spack load intel-oneapi-mkl
|
||||
|
||||
Will update your environment CPATH, LIBRARY_PATH, and other
|
||||
environment variables for building an application with oneMKL.
|
||||
environment variables for building an application with MKL.
|
||||
|
||||
More information
|
||||
================
|
||||
|
||||
@@ -15,9 +15,6 @@ IntelPackage
|
||||
Intel packages in Spack
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
This is an earlier version of Intel software development tools and has
|
||||
now been replaced by Intel oneAPI Toolkits.
|
||||
|
||||
Spack can install and use several software development products offered by Intel.
|
||||
Some of these are available under no-cost terms, others require a paid license.
|
||||
All share the same basic steps for configuration, installation, and, where
|
||||
|
||||
@@ -48,9 +48,8 @@ important to understand.
|
||||
**build backend**
|
||||
Libraries used to define how to build a wheel. Examples
|
||||
include `setuptools <https://setuptools.pypa.io/>`__,
|
||||
`flit <https://flit.readthedocs.io/>`_,
|
||||
`poetry <https://python-poetry.org/>`_, and
|
||||
`hatchling <https://hatch.pypa.io/latest/>`_.
|
||||
`flit <https://flit.readthedocs.io/>`_, and
|
||||
`poetry <https://python-poetry.org/>`_.
|
||||
|
||||
^^^^^^^^^^^
|
||||
Downloading
|
||||
@@ -327,33 +326,6 @@ for specifying the version requirements. Note that ``~=`` works
|
||||
differently in poetry than in setuptools and flit for versions that
|
||||
start with a zero.
|
||||
|
||||
"""""""""
|
||||
hatchling
|
||||
"""""""""
|
||||
|
||||
If the ``pyproject.toml`` lists ``hatchling.build`` as the
|
||||
``build-backend``, it uses the hatchling build system. Look for
|
||||
dependencies under the following keys:
|
||||
|
||||
* ``requires-python``
|
||||
|
||||
This specifies the version of Python that is required
|
||||
|
||||
* ``project.dependencies``
|
||||
|
||||
These packages are required for building and installation. You can
|
||||
add them with ``type=('build', 'run')``.
|
||||
|
||||
* ``project.optional-dependencies``
|
||||
|
||||
This section includes keys with lists of optional dependencies
|
||||
needed to enable those features. You should add a variant that
|
||||
optionally adds these dependencies. This variant should be ``False``
|
||||
by default.
|
||||
|
||||
See https://hatch.pypa.io/latest/config/dependency/ for more
|
||||
information.
|
||||
|
||||
""""""
|
||||
wheels
|
||||
""""""
|
||||
@@ -694,4 +666,3 @@ For more information on build backend tools, see:
|
||||
* setuptools: https://setuptools.pypa.io/
|
||||
* flit: https://flit.readthedocs.io/
|
||||
* poetry: https://python-poetry.org/
|
||||
* hatchling: https://hatch.pypa.io/latest/
|
||||
|
||||
@@ -1,55 +0,0 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _sourceforgepackage:
|
||||
|
||||
------------------
|
||||
SourceforgePackage
|
||||
------------------
|
||||
|
||||
``SourceforgePackage`` is a
|
||||
`mixin-class <https://en.wikipedia.org/wiki/Mixin>`_. It automatically
|
||||
sets the URL based on a list of Sourceforge mirrors listed in
|
||||
`sourceforge_mirror_path`, which defaults to a half dozen known mirrors.
|
||||
Refer to the package source
|
||||
(`<https://github.com/spack/spack/blob/develop/lib/spack/spack/build_systems/sourceforge.py>`__) for the current list of mirrors used by Spack.
|
||||
|
||||
|
||||
^^^^^^^
|
||||
Methods
|
||||
^^^^^^^
|
||||
|
||||
This package provides a method for populating mirror URLs.
|
||||
|
||||
**urls**
|
||||
|
||||
This method returns a list of possible URLs for package source.
|
||||
It is decorated with `property` so its results are treated as
|
||||
a package attribute.
|
||||
|
||||
Refer to
|
||||
`<https://spack.readthedocs.io/en/latest/packaging_guide.html#mirrors-of-the-main-url>`__
|
||||
for information on how Spack uses the `urls` attribute during
|
||||
fetching.
|
||||
|
||||
^^^^^
|
||||
Usage
|
||||
^^^^^
|
||||
|
||||
This helper package can be added to your package by adding it as a base
|
||||
class of your package and defining the relative location of an archive
|
||||
file for one version of your software.
|
||||
|
||||
.. code-block:: python
|
||||
:emphasize-lines: 1,3
|
||||
|
||||
class MyPackage(AutotoolsPackage, SourceforgePackage):
|
||||
...
|
||||
sourceforge_mirror_path = "my-package/mypackage.1.0.0.tar.gz"
|
||||
...
|
||||
|
||||
Over 40 packages are using ``SourceforcePackage`` this mix-in as of
|
||||
July 2022 so there are multiple packages to choose from if you want
|
||||
to see a real example.
|
||||
@@ -59,8 +59,7 @@ other techniques to minimize the size of the final image:
|
||||
&& echo " specs:" \
|
||||
&& echo " - gromacs+mpi" \
|
||||
&& echo " - mpich" \
|
||||
&& echo " concretizer: together" \
|
||||
&& echo " unify: true" \
|
||||
&& echo " concretization: together" \
|
||||
&& echo " config:" \
|
||||
&& echo " install_tree: /opt/software" \
|
||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||
@@ -109,10 +108,9 @@ Spack Images on Docker Hub
|
||||
--------------------------
|
||||
|
||||
Docker images with Spack preinstalled and ready to be used are
|
||||
built when a release is tagged, or nightly on ``develop``. The images
|
||||
are then pushed both to `Docker Hub <https://hub.docker.com/u/spack>`_
|
||||
and to `GitHub Container Registry <https://github.com/orgs/spack/packages?repo_name=spack>`_.
|
||||
The OS that are currently supported are summarized in the table below:
|
||||
built on `Docker Hub <https://hub.docker.com/u/spack>`_
|
||||
at every push to ``develop`` or to a release branch. The OS that
|
||||
are currently supported are summarized in the table below:
|
||||
|
||||
.. _containers-supported-os:
|
||||
|
||||
@@ -122,31 +120,22 @@ The OS that are currently supported are summarized in the table below:
|
||||
* - Operating System
|
||||
- Base Image
|
||||
- Spack Image
|
||||
* - Ubuntu 16.04
|
||||
- ``ubuntu:16.04``
|
||||
- ``spack/ubuntu-xenial``
|
||||
* - Ubuntu 18.04
|
||||
- ``ubuntu:18.04``
|
||||
- ``spack/ubuntu-bionic``
|
||||
* - Ubuntu 20.04
|
||||
- ``ubuntu:20.04``
|
||||
- ``spack/ubuntu-focal``
|
||||
* - Ubuntu 22.04
|
||||
- ``ubuntu:22.04``
|
||||
- ``spack/ubuntu-jammy``
|
||||
* - CentOS 7
|
||||
- ``centos:7``
|
||||
- ``spack/centos7``
|
||||
* - CentOS Stream
|
||||
- ``quay.io/centos/centos:stream``
|
||||
- ``spack/centos-stream``
|
||||
* - openSUSE Leap
|
||||
- ``opensuse/leap``
|
||||
- ``spack/leap15``
|
||||
* - Amazon Linux 2
|
||||
- ``amazonlinux:2``
|
||||
- ``spack/amazon-linux``
|
||||
|
||||
All the images are tagged with the corresponding release of Spack:
|
||||
|
||||
.. image:: images/ghcr_spack.png
|
||||
.. image:: dockerhub_spack.png
|
||||
|
||||
with the exception of the ``latest`` tag that points to the HEAD
|
||||
of the ``develop`` branch. These images are available for anyone
|
||||
@@ -256,8 +245,7 @@ software is respectively built and installed:
|
||||
&& echo " specs:" \
|
||||
&& echo " - gromacs+mpi" \
|
||||
&& echo " - mpich" \
|
||||
&& echo " concretizer:" \
|
||||
&& echo " unify: true" \
|
||||
&& echo " concretization: together" \
|
||||
&& echo " config:" \
|
||||
&& echo " install_tree: /opt/software" \
|
||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||
@@ -378,8 +366,7 @@ produces, for instance, the following ``Dockerfile``:
|
||||
&& echo " externals:" \
|
||||
&& echo " - spec: cuda%gcc" \
|
||||
&& echo " prefix: /usr/local/cuda" \
|
||||
&& echo " concretizer:" \
|
||||
&& echo " unify: true" \
|
||||
&& echo " concretization: together" \
|
||||
&& echo " config:" \
|
||||
&& echo " install_tree: /opt/software" \
|
||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||
|
||||
@@ -107,6 +107,7 @@ with a high level view of Spack's directory structure:
|
||||
llnl/ <- some general-use libraries
|
||||
|
||||
spack/ <- spack module; contains Python code
|
||||
analyzers/ <- modules to run analysis on installed packages
|
||||
build_systems/ <- modules for different build systems
|
||||
cmd/ <- each file in here is a spack subcommand
|
||||
compilers/ <- compiler description files
|
||||
@@ -150,7 +151,7 @@ Package-related modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
:mod:`spack.package`
|
||||
Contains the :class:`~spack.package_base.Package` class, which
|
||||
Contains the :class:`~spack.package.Package` class, which
|
||||
is the superclass for all packages in Spack. Methods on ``Package``
|
||||
implement all phases of the :ref:`package lifecycle
|
||||
<package-lifecycle>` and manage the build process.
|
||||
@@ -241,6 +242,22 @@ Unit tests
|
||||
Implements Spack's test suite. Add a module and put its name in
|
||||
the test suite in ``__init__.py`` to add more unit tests.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Research and Monitoring Modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
:mod:`spack.monitor`
|
||||
Contains :class:`~spack.monitor.SpackMonitorClient`. This is accessed from
|
||||
the ``spack install`` and ``spack analyze`` commands to send build and
|
||||
package metadata up to a `Spack Monitor
|
||||
<https://github.com/spack/spack-monitor>`_ server.
|
||||
|
||||
|
||||
:mod:`spack.analyzers`
|
||||
A module folder with a :class:`~spack.analyzers.analyzer_base.AnalyzerBase`
|
||||
that provides base functions to run, save, and (optionally) upload analysis
|
||||
results to a `Spack Monitor <https://github.com/spack/spack-monitor>`_ server.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
Other Modules
|
||||
@@ -284,6 +301,240 @@ Most spack commands look something like this:
|
||||
The information in Package files is used at all stages in this
|
||||
process.
|
||||
|
||||
Conceptually, packages are overloaded. They contain:
|
||||
|
||||
-------------
|
||||
Stage objects
|
||||
-------------
|
||||
|
||||
|
||||
.. _writing-analyzers:
|
||||
|
||||
-----------------
|
||||
Writing analyzers
|
||||
-----------------
|
||||
|
||||
To write an analyzer, you should add a new python file to the
|
||||
analyzers module directory at ``lib/spack/spack/analyzers`` .
|
||||
Your analyzer should be a subclass of the :class:`AnalyzerBase <spack.analyzers.analyzer_base.AnalyzerBase>`. For example, if you want
|
||||
to add an analyzer class ``Myanalyzer`` you would write to
|
||||
``spack/analyzers/myanalyzer.py`` and import and
|
||||
use the base as follows:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from .analyzer_base import AnalyzerBase
|
||||
|
||||
class Myanalyzer(AnalyzerBase):
|
||||
|
||||
|
||||
Note that the class name is your module file name, all lowercase
|
||||
except for the first capital letter. You can look at other analyzers in
|
||||
that analyzer directory for examples. The guide here will tell you about the basic functions needed.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Analyzer Output Directory
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
By default, when you run ``spack analyze run`` an analyzer output directory will
|
||||
be created in your spack user directory in your ``$HOME``. The reason we output here
|
||||
is because the install directory might not always be writable.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
~/.spack/
|
||||
analyzers
|
||||
|
||||
Result files will be written here, organized in subfolders in the same structure
|
||||
as the package, with each analyzer owning it's own subfolder. for example:
|
||||
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ tree ~/.spack/analyzers/
|
||||
/home/spackuser/.spack/analyzers/
|
||||
└── linux-ubuntu20.04-skylake
|
||||
└── gcc-9.3.0
|
||||
└── zlib-1.2.11-sl7m27mzkbejtkrajigj3a3m37ygv4u2
|
||||
├── environment_variables
|
||||
│ └── spack-analyzer-environment-variables.json
|
||||
├── install_files
|
||||
│ └── spack-analyzer-install-files.json
|
||||
└── libabigail
|
||||
└── lib
|
||||
└── spack-analyzer-libabigail-libz.so.1.2.11.xml
|
||||
|
||||
|
||||
Notice that for the libabigail analyzer, since results are generated per object,
|
||||
we honor the object's folder in case there are equivalently named files in
|
||||
different folders. The result files are typically written as json so they can be easily read and uploaded in a future interaction with a monitor.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
Analyzer Metadata
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
Your analyzer is required to have the class attributes ``name``, ``outfile``,
|
||||
and ``description``. These are printed to the user with they use the subcommand
|
||||
``spack analyze list-analyzers``. Here is an example.
|
||||
As we mentioned above, note that this analyzer would live in a module named
|
||||
``libabigail.py`` in the analyzers folder so that the class can be discovered.
|
||||
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Libabigail(AnalyzerBase):
|
||||
|
||||
name = "libabigail"
|
||||
outfile = "spack-analyzer-libabigail.json"
|
||||
description = "Application Binary Interface (ABI) features for objects"
|
||||
|
||||
|
||||
This means that the name and output file should be unique for your analyzer.
|
||||
Note that "all" cannot be the name of an analyzer, as this key is used to indicate
|
||||
that the user wants to run all analyzers.
|
||||
|
||||
.. _analyzer_run_function:
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
An analyzer run Function
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The core of an analyzer is its ``run()`` function, which should accept no
|
||||
arguments. You can assume your analyzer has the package spec of interest at ``self.spec``
|
||||
and it's up to the run function to generate whatever analysis data you need,
|
||||
and then return the object with a key as the analyzer name. The result data
|
||||
should be a list of objects, each with a name, ``analyzer_name``, ``install_file``,
|
||||
and one of ``value`` or ``binary_value``. The install file should be for a relative
|
||||
path, and not the absolute path. For example, let's say we extract a metric called
|
||||
``metric`` for ``bin/wget`` using our analyzer ``thebest-analyzer``.
|
||||
We might have data that looks like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
result = {"name": "metric", "analyzer_name": "thebest-analyzer", "value": "1", "install_file": "bin/wget"}
|
||||
|
||||
|
||||
We'd then return it as follows - note that they key is the analyzer name at ``self.name``.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
return {self.name: result}
|
||||
|
||||
This will save the complete result to the analyzer metadata folder, as described
|
||||
previously. If you want support for adding a different kind of metadata (e.g.,
|
||||
not associated with an install file) then the monitor server would need to be updated
|
||||
to support this first.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
An analyzer init Function
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you don't need any extra dependencies or checks, you can skip defining an analyzer
|
||||
init function, as the base class will handle it. Typically, it will accept
|
||||
a spec, and an optional output directory (if the user does not want the default
|
||||
metadata folder for analyzer results). The analyzer init function should call
|
||||
it's parent init, and then do any extra checks or validation that are required to
|
||||
work. For example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def __init__(self, spec, dirname=None):
|
||||
super(Myanalyzer, self).__init__(spec, dirname)
|
||||
|
||||
# install extra dependencies, do extra preparation and checks here
|
||||
|
||||
|
||||
At the end of the init, you will have available to you:
|
||||
|
||||
- **self.spec**: the spec object
|
||||
- **self.dirname**: an optional directory name the user as provided at init to save
|
||||
- **self.output_dir**: the analyzer metadata directory, where we save by default
|
||||
- **self.meta_dir**: the path to the package metadata directory (.spack) if you need it
|
||||
|
||||
And can proceed to write your analyzer.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Saving Analyzer Results
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The analyzer will have ``save_result`` called, with the result object generated
|
||||
to save it to the filesystem, and if the user has added the ``--monitor`` flag
|
||||
to upload it to a monitor server. If your result follows an accepted result
|
||||
format and you don't need to parse it further, you don't need to add this
|
||||
function to your class. However, if your result data is large or otherwise
|
||||
needs additional parsing, you can define it. If you define the function, it
|
||||
is useful to know about the ``output_dir`` property, which you can join
|
||||
with your output file relative path of choice:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
outfile = os.path.join(self.output_dir, "my-output-file.txt")
|
||||
|
||||
|
||||
The directory will be provided by the ``output_dir`` property but it won't exist,
|
||||
so you should create it:
|
||||
|
||||
|
||||
.. code::block:: python
|
||||
|
||||
# Create the output directory
|
||||
if not os.path.exists(self._output_dir):
|
||||
os.makedirs(self._output_dir)
|
||||
|
||||
|
||||
If you are generating results that match to specific files in the package
|
||||
install directory, you should try to maintain those paths in the case that
|
||||
there are equivalently named files in different directories that would
|
||||
overwrite one another. As an example of an analyzer with a custom save,
|
||||
the Libabigail analyzer saves ``*.xml`` files to the analyzer metadata
|
||||
folder in ``run()``, as they are either binaries, or as xml (text) would
|
||||
usually be too big to pass in one request. For this reason, the files
|
||||
are saved during ``run()`` and the filenames added to the result object,
|
||||
and then when the result object is passed back into ``save_result()``,
|
||||
we skip saving to the filesystem, and instead read the file and send
|
||||
each one (separately) to the monitor:
|
||||
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def save_result(self, result, monitor=None, overwrite=False):
|
||||
"""ABI results are saved to individual files, so each one needs to be
|
||||
read and uploaded. Result here should be the lookup generated in run(),
|
||||
the key is the analyzer name, and each value is the result file.
|
||||
We currently upload the entire xml as text because libabigail can't
|
||||
easily read gzipped xml, but this will be updated when it can.
|
||||
"""
|
||||
if not monitor:
|
||||
return
|
||||
|
||||
name = self.spec.package.name
|
||||
|
||||
for obj, filename in result.get(self.name, {}).items():
|
||||
|
||||
# Don't include the prefix
|
||||
rel_path = obj.replace(self.spec.prefix + os.path.sep, "")
|
||||
|
||||
# We've already saved the results to file during run
|
||||
content = spack.monitor.read_file(filename)
|
||||
|
||||
# A result needs an analyzer, value or binary_value, and name
|
||||
data = {"value": content, "install_file": rel_path, "name": "abidw-xml"}
|
||||
tty.info("Sending result for %s %s to monitor." % (name, rel_path))
|
||||
monitor.send_analyze_metadata(self.spec.package, {"libabigail": [data]})
|
||||
|
||||
|
||||
|
||||
Notice that this function, if you define it, requires a result object (generated by
|
||||
``run()``, a monitor (if you want to send), and a boolean ``overwrite`` to be used
|
||||
to check if a result exists first, and not write to it if the result exists and
|
||||
overwrite is False. Also notice that since we already saved these files to the analyzer metadata folder, we return early if a monitor isn't defined, because this function serves to send results to the monitor. If you haven't saved anything to the analyzer metadata folder
|
||||
yet, you might want to do that here. You should also use ``tty.info`` to give
|
||||
the user a message of "Writing result to $DIRNAME."
|
||||
|
||||
|
||||
.. _writing-commands:
|
||||
|
||||
@@ -448,6 +699,23 @@ with a hook, and this is the purpose of this particular hook. Akin to
|
||||
``on_phase_success`` we require the same variables - the package that failed,
|
||||
the name of the phase, and the log file where we might find errors.
|
||||
|
||||
"""""""""""""""""""""""""""""""""
|
||||
``on_analyzer_save(pkg, result)``
|
||||
"""""""""""""""""""""""""""""""""
|
||||
|
||||
After an analyzer has saved some result for a package, this hook is called,
|
||||
and it provides the package that we just ran the analysis for, along with
|
||||
the loaded result. Typically, a result is structured to have the name
|
||||
of the analyzer as key, and the result object that is defined in detail in
|
||||
:ref:`analyzer_run_function`.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def on_analyzer_save(pkg, result):
|
||||
"""given a package and a result...
|
||||
"""
|
||||
print('Do something extra with a package analysis result here')
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
Adding a New Hook Type
|
||||
|
||||
BIN
lib/spack/docs/dockerhub_spack.png
Normal file
BIN
lib/spack/docs/dockerhub_spack.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 88 KiB |
@@ -273,9 +273,19 @@ or
|
||||
Concretizing
|
||||
^^^^^^^^^^^^
|
||||
|
||||
Once some user specs have been added to an environment, they can be concretized.
|
||||
There are at the moment three different modes of operation to concretize an environment,
|
||||
which are explained in details in :ref:`environments_concretization_config`.
|
||||
Once some user specs have been added to an environment, they can be
|
||||
concretized. *By default specs are concretized separately*, one after
|
||||
the other. This mode of operation permits to deploy a full
|
||||
software stack where multiple configurations of the same package
|
||||
need to be installed alongside each other. Central installations done
|
||||
at HPC centers by system administrators or user support groups
|
||||
are a common case that fits in this behavior.
|
||||
Environments *can also be configured to concretize all
|
||||
the root specs in a self-consistent way* to ensure that
|
||||
each package in the environment comes with a single configuration. This
|
||||
mode of operation is usually what is required by software developers that
|
||||
want to deploy their development environment.
|
||||
|
||||
Regardless of which mode of operation has been chosen, the following
|
||||
command will ensure all the root specs are concretized according to the
|
||||
constraints that are prescribed in the configuration:
|
||||
@@ -483,76 +493,32 @@ Appending to this list in the yaml is identical to using the ``spack
|
||||
add`` command from the command line. However, there is more power
|
||||
available from the yaml file.
|
||||
|
||||
.. _environments_concretization_config:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Spec concretization
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
An environment can be concretized in three different modes and the behavior active under any environment
|
||||
is determined by the ``concretizer:unify`` property. By default specs are concretized *separately*, one after the other:
|
||||
|
||||
Specs can be concretized separately or together, as already
|
||||
explained in :ref:`environments_concretization`. The behavior active
|
||||
under any environment is determined by the ``concretization`` property:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- hdf5~mpi
|
||||
- hdf5+mpi
|
||||
- zlib@1.2.8
|
||||
concretizer:
|
||||
unify: false
|
||||
- ncview
|
||||
- netcdf
|
||||
- nco
|
||||
- py-sphinx
|
||||
concretization: together
|
||||
|
||||
This mode of operation permits to deploy a full software stack where multiple configurations of the same package
|
||||
need to be installed alongside each other using the best possible selection of transitive dependencies. The downside
|
||||
is that redundancy of installations is disregarded completely, and thus environments might be more bloated than
|
||||
strictly needed. In the example above, for instance, if a version of ``zlib`` newer than ``1.2.8`` is known to Spack,
|
||||
then it will be used for both ``hdf5`` installations.
|
||||
|
||||
If redundancy of the environment is a concern, Spack provides a way to install it *together where possible*,
|
||||
i.e. trying to maximize reuse of dependencies across different specs:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- hdf5~mpi
|
||||
- hdf5+mpi
|
||||
- zlib@1.2.8
|
||||
concretizer:
|
||||
unify: when_possible
|
||||
|
||||
Also in this case Spack allows having multiple configurations of the same package, but privileges the reuse of
|
||||
specs over other factors. Going back to our example, this means that both ``hdf5`` installations will use
|
||||
``zlib@1.2.8`` as a dependency even if newer versions of that library are available.
|
||||
Central installations done at HPC centers by system administrators or user support groups are a common case
|
||||
that fits either of these two modes.
|
||||
|
||||
Environments can also be configured to concretize all the root specs *together*, in a self-consistent way, to
|
||||
ensure that each package in the environment comes with a single configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- hdf5+mpi
|
||||
- zlib@1.2.8
|
||||
concretizer:
|
||||
unify: true
|
||||
|
||||
This mode of operation is usually what is required by software developers that want to deploy their development
|
||||
environment and have a single view of it in the filesystem.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``concretizer:unify`` config option was introduced in Spack 0.18 to
|
||||
replace the ``concretization`` property. For reference,
|
||||
``concretization: together`` is replaced by ``concretizer:unify:true``,
|
||||
and ``concretization: separately`` is replaced by ``concretizer:unify:false``.
|
||||
which can currently take either one of the two allowed values ``together`` or ``separately``
|
||||
(the default).
|
||||
|
||||
.. admonition:: Re-concretization of user specs
|
||||
|
||||
When concretizing specs *together* or *together where possible* the entire set of specs will be
|
||||
When concretizing specs together the entire set of specs will be
|
||||
re-concretized after any addition of new user specs, to ensure that
|
||||
the environment remains consistent / minimal. When instead the specs are concretized
|
||||
the environment remains consistent. When instead the specs are concretized
|
||||
separately only the new specs will be re-concretized after any addition.
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
@@ -799,7 +765,7 @@ directories.
|
||||
select: [^mpi]
|
||||
exclude: ['%pgi@18.5']
|
||||
projections:
|
||||
all: '{name}/{version}-{compiler.name}'
|
||||
all: {name}/{version}-{compiler.name}
|
||||
link: all
|
||||
link_type: symlink
|
||||
|
||||
@@ -1013,7 +979,7 @@ The following advanced example shows how generated targets can be used in a
|
||||
|
||||
SPACK ?= spack
|
||||
|
||||
.PHONY: all clean env
|
||||
.PHONY: all clean fetch env
|
||||
|
||||
all: env
|
||||
|
||||
@@ -1022,6 +988,9 @@ The following advanced example shows how generated targets can be used in a
|
||||
|
||||
env.mk: spack.lock
|
||||
$(SPACK) -e . env depfile -o $@ --make-target-prefix spack
|
||||
|
||||
fetch: spack/fetch
|
||||
$(info Environment fetched!)
|
||||
|
||||
env: spack/env
|
||||
$(info Environment installed!)
|
||||
@@ -1034,10 +1003,10 @@ The following advanced example shows how generated targets can be used in a
|
||||
endif
|
||||
|
||||
When ``make`` is invoked, it first "remakes" the missing include ``env.mk``
|
||||
from its rule, which triggers concretization. When done, the generated target
|
||||
``spack/env`` is available. In the above example, the ``env`` target uses this generated
|
||||
target as a prerequisite, meaning that it can make use of the installed packages in
|
||||
its commands.
|
||||
from its rule, which triggers concretization. When done, the generated targets
|
||||
``spack/fetch`` and ``spack/env`` are available. In the above
|
||||
example, the ``env`` target uses the latter as a prerequisite, meaning
|
||||
that it can make use of the installed packages in its commands.
|
||||
|
||||
As it is typically undesirable to remake ``env.mk`` as part of ``make clean``,
|
||||
the include is conditional.
|
||||
@@ -1045,6 +1014,7 @@ the include is conditional.
|
||||
.. note::
|
||||
|
||||
When including generated ``Makefile``\s, it is important to use
|
||||
the ``--make-target-prefix`` flag and use the non-phony target
|
||||
``<target-prefix>/env`` as prerequisite, instead of the phony target
|
||||
``<target-prefix>/all``.
|
||||
the ``--make-target-prefix`` flag and use the non-phony targets
|
||||
``<target-prefix>/env`` and ``<target-prefix>/fetch`` as
|
||||
prerequisites, instead of the phony targets ``<target-prefix>/all``
|
||||
and ``<target-prefix>/fetch-all`` respectively.
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 70 KiB |
@@ -63,7 +63,6 @@ or refer to the full manual below.
|
||||
|
||||
configuration
|
||||
config_yaml
|
||||
bootstrapping
|
||||
build_settings
|
||||
environments
|
||||
containers
|
||||
|
||||
@@ -1070,32 +1070,13 @@ Commits
|
||||
|
||||
Submodules
|
||||
You can supply ``submodules=True`` to cause Spack to fetch submodules
|
||||
recursively along with the repository at fetch time.
|
||||
recursively along with the repository at fetch time. For more information
|
||||
about git submodules see the manpage of git: ``man git-submodule``.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
version('1.0.1', tag='v1.0.1', submodules=True)
|
||||
|
||||
If a package has needs more fine-grained control over submodules, define
|
||||
``submodules`` to be a callable function that takes the package instance as
|
||||
its only argument. The function should return a list of submodules to be fetched.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def submodules(package):
|
||||
submodules = []
|
||||
if "+variant-1" in package.spec:
|
||||
submodules.append("submodule_for_variant_1")
|
||||
if "+variant-2" in package.spec:
|
||||
submodules.append("submodule_for_variant_2")
|
||||
return submodules
|
||||
|
||||
|
||||
class MyPackage(Package):
|
||||
version("0.1.0", submodules=submodules)
|
||||
|
||||
For more information about git submodules see the manpage of git: ``man
|
||||
git-submodule``.
|
||||
|
||||
.. _github-fetch:
|
||||
|
||||
@@ -2412,9 +2393,9 @@ Influence how dependents are built or run
|
||||
|
||||
Spack provides a mechanism for dependencies to influence the
|
||||
environment of their dependents by overriding the
|
||||
:meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
|
||||
:meth:`setup_dependent_run_environment <spack.package.PackageBase.setup_dependent_run_environment>`
|
||||
or the
|
||||
:meth:`setup_dependent_build_environment <spack.package_base.PackageBase.setup_dependent_build_environment>`
|
||||
:meth:`setup_dependent_build_environment <spack.package.PackageBase.setup_dependent_build_environment>`
|
||||
methods.
|
||||
The Qt package, for instance, uses this call:
|
||||
|
||||
@@ -2436,7 +2417,7 @@ will have the ``PYTHONPATH``, ``PYTHONHOME`` and ``PATH`` environment
|
||||
variables set appropriately before starting the installation. To make things
|
||||
even simpler the ``python setup.py`` command is also inserted into the module
|
||||
scope of dependents by overriding a third method called
|
||||
:meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`
|
||||
:meth:`setup_dependent_package <spack.package.PackageBase.setup_dependent_package>`
|
||||
:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
||||
@@ -2794,256 +2775,6 @@ Suppose a user invokes ``spack install`` like this:
|
||||
Spack will fail with a constraint violation, because the version of
|
||||
MPICH requested is too low for the ``mpi`` requirement in ``foo``.
|
||||
|
||||
.. _custom-attributes:
|
||||
|
||||
------------------
|
||||
Custom attributes
|
||||
------------------
|
||||
|
||||
Often a package will need to provide attributes for dependents to query
|
||||
various details about what it provides. While any number of custom defined
|
||||
attributes can be implemented by a package, the four specific attributes
|
||||
described below are always available on every package with default
|
||||
implementations and the ability to customize with alternate implementations
|
||||
in the case of virtual packages provided:
|
||||
|
||||
=========== =========================================== =====================
|
||||
Attribute Purpose Default
|
||||
=========== =========================================== =====================
|
||||
``home`` The installation path for the package ``spec.prefix``
|
||||
``command`` An executable command for the package | ``spec.name`` found
|
||||
in
|
||||
| ``.home.bin``
|
||||
``headers`` A list of headers provided by the package | All headers
|
||||
searched
|
||||
| recursively in
|
||||
``.home.include``
|
||||
``libs`` A list of libraries provided by the package | ``lib{spec.name}``
|
||||
searched
|
||||
| recursively in
|
||||
``.home`` starting
|
||||
| with ``lib``,
|
||||
``lib64``, then the
|
||||
| rest of ``.home``
|
||||
=========== =========================================== =====================
|
||||
|
||||
Each of these can be customized by implementing the relevant attribute
|
||||
as a ``@property`` in the package's class:
|
||||
|
||||
.. code-block:: python
|
||||
:linenos:
|
||||
|
||||
class Foo(Package):
|
||||
...
|
||||
@property
|
||||
def libs(self):
|
||||
# The library provided by Foo is libMyFoo.so
|
||||
return find_libraries('libMyFoo', root=self.home, recursive=True)
|
||||
|
||||
A package may also provide a custom implementation of each attribute
|
||||
for the virtual packages it provides by implementing the
|
||||
``virtualpackagename_attributename`` property in the package's class.
|
||||
The implementation used is the first one found from:
|
||||
|
||||
#. Specialized virtual: ``Package.virtualpackagename_attributename``
|
||||
#. Generic package: ``Package.attributename``
|
||||
#. Default
|
||||
|
||||
The use of customized attributes is demonstrated in the next example.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Example: Customized attributes for virtual packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Consider a package ``foo`` that can optionally provide two virtual
|
||||
packages ``bar`` and ``baz``. When both are enabled the installation tree
|
||||
appears as follows:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
include/foo.h
|
||||
include/bar/bar.h
|
||||
lib64/libFoo.so
|
||||
lib64/libFooBar.so
|
||||
baz/include/baz/baz.h
|
||||
baz/lib/libFooBaz.so
|
||||
|
||||
The install tree shows that ``foo`` is providing the header ``include/foo.h``
|
||||
and library ``lib64/libFoo.so`` in it's install prefix. The virtual
|
||||
package ``bar`` is providing ``include/bar/bar.h`` and library
|
||||
``lib64/libFooBar.so``, also in ``foo``'s install prefix. The ``baz``
|
||||
package, however, is provided in the ``baz`` subdirectory of ``foo``'s
|
||||
prefix with the ``include/baz/baz.h`` header and ``lib/libFooBaz.so``
|
||||
library. Such a package could implement the optional attributes as
|
||||
follows:
|
||||
|
||||
.. code-block:: python
|
||||
:linenos:
|
||||
|
||||
class Foo(Package):
|
||||
...
|
||||
variant('bar', default=False, description='Enable the Foo implementation of bar')
|
||||
variant('baz', default=False, description='Enable the Foo implementation of baz')
|
||||
...
|
||||
provides('bar', when='+bar')
|
||||
provides('baz', when='+baz')
|
||||
....
|
||||
|
||||
# Just the foo headers
|
||||
@property
|
||||
def headers(self):
|
||||
return find_headers('foo', root=self.home.include, recursive=False)
|
||||
|
||||
# Just the foo libraries
|
||||
@property
|
||||
def libs(self):
|
||||
return find_libraries('libFoo', root=self.home, recursive=True)
|
||||
|
||||
# The header provided by the bar virutal package
|
||||
@property
|
||||
def bar_headers(self):
|
||||
return find_headers('bar/bar.h', root=self.home.include, recursive=False)
|
||||
|
||||
# The libary provided by the bar virtual package
|
||||
@property
|
||||
def bar_libs(self):
|
||||
return find_libraries('libFooBar', root=sef.home, recursive=True)
|
||||
|
||||
# The baz virtual package home
|
||||
@property
|
||||
def baz_home(self):
|
||||
return self.prefix.baz
|
||||
|
||||
# The header provided by the baz virtual package
|
||||
@property
|
||||
def baz_headers(self):
|
||||
return find_headers('baz/baz', root=self.baz_home.include, recursive=False)
|
||||
|
||||
# The library provided by the baz virtual package
|
||||
@property
|
||||
def baz_libs(self):
|
||||
return find_libraries('libFooBaz', root=self.baz_home, recursive=True)
|
||||
|
||||
Now consider another package, ``foo-app``, depending on all three:
|
||||
|
||||
.. code-block:: python
|
||||
:linenos:
|
||||
|
||||
class FooApp(CMakePackage):
|
||||
...
|
||||
depends_on('foo')
|
||||
depends_on('bar')
|
||||
depends_on('baz')
|
||||
|
||||
The resulting spec objects for it's dependencies shows the result of
|
||||
the above attribute implementations:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# The core headers and libraries of the foo package
|
||||
|
||||
>>> spec['foo']
|
||||
foo@1.0%gcc@11.3.1+bar+baz arch=linux-fedora35-haswell
|
||||
>>> spec['foo'].prefix
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
||||
|
||||
# home defaults to the package install prefix without an explicit implementation
|
||||
>>> spec['foo'].home
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
||||
|
||||
# foo headers from the foo prefix
|
||||
>>> spec['foo'].headers
|
||||
HeaderList([
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/include/foo.h',
|
||||
])
|
||||
|
||||
# foo include directories from the foo prefix
|
||||
>>> spec['foo'].headers.directories
|
||||
['/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/include']
|
||||
|
||||
# foo libraries from the foo prefix
|
||||
>>> spec['foo'].libs
|
||||
LibraryList([
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/lib64/libFoo.so',
|
||||
])
|
||||
|
||||
# foo library directories from the foo prefix
|
||||
>>> spec['foo'].libs.directories
|
||||
['/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/lib64']
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# The virtual bar package in the same prefix as foo
|
||||
|
||||
# bar resolves to the foo package
|
||||
>>> spec['bar']
|
||||
foo@1.0%gcc@11.3.1+bar+baz arch=linux-fedora35-haswell
|
||||
>>> spec['bar'].prefix
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
||||
|
||||
# home defaults to the foo prefix without either a Foo.bar_home
|
||||
# or Foo.home implementation
|
||||
>>> spec['bar'].home
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
||||
|
||||
# bar header in the foo prefix
|
||||
>>> spec['bar'].headers
|
||||
HeaderList([
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/include/bar/bar.h'
|
||||
])
|
||||
|
||||
# bar include dirs from the foo prefix
|
||||
>>> spec['bar'].headers.directories
|
||||
['/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/include']
|
||||
|
||||
# bar library from the foo prefix
|
||||
>>> spec['bar'].libs
|
||||
LibraryList([
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/lib64/libFooBar.so'
|
||||
])
|
||||
|
||||
# bar library directories from the foo prefix
|
||||
>>> spec['bar'].libs.directories
|
||||
['/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/lib64']
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# The virtual baz package in a subdirectory of foo's prefix
|
||||
|
||||
# baz resolves to the foo package
|
||||
>>> spec['baz']
|
||||
foo@1.0%gcc@11.3.1+bar+baz arch=linux-fedora35-haswell
|
||||
>>> spec['baz'].prefix
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
||||
|
||||
# baz_home implementation provides the subdirectory inside the foo prefix
|
||||
>>> spec['baz'].home
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz'
|
||||
|
||||
# baz headers in the baz subdirectory of the foo prefix
|
||||
>>> spec['baz'].headers
|
||||
HeaderList([
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/include/baz/baz.h'
|
||||
])
|
||||
|
||||
# baz include directories in the baz subdirectory of the foo prefix
|
||||
>>> spec['baz'].headers.directories
|
||||
[
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/include'
|
||||
]
|
||||
|
||||
# baz libraries in the baz subdirectory of the foo prefix
|
||||
>>> spec['baz'].libs
|
||||
LibraryList([
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/lib/libFooBaz.so'
|
||||
])
|
||||
|
||||
# baz library directories in the baz subdirectory of the foo porefix
|
||||
>>> spec['baz'].libs.directories
|
||||
[
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/lib'
|
||||
]
|
||||
|
||||
.. _abstract-and-concrete:
|
||||
|
||||
-------------------------
|
||||
@@ -3291,7 +3022,7 @@ The classes that are currently provided by Spack are:
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| **Base Class** | **Purpose** |
|
||||
+==========================================================+==================================+
|
||||
| :class:`~spack.package_base.Package` | General base class not |
|
||||
| :class:`~spack.package.Package` | General base class not |
|
||||
| | specialized for any build system |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.makefile.MakefilePackage` | Specialized class for packages |
|
||||
@@ -3422,7 +3153,7 @@ for the install phase is:
|
||||
For those not used to Python instance methods, this is the
|
||||
package itself. In this case it's an instance of ``Foo``, which
|
||||
extends ``Package``. For API docs on Package objects, see
|
||||
:py:class:`Package <spack.package_base.Package>`.
|
||||
:py:class:`Package <spack.package.Package>`.
|
||||
|
||||
``spec``
|
||||
This is the concrete spec object created by Spack from an
|
||||
@@ -5745,24 +5476,6 @@ Version Lists
|
||||
|
||||
Spack packages should list supported versions with the newest first.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Using ``home`` vs ``prefix``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
``home`` and ``prefix`` are both attributes that can be queried on a
|
||||
package's dependencies, often when passing configure arguments pointing to the
|
||||
location of a dependency. The difference is that while ``prefix`` is the
|
||||
location on disk where a concrete package resides, ``home`` is the `logical`
|
||||
location that a package resides, which may be different than ``prefix`` in
|
||||
the case of virtual packages or other special circumstances. For most use
|
||||
cases inside a package, it's dependency locations can be accessed via either
|
||||
``self.spec['foo'].home`` or ``self.spec['foo'].prefix``. Specific packages
|
||||
that should be consumed by dependents via ``.home`` instead of ``.prefix``
|
||||
should be noted in their respective documentation.
|
||||
|
||||
See :ref:`custom-attributes` for more details and an example implementing
|
||||
a custom ``home`` attribute.
|
||||
|
||||
---------------------------
|
||||
Packaging workflow commands
|
||||
---------------------------
|
||||
|
||||
@@ -115,8 +115,7 @@ And here's the spack environment built by the pipeline represented as a
|
||||
|
||||
spack:
|
||||
view: false
|
||||
concretizer:
|
||||
unify: false
|
||||
concretization: separately
|
||||
|
||||
definitions:
|
||||
- pkgs:
|
||||
|
||||
@@ -61,7 +61,7 @@ You can see the packages we added earlier in the ``specs:`` section. If you
|
||||
ever want to add more packages, you can either use ``spack add`` or manually
|
||||
edit this file.
|
||||
|
||||
We also need to change the ``concretizer:unify`` option. By default, Spack
|
||||
We also need to change the ``concretization:`` option. By default, Spack
|
||||
concretizes each spec *separately*, allowing multiple versions of the same
|
||||
package to coexist. Since we want a single consistent environment, we want to
|
||||
concretize all of the specs *together*.
|
||||
@@ -78,8 +78,7 @@ Here is what your ``spack.yaml`` looks like with this new setting:
|
||||
# add package specs to the `specs` list
|
||||
specs: [bash@5, python, py-numpy, py-scipy, py-matplotlib]
|
||||
view: true
|
||||
concretizer:
|
||||
unify: true
|
||||
concretization: together
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
Symlink location
|
||||
|
||||
@@ -25,5 +25,4 @@ spack:
|
||||
- subversion
|
||||
# Plotting
|
||||
- graphviz
|
||||
concretizer:
|
||||
unify: true
|
||||
concretization: together
|
||||
|
||||
@@ -7,7 +7,7 @@ bash, , , Compiler wrappers
|
||||
tar, , , Extract/create archives
|
||||
gzip, , , Compress/Decompress archives
|
||||
unzip, , , Compress/Decompress archives
|
||||
bzip2, , , Compress/Decompress archives
|
||||
bzip, , , Compress/Decompress archives
|
||||
xz, , , Compress/Decompress archives
|
||||
zstd, , Optional, Compress/Decompress archives
|
||||
file, , , Create/Use Buildcaches
|
||||
@@ -15,4 +15,4 @@ gnupg2, , , Sign/Verify Buildcaches
|
||||
git, , , Manage Software Repositories
|
||||
svn, , Optional, Manage Software Repositories
|
||||
hg, , Optional, Manage Software Repositories
|
||||
Python header files, , Optional (e.g. ``python3-dev`` on Debian), Bootstrapping from sources
|
||||
Python header files, , Optional (e.g. ``python3-dev`` on Debian), Bootstrapping from sources
|
||||
|
6
lib/spack/env/cc
vendored
6
lib/spack/env/cc
vendored
@@ -1,4 +1,4 @@
|
||||
#!/bin/sh -f
|
||||
#!/bin/sh
|
||||
# shellcheck disable=SC2034 # evals in this script fool shellcheck
|
||||
#
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
@@ -768,9 +768,7 @@ if [ "$SPACK_DEBUG" = TRUE ]; then
|
||||
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
|
||||
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
|
||||
echo "[$mode] $command $input_command" >> "$input_log"
|
||||
IFS="$lsep"
|
||||
echo "[$mode] "$full_command_list >> "$output_log"
|
||||
unset IFS
|
||||
echo "[$mode] ${full_command_list}" >> "$output_log"
|
||||
fi
|
||||
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
|
||||
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.1.4 (commit b8eea9df2b4204ff27d204452cd46f5199a0b423)
|
||||
* Version: 0.1.2 (commit 85757b6666422fca86aa882a769bf78b0f992f54)
|
||||
|
||||
argparse
|
||||
--------
|
||||
|
||||
75
lib/spack/external/archspec/cpu/detect.py
vendored
75
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -61,7 +61,7 @@ def proc_cpuinfo():
|
||||
``/proc/cpuinfo``
|
||||
"""
|
||||
info = {}
|
||||
with open("/proc/cpuinfo") as file: # pylint: disable=unspecified-encoding
|
||||
with open("/proc/cpuinfo") as file:
|
||||
for line in file:
|
||||
key, separator, value = line.partition(":")
|
||||
|
||||
@@ -80,46 +80,26 @@ def proc_cpuinfo():
|
||||
|
||||
|
||||
def _check_output(args, env):
|
||||
output = subprocess.Popen( # pylint: disable=consider-using-with
|
||||
args, stdout=subprocess.PIPE, env=env
|
||||
).communicate()[0]
|
||||
output = subprocess.Popen(args, stdout=subprocess.PIPE, env=env).communicate()[0]
|
||||
return six.text_type(output.decode("utf-8"))
|
||||
|
||||
|
||||
def _machine():
|
||||
""" "Return the machine architecture we are on"""
|
||||
operating_system = platform.system()
|
||||
|
||||
# If we are not on Darwin, trust what Python tells us
|
||||
if operating_system != "Darwin":
|
||||
return platform.machine()
|
||||
|
||||
# On Darwin it might happen that we are on M1, but using an interpreter
|
||||
# built for x86_64. In that case "platform.machine() == 'x86_64'", so we
|
||||
# need to fix that.
|
||||
#
|
||||
# See: https://bugs.python.org/issue42704
|
||||
output = _check_output(
|
||||
["sysctl", "-n", "machdep.cpu.brand_string"], env=_ensure_bin_usrbin_in_path()
|
||||
).strip()
|
||||
|
||||
if "Apple" in output:
|
||||
# Note that a native Python interpreter on Apple M1 would return
|
||||
# "arm64" instead of "aarch64". Here we normalize to the latter.
|
||||
return "aarch64"
|
||||
|
||||
return "x86_64"
|
||||
|
||||
|
||||
@info_dict(operating_system="Darwin")
|
||||
def sysctl_info_dict():
|
||||
"""Returns a raw info dictionary parsing the output of sysctl."""
|
||||
child_environment = _ensure_bin_usrbin_in_path()
|
||||
# Make sure that /sbin and /usr/sbin are in PATH as sysctl is
|
||||
# usually found there
|
||||
child_environment = dict(os.environ.items())
|
||||
search_paths = child_environment.get("PATH", "").split(os.pathsep)
|
||||
for additional_path in ("/sbin", "/usr/sbin"):
|
||||
if additional_path not in search_paths:
|
||||
search_paths.append(additional_path)
|
||||
child_environment["PATH"] = os.pathsep.join(search_paths)
|
||||
|
||||
def sysctl(*args):
|
||||
return _check_output(["sysctl"] + list(args), env=child_environment).strip()
|
||||
|
||||
if _machine() == "x86_64":
|
||||
if platform.machine() == "x86_64":
|
||||
flags = (
|
||||
sysctl("-n", "machdep.cpu.features").lower()
|
||||
+ " "
|
||||
@@ -145,18 +125,6 @@ def sysctl(*args):
|
||||
return info
|
||||
|
||||
|
||||
def _ensure_bin_usrbin_in_path():
|
||||
# Make sure that /sbin and /usr/sbin are in PATH as sysctl is
|
||||
# usually found there
|
||||
child_environment = dict(os.environ.items())
|
||||
search_paths = child_environment.get("PATH", "").split(os.pathsep)
|
||||
for additional_path in ("/sbin", "/usr/sbin"):
|
||||
if additional_path not in search_paths:
|
||||
search_paths.append(additional_path)
|
||||
child_environment["PATH"] = os.pathsep.join(search_paths)
|
||||
return child_environment
|
||||
|
||||
|
||||
def adjust_raw_flags(info):
|
||||
"""Adjust the flags detected on the system to homogenize
|
||||
slightly different representations.
|
||||
@@ -216,7 +184,12 @@ def compatible_microarchitectures(info):
|
||||
Args:
|
||||
info (dict): dictionary containing information on the host cpu
|
||||
"""
|
||||
architecture_family = _machine()
|
||||
architecture_family = platform.machine()
|
||||
# On Apple M1 platform.machine() returns "arm64" instead of "aarch64"
|
||||
# so we should normalize the name here
|
||||
if architecture_family == "arm64":
|
||||
architecture_family = "aarch64"
|
||||
|
||||
# If a tester is not registered, be conservative and assume no known
|
||||
# target is compatible with the host
|
||||
tester = COMPATIBILITY_CHECKS.get(architecture_family, lambda x, y: False)
|
||||
@@ -271,7 +244,12 @@ def compatibility_check(architecture_family):
|
||||
architecture_family = (architecture_family,)
|
||||
|
||||
def decorator(func):
|
||||
COMPATIBILITY_CHECKS.update({family: func for family in architecture_family})
|
||||
# pylint: disable=fixme
|
||||
# TODO: on removal of Python 2.6 support this can be re-written as
|
||||
# TODO: an update + a dict comprehension
|
||||
for arch_family in architecture_family:
|
||||
COMPATIBILITY_CHECKS[arch_family] = func
|
||||
|
||||
return func
|
||||
|
||||
return decorator
|
||||
@@ -310,7 +288,7 @@ def compatibility_check_for_x86_64(info, target):
|
||||
arch_root = TARGETS[basename]
|
||||
return (
|
||||
(target == arch_root or arch_root in target.ancestors)
|
||||
and target.vendor in (vendor, "generic")
|
||||
and (target.vendor == vendor or target.vendor == "generic")
|
||||
and target.features.issubset(features)
|
||||
)
|
||||
|
||||
@@ -325,9 +303,8 @@ def compatibility_check_for_aarch64(info, target):
|
||||
arch_root = TARGETS[basename]
|
||||
return (
|
||||
(target == arch_root or arch_root in target.ancestors)
|
||||
and target.vendor in (vendor, "generic")
|
||||
# On macOS it seems impossible to get all the CPU features with syctl info
|
||||
and (target.features.issubset(features) or platform.system() == "Darwin")
|
||||
and (target.vendor == vendor or target.vendor == "generic")
|
||||
and target.features.issubset(features)
|
||||
)
|
||||
|
||||
|
||||
|
||||
4
lib/spack/external/archspec/cpu/schema.py
vendored
4
lib/spack/external/archspec/cpu/schema.py
vendored
@@ -11,7 +11,7 @@
|
||||
try:
|
||||
from collections.abc import MutableMapping # novm
|
||||
except ImportError:
|
||||
from collections import MutableMapping # pylint: disable=deprecated-class
|
||||
from collections import MutableMapping
|
||||
|
||||
|
||||
class LazyDictionary(MutableMapping):
|
||||
@@ -56,7 +56,7 @@ def _load_json_file(json_file):
|
||||
|
||||
def _factory():
|
||||
filename = os.path.join(json_dir, json_file)
|
||||
with open(filename, "r") as file: # pylint: disable=unspecified-encoding
|
||||
with open(filename, "r") as file:
|
||||
return json.load(file)
|
||||
|
||||
return _factory
|
||||
|
||||
@@ -85,21 +85,7 @@
|
||||
"intel": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "x86-64",
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
@@ -143,20 +129,6 @@
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v2",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v2",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -214,20 +186,6 @@
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v3",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v3",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -290,20 +248,6 @@
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave -mavx512f -mavx512bw -mavx512cd -mavx512dq -mavx512vl"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -344,19 +288,8 @@
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -400,18 +333,6 @@
|
||||
"versions": "16.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -463,20 +384,6 @@
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -525,20 +432,6 @@
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -597,18 +490,6 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -669,18 +550,6 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -746,18 +615,6 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -815,18 +672,6 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -887,18 +732,6 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -965,20 +798,6 @@
|
||||
"name": "knl",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "knl",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "knl",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1049,20 +868,6 @@
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1132,18 +937,6 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1211,18 +1004,6 @@
|
||||
"versions": "19.0.1:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1317,20 +1098,6 @@
|
||||
"name": "icelake-client",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "icelake-client",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "icelake-client",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1375,20 +1142,6 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse2"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse2"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse2"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1439,20 +1192,6 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1507,20 +1246,6 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1576,20 +1301,6 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse4.2"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse4.2"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse4.2"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1649,22 +1360,6 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1727,22 +1422,6 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1806,22 +1485,6 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1880,30 +1543,6 @@
|
||||
"name": "znver3",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -2149,6 +1788,7 @@
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
@@ -2181,26 +1821,18 @@
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16"
|
||||
},
|
||||
{
|
||||
"versions": "8:10.2",
|
||||
"flags": "-march=armv8.2-a+crc+sha2+fp16+sve -msve-vector-bits=512"
|
||||
},
|
||||
{
|
||||
"versions": "10.3:",
|
||||
"flags": "-mcpu=a64fx -msve-vector-bits=512"
|
||||
"versions": "8:",
|
||||
"flags": "-march=armv8.2-a+crc+aes+sha2+fp16+sve -msve-vector-bits=512"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "3.9:4.9",
|
||||
"flags": "-march=armv8.2-a+crc+sha2+fp16"
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16"
|
||||
},
|
||||
{
|
||||
"versions": "5:10",
|
||||
"flags": "-march=armv8.2-a+crc+sha2+fp16+sve"
|
||||
},
|
||||
{
|
||||
"versions": "11:",
|
||||
"flags": "-mcpu=a64fx"
|
||||
"versions": "5:",
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
|
||||
}
|
||||
],
|
||||
"arm": [
|
||||
@@ -2322,40 +1954,7 @@
|
||||
"m1": {
|
||||
"from": ["aarch64"],
|
||||
"vendor": "Apple",
|
||||
"features": [
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"crc32",
|
||||
"atomics",
|
||||
"fphp",
|
||||
"asimdhp",
|
||||
"cpuid",
|
||||
"asimdrdm",
|
||||
"jscvt",
|
||||
"fcma",
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"asimdfhm",
|
||||
"dit",
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"sb",
|
||||
"paca",
|
||||
"pacg",
|
||||
"dcpodp",
|
||||
"flagm2",
|
||||
"frint"
|
||||
],
|
||||
"features": [],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
@@ -2365,22 +1964,14 @@
|
||||
],
|
||||
"clang" : [
|
||||
{
|
||||
"versions": "9.0:12.0",
|
||||
"versions": "9.0:",
|
||||
"flags" : "-march=armv8.4-a"
|
||||
},
|
||||
{
|
||||
"versions": "13.0:",
|
||||
"flags" : "-mcpu=apple-m1"
|
||||
}
|
||||
],
|
||||
"apple-clang": [
|
||||
{
|
||||
"versions": "11.0:12.5",
|
||||
"versions": "11.0:",
|
||||
"flags" : "-march=armv8.4-a"
|
||||
},
|
||||
{
|
||||
"versions": "13.0:",
|
||||
"flags" : "-mcpu=apple-m1"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -308,68 +308,6 @@ def change_sed_delimiter(old_delim, new_delim, *filenames):
|
||||
filter_file(double_quoted, '"%s"' % repl, f)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def exploding_archive_catch(stage):
|
||||
# Check for an exploding tarball, i.e. one that doesn't expand to
|
||||
# a single directory. If the tarball *didn't* explode, move its
|
||||
# contents to the staging source directory & remove the container
|
||||
# directory. If the tarball did explode, just rename the tarball
|
||||
# directory to the staging source directory.
|
||||
#
|
||||
# NOTE: The tar program on Mac OS X will encode HFS metadata in
|
||||
# hidden files, which can end up *alongside* a single top-level
|
||||
# directory. We initially ignore presence of hidden files to
|
||||
# accomodate these "semi-exploding" tarballs but ensure the files
|
||||
# are copied to the source directory.
|
||||
|
||||
# Expand all tarballs in their own directory to contain
|
||||
# exploding tarballs.
|
||||
tarball_container = os.path.join(stage.path,
|
||||
"spack-expanded-archive")
|
||||
mkdirp(tarball_container)
|
||||
orig_dir = os.getcwd()
|
||||
os.chdir(tarball_container)
|
||||
try:
|
||||
yield
|
||||
# catch an exploding archive on sucessful extraction
|
||||
os.chdir(orig_dir)
|
||||
exploding_archive_handler(tarball_container, stage)
|
||||
except Exception as e:
|
||||
# return current directory context to previous on failure
|
||||
os.chdir(orig_dir)
|
||||
raise e
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def exploding_archive_handler(tarball_container, stage):
|
||||
"""
|
||||
Args:
|
||||
tarball_container: where the archive was expanded to
|
||||
stage: Stage object referencing filesystem location
|
||||
where archive is being expanded
|
||||
"""
|
||||
files = os.listdir(tarball_container)
|
||||
non_hidden = [f for f in files if not f.startswith('.')]
|
||||
if len(non_hidden) == 1:
|
||||
src = os.path.join(tarball_container, non_hidden[0])
|
||||
if os.path.isdir(src):
|
||||
stage.srcdir = non_hidden[0]
|
||||
shutil.move(src, stage.source_path)
|
||||
if len(files) > 1:
|
||||
files.remove(non_hidden[0])
|
||||
for f in files:
|
||||
src = os.path.join(tarball_container, f)
|
||||
dest = os.path.join(stage.path, f)
|
||||
shutil.move(src, dest)
|
||||
os.rmdir(tarball_container)
|
||||
else:
|
||||
# This is a non-directory entry (e.g., a patch file) so simply
|
||||
# rename the tarball container to be the source path.
|
||||
shutil.move(tarball_container, stage.source_path)
|
||||
else:
|
||||
shutil.move(tarball_container, stage.source_path)
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def get_owner_uid(path, err_msg=None):
|
||||
if not os.path.exists(path):
|
||||
@@ -429,7 +367,7 @@ def group_ids(uid=None):
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def chgrp(path, group, follow_symlinks=True):
|
||||
def chgrp(path, group):
|
||||
"""Implement the bash chgrp function on a single path"""
|
||||
if is_windows:
|
||||
raise OSError("Function 'chgrp' is not supported on Windows")
|
||||
@@ -438,10 +376,7 @@ def chgrp(path, group, follow_symlinks=True):
|
||||
gid = grp.getgrnam(group).gr_gid
|
||||
else:
|
||||
gid = group
|
||||
if follow_symlinks:
|
||||
os.chown(path, -1, gid)
|
||||
else:
|
||||
os.lchown(path, -1, gid)
|
||||
os.chown(path, -1, gid)
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
|
||||
@@ -11,9 +11,7 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Tuple
|
||||
|
||||
import six
|
||||
from six import string_types
|
||||
@@ -1011,76 +1009,3 @@ def __repr__(self):
|
||||
|
||||
def __str__(self):
|
||||
return str(self.data)
|
||||
|
||||
|
||||
class GroupedExceptionHandler(object):
|
||||
"""A generic mechanism to coalesce multiple exceptions and preserve tracebacks."""
|
||||
|
||||
def __init__(self):
|
||||
self.exceptions = [] # type: List[Tuple[str, Exception, List[str]]]
|
||||
|
||||
def __bool__(self):
|
||||
"""Whether any exceptions were handled."""
|
||||
return bool(self.exceptions)
|
||||
|
||||
def forward(self, context):
|
||||
# type: (str) -> GroupedExceptionForwarder
|
||||
"""Return a contextmanager which extracts tracebacks and prefixes a message."""
|
||||
return GroupedExceptionForwarder(context, self)
|
||||
|
||||
def _receive_forwarded(self, context, exc, tb):
|
||||
# type: (str, Exception, List[str]) -> None
|
||||
self.exceptions.append((context, exc, tb))
|
||||
|
||||
def grouped_message(self, with_tracebacks=True):
|
||||
# type: (bool) -> str
|
||||
"""Print out an error message coalescing all the forwarded errors."""
|
||||
each_exception_message = [
|
||||
'{0} raised {1}: {2}{3}'.format(
|
||||
context,
|
||||
exc.__class__.__name__,
|
||||
exc,
|
||||
'\n{0}'.format(''.join(tb)) if with_tracebacks else '',
|
||||
)
|
||||
for context, exc, tb in self.exceptions
|
||||
]
|
||||
return 'due to the following failures:\n{0}'.format(
|
||||
'\n'.join(each_exception_message)
|
||||
)
|
||||
|
||||
|
||||
class GroupedExceptionForwarder(object):
|
||||
"""A contextmanager to capture exceptions and forward them to a
|
||||
GroupedExceptionHandler."""
|
||||
|
||||
def __init__(self, context, handler):
|
||||
# type: (str, GroupedExceptionHandler) -> None
|
||||
self._context = context
|
||||
self._handler = handler
|
||||
|
||||
def __enter__(self):
|
||||
return None
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
if exc_value is not None:
|
||||
self._handler._receive_forwarded(
|
||||
self._context,
|
||||
exc_value,
|
||||
traceback.format_tb(tb),
|
||||
)
|
||||
|
||||
# Suppress any exception from being re-raised:
|
||||
# https://docs.python.org/3/reference/datamodel.html#object.__exit__.
|
||||
return True
|
||||
|
||||
|
||||
class classproperty(object):
|
||||
"""Non-data descriptor to evaluate a class-level property. The function that performs
|
||||
the evaluation is injected at creation time and take an instance (could be None) and
|
||||
an owner (i.e. the class that originated the instance)
|
||||
"""
|
||||
def __init__(self, callback):
|
||||
self.callback = callback
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
return self.callback(owner)
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
#: (major, minor, micro, dev release) tuple
|
||||
spack_version_info = (0, 19, 0, 'dev0')
|
||||
spack_version_info = (0, 18, 0, 'dev0')
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
spack_version = '.'.join(str(s) for s in spack_version_info)
|
||||
|
||||
42
lib/spack/spack/analyzers/__init__.py
Normal file
42
lib/spack/spack/analyzers/__init__.py
Normal file
@@ -0,0 +1,42 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""This package contains code for creating analyzers to extract Application
|
||||
Binary Interface (ABI) information, along with simple analyses that just load
|
||||
existing metadata.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.paths
|
||||
import spack.util.classes
|
||||
|
||||
mod_path = spack.paths.analyzers_path
|
||||
analyzers = spack.util.classes.list_classes("spack.analyzers", mod_path)
|
||||
|
||||
# The base analyzer does not have a name, and cannot do dict comprehension
|
||||
analyzer_types = {}
|
||||
for a in analyzers:
|
||||
if not hasattr(a, "name"):
|
||||
continue
|
||||
analyzer_types[a.name] = a
|
||||
|
||||
|
||||
def list_all():
|
||||
"""A helper function to list all analyzers and their descriptions
|
||||
"""
|
||||
for name, analyzer in analyzer_types.items():
|
||||
print("%-25s: %-35s" % (name, analyzer.description))
|
||||
|
||||
|
||||
def get_analyzer(name):
|
||||
"""Courtesy function to retrieve an analyzer, and exit on error if it
|
||||
does not exist.
|
||||
"""
|
||||
if name in analyzer_types:
|
||||
return analyzer_types[name]
|
||||
tty.die("Analyzer %s does not exist" % name)
|
||||
116
lib/spack/spack/analyzers/analyzer_base.py
Normal file
116
lib/spack/spack/analyzers/analyzer_base.py
Normal file
@@ -0,0 +1,116 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""An analyzer base provides basic functions to run the analysis, save results,
|
||||
and (optionally) interact with a Spack Monitor
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.config
|
||||
import spack.hooks
|
||||
import spack.monitor
|
||||
import spack.util.path
|
||||
|
||||
|
||||
def get_analyzer_dir(spec, analyzer_dir=None):
|
||||
"""
|
||||
Given a spec, return the directory to save analyzer results.
|
||||
|
||||
We create the directory if it does not exist. We also check that the
|
||||
spec has an associated package. An analyzer cannot be run if the spec isn't
|
||||
associated with a package. If the user provides a custom analyzer_dir,
|
||||
we use it over checking the config and the default at ~/.spack/analyzers
|
||||
"""
|
||||
# An analyzer cannot be run if the spec isn't associated with a package
|
||||
if not hasattr(spec, "package") or not spec.package:
|
||||
tty.die("A spec can only be analyzed with an associated package.")
|
||||
|
||||
# The top level directory is in the user home, or a custom location
|
||||
if not analyzer_dir:
|
||||
analyzer_dir = spack.util.path.canonicalize_path(
|
||||
spack.config.get('config:analyzers_dir', '~/.spack/analyzers'))
|
||||
|
||||
# We follow the same convention as the spec install (this could be better)
|
||||
package_prefix = os.sep.join(spec.package.prefix.split('/')[-3:])
|
||||
meta_dir = os.path.join(analyzer_dir, package_prefix)
|
||||
return meta_dir
|
||||
|
||||
|
||||
class AnalyzerBase(object):
|
||||
|
||||
def __init__(self, spec, dirname=None):
|
||||
"""
|
||||
Verify that the analyzer has correct metadata.
|
||||
|
||||
An Analyzer is intended to run on one spec install, so the spec
|
||||
with its associated package is required on init. The child analyzer
|
||||
class should define an init function that super's the init here, and
|
||||
also check that the analyzer has all dependencies that it
|
||||
needs. If an analyzer subclass does not have dependencies, it does not
|
||||
need to define an init. An Analyzer should not be allowed to proceed
|
||||
if one or more dependencies are missing. The dirname, if defined,
|
||||
is an optional directory name to save to (instead of the default meta
|
||||
spack directory).
|
||||
"""
|
||||
self.spec = spec
|
||||
self.dirname = dirname
|
||||
self.meta_dir = os.path.dirname(spec.package.install_log_path)
|
||||
|
||||
for required in ["name", "outfile", "description"]:
|
||||
if not hasattr(self, required):
|
||||
tty.die("Please add a %s attribute on the analyzer." % required)
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Given a spec with an installed package, run the analyzer on it.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def output_dir(self):
|
||||
"""
|
||||
The full path to the output directory.
|
||||
|
||||
This includes the nested analyzer directory structure. This function
|
||||
does not create anything.
|
||||
"""
|
||||
if not hasattr(self, "_output_dir"):
|
||||
output_dir = get_analyzer_dir(self.spec, self.dirname)
|
||||
self._output_dir = os.path.join(output_dir, self.name)
|
||||
|
||||
return self._output_dir
|
||||
|
||||
def save_result(self, result, overwrite=False):
|
||||
"""
|
||||
Save a result to the associated spack monitor, if defined.
|
||||
|
||||
This function is on the level of the analyzer because it might be
|
||||
the case that the result is large (appropriate for a single request)
|
||||
or that the data is organized differently (e.g., more than one
|
||||
request per result). If an analyzer subclass needs to over-write
|
||||
this function with a custom save, that is appropriate to do (see abi).
|
||||
"""
|
||||
# We maintain the structure in json with the analyzer as key so
|
||||
# that in the future, we could upload to a monitor server
|
||||
if result[self.name]:
|
||||
|
||||
outfile = os.path.join(self.output_dir, self.outfile)
|
||||
|
||||
# Only try to create the results directory if we have a result
|
||||
if not os.path.exists(self._output_dir):
|
||||
os.makedirs(self._output_dir)
|
||||
|
||||
# Don't overwrite an existing result if overwrite is False
|
||||
if os.path.exists(outfile) and not overwrite:
|
||||
tty.info("%s exists and overwrite is False, skipping." % outfile)
|
||||
else:
|
||||
tty.info("Writing result to %s" % outfile)
|
||||
spack.monitor.write_json(result[self.name], outfile)
|
||||
|
||||
# This hook runs after a save result
|
||||
spack.hooks.on_analyzer_save(self.spec.package, result)
|
||||
33
lib/spack/spack/analyzers/config_args.py
Normal file
33
lib/spack/spack/analyzers/config_args.py
Normal file
@@ -0,0 +1,33 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""A configargs analyzer is a class of analyzer that typically just uploads
|
||||
already existing metadata about config args from a package spec install
|
||||
directory."""
|
||||
|
||||
|
||||
import os
|
||||
|
||||
import spack.monitor
|
||||
|
||||
from .analyzer_base import AnalyzerBase
|
||||
|
||||
|
||||
class ConfigArgs(AnalyzerBase):
|
||||
|
||||
name = "config_args"
|
||||
outfile = "spack-analyzer-config-args.json"
|
||||
description = "config args loaded from spack-configure-args.txt"
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Load the configure-args.txt and save in json.
|
||||
|
||||
The run function will find the spack-config-args.txt file in the
|
||||
package install directory, and read it into a json structure that has
|
||||
the name of the analyzer as the key.
|
||||
"""
|
||||
config_file = os.path.join(self.meta_dir, "spack-configure-args.txt")
|
||||
return {self.name: spack.monitor.read_file(config_file)}
|
||||
54
lib/spack/spack/analyzers/environment_variables.py
Normal file
54
lib/spack/spack/analyzers/environment_variables.py
Normal file
@@ -0,0 +1,54 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""An environment analyzer will read and parse the environment variables
|
||||
file in the installed package directory, generating a json file that has
|
||||
an index of key, value pairs for environment variables."""
|
||||
|
||||
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
from .analyzer_base import AnalyzerBase
|
||||
|
||||
|
||||
class EnvironmentVariables(AnalyzerBase):
|
||||
|
||||
name = "environment_variables"
|
||||
outfile = "spack-analyzer-environment-variables.json"
|
||||
description = "environment variables parsed from spack-build-env.txt"
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Load, parse, and save spack-build-env.txt to analyzers.
|
||||
|
||||
Read in the spack-build-env.txt file from the package install
|
||||
directory and parse the environment variables into key value pairs.
|
||||
The result should have the key for the analyzer, the name.
|
||||
"""
|
||||
env_file = os.path.join(self.meta_dir, "spack-build-env.txt")
|
||||
return {self.name: self._read_environment_file(env_file)}
|
||||
|
||||
def _read_environment_file(self, filename):
|
||||
"""
|
||||
Read and parse the environment file.
|
||||
|
||||
Given an environment file, we want to read it, split by semicolons
|
||||
and new lines, and then parse down to the subset of SPACK_* variables.
|
||||
We assume that all spack prefix variables are not secrets, and unlike
|
||||
the install_manifest.json, we don't (at least to start) parse the values
|
||||
to remove path prefixes specific to user systems.
|
||||
"""
|
||||
if not os.path.exists(filename):
|
||||
tty.warn("No environment file available")
|
||||
return
|
||||
|
||||
mods = EnvironmentModifications.from_sourcing_file(filename)
|
||||
env = {}
|
||||
mods.apply_modifications(env)
|
||||
return env
|
||||
31
lib/spack/spack/analyzers/install_files.py
Normal file
31
lib/spack/spack/analyzers/install_files.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""The install files json file (install_manifest.json) already exists in
|
||||
the package install folder, so this analyzer simply moves it to the user
|
||||
analyzer folder for further processing."""
|
||||
|
||||
|
||||
import os
|
||||
|
||||
import spack.monitor
|
||||
|
||||
from .analyzer_base import AnalyzerBase
|
||||
|
||||
|
||||
class InstallFiles(AnalyzerBase):
|
||||
|
||||
name = "install_files"
|
||||
outfile = "spack-analyzer-install-files.json"
|
||||
description = "install file listing read from install_manifest.json"
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Load in the install_manifest.json and save to analyzers.
|
||||
|
||||
We write it out to the analyzers folder, with key as the analyzer name.
|
||||
"""
|
||||
manifest_file = os.path.join(self.meta_dir, "install_manifest.json")
|
||||
return {self.name: spack.monitor.read_json(manifest_file)}
|
||||
114
lib/spack/spack/analyzers/libabigail.py
Normal file
114
lib/spack/spack/analyzers/libabigail.py
Normal file
@@ -0,0 +1,114 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution
|
||||
import spack.bootstrap
|
||||
import spack.error
|
||||
import spack.hooks
|
||||
import spack.monitor
|
||||
import spack.package
|
||||
import spack.repo
|
||||
import spack.util.executable
|
||||
|
||||
from .analyzer_base import AnalyzerBase
|
||||
|
||||
|
||||
class Libabigail(AnalyzerBase):
|
||||
|
||||
name = "libabigail"
|
||||
outfile = "spack-analyzer-libabigail.json"
|
||||
description = "Application Binary Interface (ABI) features for objects"
|
||||
|
||||
def __init__(self, spec, dirname=None):
|
||||
"""
|
||||
init for an analyzer ensures we have all needed dependencies.
|
||||
|
||||
For the libabigail analyzer, this means Libabigail.
|
||||
Since the output for libabigail is one file per object, we communicate
|
||||
with the monitor multiple times.
|
||||
"""
|
||||
super(Libabigail, self).__init__(spec, dirname)
|
||||
|
||||
# This doesn't seem to work to import on the module level
|
||||
tty.debug("Preparing to use Libabigail, will install if missing.")
|
||||
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
# libabigail won't install lib/bin/share without docs
|
||||
spec = spack.spec.Spec("libabigail+docs")
|
||||
spack.bootstrap.ensure_executables_in_path_or_raise(
|
||||
["abidw"], abstract_spec=spec
|
||||
)
|
||||
self.abidw = spack.util.executable.which('abidw')
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Run libabigail, and save results to filename.
|
||||
|
||||
This run function differs in that we write as we generate and then
|
||||
return a dict with the analyzer name as the key, and the value of a
|
||||
dict of results, where the key is the object name, and the value is
|
||||
the output file written to.
|
||||
"""
|
||||
manifest = spack.binary_distribution.get_buildfile_manifest(self.spec)
|
||||
|
||||
# This result will store a path to each file
|
||||
result = {}
|
||||
|
||||
# Generate an output file for each binary or object
|
||||
for obj in manifest.get("binary_to_relocate_fullpath", []):
|
||||
|
||||
# We want to preserve the path in the install directory in case
|
||||
# a library has an equivalenly named lib or executable, for example
|
||||
outdir = os.path.dirname(obj.replace(self.spec.package.prefix,
|
||||
'').strip(os.path.sep))
|
||||
outfile = "spack-analyzer-libabigail-%s.xml" % os.path.basename(obj)
|
||||
outfile = os.path.join(self.output_dir, outdir, outfile)
|
||||
outdir = os.path.dirname(outfile)
|
||||
|
||||
# Create the output directory
|
||||
if not os.path.exists(outdir):
|
||||
os.makedirs(outdir)
|
||||
|
||||
# Sometimes libabigail segfaults and dumps
|
||||
try:
|
||||
self.abidw(obj, "--out-file", outfile)
|
||||
result[obj] = outfile
|
||||
tty.info("Writing result to %s" % outfile)
|
||||
except spack.error.SpackError:
|
||||
tty.warn("Issue running abidw for %s" % obj)
|
||||
|
||||
return {self.name: result}
|
||||
|
||||
def save_result(self, result, overwrite=False):
|
||||
"""
|
||||
Read saved ABI results and upload to monitor server.
|
||||
|
||||
ABI results are saved to individual files, so each one needs to be
|
||||
read and uploaded. Result here should be the lookup generated in run(),
|
||||
the key is the analyzer name, and each value is the result file.
|
||||
We currently upload the entire xml as text because libabigail can't
|
||||
easily read gzipped xml, but this will be updated when it can.
|
||||
"""
|
||||
if not spack.monitor.cli:
|
||||
return
|
||||
|
||||
name = self.spec.package.name
|
||||
|
||||
for obj, filename in result.get(self.name, {}).items():
|
||||
|
||||
# Don't include the prefix
|
||||
rel_path = obj.replace(self.spec.prefix + os.path.sep, "")
|
||||
|
||||
# We've already saved the results to file during run
|
||||
content = spack.monitor.read_file(filename)
|
||||
|
||||
# A result needs an analyzer, value or binary_value, and name
|
||||
data = {"value": content, "install_file": rel_path, "name": "abidw-xml"}
|
||||
tty.info("Sending result for %s %s to monitor." % (name, rel_path))
|
||||
spack.hooks.on_analyzer_save(self.spec.package, {"libabigail": [data]})
|
||||
@@ -281,15 +281,15 @@ def _check_build_test_callbacks(pkgs, error_cls):
|
||||
"""Ensure stand-alone test method is not included in build-time callbacks"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
test_callbacks = pkg_cls.build_time_test_callbacks
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
test_callbacks = pkg.build_time_test_callbacks
|
||||
|
||||
if test_callbacks and 'test' in test_callbacks:
|
||||
msg = ('{0} package contains "test" method in '
|
||||
'build_time_test_callbacks')
|
||||
instr = ('Remove "test" from: [{0}]'
|
||||
.format(', '.join(test_callbacks)))
|
||||
errors.append(error_cls(msg.format(pkg_name), [instr]))
|
||||
errors.append(error_cls(msg.format(pkg.name), [instr]))
|
||||
|
||||
return errors
|
||||
|
||||
@@ -298,14 +298,13 @@ def _check_build_test_callbacks(pkgs, error_cls):
|
||||
def _check_patch_urls(pkgs, error_cls):
|
||||
"""Ensure that patches fetched from GitHub have stable sha256 hashes."""
|
||||
github_patch_url_re = (
|
||||
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
||||
".+/.+/(?:commit|pull)/[a-fA-F0-9]*.(?:patch|diff)"
|
||||
r"^https?://github\.com/.+/.+/(?:commit|pull)/[a-fA-F0-9]*.(?:patch|diff)"
|
||||
)
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
for condition, patches in pkg_cls.patches.items():
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
for condition, patches in pkg.patches.items():
|
||||
for patch in patches:
|
||||
if not isinstance(patch, spack.patch.UrlPatch):
|
||||
continue
|
||||
@@ -317,7 +316,7 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
if not patch.url.endswith(full_index_arg):
|
||||
errors.append(error_cls(
|
||||
"patch URL in package {0} must end with {1}".format(
|
||||
pkg_cls.name, full_index_arg,
|
||||
pkg.name, full_index_arg,
|
||||
),
|
||||
[patch.url],
|
||||
))
|
||||
@@ -331,21 +330,21 @@ def _linting_package_file(pkgs, error_cls):
|
||||
"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
|
||||
# Does the homepage have http, and if so, does https work?
|
||||
if pkg_cls.homepage.startswith('http://'):
|
||||
https = re.sub("http", "https", pkg_cls.homepage, 1)
|
||||
if pkg.homepage.startswith('http://'):
|
||||
https = re.sub("http", "https", pkg.homepage, 1)
|
||||
try:
|
||||
response = urlopen(https)
|
||||
except Exception as e:
|
||||
msg = 'Error with attempting https for "{0}": '
|
||||
errors.append(error_cls(msg.format(pkg_cls.name), [str(e)]))
|
||||
errors.append(error_cls(msg.format(pkg.name), [str(e)]))
|
||||
continue
|
||||
|
||||
if response.getcode() == 200:
|
||||
msg = 'Package "{0}" uses http but has a valid https endpoint.'
|
||||
errors.append(msg.format(pkg_cls.name))
|
||||
errors.append(msg.format(pkg.name))
|
||||
|
||||
return llnl.util.lang.dedupe(errors)
|
||||
|
||||
@@ -355,10 +354,10 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
"""Report unknown or wrong variants in directives for this package"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
|
||||
# Check "conflicts" directive
|
||||
for conflict, triggers in pkg_cls.conflicts.items():
|
||||
for conflict, triggers in pkg.conflicts.items():
|
||||
for trigger, _ in triggers:
|
||||
vrn = spack.spec.Spec(conflict)
|
||||
try:
|
||||
@@ -371,34 +370,34 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
# When os and target constraints can be created independently of
|
||||
# the platform, TODO change this back to add an error.
|
||||
errors.extend(_analyze_variants_in_directive(
|
||||
pkg_cls, spack.spec.Spec(trigger),
|
||||
pkg, spack.spec.Spec(trigger),
|
||||
directive='conflicts', error_cls=error_cls
|
||||
))
|
||||
errors.extend(_analyze_variants_in_directive(
|
||||
pkg_cls, vrn, directive='conflicts', error_cls=error_cls
|
||||
pkg, vrn, directive='conflicts', error_cls=error_cls
|
||||
))
|
||||
|
||||
# Check "depends_on" directive
|
||||
for _, triggers in pkg_cls.dependencies.items():
|
||||
for _, triggers in pkg.dependencies.items():
|
||||
triggers = list(triggers)
|
||||
for trigger in list(triggers):
|
||||
vrn = spack.spec.Spec(trigger)
|
||||
errors.extend(_analyze_variants_in_directive(
|
||||
pkg_cls, vrn, directive='depends_on', error_cls=error_cls
|
||||
pkg, vrn, directive='depends_on', error_cls=error_cls
|
||||
))
|
||||
|
||||
# Check "patch" directive
|
||||
for _, triggers in pkg_cls.provided.items():
|
||||
for _, triggers in pkg.provided.items():
|
||||
triggers = [spack.spec.Spec(x) for x in triggers]
|
||||
for vrn in triggers:
|
||||
errors.extend(_analyze_variants_in_directive(
|
||||
pkg_cls, vrn, directive='patch', error_cls=error_cls
|
||||
pkg, vrn, directive='patch', error_cls=error_cls
|
||||
))
|
||||
|
||||
# Check "resource" directive
|
||||
for vrn in pkg_cls.resources:
|
||||
for vrn in pkg.resources:
|
||||
errors.extend(_analyze_variants_in_directive(
|
||||
pkg_cls, vrn, directive='resource', error_cls=error_cls
|
||||
pkg, vrn, directive='resource', error_cls=error_cls
|
||||
))
|
||||
|
||||
return llnl.util.lang.dedupe(errors)
|
||||
@@ -409,15 +408,15 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
|
||||
"""Report unknown dependencies and wrong variants for dependencies"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
for dependency_name, dependency_data in pkg.dependencies.items():
|
||||
# No need to analyze virtual packages
|
||||
if spack.repo.path.is_virtual(dependency_name):
|
||||
continue
|
||||
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(dependency_name)
|
||||
dependency_pkg = spack.repo.get(dependency_name)
|
||||
except spack.repo.UnknownPackageError:
|
||||
# This dependency is completely missing, so report
|
||||
# and continue the analysis
|
||||
@@ -433,8 +432,8 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
|
||||
dependency_variants = dependency_edge.spec.variants
|
||||
for name, value in dependency_variants.items():
|
||||
try:
|
||||
v, _ = dependency_pkg_cls.variants[name]
|
||||
v.validate_or_raise(value, pkg_cls=dependency_pkg_cls)
|
||||
v, _ = dependency_pkg.variants[name]
|
||||
v.validate_or_raise(value, pkg=dependency_pkg)
|
||||
except Exception as e:
|
||||
summary = (pkg_name + ": wrong variant used for a "
|
||||
"dependency in a 'depends_on' directive")
|
||||
@@ -456,10 +455,10 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
||||
"""Report if version constraints used in directives are not satisfiable"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
dependencies_to_check = []
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
for dependency_name, dependency_data in pkg.dependencies.items():
|
||||
# Skip virtual dependencies for the time being, check on
|
||||
# their versions can be added later
|
||||
if spack.repo.path.is_virtual(dependency_name):
|
||||
@@ -470,19 +469,19 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
||||
)
|
||||
|
||||
for s in dependencies_to_check:
|
||||
dependency_pkg_cls = None
|
||||
dependency_pkg = None
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(s.name)
|
||||
dependency_pkg = spack.repo.get(s.name)
|
||||
assert any(
|
||||
v.satisfies(s.versions) for v in list(dependency_pkg_cls.versions)
|
||||
v.satisfies(s.versions) for v in list(dependency_pkg.versions)
|
||||
)
|
||||
except Exception:
|
||||
summary = ("{0}: dependency on {1} cannot be satisfied "
|
||||
"by known versions of {1.name}").format(pkg_name, s)
|
||||
details = ['happening in ' + filename]
|
||||
if dependency_pkg_cls is not None:
|
||||
if dependency_pkg is not None:
|
||||
details.append('known versions of {0.name} are {1}'.format(
|
||||
s, ', '.join([str(x) for x in dependency_pkg_cls.versions])
|
||||
s, ', '.join([str(x) for x in dependency_pkg.versions])
|
||||
))
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
|
||||
@@ -500,7 +499,7 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
||||
for name, v in constraint.variants.items():
|
||||
try:
|
||||
variant, _ = pkg.variants[name]
|
||||
variant.validate_or_raise(v, pkg_cls=pkg)
|
||||
variant.validate_or_raise(v, pkg=pkg)
|
||||
except variant_exceptions as e:
|
||||
summary = pkg.name + ': wrong variant in "{0}" directive'
|
||||
summary = summary.format(directive)
|
||||
|
||||
@@ -210,7 +210,7 @@ def get_all_built_specs(self):
|
||||
|
||||
return spec_list
|
||||
|
||||
def find_built_spec(self, spec, mirrors_to_check=None):
|
||||
def find_built_spec(self, spec):
|
||||
"""Look in our cache for the built spec corresponding to ``spec``.
|
||||
|
||||
If the spec can be found among the configured binary mirrors, a
|
||||
@@ -225,8 +225,6 @@ def find_built_spec(self, spec, mirrors_to_check=None):
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): Concrete spec to find
|
||||
mirrors_to_check: Optional mapping containing mirrors to check. If
|
||||
None, just assumes all configured mirrors.
|
||||
|
||||
Returns:
|
||||
An list of objects containing the found specs and mirror url where
|
||||
@@ -242,23 +240,17 @@ def find_built_spec(self, spec, mirrors_to_check=None):
|
||||
]
|
||||
"""
|
||||
self.regenerate_spec_cache()
|
||||
return self.find_by_hash(spec.dag_hash(), mirrors_to_check=mirrors_to_check)
|
||||
return self.find_by_hash(spec.dag_hash())
|
||||
|
||||
def find_by_hash(self, find_hash, mirrors_to_check=None):
|
||||
def find_by_hash(self, find_hash):
|
||||
"""Same as find_built_spec but uses the hash of a spec.
|
||||
|
||||
Args:
|
||||
find_hash (str): hash of the spec to search
|
||||
mirrors_to_check: Optional mapping containing mirrors to check. If
|
||||
None, just assumes all configured mirrors.
|
||||
"""
|
||||
if find_hash not in self._mirrors_for_spec:
|
||||
return None
|
||||
results = self._mirrors_for_spec[find_hash]
|
||||
if not mirrors_to_check:
|
||||
return results
|
||||
mirror_urls = mirrors_to_check.values()
|
||||
return [r for r in results if r['mirror_url'] in mirror_urls]
|
||||
return self._mirrors_for_spec[find_hash]
|
||||
|
||||
def update_spec(self, spec, found_list):
|
||||
"""
|
||||
@@ -571,13 +563,6 @@ def __init__(self, msg):
|
||||
super(NewLayoutException, self).__init__(msg)
|
||||
|
||||
|
||||
class UnsignedPackageException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if installation of unsigned package is attempted without
|
||||
the use of ``--no-check-signature``.
|
||||
"""
|
||||
|
||||
|
||||
def compute_hash(data):
|
||||
return hashlib.sha256(data.encode('utf-8')).hexdigest()
|
||||
|
||||
@@ -766,16 +751,15 @@ def select_signing_key(key=None):
|
||||
return key
|
||||
|
||||
|
||||
def sign_specfile(key, force, specfile_path):
|
||||
signed_specfile_path = '%s.sig' % specfile_path
|
||||
if os.path.exists(signed_specfile_path):
|
||||
def sign_tarball(key, force, specfile_path):
|
||||
if os.path.exists('%s.asc' % specfile_path):
|
||||
if force:
|
||||
os.remove(signed_specfile_path)
|
||||
os.remove('%s.asc' % specfile_path)
|
||||
else:
|
||||
raise NoOverwriteException(signed_specfile_path)
|
||||
raise NoOverwriteException('%s.asc' % specfile_path)
|
||||
|
||||
key = select_signing_key(key)
|
||||
spack.util.gpg.sign(key, specfile_path, signed_specfile_path, clearsign=True)
|
||||
spack.util.gpg.sign(key, specfile_path, '%s.asc' % specfile_path)
|
||||
|
||||
|
||||
def _fetch_spec_from_mirror(spec_url):
|
||||
@@ -784,10 +768,7 @@ def _fetch_spec_from_mirror(spec_url):
|
||||
_, _, spec_file = web_util.read_from_url(spec_url)
|
||||
spec_file_contents = codecs.getreader('utf-8')(spec_file).read()
|
||||
# Need full spec.json name or this gets confused with index.json.
|
||||
if spec_url.endswith('.json.sig'):
|
||||
specfile_json = Spec.extract_json_from_clearsig(spec_file_contents)
|
||||
s = Spec.from_dict(specfile_json)
|
||||
elif spec_url.endswith('.json'):
|
||||
if spec_url.endswith('.json'):
|
||||
s = Spec.from_json(spec_file_contents)
|
||||
elif spec_url.endswith('.yaml'):
|
||||
s = Spec.from_yaml(spec_file_contents)
|
||||
@@ -848,9 +829,7 @@ def generate_package_index(cache_prefix):
|
||||
file_list = (
|
||||
entry
|
||||
for entry in web_util.list_url(cache_prefix)
|
||||
if entry.endswith('.yaml') or
|
||||
entry.endswith('spec.json') or
|
||||
entry.endswith('spec.json.sig'))
|
||||
if entry.endswith('.yaml') or entry.endswith('spec.json'))
|
||||
except KeyError as inst:
|
||||
msg = 'No packages at {0}: {1}'.format(cache_prefix, inst)
|
||||
tty.warn(msg)
|
||||
@@ -965,7 +944,7 @@ def _build_tarball(
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
cache_prefix = build_cache_prefix(tmpdir)
|
||||
|
||||
tarfile_name = tarball_name(spec, '.spack')
|
||||
tarfile_name = tarball_name(spec, '.tar.gz')
|
||||
tarfile_dir = os.path.join(cache_prefix, tarball_directory_name(spec))
|
||||
tarfile_path = os.path.join(tarfile_dir, tarfile_name)
|
||||
spackfile_path = os.path.join(
|
||||
@@ -988,12 +967,10 @@ def _build_tarball(
|
||||
spec_file = spack.store.layout.spec_file_path(spec)
|
||||
specfile_name = tarball_name(spec, '.spec.json')
|
||||
specfile_path = os.path.realpath(os.path.join(cache_prefix, specfile_name))
|
||||
signed_specfile_path = '{0}.sig'.format(specfile_path)
|
||||
deprecated_specfile_path = specfile_path.replace('.spec.json', '.spec.yaml')
|
||||
|
||||
remote_specfile_path = url_util.join(
|
||||
outdir, os.path.relpath(specfile_path, os.path.realpath(tmpdir)))
|
||||
remote_signed_specfile_path = '{0}.sig'.format(remote_specfile_path)
|
||||
remote_specfile_path_deprecated = url_util.join(
|
||||
outdir, os.path.relpath(deprecated_specfile_path,
|
||||
os.path.realpath(tmpdir)))
|
||||
@@ -1002,12 +979,9 @@ def _build_tarball(
|
||||
if force:
|
||||
if web_util.url_exists(remote_specfile_path):
|
||||
web_util.remove_url(remote_specfile_path)
|
||||
if web_util.url_exists(remote_signed_specfile_path):
|
||||
web_util.remove_url(remote_signed_specfile_path)
|
||||
if web_util.url_exists(remote_specfile_path_deprecated):
|
||||
web_util.remove_url(remote_specfile_path_deprecated)
|
||||
elif (web_util.url_exists(remote_specfile_path) or
|
||||
web_util.url_exists(remote_signed_specfile_path) or
|
||||
web_util.url_exists(remote_specfile_path_deprecated)):
|
||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||
|
||||
@@ -1069,7 +1043,6 @@ def _build_tarball(
|
||||
raise ValueError(
|
||||
'{0} not a valid spec file type (json or yaml)'.format(
|
||||
spec_file))
|
||||
spec_dict['buildcache_layout_version'] = 1
|
||||
bchecksum = {}
|
||||
bchecksum['hash_algorithm'] = 'sha256'
|
||||
bchecksum['hash'] = checksum
|
||||
@@ -1088,15 +1061,25 @@ def _build_tarball(
|
||||
# sign the tarball and spec file with gpg
|
||||
if not unsigned:
|
||||
key = select_signing_key(key)
|
||||
sign_specfile(key, force, specfile_path)
|
||||
sign_tarball(key, force, specfile_path)
|
||||
|
||||
# put tarball, spec and signature files in .spack archive
|
||||
with closing(tarfile.open(spackfile_path, 'w')) as tar:
|
||||
tar.add(name=tarfile_path, arcname='%s' % tarfile_name)
|
||||
tar.add(name=specfile_path, arcname='%s' % specfile_name)
|
||||
if not unsigned:
|
||||
tar.add(name='%s.asc' % specfile_path,
|
||||
arcname='%s.asc' % specfile_name)
|
||||
|
||||
# cleanup file moved to archive
|
||||
os.remove(tarfile_path)
|
||||
if not unsigned:
|
||||
os.remove('%s.asc' % specfile_path)
|
||||
|
||||
# push tarball and signed spec json to remote mirror
|
||||
web_util.push_to_url(
|
||||
spackfile_path, remote_spackfile_path, keep_original=False)
|
||||
web_util.push_to_url(
|
||||
signed_specfile_path if not unsigned else specfile_path,
|
||||
remote_signed_specfile_path if not unsigned else remote_specfile_path,
|
||||
keep_original=False)
|
||||
specfile_path, remote_specfile_path, keep_original=False)
|
||||
|
||||
tty.debug('Buildcache for "{0}" written to \n {1}'
|
||||
.format(spec, remote_spackfile_path))
|
||||
@@ -1179,174 +1162,48 @@ def push(specs, push_url, specs_kwargs=None, **kwargs):
|
||||
warnings.warn(str(e))
|
||||
|
||||
|
||||
def try_verify(specfile_path):
|
||||
"""Utility function to attempt to verify a local file. Assumes the
|
||||
file is a clearsigned signature file.
|
||||
|
||||
Args:
|
||||
specfile_path (str): Path to file to be verified.
|
||||
|
||||
Returns:
|
||||
``True`` if the signature could be verified, ``False`` otherwise.
|
||||
"""
|
||||
suppress = config.get('config:suppress_gpg_warnings', False)
|
||||
|
||||
try:
|
||||
spack.util.gpg.verify(specfile_path, suppress_warnings=suppress)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def try_fetch(url_to_fetch):
|
||||
"""Utility function to try and fetch a file from a url, stage it
|
||||
locally, and return the path to the staged file.
|
||||
|
||||
Args:
|
||||
url_to_fetch (str): Url pointing to remote resource to fetch
|
||||
|
||||
Returns:
|
||||
Path to locally staged resource or ``None`` if it could not be fetched.
|
||||
"""
|
||||
stage = Stage(url_to_fetch, keep=True)
|
||||
stage.create()
|
||||
|
||||
try:
|
||||
stage.fetch()
|
||||
except fs.FetchError:
|
||||
stage.destroy()
|
||||
return None
|
||||
|
||||
return stage
|
||||
|
||||
|
||||
def _delete_staged_downloads(download_result):
|
||||
"""Clean up stages used to download tarball and specfile"""
|
||||
download_result['tarball_stage'].destroy()
|
||||
download_result['specfile_stage'].destroy()
|
||||
|
||||
|
||||
def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
def download_tarball(spec, preferred_mirrors=None):
|
||||
"""
|
||||
Download binary tarball for given package into stage area, returning
|
||||
path to downloaded tarball if successful, None otherwise.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): Concrete spec
|
||||
unsigned (bool): Whether or not to require signed binaries
|
||||
mirrors_for_spec (list): Optional list of concrete specs and mirrors
|
||||
obtained by calling binary_distribution.get_mirrors_for_spec().
|
||||
These will be checked in order first before looking in other
|
||||
configured mirrors.
|
||||
preferred_mirrors (list): If provided, this is a list of preferred
|
||||
mirror urls. Other configured mirrors will only be used if the
|
||||
tarball can't be retrieved from one of these.
|
||||
|
||||
Returns:
|
||||
``None`` if the tarball could not be downloaded (maybe also verified,
|
||||
depending on whether new-style signed binary packages were found).
|
||||
Otherwise, return an object indicating the path to the downloaded
|
||||
tarball, the path to the downloaded specfile (in the case of new-style
|
||||
buildcache), and whether or not the tarball is already verified.
|
||||
|
||||
.. code-block:: JSON
|
||||
|
||||
{
|
||||
"tarball_path": "path-to-locally-saved-tarfile",
|
||||
"specfile_path": "none-or-path-to-locally-saved-specfile",
|
||||
"signature_verified": "true-if-binary-pkg-was-already-verified"
|
||||
}
|
||||
Path to the downloaded tarball, or ``None`` if the tarball could not
|
||||
be downloaded from any configured mirrors.
|
||||
"""
|
||||
if not spack.mirror.MirrorCollection():
|
||||
tty.die("Please add a spack mirror to allow " +
|
||||
"download of pre-compiled packages.")
|
||||
|
||||
tarball = tarball_path_name(spec, '.spack')
|
||||
specfile_prefix = tarball_name(spec, '.spec')
|
||||
|
||||
mirrors_to_try = []
|
||||
urls_to_try = []
|
||||
|
||||
# Note on try_first and try_next:
|
||||
# mirrors_for_spec mostly likely came from spack caching remote
|
||||
# mirror indices locally and adding their specs to a local data
|
||||
# structure supporting quick lookup of concrete specs. Those
|
||||
# mirrors are likely a subset of all configured mirrors, and
|
||||
# we'll probably find what we need in one of them. But we'll
|
||||
# look in all configured mirrors if needed, as maybe the spec
|
||||
# we need was in an un-indexed mirror. No need to check any
|
||||
# mirror for the spec twice though.
|
||||
try_first = [i['mirror_url'] for i in mirrors_for_spec] if mirrors_for_spec else []
|
||||
try_next = [
|
||||
i.fetch_url for i in spack.mirror.MirrorCollection().values()
|
||||
if i.fetch_url not in try_first
|
||||
]
|
||||
if preferred_mirrors:
|
||||
for preferred_url in preferred_mirrors:
|
||||
urls_to_try.append(url_util.join(
|
||||
preferred_url, _build_cache_relative_path, tarball))
|
||||
|
||||
for url in try_first + try_next:
|
||||
mirrors_to_try.append({
|
||||
'specfile': url_util.join(url,
|
||||
_build_cache_relative_path, specfile_prefix),
|
||||
'spackfile': url_util.join(url,
|
||||
_build_cache_relative_path, tarball)
|
||||
})
|
||||
for mirror in spack.mirror.MirrorCollection().values():
|
||||
if not preferred_mirrors or mirror.fetch_url not in preferred_mirrors:
|
||||
urls_to_try.append(url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, tarball))
|
||||
|
||||
tried_to_verify_sigs = []
|
||||
|
||||
# Assumes we care more about finding a spec file by preferred ext
|
||||
# than by mirrory priority. This can be made less complicated as
|
||||
# we remove support for deprecated spec formats and buildcache layouts.
|
||||
for ext in ['json.sig', 'json', 'yaml']:
|
||||
for mirror_to_try in mirrors_to_try:
|
||||
specfile_url = '{0}.{1}'.format(mirror_to_try['specfile'], ext)
|
||||
spackfile_url = mirror_to_try['spackfile']
|
||||
local_specfile_stage = try_fetch(specfile_url)
|
||||
if local_specfile_stage:
|
||||
local_specfile_path = local_specfile_stage.save_filename
|
||||
signature_verified = False
|
||||
|
||||
if ext.endswith('.sig') and not unsigned:
|
||||
# If we found a signed specfile at the root, try to verify
|
||||
# the signature immediately. We will not download the
|
||||
# tarball if we could not verify the signature.
|
||||
tried_to_verify_sigs.append(specfile_url)
|
||||
signature_verified = try_verify(local_specfile_path)
|
||||
if not signature_verified:
|
||||
tty.warn("Failed to verify: {0}".format(specfile_url))
|
||||
|
||||
if unsigned or signature_verified or not ext.endswith('.sig'):
|
||||
# We will download the tarball in one of three cases:
|
||||
# 1. user asked for --no-check-signature
|
||||
# 2. user didn't ask for --no-check-signature, but we
|
||||
# found a spec.json.sig and verified the signature already
|
||||
# 3. neither of the first two cases are true, but this file
|
||||
# is *not* a signed json (not a spec.json.sig file). That
|
||||
# means we already looked at all the mirrors and either didn't
|
||||
# find any .sig files or couldn't verify any of them. But it
|
||||
# is still possible to find an old style binary package where
|
||||
# the signature is a detached .asc file in the outer archive
|
||||
# of the tarball, and in that case, the only way to know is to
|
||||
# download the tarball. This is a deprecated use case, so if
|
||||
# something goes wrong during the extraction process (can't
|
||||
# verify signature, checksum doesn't match) we will fail at
|
||||
# that point instead of trying to download more tarballs from
|
||||
# the remaining mirrors, looking for one we can use.
|
||||
tarball_stage = try_fetch(spackfile_url)
|
||||
if tarball_stage:
|
||||
return {
|
||||
'tarball_stage': tarball_stage,
|
||||
'specfile_stage': local_specfile_stage,
|
||||
'signature_verified': signature_verified,
|
||||
}
|
||||
|
||||
local_specfile_stage.destroy()
|
||||
|
||||
# Falling through the nested loops meeans we exhaustively searched
|
||||
# for all known kinds of spec files on all mirrors and did not find
|
||||
# an acceptable one for which we could download a tarball.
|
||||
|
||||
if tried_to_verify_sigs:
|
||||
raise NoVerifyException(("Spack found new style signed binary packages, "
|
||||
"but was unable to verify any of them. Please "
|
||||
"obtain and trust the correct public key. If "
|
||||
"these are public spack binaries, please see the "
|
||||
"spack docs for locations where keys can be found."))
|
||||
for try_url in urls_to_try:
|
||||
# stage the tarball into standard place
|
||||
stage = Stage(try_url, name="build_cache", keep=True)
|
||||
stage.create()
|
||||
try:
|
||||
stage.fetch()
|
||||
return stage.save_filename
|
||||
except fs.FetchError:
|
||||
continue
|
||||
|
||||
tty.warn("download_tarball() was unable to download " +
|
||||
"{0} from any configured mirrors".format(spec))
|
||||
@@ -1520,55 +1377,7 @@ def is_backup_file(file):
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
|
||||
|
||||
def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum):
|
||||
stagepath = os.path.dirname(filename)
|
||||
spackfile_name = tarball_name(spec, '.spack')
|
||||
spackfile_path = os.path.join(stagepath, spackfile_name)
|
||||
tarfile_name = tarball_name(spec, '.tar.gz')
|
||||
tarfile_path = os.path.join(extract_to, tarfile_name)
|
||||
deprecated_yaml_name = tarball_name(spec, '.spec.yaml')
|
||||
deprecated_yaml_path = os.path.join(extract_to, deprecated_yaml_name)
|
||||
json_name = tarball_name(spec, '.spec.json')
|
||||
json_path = os.path.join(extract_to, json_name)
|
||||
with closing(tarfile.open(spackfile_path, 'r')) as tar:
|
||||
tar.extractall(extract_to)
|
||||
# some buildcache tarfiles use bzip2 compression
|
||||
if not os.path.exists(tarfile_path):
|
||||
tarfile_name = tarball_name(spec, '.tar.bz2')
|
||||
tarfile_path = os.path.join(extract_to, tarfile_name)
|
||||
|
||||
if os.path.exists(json_path):
|
||||
specfile_path = json_path
|
||||
elif os.path.exists(deprecated_yaml_path):
|
||||
specfile_path = deprecated_yaml_path
|
||||
else:
|
||||
raise ValueError('Cannot find spec file for {0}.'.format(extract_to))
|
||||
|
||||
if not unsigned:
|
||||
if os.path.exists('%s.asc' % specfile_path):
|
||||
suppress = config.get('config:suppress_gpg_warnings', False)
|
||||
try:
|
||||
spack.util.gpg.verify('%s.asc' % specfile_path, specfile_path, suppress)
|
||||
except Exception:
|
||||
raise NoVerifyException("Spack was unable to verify package "
|
||||
"signature, please obtain and trust the "
|
||||
"correct public key.")
|
||||
else:
|
||||
raise UnsignedPackageException(
|
||||
"To install unsigned packages, use the --no-check-signature option.")
|
||||
# get the sha256 checksum of the tarball
|
||||
local_checksum = checksum_tarball(tarfile_path)
|
||||
|
||||
# if the checksums don't match don't install
|
||||
if local_checksum != remote_checksum['hash']:
|
||||
raise NoChecksumException(
|
||||
"Package tarball failed checksum verification.\n"
|
||||
"It cannot be installed.")
|
||||
|
||||
return tarfile_path
|
||||
|
||||
|
||||
def extract_tarball(spec, download_result, allow_root=False, unsigned=False,
|
||||
def extract_tarball(spec, filename, allow_root=False, unsigned=False,
|
||||
force=False):
|
||||
"""
|
||||
extract binary tarball for given package into install area
|
||||
@@ -1579,56 +1388,66 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False,
|
||||
else:
|
||||
raise NoOverwriteException(str(spec.prefix))
|
||||
|
||||
specfile_path = download_result['specfile_stage'].save_filename
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
stagepath = os.path.dirname(filename)
|
||||
spackfile_name = tarball_name(spec, '.spack')
|
||||
spackfile_path = os.path.join(stagepath, spackfile_name)
|
||||
tarfile_name = tarball_name(spec, '.tar.gz')
|
||||
tarfile_path = os.path.join(tmpdir, tarfile_name)
|
||||
specfile_is_json = True
|
||||
deprecated_yaml_name = tarball_name(spec, '.spec.yaml')
|
||||
deprecated_yaml_path = os.path.join(tmpdir, deprecated_yaml_name)
|
||||
json_name = tarball_name(spec, '.spec.json')
|
||||
json_path = os.path.join(tmpdir, json_name)
|
||||
with closing(tarfile.open(spackfile_path, 'r')) as tar:
|
||||
tar.extractall(tmpdir)
|
||||
# some buildcache tarfiles use bzip2 compression
|
||||
if not os.path.exists(tarfile_path):
|
||||
tarfile_name = tarball_name(spec, '.tar.bz2')
|
||||
tarfile_path = os.path.join(tmpdir, tarfile_name)
|
||||
|
||||
if os.path.exists(json_path):
|
||||
specfile_path = json_path
|
||||
elif os.path.exists(deprecated_yaml_path):
|
||||
specfile_is_json = False
|
||||
specfile_path = deprecated_yaml_path
|
||||
else:
|
||||
raise ValueError('Cannot find spec file for {0}.'.format(tmpdir))
|
||||
|
||||
if not unsigned:
|
||||
if os.path.exists('%s.asc' % specfile_path):
|
||||
try:
|
||||
suppress = config.get('config:suppress_gpg_warnings', False)
|
||||
spack.util.gpg.verify(
|
||||
'%s.asc' % specfile_path, specfile_path, suppress)
|
||||
except Exception as e:
|
||||
shutil.rmtree(tmpdir)
|
||||
raise e
|
||||
else:
|
||||
shutil.rmtree(tmpdir)
|
||||
raise NoVerifyException(
|
||||
"Package spec file failed signature verification.\n"
|
||||
"Use spack buildcache keys to download "
|
||||
"and install a key for verification from the mirror.")
|
||||
# get the sha256 checksum of the tarball
|
||||
checksum = checksum_tarball(tarfile_path)
|
||||
|
||||
# get the sha256 checksum recorded at creation
|
||||
spec_dict = {}
|
||||
with open(specfile_path, 'r') as inputfile:
|
||||
content = inputfile.read()
|
||||
if specfile_path.endswith('.json.sig'):
|
||||
spec_dict = Spec.extract_json_from_clearsig(content)
|
||||
elif specfile_path.endswith('.json'):
|
||||
if specfile_is_json:
|
||||
spec_dict = sjson.load(content)
|
||||
else:
|
||||
spec_dict = syaml.load(content)
|
||||
|
||||
bchecksum = spec_dict['binary_cache_checksum']
|
||||
filename = download_result['tarball_stage'].save_filename
|
||||
signature_verified = download_result['signature_verified']
|
||||
tmpdir = None
|
||||
|
||||
if ('buildcache_layout_version' not in spec_dict or
|
||||
int(spec_dict['buildcache_layout_version']) < 1):
|
||||
# Handle the older buildcache layout where the .spack file
|
||||
# contains a spec json/yaml, maybe an .asc file (signature),
|
||||
# and another tarball containing the actual install tree.
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
try:
|
||||
tarfile_path = _extract_inner_tarball(
|
||||
spec, filename, tmpdir, unsigned, bchecksum)
|
||||
except Exception as e:
|
||||
_delete_staged_downloads(download_result)
|
||||
shutil.rmtree(tmpdir)
|
||||
raise e
|
||||
else:
|
||||
# Newer buildcache layout: the .spack file contains just
|
||||
# in the install tree, the signature, if it exists, is
|
||||
# wrapped around the spec.json at the root. If sig verify
|
||||
# was required, it was already done before downloading
|
||||
# the tarball.
|
||||
tarfile_path = filename
|
||||
|
||||
if not unsigned and not signature_verified:
|
||||
raise UnsignedPackageException(
|
||||
"To install unsigned packages, use the --no-check-signature option.")
|
||||
|
||||
# compute the sha256 checksum of the tarball
|
||||
local_checksum = checksum_tarball(tarfile_path)
|
||||
|
||||
# if the checksums don't match don't install
|
||||
if local_checksum != bchecksum['hash']:
|
||||
_delete_staged_downloads(download_result)
|
||||
raise NoChecksumException(
|
||||
"Package tarball failed checksum verification.\n"
|
||||
"It cannot be installed.")
|
||||
# if the checksums don't match don't install
|
||||
if bchecksum['hash'] != checksum:
|
||||
shutil.rmtree(tmpdir)
|
||||
raise NoChecksumException(
|
||||
"Package tarball failed checksum verification.\n"
|
||||
"It cannot be installed.")
|
||||
|
||||
new_relative_prefix = str(os.path.relpath(spec.prefix,
|
||||
spack.store.layout.root))
|
||||
@@ -1653,13 +1472,11 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False,
|
||||
try:
|
||||
tar.extractall(path=extract_tmp)
|
||||
except Exception as e:
|
||||
_delete_staged_downloads(download_result)
|
||||
shutil.rmtree(extracted_dir)
|
||||
raise e
|
||||
try:
|
||||
shutil.move(extracted_dir, spec.prefix)
|
||||
except Exception as e:
|
||||
_delete_staged_downloads(download_result)
|
||||
shutil.rmtree(extracted_dir)
|
||||
raise e
|
||||
os.remove(tarfile_path)
|
||||
@@ -1678,11 +1495,9 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False,
|
||||
spec_id = spec.format('{name}/{hash:7}')
|
||||
tty.warn('No manifest file in tarball for spec %s' % spec_id)
|
||||
finally:
|
||||
if tmpdir:
|
||||
shutil.rmtree(tmpdir)
|
||||
shutil.rmtree(tmpdir)
|
||||
if os.path.exists(filename):
|
||||
os.remove(filename)
|
||||
_delete_staged_downloads(download_result)
|
||||
|
||||
|
||||
def install_root_node(spec, allow_root, unsigned=False, force=False, sha256=None):
|
||||
@@ -1710,23 +1525,21 @@ def install_root_node(spec, allow_root, unsigned=False, force=False, sha256=None
|
||||
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
|
||||
return
|
||||
|
||||
download_result = download_tarball(spec, unsigned)
|
||||
if not download_result:
|
||||
tarball = download_tarball(spec)
|
||||
if not tarball:
|
||||
msg = 'download of binary cache file for spec "{0}" failed'
|
||||
raise RuntimeError(msg.format(spec.format()))
|
||||
|
||||
if sha256:
|
||||
checker = spack.util.crypto.Checker(sha256)
|
||||
msg = 'cannot verify checksum for "{0}" [expected={1}]'
|
||||
tarball_path = download_result['tarball_stage'].save_filename
|
||||
msg = msg.format(tarball_path, sha256)
|
||||
if not checker.check(tarball_path):
|
||||
_delete_staged_downloads(download_result)
|
||||
msg = msg.format(tarball, sha256)
|
||||
if not checker.check(tarball):
|
||||
raise spack.binary_distribution.NoChecksumException(msg)
|
||||
tty.debug('Verified SHA256 checksum of the build cache')
|
||||
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, download_result, allow_root, unsigned, force)
|
||||
extract_tarball(spec, tarball, allow_root, unsigned, force)
|
||||
spack.hooks.post_install(spec)
|
||||
spack.store.db.add(spec, spack.store.layout)
|
||||
|
||||
@@ -1752,8 +1565,6 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
"""
|
||||
deprecated_specfile_name = tarball_name(spec, '.spec.yaml')
|
||||
specfile_name = tarball_name(spec, '.spec.json')
|
||||
signed_specfile_name = tarball_name(spec, '.spec.json.sig')
|
||||
specfile_is_signed = False
|
||||
specfile_is_json = True
|
||||
found_specs = []
|
||||
|
||||
@@ -1762,35 +1573,24 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
mirror.fetch_url, _build_cache_relative_path, deprecated_specfile_name)
|
||||
buildcache_fetch_url_json = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, specfile_name)
|
||||
buildcache_fetch_url_signed_json = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, signed_specfile_name)
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
|
||||
specfile_is_signed = True
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err:
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_x:
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_yaml)
|
||||
specfile_is_json = False
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_y:
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url_signed_json), url_err)
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url_json), url_err_x)
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url_yaml), url_err_y)
|
||||
continue
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_yaml)
|
||||
specfile_is_json = False
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_y:
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url_json), url_err)
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url_yaml), url_err_y)
|
||||
continue
|
||||
specfile_contents = codecs.getreader('utf-8')(fs).read()
|
||||
|
||||
# read the spec from the build cache file. All specs in build caches
|
||||
# are concrete (as they are built) so we need to mark this spec
|
||||
# concrete on read-in.
|
||||
if specfile_is_signed:
|
||||
specfile_json = Spec.extract_json_from_clearsig(specfile_contents)
|
||||
fetched_spec = Spec.from_dict(specfile_json)
|
||||
elif specfile_is_json:
|
||||
if specfile_is_json:
|
||||
fetched_spec = Spec.from_json(specfile_contents)
|
||||
else:
|
||||
fetched_spec = Spec.from_yaml(specfile_contents)
|
||||
@@ -1827,7 +1627,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
|
||||
tty.debug("No Spack mirrors are currently configured")
|
||||
return {}
|
||||
|
||||
results = binary_index.find_built_spec(spec, mirrors_to_check=mirrors_to_check)
|
||||
results = binary_index.find_built_spec(spec)
|
||||
|
||||
# Maybe we just didn't have the latest information from the mirror, so
|
||||
# try to fetch directly, unless we are only considering the indices.
|
||||
@@ -2117,8 +1917,7 @@ def download_single_spec(
|
||||
'path': local_tarball_path,
|
||||
'required': True,
|
||||
}, {
|
||||
'url': [tarball_name(concrete_spec, '.spec.json.sig'),
|
||||
tarball_name(concrete_spec, '.spec.json'),
|
||||
'url': [tarball_name(concrete_spec, '.spec.json'),
|
||||
tarball_name(concrete_spec, '.spec.yaml')],
|
||||
'path': destination,
|
||||
'required': True,
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import contextlib
|
||||
import copy
|
||||
import fnmatch
|
||||
import functools
|
||||
import json
|
||||
@@ -22,7 +21,6 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import GroupedExceptionHandler
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.config
|
||||
@@ -38,11 +36,6 @@
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml
|
||||
import spack.util.url
|
||||
|
||||
#: Name of the file containing metadata about the bootstrapping source
|
||||
METADATA_YAML_FILENAME = 'metadata.yaml'
|
||||
|
||||
#: Map a bootstrapper type to the corresponding class
|
||||
_bootstrap_methods = {}
|
||||
@@ -80,41 +73,32 @@ def _try_import_from_store(module, query_spec, query_info=None):
|
||||
|
||||
for candidate_spec in installed_specs:
|
||||
pkg = candidate_spec['python'].package
|
||||
module_paths = [
|
||||
module_paths = {
|
||||
os.path.join(candidate_spec.prefix, pkg.purelib),
|
||||
os.path.join(candidate_spec.prefix, pkg.platlib),
|
||||
] # type: list[str]
|
||||
path_before = list(sys.path)
|
||||
# NOTE: try module_paths first and last, last allows an existing version in path
|
||||
# to be picked up and used, possibly depending on something in the store, first
|
||||
# allows the bootstrap version to work when an incompatible version is in
|
||||
# sys.path
|
||||
orders = [
|
||||
module_paths + sys.path,
|
||||
sys.path + module_paths,
|
||||
]
|
||||
for path in orders:
|
||||
sys.path = path
|
||||
try:
|
||||
_fix_ext_suffix(candidate_spec)
|
||||
if _python_import(module):
|
||||
msg = ('[BOOTSTRAP MODULE {0}] The installed spec "{1}/{2}" '
|
||||
'provides the "{0}" Python module').format(
|
||||
module, query_spec, candidate_spec.dag_hash()
|
||||
)
|
||||
tty.debug(msg)
|
||||
if query_info is not None:
|
||||
query_info['spec'] = candidate_spec
|
||||
return True
|
||||
except Exception as e:
|
||||
msg = ('unexpected error while trying to import module '
|
||||
'"{0}" from spec "{1}" [error="{2}"]')
|
||||
tty.warn(msg.format(module, candidate_spec, str(e)))
|
||||
else:
|
||||
msg = "Spec {0} did not provide module {1}"
|
||||
tty.warn(msg.format(candidate_spec, module))
|
||||
}
|
||||
sys.path.extend(module_paths)
|
||||
|
||||
sys.path = path_before
|
||||
try:
|
||||
_fix_ext_suffix(candidate_spec)
|
||||
if _python_import(module):
|
||||
msg = ('[BOOTSTRAP MODULE {0}] The installed spec "{1}/{2}" '
|
||||
'provides the "{0}" Python module').format(
|
||||
module, query_spec, candidate_spec.dag_hash()
|
||||
)
|
||||
tty.debug(msg)
|
||||
if query_info is not None:
|
||||
query_info['spec'] = candidate_spec
|
||||
return True
|
||||
except Exception as e:
|
||||
msg = ('unexpected error while trying to import module '
|
||||
'"{0}" from spec "{1}" [error="{2}"]')
|
||||
tty.warn(msg.format(module, candidate_spec, str(e)))
|
||||
else:
|
||||
msg = "Spec {0} did not provide module {1}"
|
||||
tty.warn(msg.format(candidate_spec, module))
|
||||
|
||||
sys.path = sys.path[:-3]
|
||||
|
||||
return False
|
||||
|
||||
@@ -219,43 +203,12 @@ def _executables_in_store(executables, query_spec, query_info=None):
|
||||
return False
|
||||
|
||||
|
||||
class _BootstrapperBase(object):
|
||||
"""Base class to derive types that can bootstrap software for Spack"""
|
||||
config_scope_name = ''
|
||||
|
||||
@_bootstrapper(type='buildcache')
|
||||
class _BuildcacheBootstrapper(object):
|
||||
"""Install the software needed during bootstrapping from a buildcache."""
|
||||
def __init__(self, conf):
|
||||
self.name = conf['name']
|
||||
self.url = conf['info']['url']
|
||||
|
||||
@property
|
||||
def mirror_url(self):
|
||||
# Absolute paths
|
||||
if os.path.isabs(self.url):
|
||||
return spack.util.url.format(self.url)
|
||||
|
||||
# Check for :// and assume it's an url if we find it
|
||||
if '://' in self.url:
|
||||
return self.url
|
||||
|
||||
# Otherwise, it's a relative path
|
||||
return spack.util.url.format(os.path.join(self.metadata_dir, self.url))
|
||||
|
||||
@property
|
||||
def mirror_scope(self):
|
||||
return spack.config.InternalConfigScope(
|
||||
self.config_scope_name, {'mirrors:': {self.name: self.mirror_url}}
|
||||
)
|
||||
|
||||
|
||||
@_bootstrapper(type='buildcache')
|
||||
class _BuildcacheBootstrapper(_BootstrapperBase):
|
||||
"""Install the software needed during bootstrapping from a buildcache."""
|
||||
|
||||
config_scope_name = 'bootstrap_buildcache'
|
||||
|
||||
def __init__(self, conf):
|
||||
super(_BuildcacheBootstrapper, self).__init__(conf)
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf['metadata'])
|
||||
self.last_search = None
|
||||
|
||||
@staticmethod
|
||||
@@ -278,8 +231,9 @@ def _spec_and_platform(abstract_spec_str):
|
||||
def _read_metadata(self, package_name):
|
||||
"""Return metadata about the given package."""
|
||||
json_filename = '{0}.json'.format(package_name)
|
||||
json_dir = self.metadata_dir
|
||||
json_path = os.path.join(json_dir, json_filename)
|
||||
json_path = os.path.join(
|
||||
spack.paths.share_path, 'bootstrap', self.name, json_filename
|
||||
)
|
||||
with open(json_path) as f:
|
||||
data = json.load(f)
|
||||
return data
|
||||
@@ -353,6 +307,12 @@ def _install_and_test(
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def mirror_scope(self):
|
||||
return spack.config.InternalConfigScope(
|
||||
'bootstrap_buildcache', {'mirrors:': {self.name: self.url}}
|
||||
)
|
||||
|
||||
def try_import(self, module, abstract_spec_str):
|
||||
test_fn, info = functools.partial(_try_import_from_store, module), {}
|
||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||
@@ -382,13 +342,9 @@ def try_search_path(self, executables, abstract_spec_str):
|
||||
|
||||
|
||||
@_bootstrapper(type='install')
|
||||
class _SourceBootstrapper(_BootstrapperBase):
|
||||
class _SourceBootstrapper(object):
|
||||
"""Install the software needed during bootstrapping from sources."""
|
||||
config_scope_name = 'bootstrap_source'
|
||||
|
||||
def __init__(self, conf):
|
||||
super(_SourceBootstrapper, self).__init__(conf)
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf['metadata'])
|
||||
self.conf = conf
|
||||
self.last_search = None
|
||||
|
||||
@@ -421,8 +377,7 @@ def try_import(self, module, abstract_spec_str):
|
||||
tty.debug(msg.format(module, abstract_spec_str))
|
||||
|
||||
# Install the spec that should make the module importable
|
||||
with spack.config.override(self.mirror_scope):
|
||||
concrete_spec.package.do_install(fail_fast=True)
|
||||
concrete_spec.package.do_install(fail_fast=True)
|
||||
|
||||
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
||||
self.last_search = info
|
||||
@@ -435,8 +390,6 @@ def try_search_path(self, executables, abstract_spec_str):
|
||||
self.last_search = info
|
||||
return True
|
||||
|
||||
tty.info("Bootstrapping {0} from sources".format(abstract_spec_str))
|
||||
|
||||
# If we compile code from sources detecting a few build tools
|
||||
# might reduce compilation time by a fair amount
|
||||
_add_externals_if_missing()
|
||||
@@ -449,8 +402,7 @@ def try_search_path(self, executables, abstract_spec_str):
|
||||
|
||||
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||
tty.debug(msg.format(abstract_spec_str))
|
||||
with spack.config.override(self.mirror_scope):
|
||||
concrete_spec.package.do_install()
|
||||
concrete_spec.package.do_install()
|
||||
if _executables_in_store(executables, concrete_spec, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
@@ -465,11 +417,11 @@ def _make_bootstrapper(conf):
|
||||
return _bootstrap_methods[btype](conf)
|
||||
|
||||
|
||||
def source_is_enabled_or_raise(conf):
|
||||
"""Raise ValueError if the source is not enabled for bootstrapping"""
|
||||
def _source_is_trusted(conf):
|
||||
trusted, name = spack.config.get('bootstrap:trusted'), conf['name']
|
||||
if not trusted.get(name, False):
|
||||
raise ValueError('source is not trusted')
|
||||
if name not in trusted:
|
||||
return False
|
||||
return trusted[name]
|
||||
|
||||
|
||||
def spec_for_current_python():
|
||||
@@ -534,26 +486,36 @@ def ensure_module_importable_or_raise(module, abstract_spec=None):
|
||||
return
|
||||
|
||||
abstract_spec = abstract_spec or module
|
||||
source_configs = spack.config.get('bootstrap:sources', [])
|
||||
|
||||
h = GroupedExceptionHandler()
|
||||
errors = {}
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
with h.forward(current_config['name']):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
for current_config in source_configs:
|
||||
if not _source_is_trusted(current_config):
|
||||
msg = ('[BOOTSTRAP MODULE {0}] Skipping source "{1}" since it is '
|
||||
'not trusted').format(module, current_config['name'])
|
||||
tty.debug(msg)
|
||||
continue
|
||||
|
||||
b = _make_bootstrapper(current_config)
|
||||
b = _make_bootstrapper(current_config)
|
||||
try:
|
||||
if b.try_import(module, abstract_spec):
|
||||
return
|
||||
except Exception as e:
|
||||
msg = '[BOOTSTRAP MODULE {0}] Unexpected error "{1}"'
|
||||
tty.debug(msg.format(module, str(e)))
|
||||
errors[current_config['name']] = e
|
||||
|
||||
assert h, 'expected at least one exception to have been raised at this point: while bootstrapping {0}'.format(module) # noqa: E501
|
||||
msg = 'cannot bootstrap the "{0}" Python module '.format(module)
|
||||
# We couldn't import in any way, so raise an import error
|
||||
msg = 'cannot bootstrap the "{0}" Python module'.format(module)
|
||||
if abstract_spec:
|
||||
msg += 'from spec "{0}" '.format(abstract_spec)
|
||||
if tty.is_debug():
|
||||
msg += h.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += h.grouped_message(with_tracebacks=False)
|
||||
msg += '\nRun `spack --debug ...` for more detailed errors'
|
||||
msg += ' from spec "{0}"'.format(abstract_spec)
|
||||
msg += ' due to the following failures:\n'
|
||||
for method in errors:
|
||||
err = errors[method]
|
||||
msg += " '{0}' raised {1}: {2}\n".format(
|
||||
method, err.__class__.__name__, str(err))
|
||||
msg += ' Please run `spack -d spec zlib` for more verbose error messages'
|
||||
raise ImportError(msg)
|
||||
|
||||
|
||||
@@ -576,14 +538,16 @@ def ensure_executables_in_path_or_raise(executables, abstract_spec):
|
||||
return cmd
|
||||
|
||||
executables_str = ', '.join(executables)
|
||||
source_configs = spack.config.get('bootstrap:sources', [])
|
||||
for current_config in source_configs:
|
||||
if not _source_is_trusted(current_config):
|
||||
msg = ('[BOOTSTRAP EXECUTABLES {0}] Skipping source "{1}" since it is '
|
||||
'not trusted').format(executables_str, current_config['name'])
|
||||
tty.debug(msg)
|
||||
continue
|
||||
|
||||
h = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
with h.forward(current_config['name']):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
|
||||
b = _make_bootstrapper(current_config)
|
||||
b = _make_bootstrapper(current_config)
|
||||
try:
|
||||
if b.try_search_path(executables, abstract_spec):
|
||||
# Additional environment variables needed
|
||||
concrete_spec, cmd = b.last_search['spec'], b.last_search['command']
|
||||
@@ -598,16 +562,14 @@ def ensure_executables_in_path_or_raise(executables, abstract_spec):
|
||||
)
|
||||
cmd.add_default_envmod(env_mods)
|
||||
return cmd
|
||||
except Exception as e:
|
||||
msg = '[BOOTSTRAP EXECUTABLES {0}] Unexpected error "{1}"'
|
||||
tty.debug(msg.format(executables_str, str(e)))
|
||||
|
||||
assert h, 'expected at least one exception to have been raised at this point: while bootstrapping {0}'.format(executables_str) # noqa: E501
|
||||
msg = 'cannot bootstrap any of the {0} executables '.format(executables_str)
|
||||
# We couldn't import in any way, so raise an import error
|
||||
msg = 'cannot bootstrap any of the {0} executables'.format(executables_str)
|
||||
if abstract_spec:
|
||||
msg += 'from spec "{0}" '.format(abstract_spec)
|
||||
if tty.is_debug():
|
||||
msg += h.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += h.grouped_message(with_tracebacks=False)
|
||||
msg += '\nRun `spack --debug ...` for more detailed errors'
|
||||
msg += ' from spec "{0}"'.format(abstract_spec)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
@@ -652,10 +614,10 @@ def _add_compilers_if_missing():
|
||||
def _add_externals_if_missing():
|
||||
search_list = [
|
||||
# clingo
|
||||
spack.repo.path.get_pkg_class('cmake'),
|
||||
spack.repo.path.get_pkg_class('bison'),
|
||||
spack.repo.path.get('cmake'),
|
||||
spack.repo.path.get('bison'),
|
||||
# GnuPG
|
||||
spack.repo.path.get_pkg_class('gawk')
|
||||
spack.repo.path.get('gawk')
|
||||
]
|
||||
detected_packages = spack.detection.by_executable(search_list)
|
||||
spack.detection.update_configuration(detected_packages, scope='bootstrap')
|
||||
@@ -864,19 +826,6 @@ def ensure_flake8_in_path_or_raise():
|
||||
return ensure_executables_in_path_or_raise([executable], abstract_spec=root_spec)
|
||||
|
||||
|
||||
def all_root_specs(development=False):
|
||||
"""Return a list of all the root specs that may be used to bootstrap Spack.
|
||||
|
||||
Args:
|
||||
development (bool): if True include dev dependencies
|
||||
"""
|
||||
specs = [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()]
|
||||
if development:
|
||||
specs += [isort_root_spec(), mypy_root_spec(),
|
||||
black_root_spec(), flake8_root_spec()]
|
||||
return specs
|
||||
|
||||
|
||||
def _missing(name, purpose, system_only=True):
|
||||
"""Message to be printed if an executable is not found"""
|
||||
msg = '[{2}] MISSING "{0}": {1}'
|
||||
@@ -1014,23 +963,3 @@ def status_message(section):
|
||||
msg += '\n'
|
||||
msg = msg.format(pass_token if not missing_software else fail_token)
|
||||
return msg, missing_software
|
||||
|
||||
|
||||
def bootstrapping_sources(scope=None):
|
||||
"""Return the list of configured sources of software for bootstrapping Spack
|
||||
|
||||
Args:
|
||||
scope (str or None): if a valid configuration scope is given, return the
|
||||
list only from that scope
|
||||
"""
|
||||
source_configs = spack.config.get('bootstrap:sources', default=None, scope=scope)
|
||||
source_configs = source_configs or []
|
||||
list_of_sources = []
|
||||
for entry in source_configs:
|
||||
current = copy.copy(entry)
|
||||
metadata_dir = spack.util.path.canonicalize_path(entry['metadata'])
|
||||
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
||||
with open(metadata_yaml) as f:
|
||||
current.update(spack.util.spack_yaml.load(f))
|
||||
list_of_sources.append(current)
|
||||
return list_of_sources
|
||||
|
||||
@@ -55,7 +55,7 @@
|
||||
import spack.config
|
||||
import spack.install_test
|
||||
import spack.main
|
||||
import spack.package_base
|
||||
import spack.package
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
@@ -722,7 +722,7 @@ def get_std_cmake_args(pkg):
|
||||
package were a CMakePackage instance.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package under consideration
|
||||
pkg (spack.package.PackageBase): package under consideration
|
||||
|
||||
Returns:
|
||||
list: arguments for cmake
|
||||
@@ -738,7 +738,7 @@ def get_std_meson_args(pkg):
|
||||
package were a MesonPackage instance.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package under consideration
|
||||
pkg (spack.package.PackageBase): package under consideration
|
||||
|
||||
Returns:
|
||||
list: arguments for meson
|
||||
@@ -748,12 +748,12 @@ def get_std_meson_args(pkg):
|
||||
|
||||
def parent_class_modules(cls):
|
||||
"""
|
||||
Get list of superclass modules that descend from spack.package_base.PackageBase
|
||||
Get list of superclass modules that descend from spack.package.PackageBase
|
||||
|
||||
Includes cls.__module__
|
||||
"""
|
||||
if (not issubclass(cls, spack.package_base.PackageBase) or
|
||||
issubclass(spack.package_base.PackageBase, cls)):
|
||||
if (not issubclass(cls, spack.package.PackageBase) or
|
||||
issubclass(spack.package.PackageBase, cls)):
|
||||
return []
|
||||
result = []
|
||||
module = sys.modules.get(cls.__module__)
|
||||
@@ -771,7 +771,7 @@ def load_external_modules(pkg):
|
||||
associated with them.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package to load deps for
|
||||
pkg (spack.package.PackageBase): package to load deps for
|
||||
"""
|
||||
for dep in list(pkg.spec.traverse()):
|
||||
external_modules = dep.external_modules or []
|
||||
@@ -1109,7 +1109,7 @@ def start_build_process(pkg, function, kwargs):
|
||||
|
||||
Args:
|
||||
|
||||
pkg (spack.package_base.PackageBase): package whose environment we should set up the
|
||||
pkg (spack.package.PackageBase): package whose environment we should set up the
|
||||
child process for.
|
||||
function (typing.Callable): argless function to run in the child
|
||||
process.
|
||||
@@ -1234,7 +1234,7 @@ def make_stack(tb, stack=None):
|
||||
if 'self' in frame.f_locals:
|
||||
# Find the first proper subclass of PackageBase.
|
||||
obj = frame.f_locals['self']
|
||||
if isinstance(obj, spack.package_base.PackageBase):
|
||||
if isinstance(obj, spack.package.PackageBase):
|
||||
break
|
||||
|
||||
# We found obj, the Package implementation we care about.
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
|
||||
from spack.build_systems.autotools import AutotoolsPackage
|
||||
from spack.directives import extends
|
||||
from spack.package_base import ExtensionError
|
||||
from spack.package import ExtensionError
|
||||
from spack.util.executable import which
|
||||
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
from spack.build_environment import InstallError
|
||||
from spack.directives import conflicts, depends_on
|
||||
from spack.operating_systems.mac_os import macos_version
|
||||
from spack.package_base import PackageBase, run_after, run_before
|
||||
from spack.package import PackageBase, run_after, run_before
|
||||
from spack.util.executable import Executable
|
||||
from spack.version import Version
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
from llnl.util.filesystem import install, mkdirp
|
||||
|
||||
from spack.build_systems.cmake import CMakePackage
|
||||
from spack.package_base import run_after
|
||||
from spack.package import run_after
|
||||
|
||||
|
||||
def cmake_cache_path(name, value, comment=""):
|
||||
@@ -210,10 +210,6 @@ def std_initconfig_entries(self):
|
||||
"#------------------{0}\n".format("-" * 60),
|
||||
]
|
||||
|
||||
def initconfig_package_entries(self):
|
||||
"""This method is to be overwritten by the package"""
|
||||
return []
|
||||
|
||||
def initconfig(self, spec, prefix):
|
||||
cache_entries = (self.std_initconfig_entries() +
|
||||
self.initconfig_compiler_entries() +
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
|
||||
import spack.build_environment
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.package_base import InstallError, PackageBase, run_after
|
||||
from spack.package import InstallError, PackageBase, run_after
|
||||
from spack.util.path import convert_to_posix_path
|
||||
|
||||
# Regex to extract the primary generator from the CMake generator
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
import spack.variant
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
from spack.package_base import PackageBase
|
||||
from spack.package import PackageBase
|
||||
|
||||
|
||||
class CudaPackage(PackageBase):
|
||||
@@ -37,7 +37,6 @@ class CudaPackage(PackageBase):
|
||||
variant('cuda_arch',
|
||||
description='CUDA architecture',
|
||||
values=spack.variant.any_combination_of(*cuda_arch_values),
|
||||
sticky=True,
|
||||
when='+cuda')
|
||||
|
||||
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#nvcc-examples
|
||||
|
||||
@@ -3,16 +3,14 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from typing import Optional
|
||||
|
||||
import spack.package_base
|
||||
import spack.package
|
||||
import spack.util.url
|
||||
|
||||
|
||||
class GNUMirrorPackage(spack.package_base.PackageBase):
|
||||
class GNUMirrorPackage(spack.package.PackageBase):
|
||||
"""Mixin that takes care of setting url and mirrors for GNU packages."""
|
||||
#: Path of the package in a GNU mirror
|
||||
gnu_mirror_path = None # type: Optional[str]
|
||||
gnu_mirror_path = None
|
||||
|
||||
#: List of GNU mirrors used by Spack
|
||||
base_mirrors = [
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
|
||||
import spack.error
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.package_base import InstallError, PackageBase, run_after
|
||||
from spack.package import InstallError, PackageBase, run_after
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.prefix import Prefix
|
||||
@@ -1115,7 +1115,7 @@ def _setup_dependent_env_callback(
|
||||
raise InstallError('compilers_of_client arg required for MPI')
|
||||
|
||||
def setup_dependent_package(self, module, dep_spec):
|
||||
# https://spack.readthedocs.io/en/latest/spack.html#spack.package_base.PackageBase.setup_dependent_package
|
||||
# https://spack.readthedocs.io/en/latest/spack.html#spack.package.PackageBase.setup_dependent_package
|
||||
# Reminder: "module" refers to Python module.
|
||||
# Called before the install() method of dependents.
|
||||
|
||||
@@ -1259,14 +1259,6 @@ def install(self, spec, prefix):
|
||||
for f in glob.glob('%s/intel*log' % tmpdir):
|
||||
install(f, dst)
|
||||
|
||||
@run_after('install')
|
||||
def validate_install(self):
|
||||
# Sometimes the installer exits with an error but doesn't pass a
|
||||
# non-zero exit code to spack. Check for the existence of a 'bin'
|
||||
# directory to catch this error condition.
|
||||
if not os.path.exists(self.prefix.bin):
|
||||
raise InstallError('The installer has failed to install anything.')
|
||||
|
||||
@run_after('install')
|
||||
def configure_rpath(self):
|
||||
if '+rpath' not in self.spec:
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
|
||||
from spack.directives import depends_on, extends
|
||||
from spack.multimethod import when
|
||||
from spack.package_base import PackageBase
|
||||
from spack.package import PackageBase
|
||||
from spack.util.executable import Executable
|
||||
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.directives import conflicts
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
class MakefilePackage(PackageBase):
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
from llnl.util.filesystem import install_tree, working_dir
|
||||
|
||||
from spack.directives import depends_on
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.util.executable import which
|
||||
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.directives import depends_on, variant
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
class MesonPackage(PackageBase):
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
import inspect
|
||||
|
||||
from spack.directives import extends
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
class OctavePackage(PackageBase):
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
|
||||
from llnl.util.filesystem import find_headers, find_libraries, join_path
|
||||
|
||||
from spack.package_base import Package
|
||||
from spack.package import Package
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
|
||||
@@ -45,16 +45,18 @@ def component_dir(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def component_prefix(self):
|
||||
def component_path(self):
|
||||
"""Path to component <prefix>/<component>/<version>."""
|
||||
return self.prefix.join(join_path(self.component_dir, self.spec.version))
|
||||
return join_path(self.prefix, self.component_dir, str(self.spec.version))
|
||||
|
||||
def install(self, spec, prefix):
|
||||
self.install_component(basename(self.url_for_version(spec.version)))
|
||||
|
||||
def install_component(self, installer_path):
|
||||
def install(self, spec, prefix, installer_path=None):
|
||||
"""Shared install method for all oneapi packages."""
|
||||
|
||||
# intel-oneapi-compilers overrides the installer_path when
|
||||
# installing fortran, which comes from a spack resource
|
||||
if installer_path is None:
|
||||
installer_path = basename(self.url_for_version(spec.version))
|
||||
|
||||
if platform.system() == 'Linux':
|
||||
# Intel installer assumes and enforces that all components
|
||||
# are installed into a single prefix. Spack wants to
|
||||
@@ -75,7 +77,7 @@ def install_component(self, installer_path):
|
||||
bash = Executable('bash')
|
||||
|
||||
# Installer writes files in ~/intel set HOME so it goes to prefix
|
||||
bash.add_default_env('HOME', self.prefix)
|
||||
bash.add_default_env('HOME', prefix)
|
||||
# Installer checks $XDG_RUNTIME_DIR/.bootstrapper_lock_file as well
|
||||
bash.add_default_env('XDG_RUNTIME_DIR',
|
||||
join_path(self.stage.path, 'runtime'))
|
||||
@@ -83,13 +85,13 @@ def install_component(self, installer_path):
|
||||
bash(installer_path,
|
||||
'-s', '-a', '-s', '--action', 'install',
|
||||
'--eula', 'accept',
|
||||
'--install-dir', self.prefix)
|
||||
'--install-dir', prefix)
|
||||
|
||||
if getpass.getuser() == 'root':
|
||||
shutil.rmtree('/var/intel/installercache', ignore_errors=True)
|
||||
|
||||
# Some installers have a bug and do not return an error code when failing
|
||||
if not isdir(join_path(self.prefix, self.component_dir)):
|
||||
if not isdir(join_path(prefix, self.component_dir)):
|
||||
raise RuntimeError('install failed')
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
@@ -102,7 +104,7 @@ def setup_run_environment(self, env):
|
||||
$ source {prefix}/{component}/{version}/env/vars.sh
|
||||
"""
|
||||
env.extend(EnvironmentModifications.from_sourcing_file(
|
||||
join_path(self.component_prefix, 'env', 'vars.sh')))
|
||||
join_path(self.component_path, 'env', 'vars.sh')))
|
||||
|
||||
|
||||
class IntelOneApiLibraryPackage(IntelOneApiPackage):
|
||||
@@ -116,12 +118,12 @@ class IntelOneApiLibraryPackage(IntelOneApiPackage):
|
||||
|
||||
@property
|
||||
def headers(self):
|
||||
include_path = join_path(self.component_prefix, 'include')
|
||||
include_path = join_path(self.component_path, 'include')
|
||||
return find_headers('*', include_path, recursive=True)
|
||||
|
||||
@property
|
||||
def libs(self):
|
||||
lib_path = join_path(self.component_prefix, 'lib', 'intel64')
|
||||
lib_path = join_path(self.component_path, 'lib', 'intel64')
|
||||
lib_path = lib_path if isdir(lib_path) else dirname(lib_path)
|
||||
return find_libraries('*', root=lib_path, shared=True, recursive=True)
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
from llnl.util.filesystem import filter_file
|
||||
|
||||
from spack.directives import extends
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.util.executable import Executable
|
||||
|
||||
|
||||
|
||||
@@ -6,30 +6,26 @@
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
from typing import Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import (
|
||||
filter_file,
|
||||
find,
|
||||
find_all_headers,
|
||||
find_libraries,
|
||||
is_nonsymlink_exe_with_shebang,
|
||||
path_contains_subdirectory,
|
||||
same_path,
|
||||
working_dir,
|
||||
)
|
||||
from llnl.util.lang import classproperty, match_predicate
|
||||
from llnl.util.lang import match_predicate
|
||||
|
||||
from spack.directives import depends_on, extends
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
class PythonPackage(PackageBase):
|
||||
"""Specialized class for packages that are built using pip."""
|
||||
#: Package name, version, and extension on PyPI
|
||||
pypi = None # type: Optional[str]
|
||||
pypi = None
|
||||
|
||||
maintainers = ['adamjstewart']
|
||||
|
||||
@@ -50,7 +46,7 @@ class PythonPackage(PackageBase):
|
||||
# package manually
|
||||
depends_on('py-wheel', type='build')
|
||||
|
||||
py_namespace = None # type: Optional[str]
|
||||
py_namespace = None
|
||||
|
||||
@staticmethod
|
||||
def _std_args(cls):
|
||||
@@ -77,21 +73,24 @@ def _std_args(cls):
|
||||
'--no-index',
|
||||
]
|
||||
|
||||
@classproperty
|
||||
def homepage(cls):
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split('/')[0]
|
||||
@property
|
||||
def homepage(self):
|
||||
if self.pypi:
|
||||
name = self.pypi.split('/')[0]
|
||||
return 'https://pypi.org/project/' + name + '/'
|
||||
|
||||
@classproperty
|
||||
def url(cls):
|
||||
if cls.pypi:
|
||||
return 'https://files.pythonhosted.org/packages/source/' + cls.pypi[0] + '/' + cls.pypi
|
||||
@property
|
||||
def url(self):
|
||||
if self.pypi:
|
||||
return (
|
||||
'https://files.pythonhosted.org/packages/source/'
|
||||
+ self.pypi[0] + '/' + self.pypi
|
||||
)
|
||||
|
||||
@classproperty
|
||||
def list_url(cls):
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split('/')[0]
|
||||
@property
|
||||
def list_url(self):
|
||||
if self.pypi:
|
||||
name = self.pypi.split('/')[0]
|
||||
return 'https://pypi.org/simple/' + name + '/'
|
||||
|
||||
@property
|
||||
@@ -178,37 +177,6 @@ def install(self, spec, prefix):
|
||||
with working_dir(self.build_directory):
|
||||
pip(*args)
|
||||
|
||||
@property
|
||||
def headers(self):
|
||||
"""Discover header files in platlib."""
|
||||
|
||||
# Headers may be in either location
|
||||
include = inspect.getmodule(self).include
|
||||
platlib = inspect.getmodule(self).platlib
|
||||
headers = find_all_headers(include) + find_all_headers(platlib)
|
||||
|
||||
if headers:
|
||||
return headers
|
||||
|
||||
msg = 'Unable to locate {} headers in {} or {}'
|
||||
raise NoHeadersError(msg.format(self.spec.name, include, platlib))
|
||||
|
||||
@property
|
||||
def libs(self):
|
||||
"""Discover libraries in platlib."""
|
||||
|
||||
# Remove py- prefix in package name
|
||||
library = 'lib' + self.spec.name[3:].replace('-', '?')
|
||||
root = inspect.getmodule(self).platlib
|
||||
|
||||
for shared in [True, False]:
|
||||
libs = find_libraries(library, root, shared=shared, recursive=True)
|
||||
if libs:
|
||||
return libs
|
||||
|
||||
msg = 'Unable to recursively locate {} libraries in {}'
|
||||
raise NoLibrariesError(msg.format(self.spec.name, root))
|
||||
|
||||
# Testing
|
||||
|
||||
def test(self):
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.directives import depends_on
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
class QMakePackage(PackageBase):
|
||||
|
||||
@@ -2,13 +2,12 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
from typing import Optional
|
||||
|
||||
import llnl.util.lang as lang
|
||||
|
||||
import inspect
|
||||
|
||||
from spack.directives import extends
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
class RPackage(PackageBase):
|
||||
@@ -29,10 +28,10 @@ class RPackage(PackageBase):
|
||||
# package attributes that can be expanded to set the homepage, url,
|
||||
# list_url, and git values
|
||||
# For CRAN packages
|
||||
cran = None # type: Optional[str]
|
||||
cran = None
|
||||
|
||||
# For Bioconductor packages
|
||||
bioc = None # type: Optional[str]
|
||||
bioc = None
|
||||
|
||||
maintainers = ['glennpj']
|
||||
|
||||
@@ -42,27 +41,27 @@ class RPackage(PackageBase):
|
||||
|
||||
extends('r')
|
||||
|
||||
@lang.classproperty
|
||||
def homepage(cls):
|
||||
if cls.cran:
|
||||
return 'https://cloud.r-project.org/package=' + cls.cran
|
||||
elif cls.bioc:
|
||||
return 'https://bioconductor.org/packages/' + cls.bioc
|
||||
@property
|
||||
def homepage(self):
|
||||
if self.cran:
|
||||
return 'https://cloud.r-project.org/package=' + self.cran
|
||||
elif self.bioc:
|
||||
return 'https://bioconductor.org/packages/' + self.bioc
|
||||
|
||||
@lang.classproperty
|
||||
def url(cls):
|
||||
if cls.cran:
|
||||
@property
|
||||
def url(self):
|
||||
if self.cran:
|
||||
return (
|
||||
'https://cloud.r-project.org/src/contrib/'
|
||||
+ cls.cran + '_' + str(list(cls.versions)[0]) + '.tar.gz'
|
||||
+ self.cran + '_' + str(list(self.versions)[0]) + '.tar.gz'
|
||||
)
|
||||
|
||||
@lang.classproperty
|
||||
def list_url(cls):
|
||||
if cls.cran:
|
||||
@property
|
||||
def list_url(self):
|
||||
if self.cran:
|
||||
return (
|
||||
'https://cloud.r-project.org/src/contrib/Archive/'
|
||||
+ cls.cran + '/'
|
||||
+ self.cran + '/'
|
||||
)
|
||||
|
||||
@property
|
||||
|
||||
@@ -3,15 +3,13 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
from typing import Optional
|
||||
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
|
||||
from spack.directives import extends
|
||||
from spack.package_base import PackageBase
|
||||
from spack.package import PackageBase
|
||||
from spack.util.environment import env_flag
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
@@ -38,14 +36,14 @@ class RacketPackage(PackageBase):
|
||||
extends('racket')
|
||||
|
||||
pkgs = False
|
||||
subdirectory = None # type: Optional[str]
|
||||
name = None # type: Optional[str]
|
||||
subdirectory = None
|
||||
name = None
|
||||
parallel = True
|
||||
|
||||
@lang.classproperty
|
||||
def homepage(cls):
|
||||
if cls.pkgs:
|
||||
return 'https://pkgs.racket-lang.org/package/{0}'.format(cls.name)
|
||||
@property
|
||||
def homepage(self):
|
||||
if self.pkgs:
|
||||
return 'https://pkgs.racket-lang.org/package/{0}'.format(self.name)
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
|
||||
@@ -77,7 +77,7 @@
|
||||
|
||||
import spack.variant
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.package_base import PackageBase
|
||||
from spack.package import PackageBase
|
||||
|
||||
|
||||
class ROCmPackage(PackageBase):
|
||||
@@ -90,10 +90,9 @@ class ROCmPackage(PackageBase):
|
||||
# https://llvm.org/docs/AMDGPUUsage.html
|
||||
# Possible architectures
|
||||
amdgpu_targets = (
|
||||
'gfx701', 'gfx801', 'gfx802', 'gfx803', 'gfx900', 'gfx900:xnack-',
|
||||
'gfx906', 'gfx908', 'gfx90a',
|
||||
'gfx906:xnack-', 'gfx908:xnack-', 'gfx90a:xnack-', 'gfx90a:xnack+',
|
||||
'gfx1010', 'gfx1011', 'gfx1012', 'gfx1030', 'gfx1031',
|
||||
'gfx701', 'gfx801', 'gfx802', 'gfx803',
|
||||
'gfx900', 'gfx906', 'gfx908', 'gfx90a', 'gfx1010',
|
||||
'gfx1011', 'gfx1012'
|
||||
)
|
||||
|
||||
variant('rocm', default=False, description='Enable ROCm support')
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
import inspect
|
||||
|
||||
from spack.directives import extends
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
class RubyPackage(PackageBase):
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
import inspect
|
||||
|
||||
from spack.directives import depends_on
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
class SConsPackage(PackageBase):
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
from llnl.util.filesystem import find, join_path, working_dir
|
||||
|
||||
from spack.directives import depends_on, extends
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
class SIPPackage(PackageBase):
|
||||
|
||||
@@ -3,17 +3,15 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from typing import Optional
|
||||
|
||||
import spack.package_base
|
||||
import spack.package
|
||||
import spack.util.url
|
||||
|
||||
|
||||
class SourceforgePackage(spack.package_base.PackageBase):
|
||||
class SourceforgePackage(spack.package.PackageBase):
|
||||
"""Mixin that takes care of setting url and mirrors for Sourceforge
|
||||
packages."""
|
||||
#: Path of the package in a Sourceforge mirror
|
||||
sourceforge_mirror_path = None # type: Optional[str]
|
||||
sourceforge_mirror_path = None
|
||||
|
||||
#: List of Sourceforge mirrors used by Spack
|
||||
base_mirrors = [
|
||||
|
||||
@@ -2,17 +2,16 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from typing import Optional
|
||||
|
||||
import spack.package_base
|
||||
import spack.package
|
||||
import spack.util.url
|
||||
|
||||
|
||||
class SourcewarePackage(spack.package_base.PackageBase):
|
||||
class SourcewarePackage(spack.package.PackageBase):
|
||||
"""Mixin that takes care of setting url and mirrors for Sourceware.org
|
||||
packages."""
|
||||
#: Path of the package in a Sourceware mirror
|
||||
sourceware_mirror_path = None # type: Optional[str]
|
||||
sourceware_mirror_path = None
|
||||
|
||||
#: List of Sourceware mirrors used by Spack
|
||||
base_mirrors = [
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.directives import depends_on
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
class WafPackage(PackageBase):
|
||||
|
||||
@@ -3,17 +3,15 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from typing import Optional
|
||||
|
||||
import spack.package_base
|
||||
import spack.package
|
||||
import spack.util.url
|
||||
|
||||
|
||||
class XorgPackage(spack.package_base.PackageBase):
|
||||
class XorgPackage(spack.package.PackageBase):
|
||||
"""Mixin that takes care of setting url and mirrors for x.org
|
||||
packages."""
|
||||
#: Path of the package in a x.org mirror
|
||||
xorg_mirror_path = None # type: Optional[str]
|
||||
xorg_mirror_path = None
|
||||
|
||||
#: List of x.org mirrors used by Spack
|
||||
# Note: x.org mirrors are a bit tricky, since many are out-of-sync or off.
|
||||
|
||||
@@ -33,6 +33,7 @@
|
||||
import spack.util.executable as exe
|
||||
import spack.util.gpg as gpg_util
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.error import SpackError
|
||||
from spack.spec import Spec
|
||||
@@ -41,8 +42,10 @@
|
||||
'always',
|
||||
]
|
||||
|
||||
SPACK_PR_MIRRORS_ROOT_URL = 's3://spack-binaries-prs'
|
||||
SPACK_SHARED_PR_MIRROR_URL = url_util.join(SPACK_PR_MIRRORS_ROOT_URL,
|
||||
'shared_pr_mirror')
|
||||
TEMP_STORAGE_MIRROR_NAME = 'ci_temporary_mirror'
|
||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||
|
||||
spack_gpg = spack.main.SpackCommand('gpg')
|
||||
spack_compiler = spack.main.SpackCommand('compiler')
|
||||
@@ -196,11 +199,6 @@ def _get_cdash_build_name(spec, build_group):
|
||||
spec.name, spec.version, spec.compiler, spec.architecture, build_group)
|
||||
|
||||
|
||||
def _remove_reserved_tags(tags):
|
||||
"""Convenience function to strip reserved tags from jobs"""
|
||||
return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS]
|
||||
|
||||
|
||||
def _get_spec_string(spec):
|
||||
format_elements = [
|
||||
'{name}{@version}',
|
||||
@@ -233,10 +231,8 @@ def _add_dependency(spec_label, dep_label, deps):
|
||||
deps[spec_label].add(dep_label)
|
||||
|
||||
|
||||
def _get_spec_dependencies(specs, deps, spec_labels, check_index_only=False,
|
||||
mirrors_to_check=None):
|
||||
spec_deps_obj = _compute_spec_deps(specs, check_index_only=check_index_only,
|
||||
mirrors_to_check=mirrors_to_check)
|
||||
def _get_spec_dependencies(specs, deps, spec_labels, check_index_only=False):
|
||||
spec_deps_obj = _compute_spec_deps(specs, check_index_only=check_index_only)
|
||||
|
||||
if spec_deps_obj:
|
||||
dependencies = spec_deps_obj['dependencies']
|
||||
@@ -253,7 +249,7 @@ def _get_spec_dependencies(specs, deps, spec_labels, check_index_only=False,
|
||||
_add_dependency(entry['spec'], entry['depends'], deps)
|
||||
|
||||
|
||||
def stage_spec_jobs(specs, check_index_only=False, mirrors_to_check=None):
|
||||
def stage_spec_jobs(specs, check_index_only=False):
|
||||
"""Take a set of release specs and generate a list of "stages", where the
|
||||
jobs in any stage are dependent only on jobs in previous stages. This
|
||||
allows us to maximize build parallelism within the gitlab-ci framework.
|
||||
@@ -265,8 +261,6 @@ def stage_spec_jobs(specs, check_index_only=False, mirrors_to_check=None):
|
||||
are up to date on those mirrors. This flag limits that search to
|
||||
the binary cache indices on those mirrors to speed the process up,
|
||||
even though there is no garantee the index is up to date.
|
||||
mirrors_to_checK: Optional mapping giving mirrors to check instead of
|
||||
any configured mirrors.
|
||||
|
||||
Returns: A tuple of information objects describing the specs, dependencies
|
||||
and stages:
|
||||
@@ -303,8 +297,8 @@ def _remove_satisfied_deps(deps, satisfied_list):
|
||||
deps = {}
|
||||
spec_labels = {}
|
||||
|
||||
_get_spec_dependencies(specs, deps, spec_labels, check_index_only=check_index_only,
|
||||
mirrors_to_check=mirrors_to_check)
|
||||
_get_spec_dependencies(
|
||||
specs, deps, spec_labels, check_index_only=check_index_only)
|
||||
|
||||
# Save the original deps, as we need to return them at the end of the
|
||||
# function. In the while loop below, the "dependencies" variable is
|
||||
@@ -346,7 +340,7 @@ def _print_staging_summary(spec_labels, dependencies, stages):
|
||||
_get_spec_string(s)))
|
||||
|
||||
|
||||
def _compute_spec_deps(spec_list, check_index_only=False, mirrors_to_check=None):
|
||||
def _compute_spec_deps(spec_list, check_index_only=False):
|
||||
"""
|
||||
Computes all the dependencies for the spec(s) and generates a JSON
|
||||
object which provides both a list of unique spec names as well as a
|
||||
@@ -419,7 +413,7 @@ def append_dep(s, d):
|
||||
continue
|
||||
|
||||
up_to_date_mirrors = bindist.get_mirrors_for_spec(
|
||||
spec=s, mirrors_to_check=mirrors_to_check, index_only=check_index_only)
|
||||
spec=s, index_only=check_index_only)
|
||||
|
||||
skey = _spec_deps_key(s)
|
||||
spec_labels[skey] = {
|
||||
@@ -608,8 +602,8 @@ def get_spec_filter_list(env, affected_pkgs, dependencies=True, dependents=True)
|
||||
def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
prune_dag=False, check_index_only=False,
|
||||
run_optimizer=False, use_dependencies=False,
|
||||
artifacts_root=None, remote_mirror_override=None):
|
||||
""" Generate a gitlab yaml file to run a dynamic child pipeline from
|
||||
artifacts_root=None):
|
||||
""" Generate a gitlab yaml file to run a dynamic chile pipeline from
|
||||
the spec matrix in the active environment.
|
||||
|
||||
Arguments:
|
||||
@@ -635,10 +629,6 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
artifacts_root (str): Path where artifacts like logs, environment
|
||||
files (spack.yaml, spack.lock), etc should be written. GitLab
|
||||
requires this to be within the project directory.
|
||||
remote_mirror_override (str): Typically only needed when one spack.yaml
|
||||
is used to populate several mirrors with binaries, based on some
|
||||
criteria. Spack protected pipelines populate different mirrors based
|
||||
on branch name, facilitated by this option.
|
||||
"""
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
with env.write_transaction():
|
||||
@@ -688,19 +678,17 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
for s in affected_specs:
|
||||
tty.debug(' {0}'.format(s.name))
|
||||
|
||||
# Downstream jobs will "need" (depend on, for both scheduling and
|
||||
# artifacts, which include spack.lock file) this pipeline generation
|
||||
# job by both name and pipeline id. If those environment variables
|
||||
# do not exist, then maybe this is just running in a shell, in which
|
||||
# case, there is no expectation gitlab will ever run the generated
|
||||
# pipeline and those environment variables do not matter.
|
||||
generate_job_name = os.environ.get('CI_JOB_NAME', 'job-does-not-exist')
|
||||
parent_pipeline_id = os.environ.get('CI_PIPELINE_ID', 'pipeline-does-not-exist')
|
||||
generate_job_name = os.environ.get('CI_JOB_NAME', None)
|
||||
parent_pipeline_id = os.environ.get('CI_PIPELINE_ID', None)
|
||||
|
||||
# Values: "spack_pull_request", "spack_protected_branch", or not set
|
||||
spack_pipeline_type = os.environ.get('SPACK_PIPELINE_TYPE', None)
|
||||
is_pr_pipeline = spack_pipeline_type == 'spack_pull_request'
|
||||
|
||||
spack_buildcache_copy = os.environ.get('SPACK_COPY_BUILDCACHE', None)
|
||||
spack_pr_branch = os.environ.get('SPACK_PR_BRANCH', None)
|
||||
pr_mirror_url = None
|
||||
if spack_pr_branch:
|
||||
pr_mirror_url = url_util.join(SPACK_PR_MIRRORS_ROOT_URL,
|
||||
spack_pr_branch)
|
||||
|
||||
if 'mirrors' not in yaml_root or len(yaml_root['mirrors'].values()) < 1:
|
||||
tty.die('spack ci generate requires an env containing a mirror')
|
||||
@@ -755,29 +743,14 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
'strip-compilers': False,
|
||||
})
|
||||
|
||||
# If a remote mirror override (alternate buildcache destination) was
|
||||
# specified, add it here in case it has already built hashes we might
|
||||
# generate.
|
||||
mirrors_to_check = None
|
||||
if remote_mirror_override:
|
||||
if spack_pipeline_type == 'spack_protected_branch':
|
||||
# Overriding the main mirror in this case might result
|
||||
# in skipping jobs on a release pipeline because specs are
|
||||
# up to date in develop. Eventually we want to notice and take
|
||||
# advantage of this by scheduling a job to copy the spec from
|
||||
# develop to the release, but until we have that, this makes
|
||||
# sure we schedule a rebuild job if the spec isn't already in
|
||||
# override mirror.
|
||||
mirrors_to_check = {
|
||||
'override': remote_mirror_override
|
||||
}
|
||||
|
||||
# If we have a remote override and we want generate pipeline using
|
||||
# --check-index-only, then the override mirror needs to be added to
|
||||
# the configured mirrors when bindist.update() is run, or else we
|
||||
# won't fetch its index and include in our local cache.
|
||||
# Add per-PR mirror (and shared PR mirror) if enabled, as some specs might
|
||||
# be up to date in one of those and thus not need to be rebuilt.
|
||||
if pr_mirror_url:
|
||||
spack.mirror.add(
|
||||
'ci_pr_mirror', remote_mirror_override, cfg.default_modify_scope())
|
||||
'ci_pr_mirror', pr_mirror_url, cfg.default_modify_scope())
|
||||
spack.mirror.add('ci_shared_pr_mirror',
|
||||
SPACK_SHARED_PR_MIRROR_URL,
|
||||
cfg.default_modify_scope())
|
||||
|
||||
pipeline_artifacts_dir = artifacts_root
|
||||
if not pipeline_artifacts_dir:
|
||||
@@ -823,7 +796,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
user_artifacts_dir, ci_project_dir)
|
||||
|
||||
# Speed up staging by first fetching binary indices from all mirrors
|
||||
# (including the override mirror we may have just added above).
|
||||
# (including the per-PR mirror we may have just added above).
|
||||
try:
|
||||
bindist.binary_index.update()
|
||||
except bindist.FetchCacheError as e:
|
||||
@@ -852,11 +825,10 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
phase_spec.concretize()
|
||||
staged_phases[phase_name] = stage_spec_jobs(
|
||||
concrete_phase_specs,
|
||||
check_index_only=check_index_only,
|
||||
mirrors_to_check=mirrors_to_check)
|
||||
check_index_only=check_index_only)
|
||||
finally:
|
||||
# Clean up remote mirror override if enabled
|
||||
if remote_mirror_override:
|
||||
# Clean up PR mirror if enabled
|
||||
if pr_mirror_url:
|
||||
spack.mirror.remove('ci_pr_mirror', cfg.default_modify_scope())
|
||||
|
||||
all_job_names = []
|
||||
@@ -917,14 +889,6 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
|
||||
tags = [tag for tag in runner_attribs['tags']]
|
||||
|
||||
if spack_pipeline_type is not None:
|
||||
# For spack pipelines "public" and "protected" are reserved tags
|
||||
tags = _remove_reserved_tags(tags)
|
||||
if spack_pipeline_type == 'spack_protected_branch':
|
||||
tags.extend(['aws', 'protected'])
|
||||
elif spack_pipeline_type == 'spack_pull_request':
|
||||
tags.extend(['public'])
|
||||
|
||||
variables = {}
|
||||
if 'variables' in runner_attribs:
|
||||
variables.update(runner_attribs['variables'])
|
||||
@@ -1210,10 +1174,6 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
service_job_config,
|
||||
cleanup_job)
|
||||
|
||||
if 'tags' in cleanup_job:
|
||||
service_tags = _remove_reserved_tags(cleanup_job['tags'])
|
||||
cleanup_job['tags'] = service_tags
|
||||
|
||||
cleanup_job['stage'] = 'cleanup-temp-storage'
|
||||
cleanup_job['script'] = [
|
||||
'spack -d mirror destroy --mirror-url {0}/$CI_PIPELINE_ID'.format(
|
||||
@@ -1221,74 +1181,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
]
|
||||
cleanup_job['when'] = 'always'
|
||||
cleanup_job['retry'] = service_job_retries
|
||||
cleanup_job['interruptible'] = True
|
||||
|
||||
output_object['cleanup'] = cleanup_job
|
||||
|
||||
if ('signing-job-attributes' in gitlab_ci and
|
||||
spack_pipeline_type == 'spack_protected_branch'):
|
||||
# External signing: generate a job to check and sign binary pkgs
|
||||
stage_names.append('stage-sign-pkgs')
|
||||
signing_job_config = gitlab_ci['signing-job-attributes']
|
||||
signing_job = {}
|
||||
|
||||
signing_job_attrs_to_copy = [
|
||||
'image',
|
||||
'tags',
|
||||
'variables',
|
||||
'before_script',
|
||||
'script',
|
||||
'after_script',
|
||||
]
|
||||
|
||||
_copy_attributes(signing_job_attrs_to_copy,
|
||||
signing_job_config,
|
||||
signing_job)
|
||||
|
||||
signing_job_tags = []
|
||||
if 'tags' in signing_job:
|
||||
signing_job_tags = _remove_reserved_tags(signing_job['tags'])
|
||||
|
||||
for tag in ['aws', 'protected', 'notary']:
|
||||
if tag not in signing_job_tags:
|
||||
signing_job_tags.append(tag)
|
||||
signing_job['tags'] = signing_job_tags
|
||||
|
||||
signing_job['stage'] = 'stage-sign-pkgs'
|
||||
signing_job['when'] = 'always'
|
||||
signing_job['retry'] = {
|
||||
'max': 2,
|
||||
'when': ['always']
|
||||
}
|
||||
signing_job['interruptible'] = True
|
||||
|
||||
output_object['sign-pkgs'] = signing_job
|
||||
|
||||
if spack_buildcache_copy:
|
||||
# Generate a job to copy the contents from wherever the builds are getting
|
||||
# pushed to the url specified in the "SPACK_BUILDCACHE_COPY" environment
|
||||
# variable.
|
||||
src_url = remote_mirror_override or remote_mirror_url
|
||||
dest_url = spack_buildcache_copy
|
||||
|
||||
stage_names.append('stage-copy-buildcache')
|
||||
copy_job = {
|
||||
'stage': 'stage-copy-buildcache',
|
||||
'tags': ['spack', 'public', 'medium', 'aws', 'x86_64'],
|
||||
'image': 'ghcr.io/spack/python-aws-bash:0.0.1',
|
||||
'when': 'on_success',
|
||||
'interruptible': True,
|
||||
'retry': service_job_retries,
|
||||
'script': [
|
||||
'. ./share/spack/setup-env.sh',
|
||||
'spack --version',
|
||||
'aws s3 sync --exclude *index.json* --exclude *pgp* {0} {1}'.format(
|
||||
src_url, dest_url)
|
||||
]
|
||||
}
|
||||
|
||||
output_object['copy-mirror'] = copy_job
|
||||
|
||||
if rebuild_index_enabled:
|
||||
# Add a final job to regenerate the index
|
||||
stage_names.append('stage-rebuild-index')
|
||||
@@ -1299,13 +1194,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
service_job_config,
|
||||
final_job)
|
||||
|
||||
if 'tags' in final_job:
|
||||
service_tags = _remove_reserved_tags(final_job['tags'])
|
||||
final_job['tags'] = service_tags
|
||||
|
||||
index_target_mirror = mirror_urls[0]
|
||||
if remote_mirror_override:
|
||||
index_target_mirror = remote_mirror_override
|
||||
if is_pr_pipeline:
|
||||
index_target_mirror = pr_mirror_url
|
||||
|
||||
final_job['stage'] = 'stage-rebuild-index'
|
||||
final_job['script'] = [
|
||||
@@ -1314,7 +1205,6 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
]
|
||||
final_job['when'] = 'always'
|
||||
final_job['retry'] = service_job_retries
|
||||
final_job['interruptible'] = True
|
||||
|
||||
output_object['rebuild-index'] = final_job
|
||||
|
||||
@@ -1347,9 +1237,8 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
'SPACK_PIPELINE_TYPE': str(spack_pipeline_type)
|
||||
}
|
||||
|
||||
if remote_mirror_override:
|
||||
(output_object['variables']
|
||||
['SPACK_REMOTE_MIRROR_OVERRIDE']) = remote_mirror_override
|
||||
if pr_mirror_url:
|
||||
output_object['variables']['SPACK_PR_MIRROR_URL'] = pr_mirror_url
|
||||
|
||||
spack_stack_name = os.environ.get('SPACK_CI_STACK_NAME', None)
|
||||
if spack_stack_name:
|
||||
@@ -1628,9 +1517,8 @@ def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
|
||||
job_log_dir (str): Path into which build log should be copied
|
||||
"""
|
||||
try:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(job_spec.name)
|
||||
job_pkg = pkg_cls(job_spec)
|
||||
tty.debug('job package: {0.fullname}'.format(job_pkg))
|
||||
job_pkg = spack.repo.get(job_spec)
|
||||
tty.debug('job package: {0}'.format(job_pkg))
|
||||
stage_dir = job_pkg.stage.path
|
||||
tty.debug('stage dir: {0}'.format(stage_dir))
|
||||
build_out_src = os.path.join(stage_dir, 'spack-build-out.txt')
|
||||
|
||||
@@ -8,10 +8,7 @@
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
from textwrap import dedent
|
||||
from typing import List, Tuple
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
import six
|
||||
@@ -150,58 +147,6 @@ def get_command(cmd_name):
|
||||
return getattr(get_module(cmd_name), pname)
|
||||
|
||||
|
||||
class _UnquotedFlags(object):
|
||||
"""Use a heuristic in `.extract()` to detect whether the user is trying to set
|
||||
multiple flags like the docker ENV attribute allows (e.g. 'cflags=-Os -pipe').
|
||||
|
||||
If the heuristic finds a match (which can be checked with `__bool__()`), a warning
|
||||
message explaining how to quote multiple flags correctly can be generated with
|
||||
`.report()`.
|
||||
"""
|
||||
|
||||
flags_arg_pattern = re.compile(
|
||||
r'^({0})=([^\'"].*)$'.format(
|
||||
'|'.join(spack.spec.FlagMap.valid_compiler_flags()),
|
||||
))
|
||||
|
||||
def __init__(self, all_unquoted_flag_pairs):
|
||||
# type: (List[Tuple[re.Match, str]]) -> None
|
||||
self._flag_pairs = all_unquoted_flag_pairs
|
||||
|
||||
def __bool__(self):
|
||||
# type: () -> bool
|
||||
return bool(self._flag_pairs)
|
||||
|
||||
@classmethod
|
||||
def extract(cls, sargs):
|
||||
# type: (str) -> _UnquotedFlags
|
||||
all_unquoted_flag_pairs = [] # type: List[Tuple[re.Match, str]]
|
||||
prev_flags_arg = None
|
||||
for arg in shlex.split(sargs):
|
||||
if prev_flags_arg is not None:
|
||||
all_unquoted_flag_pairs.append((prev_flags_arg, arg))
|
||||
prev_flags_arg = cls.flags_arg_pattern.match(arg)
|
||||
return cls(all_unquoted_flag_pairs)
|
||||
|
||||
def report(self):
|
||||
# type: () -> str
|
||||
single_errors = [
|
||||
'({0}) {1} {2} => {3}'.format(
|
||||
i + 1, match.group(0), next_arg,
|
||||
'{0}="{1} {2}"'.format(match.group(1), match.group(2), next_arg),
|
||||
)
|
||||
for i, (match, next_arg) in enumerate(self._flag_pairs)
|
||||
]
|
||||
return dedent("""\
|
||||
Some compiler or linker flags were provided without quoting their arguments,
|
||||
which now causes spack to try to parse the *next* argument as a spec component
|
||||
such as a variant instead of an additional compiler or linker flag. If the
|
||||
intent was to set multiple flags, try quoting them together as described below.
|
||||
|
||||
Possible flag quotation errors (with the correctly-quoted version after the =>):
|
||||
{0}""").format('\n'.join(single_errors))
|
||||
|
||||
|
||||
def parse_specs(args, **kwargs):
|
||||
"""Convenience function for parsing arguments from specs. Handles common
|
||||
exceptions and dies if there are errors.
|
||||
@@ -210,28 +155,29 @@ def parse_specs(args, **kwargs):
|
||||
normalize = kwargs.get('normalize', False)
|
||||
tests = kwargs.get('tests', False)
|
||||
|
||||
sargs = args
|
||||
if not isinstance(args, six.string_types):
|
||||
sargs = ' '.join(args)
|
||||
unquoted_flags = _UnquotedFlags.extract(sargs)
|
||||
|
||||
try:
|
||||
sargs = args
|
||||
if not isinstance(args, six.string_types):
|
||||
sargs = ' '.join(spack.util.string.quote(args))
|
||||
specs = spack.spec.parse(sargs)
|
||||
for spec in specs:
|
||||
if concretize:
|
||||
spec.concretize(tests=tests) # implies normalize
|
||||
elif normalize:
|
||||
spec.normalize(tests=tests)
|
||||
|
||||
return specs
|
||||
|
||||
except spack.spec.SpecParseError as e:
|
||||
msg = e.message + "\n" + str(e.string) + "\n"
|
||||
msg += (e.pos + 2) * " " + "^"
|
||||
raise spack.error.SpackError(msg)
|
||||
|
||||
except spack.error.SpecError as e:
|
||||
|
||||
msg = e.message
|
||||
if e.long_message:
|
||||
msg += e.long_message
|
||||
if unquoted_flags:
|
||||
msg += '\n\n'
|
||||
msg += unquoted_flags.report()
|
||||
|
||||
raise spack.error.SpackError(msg)
|
||||
|
||||
|
||||
116
lib/spack/spack/cmd/analyze.py
Normal file
116
lib/spack/spack/cmd/analyze.py
Normal file
@@ -0,0 +1,116 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.analyzers
|
||||
import spack.build_environment
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.fetch_strategy
|
||||
import spack.monitor
|
||||
import spack.paths
|
||||
import spack.report
|
||||
|
||||
description = "run analyzers on installed packages"
|
||||
section = "analysis"
|
||||
level = "long"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='analyze_command')
|
||||
|
||||
sp.add_parser('list-analyzers',
|
||||
description="list available analyzers",
|
||||
help="show list of analyzers that are available to run.")
|
||||
|
||||
# This adds the monitor group to the subparser
|
||||
spack.monitor.get_monitor_group(subparser)
|
||||
|
||||
# Run Parser
|
||||
run_parser = sp.add_parser('run', description="run an analyzer",
|
||||
help="provide the name of the analyzer to run.")
|
||||
|
||||
run_parser.add_argument(
|
||||
'--overwrite', action='store_true',
|
||||
help="re-analyze even if the output file already exists.")
|
||||
run_parser.add_argument(
|
||||
'-p', '--path', default=None,
|
||||
dest='path',
|
||||
help="write output to a different directory than ~/.spack/analyzers")
|
||||
run_parser.add_argument(
|
||||
'-a', '--analyzers', default=None,
|
||||
dest="analyzers", action="append",
|
||||
help="add an analyzer (defaults to all available)")
|
||||
arguments.add_common_arguments(run_parser, ['spec'])
|
||||
|
||||
|
||||
def analyze_spec(spec, analyzers=None, outdir=None, monitor=None, overwrite=False):
|
||||
"""
|
||||
Do an analysis for a spec, optionally adding monitoring.
|
||||
|
||||
We also allow the user to specify a custom output directory.
|
||||
analyze_spec(spec, args.analyzers, args.outdir, monitor)
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): spec object of installed package
|
||||
analyzers (list): list of analyzer (keys) to run
|
||||
monitor (spack.monitor.SpackMonitorClient): a monitor client
|
||||
overwrite (bool): overwrite result if already exists
|
||||
"""
|
||||
analyzers = analyzers or list(spack.analyzers.analyzer_types.keys())
|
||||
|
||||
# Load the build environment from the spec install directory, and send
|
||||
# the spec to the monitor if it's not known
|
||||
if monitor:
|
||||
monitor.load_build_environment(spec)
|
||||
monitor.new_configuration([spec])
|
||||
|
||||
for name in analyzers:
|
||||
|
||||
# Instantiate the analyzer with the spec and outdir
|
||||
analyzer = spack.analyzers.get_analyzer(name)(spec, outdir)
|
||||
|
||||
# Run the analyzer to get a json result - results are returned as
|
||||
# a dictionary with a key corresponding to the analyzer type, so
|
||||
# we can just update the data
|
||||
result = analyzer.run()
|
||||
|
||||
# Send the result. We do them separately because:
|
||||
# 1. each analyzer might have differently organized output
|
||||
# 2. the size of a result can be large
|
||||
analyzer.save_result(result, overwrite)
|
||||
|
||||
|
||||
def analyze(parser, args, **kwargs):
|
||||
|
||||
# If the user wants to list analyzers, do so and exit
|
||||
if args.analyze_command == "list-analyzers":
|
||||
spack.analyzers.list_all()
|
||||
sys.exit(0)
|
||||
|
||||
# handle active environment, if any
|
||||
env = ev.active_environment()
|
||||
|
||||
# Get an disambiguate spec (we should only have one)
|
||||
specs = spack.cmd.parse_specs(args.spec)
|
||||
if not specs:
|
||||
tty.die("You must provide one or more specs to analyze.")
|
||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||
|
||||
# The user wants to monitor builds using github.com/spack/spack-monitor
|
||||
# It is instantianted once here, and then available at spack.monitor.cli
|
||||
monitor = None
|
||||
if args.use_monitor:
|
||||
monitor = spack.monitor.get_client(
|
||||
host=args.monitor_host,
|
||||
prefix=args.monitor_prefix,
|
||||
)
|
||||
|
||||
# Run the analysis
|
||||
analyze_spec(spec, args.analyzers, args.path, monitor, args.overwrite)
|
||||
@@ -99,8 +99,8 @@ def blame(parser, args):
|
||||
blame_file = path
|
||||
|
||||
if not blame_file:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(args.package_or_file)
|
||||
blame_file = pkg_cls.module.__file__.rstrip('c') # .pyc -> .py
|
||||
pkg = spack.repo.get(args.package_or_file)
|
||||
blame_file = pkg.module.__file__.rstrip('c') # .pyc -> .py
|
||||
|
||||
# get git blame for the package
|
||||
with working_dir(spack.paths.prefix):
|
||||
|
||||
@@ -6,9 +6,7 @@
|
||||
|
||||
import os.path
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty
|
||||
import llnl.util.tty.color
|
||||
|
||||
@@ -17,9 +15,6 @@
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.main
|
||||
import spack.mirror
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.path
|
||||
|
||||
description = "manage bootstrap configuration"
|
||||
@@ -27,38 +22,6 @@
|
||||
level = "long"
|
||||
|
||||
|
||||
# Tarball to be downloaded if binary packages are requested in a local mirror
|
||||
BINARY_TARBALL = 'https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.2/bootstrap-buildcache.tar.gz'
|
||||
|
||||
#: Subdirectory where to create the mirror
|
||||
LOCAL_MIRROR_DIR = 'bootstrap_cache'
|
||||
|
||||
# Metadata for a generated binary mirror
|
||||
BINARY_METADATA = {
|
||||
'type': 'buildcache',
|
||||
'description': ('Buildcache copied from a public tarball available on Github.'
|
||||
'The sha256 checksum of binaries is checked before installation.'),
|
||||
'info': {
|
||||
'url': os.path.join('..', '..', LOCAL_MIRROR_DIR),
|
||||
'homepage': 'https://github.com/spack/spack-bootstrap-mirrors',
|
||||
'releases': 'https://github.com/spack/spack-bootstrap-mirrors/releases',
|
||||
'tarball': BINARY_TARBALL
|
||||
}
|
||||
}
|
||||
|
||||
CLINGO_JSON = '$spack/share/spack/bootstrap/github-actions-v0.2/clingo.json'
|
||||
GNUPG_JSON = '$spack/share/spack/bootstrap/github-actions-v0.2/gnupg.json'
|
||||
|
||||
# Metadata for a generated source mirror
|
||||
SOURCE_METADATA = {
|
||||
'type': 'install',
|
||||
'description': 'Mirror with software needed to bootstrap Spack',
|
||||
'info': {
|
||||
'url': os.path.join('..', '..', LOCAL_MIRROR_DIR)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def _add_scope_option(parser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
@@ -104,61 +67,24 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
list = sp.add_parser(
|
||||
'list', help='list all the sources of software to bootstrap Spack'
|
||||
'list', help='list the methods available for bootstrapping'
|
||||
)
|
||||
_add_scope_option(list)
|
||||
|
||||
trust = sp.add_parser(
|
||||
'trust', help='trust a bootstrapping source'
|
||||
'trust', help='trust a bootstrapping method'
|
||||
)
|
||||
_add_scope_option(trust)
|
||||
trust.add_argument(
|
||||
'name', help='name of the source to be trusted'
|
||||
'name', help='name of the method to be trusted'
|
||||
)
|
||||
|
||||
untrust = sp.add_parser(
|
||||
'untrust', help='untrust a bootstrapping source'
|
||||
'untrust', help='untrust a bootstrapping method'
|
||||
)
|
||||
_add_scope_option(untrust)
|
||||
untrust.add_argument(
|
||||
'name', help='name of the source to be untrusted'
|
||||
)
|
||||
|
||||
add = sp.add_parser(
|
||||
'add', help='add a new source for bootstrapping'
|
||||
)
|
||||
_add_scope_option(add)
|
||||
add.add_argument(
|
||||
'--trust', action='store_true',
|
||||
help='trust the source immediately upon addition')
|
||||
add.add_argument(
|
||||
'name', help='name of the new source of software'
|
||||
)
|
||||
add.add_argument(
|
||||
'metadata_dir', help='directory where to find metadata files'
|
||||
)
|
||||
|
||||
remove = sp.add_parser(
|
||||
'remove', help='remove a bootstrapping source'
|
||||
)
|
||||
remove.add_argument(
|
||||
'name', help='name of the source to be removed'
|
||||
)
|
||||
|
||||
mirror = sp.add_parser(
|
||||
'mirror', help='create a local mirror to bootstrap Spack'
|
||||
)
|
||||
mirror.add_argument(
|
||||
'--binary-packages', action='store_true',
|
||||
help='download public binaries in the mirror'
|
||||
)
|
||||
mirror.add_argument(
|
||||
'--dev', action='store_true',
|
||||
help='download dev dependencies too'
|
||||
)
|
||||
mirror.add_argument(
|
||||
metavar='DIRECTORY', dest='root_dir',
|
||||
help='root directory in which to create the mirror and metadata'
|
||||
'name', help='name of the method to be untrusted'
|
||||
)
|
||||
|
||||
|
||||
@@ -211,7 +137,10 @@ def _root(args):
|
||||
|
||||
|
||||
def _list(args):
|
||||
sources = spack.bootstrap.bootstrapping_sources(scope=args.scope)
|
||||
sources = spack.config.get(
|
||||
'bootstrap:sources', default=None, scope=args.scope
|
||||
)
|
||||
|
||||
if not sources:
|
||||
llnl.util.tty.msg(
|
||||
"No method available for bootstrapping Spack's dependencies"
|
||||
@@ -320,121 +249,6 @@ def _status(args):
|
||||
print()
|
||||
|
||||
|
||||
def _add(args):
|
||||
initial_sources = spack.bootstrap.bootstrapping_sources()
|
||||
names = [s['name'] for s in initial_sources]
|
||||
|
||||
# If the name is already used error out
|
||||
if args.name in names:
|
||||
msg = 'a source named "{0}" already exist. Please choose a different name'
|
||||
raise RuntimeError(msg.format(args.name))
|
||||
|
||||
# Check that the metadata file exists
|
||||
metadata_dir = spack.util.path.canonicalize_path(args.metadata_dir)
|
||||
if not os.path.exists(metadata_dir) or not os.path.isdir(metadata_dir):
|
||||
raise RuntimeError(
|
||||
'the directory "{0}" does not exist'.format(args.metadata_dir)
|
||||
)
|
||||
|
||||
file = os.path.join(metadata_dir, 'metadata.yaml')
|
||||
if not os.path.exists(file):
|
||||
raise RuntimeError('the file "{0}" does not exist'.format(file))
|
||||
|
||||
# Insert the new source as the highest priority one
|
||||
write_scope = args.scope or spack.config.default_modify_scope(section='bootstrap')
|
||||
sources = spack.config.get('bootstrap:sources', scope=write_scope) or []
|
||||
sources = [
|
||||
{'name': args.name, 'metadata': args.metadata_dir}
|
||||
] + sources
|
||||
spack.config.set('bootstrap:sources', sources, scope=write_scope)
|
||||
|
||||
msg = 'New bootstrapping source "{0}" added in the "{1}" configuration scope'
|
||||
llnl.util.tty.msg(msg.format(args.name, write_scope))
|
||||
if args.trust:
|
||||
_trust(args)
|
||||
|
||||
|
||||
def _remove(args):
|
||||
initial_sources = spack.bootstrap.bootstrapping_sources()
|
||||
names = [s['name'] for s in initial_sources]
|
||||
if args.name not in names:
|
||||
msg = ('cannot find any bootstrapping source named "{0}". '
|
||||
'Run `spack bootstrap list` to see available sources.')
|
||||
raise RuntimeError(msg.format(args.name))
|
||||
|
||||
for current_scope in spack.config.scopes():
|
||||
sources = spack.config.get('bootstrap:sources', scope=current_scope) or []
|
||||
if args.name in [s['name'] for s in sources]:
|
||||
sources = [s for s in sources if s['name'] != args.name]
|
||||
spack.config.set('bootstrap:sources', sources, scope=current_scope)
|
||||
msg = ('Removed the bootstrapping source named "{0}" from the '
|
||||
'"{1}" configuration scope.')
|
||||
llnl.util.tty.msg(msg.format(args.name, current_scope))
|
||||
trusted = spack.config.get('bootstrap:trusted', scope=current_scope) or []
|
||||
if args.name in trusted:
|
||||
trusted.pop(args.name)
|
||||
spack.config.set('bootstrap:trusted', trusted, scope=current_scope)
|
||||
msg = 'Deleting information on "{0}" from list of trusted sources'
|
||||
llnl.util.tty.msg(msg.format(args.name))
|
||||
|
||||
|
||||
def _mirror(args):
|
||||
mirror_dir = spack.util.path.canonicalize_path(
|
||||
os.path.join(args.root_dir, LOCAL_MIRROR_DIR)
|
||||
)
|
||||
|
||||
# TODO: Here we are adding gnuconfig manually, but this can be fixed
|
||||
# TODO: as soon as we have an option to add to a mirror all the possible
|
||||
# TODO: dependencies of a spec
|
||||
root_specs = spack.bootstrap.all_root_specs(development=args.dev) + ['gnuconfig']
|
||||
for spec_str in root_specs:
|
||||
msg = 'Adding "{0}" and dependencies to the mirror at {1}'
|
||||
llnl.util.tty.msg(msg.format(spec_str, mirror_dir))
|
||||
# Suppress tty from the call below for terser messages
|
||||
llnl.util.tty.set_msg_enabled(False)
|
||||
spec = spack.spec.Spec(spec_str).concretized()
|
||||
for node in spec.traverse():
|
||||
spack.mirror.create(mirror_dir, [node])
|
||||
llnl.util.tty.set_msg_enabled(True)
|
||||
|
||||
if args.binary_packages:
|
||||
msg = 'Adding binary packages from "{0}" to the mirror at {1}'
|
||||
llnl.util.tty.msg(msg.format(BINARY_TARBALL, mirror_dir))
|
||||
llnl.util.tty.set_msg_enabled(False)
|
||||
stage = spack.stage.Stage(BINARY_TARBALL, path=tempfile.mkdtemp())
|
||||
stage.create()
|
||||
stage.fetch()
|
||||
stage.expand_archive()
|
||||
build_cache_dir = os.path.join(stage.source_path, 'build_cache')
|
||||
shutil.move(build_cache_dir, mirror_dir)
|
||||
llnl.util.tty.set_msg_enabled(True)
|
||||
|
||||
def write_metadata(subdir, metadata):
|
||||
metadata_rel_dir = os.path.join('metadata', subdir)
|
||||
metadata_yaml = os.path.join(
|
||||
args.root_dir, metadata_rel_dir, 'metadata.yaml'
|
||||
)
|
||||
llnl.util.filesystem.mkdirp(os.path.dirname(metadata_yaml))
|
||||
with open(metadata_yaml, mode='w') as f:
|
||||
spack.util.spack_yaml.dump(metadata, stream=f)
|
||||
return os.path.dirname(metadata_yaml), metadata_rel_dir
|
||||
|
||||
instructions = ('\nTo register the mirror on the platform where it\'s supposed '
|
||||
'to be used, move "{0}" to its final location and run the '
|
||||
'following command(s):\n\n').format(args.root_dir)
|
||||
cmd = ' % spack bootstrap add --trust {0} <final-path>/{1}\n'
|
||||
_, rel_directory = write_metadata(subdir='sources', metadata=SOURCE_METADATA)
|
||||
instructions += cmd.format('local-sources', rel_directory)
|
||||
if args.binary_packages:
|
||||
abs_directory, rel_directory = write_metadata(
|
||||
subdir='binaries', metadata=BINARY_METADATA
|
||||
)
|
||||
shutil.copy(spack.util.path.canonicalize_path(CLINGO_JSON), abs_directory)
|
||||
shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory)
|
||||
instructions += cmd.format('local-binaries', rel_directory)
|
||||
print(instructions)
|
||||
|
||||
|
||||
def bootstrap(parser, args):
|
||||
callbacks = {
|
||||
'status': _status,
|
||||
@@ -444,9 +258,6 @@ def bootstrap(parser, args):
|
||||
'root': _root,
|
||||
'list': _list,
|
||||
'trust': _trust,
|
||||
'untrust': _untrust,
|
||||
'add': _add,
|
||||
'remove': _remove,
|
||||
'mirror': _mirror
|
||||
'untrust': _untrust
|
||||
}
|
||||
callbacks[args.subcommand](args)
|
||||
|
||||
@@ -478,12 +478,11 @@ def save_specfile_fn(args):
|
||||
if args.root_specfile:
|
||||
with open(args.root_specfile) as fd:
|
||||
root_spec_as_json = fd.read()
|
||||
spec_format = 'yaml' if args.root_specfile.endswith('yaml') else 'json'
|
||||
else:
|
||||
root_spec = Spec(args.root_spec)
|
||||
root_spec.concretize()
|
||||
root_spec_as_json = root_spec.to_json(hash=ht.dag_hash)
|
||||
spec_format = 'json'
|
||||
spec_format = 'yaml' if args.root_specfile.endswith('yaml') else 'json'
|
||||
save_dependency_specfiles(
|
||||
root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format)
|
||||
|
||||
|
||||
@@ -12,12 +12,11 @@
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.crypto
|
||||
from spack.package_base import preferred_version
|
||||
from spack.package import preferred_version
|
||||
from spack.util.naming import valid_fully_qualified_module_name
|
||||
from spack.version import VersionBase, ver
|
||||
from spack.version import Version, ver
|
||||
|
||||
description = "checksum available versions of a package"
|
||||
section = "packaging"
|
||||
@@ -55,8 +54,7 @@ def checksum(parser, args):
|
||||
tty.die("`spack checksum` accepts package names, not URLs.")
|
||||
|
||||
# Get the package we're going to generate checksums for
|
||||
pkg_cls = spack.repo.path.get_pkg_class(args.package)
|
||||
pkg = pkg_cls(spack.spec.Spec(args.package))
|
||||
pkg = spack.repo.get(args.package)
|
||||
|
||||
url_dict = {}
|
||||
versions = args.versions
|
||||
@@ -67,7 +65,7 @@ def checksum(parser, args):
|
||||
remote_versions = None
|
||||
for version in versions:
|
||||
version = ver(version)
|
||||
if not isinstance(version, VersionBase):
|
||||
if not isinstance(version, Version):
|
||||
tty.die("Cannot generate checksums for version lists or "
|
||||
"version ranges. Use unambiguous versions.")
|
||||
url = pkg.find_valid_url_for_version(version)
|
||||
|
||||
@@ -64,11 +64,6 @@ def setup_parser(subparser):
|
||||
'--dependencies', action='store_true', default=False,
|
||||
help="(Experimental) disable DAG scheduling; use "
|
||||
' "plain" dependencies.')
|
||||
generate.add_argument(
|
||||
'--buildcache-destination', default=None,
|
||||
help="Override the mirror configured in the environment (spack.yaml) " +
|
||||
"in order to push binaries from the generated pipeline to a " +
|
||||
"different location.")
|
||||
prune_group = generate.add_mutually_exclusive_group()
|
||||
prune_group.add_argument(
|
||||
'--prune-dag', action='store_true', dest='prune_dag',
|
||||
@@ -132,7 +127,6 @@ def ci_generate(args):
|
||||
prune_dag = args.prune_dag
|
||||
index_only = args.index_only
|
||||
artifacts_root = args.artifacts_root
|
||||
buildcache_destination = args.buildcache_destination
|
||||
|
||||
if not output_file:
|
||||
output_file = os.path.abspath(".gitlab-ci.yml")
|
||||
@@ -146,8 +140,7 @@ def ci_generate(args):
|
||||
spack_ci.generate_gitlab_ci_yaml(
|
||||
env, True, output_file, prune_dag=prune_dag,
|
||||
check_index_only=index_only, run_optimizer=run_optimizer,
|
||||
use_dependencies=use_dependencies, artifacts_root=artifacts_root,
|
||||
remote_mirror_override=buildcache_destination)
|
||||
use_dependencies=use_dependencies, artifacts_root=artifacts_root)
|
||||
|
||||
if copy_yaml_to:
|
||||
copy_to_dir = os.path.dirname(copy_yaml_to)
|
||||
@@ -187,9 +180,6 @@ def ci_rebuild(args):
|
||||
if not gitlab_ci:
|
||||
tty.die('spack ci rebuild requires an env containing gitlab-ci cfg')
|
||||
|
||||
tty.msg('SPACK_BUILDCACHE_DESTINATION={0}'.format(
|
||||
os.environ.get('SPACK_BUILDCACHE_DESTINATION', None)))
|
||||
|
||||
# Grab the environment variables we need. These either come from the
|
||||
# pipeline generation step ("spack ci generate"), where they were written
|
||||
# out as variables, or else provided by GitLab itself.
|
||||
@@ -206,7 +196,7 @@ def ci_rebuild(args):
|
||||
compiler_action = get_env_var('SPACK_COMPILER_ACTION')
|
||||
cdash_build_name = get_env_var('SPACK_CDASH_BUILD_NAME')
|
||||
spack_pipeline_type = get_env_var('SPACK_PIPELINE_TYPE')
|
||||
remote_mirror_override = get_env_var('SPACK_REMOTE_MIRROR_OVERRIDE')
|
||||
pr_mirror_url = get_env_var('SPACK_PR_MIRROR_URL')
|
||||
remote_mirror_url = get_env_var('SPACK_REMOTE_MIRROR_URL')
|
||||
|
||||
# Construct absolute paths relative to current $CI_PROJECT_DIR
|
||||
@@ -254,10 +244,6 @@ def ci_rebuild(args):
|
||||
tty.debug('Pipeline type - PR: {0}, develop: {1}'.format(
|
||||
spack_is_pr_pipeline, spack_is_develop_pipeline))
|
||||
|
||||
# If no override url exists, then just push binary package to the
|
||||
# normal remote mirror url.
|
||||
buildcache_mirror_url = remote_mirror_override or remote_mirror_url
|
||||
|
||||
# Figure out what is our temporary storage mirror: Is it artifacts
|
||||
# buildcache? Or temporary-storage-url-prefix? In some cases we need to
|
||||
# force something or pipelines might not have a way to propagate build
|
||||
@@ -387,24 +373,7 @@ def ci_rebuild(args):
|
||||
cfg.default_modify_scope())
|
||||
|
||||
# Check configured mirrors for a built spec with a matching hash
|
||||
mirrors_to_check = None
|
||||
if remote_mirror_override and spack_pipeline_type == 'spack_protected_branch':
|
||||
# Passing "mirrors_to_check" below means we *only* look in the override
|
||||
# mirror to see if we should skip building, which is what we want.
|
||||
mirrors_to_check = {
|
||||
'override': remote_mirror_override
|
||||
}
|
||||
|
||||
# Adding this mirror to the list of configured mirrors means dependencies
|
||||
# could be installed from either the override mirror or any other configured
|
||||
# mirror (e.g. remote_mirror_url which is defined in the environment or
|
||||
# pipeline_mirror_url), which is also what we want.
|
||||
spack.mirror.add('mirror_override',
|
||||
remote_mirror_override,
|
||||
cfg.default_modify_scope())
|
||||
|
||||
matches = bindist.get_mirrors_for_spec(
|
||||
job_spec, mirrors_to_check=mirrors_to_check, index_only=False)
|
||||
matches = bindist.get_mirrors_for_spec(job_spec, index_only=False)
|
||||
|
||||
if matches:
|
||||
# Got a hash match on at least one configured mirror. All
|
||||
@@ -548,6 +517,13 @@ def ci_rebuild(args):
|
||||
# any logs from the staging directory to artifacts now
|
||||
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
|
||||
|
||||
# Create buildcache on remote mirror, either on pr-specific mirror or
|
||||
# on the main mirror defined in the gitlab-enabled spack environment
|
||||
if spack_is_pr_pipeline:
|
||||
buildcache_mirror_url = pr_mirror_url
|
||||
else:
|
||||
buildcache_mirror_url = remote_mirror_url
|
||||
|
||||
# If the install succeeded, create a buildcache entry for this job spec
|
||||
# and push it to one or more mirrors. If the install did not succeed,
|
||||
# print out some instructions on how to reproduce this build failure
|
||||
|
||||
@@ -58,21 +58,6 @@ def setup_parser(subparser):
|
||||
arguments.add_common_arguments(subparser, ['specs'])
|
||||
|
||||
|
||||
def remove_python_cache():
|
||||
for directory in [lib_path, var_path]:
|
||||
for root, dirs, files in os.walk(directory):
|
||||
for f in files:
|
||||
if f.endswith('.pyc') or f.endswith('.pyo'):
|
||||
fname = os.path.join(root, f)
|
||||
tty.debug('Removing {0}'.format(fname))
|
||||
os.remove(fname)
|
||||
for d in dirs:
|
||||
if d == '__pycache__':
|
||||
dname = os.path.join(root, d)
|
||||
tty.debug('Removing {0}'.format(dname))
|
||||
shutil.rmtree(dname)
|
||||
|
||||
|
||||
def clean(parser, args):
|
||||
# If nothing was set, activate the default
|
||||
if not any([args.specs, args.stage, args.downloads, args.failures,
|
||||
@@ -85,7 +70,8 @@ def clean(parser, args):
|
||||
for spec in specs:
|
||||
msg = 'Cleaning build stage [{0}]'
|
||||
tty.msg(msg.format(spec.short_spec))
|
||||
spec.package.do_clean()
|
||||
package = spack.repo.get(spec)
|
||||
package.do_clean()
|
||||
|
||||
if args.stage:
|
||||
tty.msg('Removing all temporary build stages')
|
||||
@@ -109,7 +95,18 @@ def clean(parser, args):
|
||||
|
||||
if args.python_cache:
|
||||
tty.msg('Removing python cache files')
|
||||
remove_python_cache()
|
||||
for directory in [lib_path, var_path]:
|
||||
for root, dirs, files in os.walk(directory):
|
||||
for f in files:
|
||||
if f.endswith('.pyc') or f.endswith('.pyo'):
|
||||
fname = os.path.join(root, f)
|
||||
tty.debug('Removing {0}'.format(fname))
|
||||
os.remove(fname)
|
||||
for d in dirs:
|
||||
if d == '__pycache__':
|
||||
dname = os.path.join(root, d)
|
||||
tty.debug('Removing {0}'.format(dname))
|
||||
shutil.rmtree(dname)
|
||||
|
||||
if args.bootstrap:
|
||||
bootstrap_prefix = spack.util.path.canonicalize_path(
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
|
||||
import spack.container
|
||||
import spack.container.images
|
||||
import spack.monitor
|
||||
|
||||
description = ("creates recipes to build images for different"
|
||||
" container runtimes")
|
||||
@@ -17,6 +18,7 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
monitor_group = spack.monitor.get_monitor_group(subparser) # noqa
|
||||
subparser.add_argument(
|
||||
'--list-os', action='store_true', default=False,
|
||||
help='list all the OS that can be used in the bootstrap phase and exit'
|
||||
@@ -44,5 +46,14 @@ def containerize(parser, args):
|
||||
raise ValueError(msg.format(config_file))
|
||||
|
||||
config = spack.container.validate(config_file)
|
||||
|
||||
# If we have a monitor request, add monitor metadata to config
|
||||
if args.use_monitor:
|
||||
config['spack']['monitor'] = {
|
||||
"host": args.monitor_host,
|
||||
"keep_going": args.monitor_keep_going,
|
||||
"prefix": args.monitor_prefix,
|
||||
"tags": args.monitor_tags
|
||||
}
|
||||
recipe = spack.container.recipe(config, last_phase=args.last_stage)
|
||||
print(recipe)
|
||||
|
||||
@@ -57,7 +57,7 @@
|
||||
# See the Spack documentation for more information on packaging.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
from spack.package import *
|
||||
from spack import *
|
||||
|
||||
|
||||
class {class_name}({base_class_name}):
|
||||
@@ -826,7 +826,7 @@ def get_versions(args, name):
|
||||
spack.util.url.require_url_format(args.url)
|
||||
if args.url.startswith('file://'):
|
||||
valid_url = False # No point in spidering these
|
||||
except (ValueError, TypeError):
|
||||
except ValueError:
|
||||
valid_url = False
|
||||
|
||||
if args.url is not None and args.template != 'bundle' and valid_url:
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.package_base
|
||||
import spack.package
|
||||
import spack.repo
|
||||
import spack.store
|
||||
|
||||
@@ -57,7 +57,7 @@ def dependencies(parser, args):
|
||||
|
||||
else:
|
||||
spec = specs[0]
|
||||
dependencies = spack.package_base.possible_dependencies(
|
||||
dependencies = spack.package.possible_dependencies(
|
||||
spec,
|
||||
transitive=args.transitive,
|
||||
expand_virtuals=args.expand_virtuals,
|
||||
|
||||
@@ -39,9 +39,9 @@ def inverted_dependencies():
|
||||
actual dependents.
|
||||
"""
|
||||
dag = {}
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
dag.setdefault(pkg_cls.name, set())
|
||||
for dep in pkg_cls.dependencies:
|
||||
for pkg in spack.repo.path.all_packages():
|
||||
dag.setdefault(pkg.name, set())
|
||||
for dep in pkg.dependencies:
|
||||
deps = [dep]
|
||||
|
||||
# expand virtuals if necessary
|
||||
@@ -49,7 +49,7 @@ def inverted_dependencies():
|
||||
deps += [s.name for s in spack.repo.path.providers_for(dep)]
|
||||
|
||||
for d in deps:
|
||||
dag.setdefault(d, set()).add(pkg_cls.name)
|
||||
dag.setdefault(d, set()).add(pkg.name)
|
||||
return dag
|
||||
|
||||
|
||||
|
||||
@@ -87,7 +87,9 @@ def dev_build(self, args):
|
||||
|
||||
# Forces the build to run out of the source directory.
|
||||
spec.constrain('dev_path=%s' % source_path)
|
||||
|
||||
spec.concretize()
|
||||
package = spack.repo.get(spec)
|
||||
|
||||
if spec.installed:
|
||||
tty.error("Already installed in %s" % spec.prefix)
|
||||
@@ -107,7 +109,7 @@ def dev_build(self, args):
|
||||
elif args.test == 'root':
|
||||
tests = [spec.name for spec in specs]
|
||||
|
||||
spec.package.do_install(
|
||||
package.do_install(
|
||||
tests=tests,
|
||||
make_jobs=args.jobs,
|
||||
keep_prefix=args.keep_prefix,
|
||||
@@ -120,5 +122,5 @@ def dev_build(self, args):
|
||||
|
||||
# drop into the build environment of the package?
|
||||
if args.shell is not None:
|
||||
spack.build_environment.setup_package(spec.package, dirty=False)
|
||||
spack.build_environment.setup_package(package, dirty=False)
|
||||
os.execvp(args.shell, [args.shell])
|
||||
|
||||
@@ -54,9 +54,8 @@ def develop(parser, args):
|
||||
tty.msg(msg)
|
||||
continue
|
||||
|
||||
spec = spack.spec.Spec(entry['spec'])
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls(spec).stage.steal_source(abspath)
|
||||
stage = spack.spec.Spec(entry['spec']).package.stage
|
||||
stage.steal_source(abspath)
|
||||
|
||||
if not env.dev_specs:
|
||||
tty.warn("No develop specs to download")
|
||||
|
||||
@@ -559,11 +559,11 @@ def env_depfile(args):
|
||||
target_prefix = args.make_target_prefix
|
||||
|
||||
def get_target(name):
|
||||
# The `all` and `clean` targets are phony. It doesn't make sense to
|
||||
# The `all`, `fetch` and `clean` targets are phony. It doesn't make sense to
|
||||
# have /abs/path/to/env/metadir/{all,clean} targets. But it *does* make
|
||||
# sense to have a prefix like `env/all`, `env/clean` when they are
|
||||
# sense to have a prefix like `env/all`, `env/fetch`, `env/clean` when they are
|
||||
# supposed to be included
|
||||
if name in ('all', 'clean') and os.path.isabs(target_prefix):
|
||||
if name in ('all', 'fetch-all', 'clean') and os.path.isabs(target_prefix):
|
||||
return name
|
||||
else:
|
||||
return os.path.join(target_prefix, name)
|
||||
@@ -571,6 +571,9 @@ def get_target(name):
|
||||
def get_install_target(name):
|
||||
return os.path.join(target_prefix, '.install', name)
|
||||
|
||||
def get_fetch_target(name):
|
||||
return os.path.join(target_prefix, '.fetch', name)
|
||||
|
||||
for _, spec in env.concretized_specs():
|
||||
for s in spec.traverse(root=True):
|
||||
hash_to_spec[s.dag_hash()] = s
|
||||
@@ -585,30 +588,46 @@ def get_install_target(name):
|
||||
# All package install targets, not just roots.
|
||||
all_install_targets = [get_install_target(h) for h in hash_to_spec.keys()]
|
||||
|
||||
# Fetch targets for all packages in the environment, not just roots.
|
||||
all_fetch_targets = [get_fetch_target(h) for h in hash_to_spec.keys()]
|
||||
|
||||
buf = six.StringIO()
|
||||
|
||||
buf.write("""SPACK ?= spack
|
||||
|
||||
.PHONY: {} {}
|
||||
.PHONY: {} {} {}
|
||||
|
||||
{}: {}
|
||||
|
||||
{}: {}
|
||||
|
||||
{}: {}
|
||||
\t@touch $@
|
||||
|
||||
{}: {}
|
||||
\t@touch $@
|
||||
|
||||
{}:
|
||||
\t@mkdir -p {}
|
||||
\t@mkdir -p {} {}
|
||||
|
||||
{}: | {}
|
||||
\t$(info Fetching $(SPEC))
|
||||
\t$(SPACK) -e '{}' fetch $(SPACK_FETCH_FLAGS) /$(notdir $@) && touch $@
|
||||
|
||||
{}: {}
|
||||
\t$(info Installing $(SPEC))
|
||||
\t{}$(SPACK) -e '{}' install $(SPACK_INSTALL_FLAGS) --only-concrete --only=package \
|
||||
--no-add /$(notdir $@) && touch $@
|
||||
|
||||
""".format(get_target('all'), get_target('clean'),
|
||||
""".format(get_target('all'), get_target('fetch-all'), get_target('clean'),
|
||||
get_target('all'), get_target('env'),
|
||||
get_target('fetch-all'), get_target('fetch'),
|
||||
get_target('env'), ' '.join(root_install_targets),
|
||||
get_target('dirs'), get_target('.install'),
|
||||
get_target('.install/%'), get_target('dirs'),
|
||||
get_target('fetch'), ' '.join(all_fetch_targets),
|
||||
get_target('dirs'), get_target('.fetch'), get_target('.install'),
|
||||
get_target('.fetch/%'), get_target('dirs'),
|
||||
env.path,
|
||||
get_target('.install/%'), get_target('.fetch/%'),
|
||||
'+' if args.jobserver else '', env.path))
|
||||
|
||||
# Targets are of the form <prefix>/<name>: [<prefix>/<depname>]...,
|
||||
@@ -638,9 +657,11 @@ def get_install_target(name):
|
||||
# --make-target-prefix can be any existing directory we do not control,
|
||||
# including empty string (which means deleting the containing folder
|
||||
# would delete the folder with the Makefile)
|
||||
buf.write("{}:\n\trm -f -- {} {}\n".format(
|
||||
buf.write("{}:\n\trm -f -- {} {} {} {}\n".format(
|
||||
get_target('clean'),
|
||||
get_target('env'),
|
||||
get_target('fetch'),
|
||||
' '.join(all_fetch_targets),
|
||||
' '.join(all_install_targets)))
|
||||
|
||||
makefile = buf.getvalue()
|
||||
|
||||
@@ -52,8 +52,8 @@ def extensions(parser, args):
|
||||
|
||||
extendable_pkgs = []
|
||||
for name in spack.repo.all_package_names():
|
||||
pkg_cls = spack.repo.path.get_pkg_class(name)
|
||||
if pkg_cls.extendable:
|
||||
pkg = spack.repo.get(name)
|
||||
if pkg.extendable:
|
||||
extendable_pkgs.append(name)
|
||||
|
||||
colify(extendable_pkgs, indent=4)
|
||||
@@ -64,12 +64,12 @@ def extensions(parser, args):
|
||||
if len(spec) > 1:
|
||||
tty.die("Can only list extensions for one package.")
|
||||
|
||||
if not spec[0].package.extendable:
|
||||
tty.die("%s is not an extendable package." % spec[0].name)
|
||||
|
||||
env = ev.active_environment()
|
||||
spec = cmd.disambiguate_spec(spec[0], env)
|
||||
|
||||
if not spec.package.extendable:
|
||||
tty.die("%s is not an extendable package." % spec[0].name)
|
||||
|
||||
if not spec.package.extendable:
|
||||
tty.die("%s does not have extensions." % spec.short_spec)
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import errno
|
||||
import os
|
||||
import sys
|
||||
|
||||
@@ -94,21 +93,6 @@ def external_find(args):
|
||||
# It's fine to not find any manifest file if we are doing the
|
||||
# search implicitly (i.e. as part of 'spack external find')
|
||||
pass
|
||||
except Exception as e:
|
||||
# For most exceptions, just print a warning and continue.
|
||||
# Note that KeyboardInterrupt does not subclass Exception
|
||||
# (so CTRL-C will terminate the program as expected).
|
||||
skip_msg = ("Skipping manifest and continuing with other external "
|
||||
"checks")
|
||||
if ((isinstance(e, IOError) or isinstance(e, OSError)) and
|
||||
e.errno in [errno.EPERM, errno.EACCES]):
|
||||
# The manifest file does not have sufficient permissions enabled:
|
||||
# print a warning and keep going
|
||||
tty.warn("Unable to read manifest due to insufficient "
|
||||
"permissions.", skip_msg)
|
||||
else:
|
||||
tty.warn("Unable to read manifest, unexpected error: {0}"
|
||||
.format(str(e)), skip_msg)
|
||||
|
||||
# If the user didn't specify anything, search for build tools by default
|
||||
if not args.tags and not args.all and not args.packages:
|
||||
@@ -119,37 +103,34 @@ def external_find(args):
|
||||
args.tags = []
|
||||
|
||||
# Construct the list of possible packages to be detected
|
||||
pkg_cls_to_check = []
|
||||
packages_to_check = []
|
||||
|
||||
# Add the packages that have been required explicitly
|
||||
if args.packages:
|
||||
pkg_cls_to_check = [
|
||||
spack.repo.path.get_pkg_class(pkg) for pkg in args.packages
|
||||
]
|
||||
packages_to_check = list(spack.repo.get(pkg) for pkg in args.packages)
|
||||
if args.tags:
|
||||
allowed = set(spack.repo.path.packages_with_tags(*args.tags))
|
||||
pkg_cls_to_check = [x for x in pkg_cls_to_check if x.name in allowed]
|
||||
packages_to_check = [x for x in packages_to_check if x in allowed]
|
||||
|
||||
if args.tags and not pkg_cls_to_check:
|
||||
if args.tags and not packages_to_check:
|
||||
# If we arrived here we didn't have any explicit package passed
|
||||
# as argument, which means to search all packages.
|
||||
# Since tags are cached it's much faster to construct what we need
|
||||
# to search directly, rather than filtering after the fact
|
||||
pkg_cls_to_check = [
|
||||
spack.repo.path.get_pkg_class(pkg_name)
|
||||
for tag in args.tags
|
||||
for pkg_name in spack.repo.path.packages_with_tags(tag)
|
||||
packages_to_check = [
|
||||
spack.repo.get(pkg) for tag in args.tags for pkg in
|
||||
spack.repo.path.packages_with_tags(tag)
|
||||
]
|
||||
pkg_cls_to_check = list(set(pkg_cls_to_check))
|
||||
packages_to_check = list(set(packages_to_check))
|
||||
|
||||
# If the list of packages is empty, search for every possible package
|
||||
if not args.tags and not pkg_cls_to_check:
|
||||
pkg_cls_to_check = list(spack.repo.path.all_package_classes())
|
||||
if not args.tags and not packages_to_check:
|
||||
packages_to_check = spack.repo.path.all_packages()
|
||||
|
||||
detected_packages = spack.detection.by_executable(
|
||||
pkg_cls_to_check, path_hints=args.path)
|
||||
packages_to_check, path_hints=args.path)
|
||||
detected_packages.update(spack.detection.by_library(
|
||||
pkg_cls_to_check, path_hints=args.path))
|
||||
packages_to_check, path_hints=args.path))
|
||||
|
||||
new_entries = spack.detection.update_configuration(
|
||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||
@@ -196,10 +177,7 @@ def _collect_and_consume_cray_manifest_files(
|
||||
|
||||
for directory in manifest_dirs:
|
||||
for fname in os.listdir(directory):
|
||||
if fname.endswith('.json'):
|
||||
fpath = os.path.join(directory, fname)
|
||||
tty.debug("Adding manifest file: {0}".format(fpath))
|
||||
manifest_files.append(os.path.join(directory, fpath))
|
||||
manifest_files.append(os.path.join(directory, fname))
|
||||
|
||||
if not manifest_files:
|
||||
raise NoManifestFileError(
|
||||
@@ -207,7 +185,6 @@ def _collect_and_consume_cray_manifest_files(
|
||||
.format(cray_manifest.default_path))
|
||||
|
||||
for path in manifest_files:
|
||||
tty.debug("Reading manifest file: " + path)
|
||||
try:
|
||||
cray_manifest.read(path, not dry_run)
|
||||
except (spack.compilers.UnknownCompilerError, spack.error.SpackError) as e:
|
||||
@@ -220,10 +197,10 @@ def _collect_and_consume_cray_manifest_files(
|
||||
|
||||
def external_list(args):
|
||||
# Trigger a read of all packages, might take a long time.
|
||||
list(spack.repo.path.all_package_classes())
|
||||
list(spack.repo.path.all_packages())
|
||||
# Print all the detectable packages
|
||||
tty.msg("Detectable packages per repository")
|
||||
for namespace, pkgs in sorted(spack.package_base.detectable_packages.items()):
|
||||
for namespace, pkgs in sorted(spack.package.detectable_packages.items()):
|
||||
print("Repository:", namespace)
|
||||
colify.colify(pkgs, indent=4, output=sys.stdout)
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
from spack.package_base import has_test_method, preferred_version
|
||||
from spack.package import has_test_method, preferred_version
|
||||
|
||||
description = 'get detailed information on a particular package'
|
||||
section = 'basic'
|
||||
@@ -269,14 +269,14 @@ def print_tests(pkg):
|
||||
names = []
|
||||
pkg_cls = pkg if inspect.isclass(pkg) else pkg.__class__
|
||||
if has_test_method(pkg_cls):
|
||||
pkg_base = spack.package_base.PackageBase
|
||||
pkg_base = spack.package.PackageBase
|
||||
test_pkgs = [str(cls.test) for cls in inspect.getmro(pkg_cls) if
|
||||
issubclass(cls, pkg_base) and cls.test != pkg_base.test]
|
||||
test_pkgs = list(set(test_pkgs))
|
||||
names.extend([(test.split()[1]).lower() for test in test_pkgs])
|
||||
|
||||
# TODO Refactor START
|
||||
# Use code from package_base.py's test_process IF this functionality is
|
||||
# Use code from package.py's test_process IF this functionality is
|
||||
# accepted.
|
||||
v_names = list(set([vspec.name for vspec in pkg.virtuals_provided]))
|
||||
|
||||
@@ -292,9 +292,10 @@ def print_tests(pkg):
|
||||
v_specs = [spack.spec.Spec(v_name) for v_name in v_names]
|
||||
for v_spec in v_specs:
|
||||
try:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(v_spec.name)
|
||||
pkg = v_spec.package
|
||||
pkg_cls = pkg if inspect.isclass(pkg) else pkg.__class__
|
||||
if has_test_method(pkg_cls):
|
||||
names.append('{0}.test'.format(pkg_cls.name.lower()))
|
||||
names.append('{0}.test'.format(pkg.name.lower()))
|
||||
except spack.repo.UnknownPackageError:
|
||||
pass
|
||||
|
||||
@@ -385,9 +386,7 @@ def print_virtuals(pkg):
|
||||
|
||||
|
||||
def info(parser, args):
|
||||
spec = spack.spec.Spec(args.package)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg = pkg_cls(spec)
|
||||
pkg = spack.repo.get(args.package)
|
||||
|
||||
# Output core package information
|
||||
header = section_title(
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.fetch_strategy
|
||||
import spack.monitor
|
||||
import spack.paths
|
||||
import spack.report
|
||||
from spack.error import SpackError
|
||||
@@ -104,6 +105,8 @@ def setup_parser(subparser):
|
||||
'--cache-only', action='store_true', dest='cache_only', default=False,
|
||||
help="only install package from binary mirrors")
|
||||
|
||||
monitor_group = spack.monitor.get_monitor_group(subparser) # noqa
|
||||
|
||||
subparser.add_argument(
|
||||
'--include-build-deps', action='store_true', dest='include_build_deps',
|
||||
default=False, help="""include build deps when installing from cache,
|
||||
@@ -289,8 +292,17 @@ def install(parser, args, **kwargs):
|
||||
parser.print_help()
|
||||
return
|
||||
|
||||
# The user wants to monitor builds using github.com/spack/spack-monitor
|
||||
if args.use_monitor:
|
||||
monitor = spack.monitor.get_client(
|
||||
host=args.monitor_host,
|
||||
prefix=args.monitor_prefix,
|
||||
tags=args.monitor_tags,
|
||||
save_local=args.monitor_save_local,
|
||||
)
|
||||
|
||||
reporter = spack.report.collect_info(
|
||||
spack.package_base.PackageInstaller, '_install_task', args.log_format, args)
|
||||
spack.package.PackageInstaller, '_install_task', args.log_format, args)
|
||||
if args.log_file:
|
||||
reporter.filename = args.log_file
|
||||
|
||||
@@ -329,6 +341,10 @@ def get_tests(specs):
|
||||
reporter.filename = default_log_file(specs[0])
|
||||
reporter.specs = specs
|
||||
|
||||
# Tell the monitor about the specs
|
||||
if args.use_monitor and specs:
|
||||
monitor.new_configuration(specs)
|
||||
|
||||
tty.msg("Installing environment {0}".format(env.name))
|
||||
with reporter('build'):
|
||||
env.install_all(**kwargs)
|
||||
@@ -374,6 +390,10 @@ def get_tests(specs):
|
||||
except SpackError as e:
|
||||
tty.debug(e)
|
||||
reporter.concretization_report(e.message)
|
||||
|
||||
# Tell spack monitor about it
|
||||
if args.use_monitor and abstract_specs:
|
||||
monitor.failed_concretization(abstract_specs)
|
||||
raise
|
||||
|
||||
# 2. Concrete specs from yaml files
|
||||
@@ -434,4 +454,17 @@ def get_tests(specs):
|
||||
|
||||
# overwrite all concrete explicit specs from this build
|
||||
kwargs['overwrite'] = [spec.dag_hash() for spec in specs]
|
||||
|
||||
# Update install_args with the monitor args, needed for build task
|
||||
kwargs.update({
|
||||
"monitor_keep_going": args.monitor_keep_going,
|
||||
"monitor_host": args.monitor_host,
|
||||
"use_monitor": args.use_monitor,
|
||||
"monitor_prefix": args.monitor_prefix,
|
||||
})
|
||||
|
||||
# If we are using the monitor, we send configs. and create build
|
||||
# The dag_hash is the main package id
|
||||
if args.use_monitor and specs:
|
||||
monitor.new_configuration(specs)
|
||||
install_specs(args, kwargs, zip(abstract_specs, specs))
|
||||
|
||||
@@ -84,9 +84,9 @@ def match(p, f):
|
||||
if f.match(p):
|
||||
return True
|
||||
|
||||
pkg_cls = spack.repo.path.get_pkg_class(p)
|
||||
if pkg_cls.__doc__:
|
||||
return f.match(pkg_cls.__doc__)
|
||||
pkg = spack.repo.get(p)
|
||||
if pkg.__doc__:
|
||||
return f.match(pkg.__doc__)
|
||||
return False
|
||||
else:
|
||||
def match(p, f):
|
||||
@@ -133,7 +133,7 @@ def get_dependencies(pkg):
|
||||
@formatter
|
||||
def version_json(pkg_names, out):
|
||||
"""Print all packages with their latest versions."""
|
||||
pkg_classes = [spack.repo.path.get_pkg_class(name) for name in pkg_names]
|
||||
pkgs = [spack.repo.get(name) for name in pkg_names]
|
||||
|
||||
out.write('[\n')
|
||||
|
||||
@@ -147,14 +147,14 @@ def version_json(pkg_names, out):
|
||||
' "maintainers": {5},\n'
|
||||
' "dependencies": {6}'
|
||||
'}}'.format(
|
||||
pkg_cls.name,
|
||||
VersionList(pkg_cls.versions).preferred(),
|
||||
json.dumps([str(v) for v in reversed(sorted(pkg_cls.versions))]),
|
||||
pkg_cls.homepage,
|
||||
github_url(pkg_cls),
|
||||
json.dumps(pkg_cls.maintainers),
|
||||
json.dumps(get_dependencies(pkg_cls))
|
||||
) for pkg_cls in pkg_classes
|
||||
pkg.name,
|
||||
VersionList(pkg.versions).preferred(),
|
||||
json.dumps([str(v) for v in reversed(sorted(pkg.versions))]),
|
||||
pkg.homepage,
|
||||
github_url(pkg),
|
||||
json.dumps(pkg.maintainers),
|
||||
json.dumps(get_dependencies(pkg))
|
||||
) for pkg in pkgs
|
||||
])
|
||||
out.write(pkg_latest)
|
||||
# important: no trailing comma in JSON arrays
|
||||
@@ -172,7 +172,7 @@ def html(pkg_names, out):
|
||||
"""
|
||||
|
||||
# Read in all packages
|
||||
pkg_classes = [spack.repo.path.get_pkg_class(name) for name in pkg_names]
|
||||
pkgs = [spack.repo.get(name) for name in pkg_names]
|
||||
|
||||
# Start at 2 because the title of the page from Sphinx is id1.
|
||||
span_id = 2
|
||||
@@ -189,7 +189,7 @@ def head(n, span_id, title, anchor=None):
|
||||
# Start with the number of packages, skipping the title and intro
|
||||
# blurb, which we maintain in the RST file.
|
||||
out.write('<p>\n')
|
||||
out.write('Spack currently has %d mainline packages:\n' % len(pkg_classes))
|
||||
out.write('Spack currently has %d mainline packages:\n' % len(pkgs))
|
||||
out.write('</p>\n')
|
||||
|
||||
# Table of links to all packages
|
||||
@@ -209,9 +209,9 @@ def head(n, span_id, title, anchor=None):
|
||||
out.write('<hr class="docutils"/>\n')
|
||||
|
||||
# Output some text for each package.
|
||||
for pkg_cls in pkg_classes:
|
||||
out.write('<div class="section" id="%s">\n' % pkg_cls.name)
|
||||
head(2, span_id, pkg_cls.name)
|
||||
for pkg in pkgs:
|
||||
out.write('<div class="section" id="%s">\n' % pkg.name)
|
||||
head(2, span_id, pkg.name)
|
||||
span_id += 1
|
||||
|
||||
out.write('<dl class="docutils">\n')
|
||||
@@ -219,10 +219,10 @@ def head(n, span_id, title, anchor=None):
|
||||
out.write('<dt>Homepage:</dt>\n')
|
||||
out.write('<dd><ul class="first last simple">\n')
|
||||
|
||||
if pkg_cls.homepage:
|
||||
if pkg.homepage:
|
||||
out.write(('<li>'
|
||||
'<a class="reference external" href="%s">%s</a>'
|
||||
'</li>\n') % (pkg_cls.homepage, escape(pkg_cls.homepage, True)))
|
||||
'</li>\n') % (pkg.homepage, escape(pkg.homepage, True)))
|
||||
else:
|
||||
out.write('No homepage\n')
|
||||
out.write('</ul></dd>\n')
|
||||
@@ -231,19 +231,19 @@ def head(n, span_id, title, anchor=None):
|
||||
out.write('<dd><ul class="first last simple">\n')
|
||||
out.write(('<li>'
|
||||
'<a class="reference external" href="%s">%s/package.py</a>'
|
||||
'</li>\n') % (github_url(pkg_cls), pkg_cls.name))
|
||||
'</li>\n') % (github_url(pkg), pkg.name))
|
||||
out.write('</ul></dd>\n')
|
||||
|
||||
if pkg_cls.versions:
|
||||
if pkg.versions:
|
||||
out.write('<dt>Versions:</dt>\n')
|
||||
out.write('<dd>\n')
|
||||
out.write(', '.join(
|
||||
str(v) for v in reversed(sorted(pkg_cls.versions))))
|
||||
str(v) for v in reversed(sorted(pkg.versions))))
|
||||
out.write('\n')
|
||||
out.write('</dd>\n')
|
||||
|
||||
for deptype in spack.dependency.all_deptypes:
|
||||
deps = pkg_cls.dependencies_of_type(deptype)
|
||||
deps = pkg.dependencies_of_type(deptype)
|
||||
if deps:
|
||||
out.write('<dt>%s Dependencies:</dt>\n' % deptype.capitalize())
|
||||
out.write('<dd>\n')
|
||||
@@ -256,7 +256,7 @@ def head(n, span_id, title, anchor=None):
|
||||
|
||||
out.write('<dt>Description:</dt>\n')
|
||||
out.write('<dd>\n')
|
||||
out.write(escape(pkg_cls.format_doc(indent=2), True))
|
||||
out.write(escape(pkg.format_doc(indent=2), True))
|
||||
out.write('\n')
|
||||
out.write('</dd>\n')
|
||||
out.write('</dl>\n')
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
import spack.package
|
||||
import spack.repo
|
||||
import spack.store
|
||||
from spack.database import InstallStatuses
|
||||
|
||||
@@ -221,7 +221,7 @@ def _read_specs_from_file(filename):
|
||||
for i, string in enumerate(stream):
|
||||
try:
|
||||
s = Spec(string)
|
||||
spack.repo.path.get_pkg_class(s.name)
|
||||
s.package
|
||||
specs.append(s)
|
||||
except SpackError as e:
|
||||
tty.debug(e)
|
||||
|
||||
33
lib/spack/spack/cmd/monitor.py
Normal file
33
lib/spack/spack/cmd/monitor.py
Normal file
@@ -0,0 +1,33 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.monitor
|
||||
|
||||
description = "interact with a monitor server"
|
||||
section = "analysis"
|
||||
level = "long"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='monitor_command')
|
||||
|
||||
# This adds the monitor group to the subparser
|
||||
spack.monitor.get_monitor_group(subparser)
|
||||
|
||||
# Spack Monitor Uploads
|
||||
monitor_parser = sp.add_parser('upload', description="upload to spack monitor")
|
||||
monitor_parser.add_argument("upload_dir", help="directory root to upload")
|
||||
|
||||
|
||||
def monitor(parser, args, **kwargs):
|
||||
|
||||
if args.monitor_command == "upload":
|
||||
monitor = spack.monitor.get_client(
|
||||
host=args.monitor_host,
|
||||
prefix=args.monitor_prefix,
|
||||
)
|
||||
|
||||
# Upload the directory
|
||||
monitor.upload_local_save(args.upload_dir)
|
||||
@@ -31,4 +31,5 @@ def patch(parser, args):
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
for spec in specs:
|
||||
spec.package.do_patch()
|
||||
package = spack.repo.get(spec)
|
||||
package.do_patch()
|
||||
|
||||
@@ -50,7 +50,7 @@ def _show_patch(sha256):
|
||||
owner = rec['owner']
|
||||
|
||||
if 'relative_path' in rec:
|
||||
pkg_dir = spack.repo.path.get_pkg_class(owner).package_dir
|
||||
pkg_dir = spack.repo.get(owner).package_dir
|
||||
path = os.path.join(pkg_dir, rec['relative_path'])
|
||||
print(" path: %s" % path)
|
||||
else:
|
||||
|
||||
@@ -24,4 +24,5 @@ def restage(parser, args):
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
for spec in specs:
|
||||
spec.package.do_restage()
|
||||
package = spack.repo.get(spec)
|
||||
package.do_restage()
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user