Compare commits
28 Commits
bugfix/bin
...
v0.17.2.3-
Author | SHA1 | Date | |
---|---|---|---|
![]() |
ed26276e45 | ||
![]() |
1b793c0b72 | ||
![]() |
0af9331015 | ||
![]() |
da423cdbac | ||
![]() |
fe0323f7a3 | ||
![]() |
0c3d3afb01 | ||
![]() |
f4b094834a | ||
![]() |
2170120f40 | ||
![]() |
58a71a5307 | ||
![]() |
203cebb081 | ||
![]() |
13362f6c86 | ||
![]() |
29dbae0c20 | ||
![]() |
4f2b9ca6dc | ||
![]() |
bf50833d58 | ||
![]() |
5adf17f8a3 | ||
![]() |
856dee5b0a | ||
![]() |
9baf586a2e | ||
![]() |
d38310fa02 | ||
![]() |
cc87de83cd | ||
![]() |
187c0f1d04 | ||
![]() |
1a0b11ca43 | ||
![]() |
982f883bcc | ||
![]() |
2487a75422 | ||
![]() |
b9bedba68c | ||
![]() |
620575f505 | ||
![]() |
43eaa713a3 | ||
![]() |
e4f602632c | ||
![]() |
5fe1a036a0 |
142
.github/workflows/bootstrap.yml
vendored
142
.github/workflows/bootstrap.yml
vendored
@@ -12,7 +12,6 @@ on:
|
||||
# built-in repository or documentation
|
||||
- 'var/spack/repos/builtin/**'
|
||||
- '!var/spack/repos/builtin/packages/clingo-bootstrap/**'
|
||||
- '!var/spack/repos/builtin/packages/clingo/**'
|
||||
- '!var/spack/repos/builtin/packages/python/**'
|
||||
- '!var/spack/repos/builtin/packages/re2c/**'
|
||||
- 'lib/spack/docs/**'
|
||||
@@ -20,16 +19,11 @@ on:
|
||||
# nightly at 2:16 AM
|
||||
- cron: '16 2 * * *'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
fedora-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "fedora:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -37,20 +31,14 @@ jobs:
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
@@ -63,7 +51,6 @@ jobs:
|
||||
ubuntu-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -74,20 +61,22 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
- name: Work around CVE-2022-24765
|
||||
run: |
|
||||
# See [1] below
|
||||
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
|
||||
# a breaking behavior. See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
@@ -100,7 +89,6 @@ jobs:
|
||||
ubuntu-clingo-binaries-and-patchelf:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -110,20 +98,22 @@ jobs:
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
- name: Work around CVE-2022-24765
|
||||
run: |
|
||||
# See [1] below
|
||||
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
|
||||
# a breaking behavior. See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
@@ -131,10 +121,10 @@ jobs:
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
|
||||
opensuse-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "opensuse/leap:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -144,12 +134,9 @@ jobs:
|
||||
bzip2 curl file gcc-c++ gcc gcc-fortran tar git gpg2 gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup repo
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
@@ -163,13 +150,11 @@ jobs:
|
||||
|
||||
macos-clingo-sources:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -180,19 +165,16 @@ jobs:
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-clingo-binaries:
|
||||
runs-on: ${{ matrix.macos-version }}
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
macos-version: ['macos-10.15', 'macos-11', 'macos-12']
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Bootstrap clingo
|
||||
@@ -207,14 +189,12 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Setup repo
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
@@ -229,7 +209,6 @@ jobs:
|
||||
ubuntu-gnupg-binaries:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -239,20 +218,22 @@ jobs:
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
- name: Work around CVE-2022-24765
|
||||
run: |
|
||||
# See [1] below
|
||||
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
|
||||
# a breaking behavior. See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap GnuPG
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
@@ -264,7 +245,6 @@ jobs:
|
||||
ubuntu-gnupg-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -275,20 +255,22 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
- name: Work around CVE-2022-24765
|
||||
run: |
|
||||
# See [1] below
|
||||
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
|
||||
# a breaking behavior. See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap GnuPG
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
@@ -300,15 +282,13 @@ jobs:
|
||||
|
||||
macos-gnupg-binaries:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -318,15 +298,13 @@ jobs:
|
||||
|
||||
macos-gnupg-sources:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install gawk tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -334,11 +312,3 @@ jobs:
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
|
||||
# [1] Distros that have patched git to resolve CVE-2022-24765 (e.g. Ubuntu patching v2.25.1)
|
||||
# introduce breaking behaviorso we have to set `safe.directory` in gitconfig ourselves.
|
||||
# See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
|
21
.github/workflows/build-containers.yml
vendored
21
.github/workflows/build-containers.yml
vendored
@@ -19,10 +19,6 @@ on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
deploy-images:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -47,10 +43,9 @@ jobs:
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
@@ -80,33 +75,33 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
uses: actions/upload-artifact@6673cd052c4cd6fcf4b4e6e60ea986c889389535
|
||||
with:
|
||||
name: dockerfiles
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@8b122486cedac8393e77aa9734c3528886e4a1a8 # @v1
|
||||
uses: docker/setup-qemu-action@27d0a4f181a40b142cce983c5393082c365d1480 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@dc7b9719a96d48369863986a06765841d7ea23f6 # @v1
|
||||
uses: docker/setup-buildx-action@94ab11c41e45d028884a99163086648e898eed25 # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # @v1
|
||||
uses: docker/login-action@dd4fa0671be5250ee6f50aedf4cb05514abda2c7 # @v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # @v1
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
uses: docker/login-action@dd4fa0671be5250ee6f50aedf4cb05514abda2c7 # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@e551b19e49efd4e98792db7592c17c09b89db8d8 # @v2
|
||||
uses: docker/build-push-action@ac9327eae2b366085ac7f6a2d02df8aa8ead720a # @v2
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
|
19
.github/workflows/macos_python.yml
vendored
19
.github/workflows/macos_python.yml
vendored
@@ -16,21 +16,16 @@ on:
|
||||
- '.github/workflows/macos_python.yml'
|
||||
# TODO: run if we touch any of the recipes involved in this
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
# GitHub Action Limits
|
||||
# https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions
|
||||
|
||||
jobs:
|
||||
install_gcc:
|
||||
name: gcc with clang
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
@@ -41,12 +36,11 @@ jobs:
|
||||
|
||||
install_jupyter_clang:
|
||||
name: jupyter
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 700
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
@@ -56,11 +50,10 @@ jobs:
|
||||
|
||||
install_scipy_clang:
|
||||
name: scipy, mpl, pd
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
|
1
.github/workflows/setup_git.ps1
vendored
1
.github/workflows/setup_git.ps1
vendored
@@ -4,7 +4,6 @@ Set-Location spack
|
||||
|
||||
git config --global user.email "spack@example.com"
|
||||
git config --global user.name "Test User"
|
||||
git config --global core.longpaths true
|
||||
|
||||
if ($(git branch --show-current) -ne "develop")
|
||||
{
|
||||
|
49
.github/workflows/unit_tests.yaml
vendored
49
.github/workflows/unit_tests.yaml
vendored
@@ -9,19 +9,14 @@ on:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# Validate that the code can be run on all the Python versions
|
||||
# supported by Spack
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install Python Packages
|
||||
@@ -36,10 +31,10 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install Python packages
|
||||
@@ -62,7 +57,7 @@ jobs:
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
with_coverage: ${{ steps.coverage.outputs.with_coverage }}
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
@@ -111,10 +106,10 @@ jobs:
|
||||
- python-version: 3.9
|
||||
concretizer: original
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -167,7 +162,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
@@ -176,10 +171,10 @@ jobs:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install System packages
|
||||
@@ -205,7 +200,7 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
@@ -223,7 +218,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -242,10 +237,10 @@ jobs:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install System packages
|
||||
@@ -279,7 +274,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
@@ -291,10 +286,10 @@ jobs:
|
||||
matrix:
|
||||
python-version: [3.8]
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -325,7 +320,7 @@ jobs:
|
||||
echo "ONLY PACKAGE RECIPES CHANGED [skipping coverage]"
|
||||
$(which spack) unit-test -x -m "not maybeslow" -k "package_sanity"
|
||||
fi
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
files: ./coverage.xml
|
||||
@@ -336,8 +331,8 @@ jobs:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install Python packages
|
||||
@@ -350,12 +345,12 @@ jobs:
|
||||
coverage run $(which spack) audit packages
|
||||
coverage combine
|
||||
coverage xml
|
||||
- name: Package audits (without coverage)
|
||||
- name: Package audits (wwithout coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,audits
|
||||
|
35
.github/workflows/windows_python.yml
vendored
35
.github/workflows/windows_python.yml
vendored
@@ -9,11 +9,6 @@ on:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
@@ -22,8 +17,8 @@ jobs:
|
||||
validate:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python Packages
|
||||
@@ -38,10 +33,10 @@ jobs:
|
||||
style:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -60,10 +55,10 @@ jobs:
|
||||
needs: [ validate, style ]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -80,10 +75,10 @@ jobs:
|
||||
needs: [ validate, style ]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -100,10 +95,10 @@ jobs:
|
||||
needs: [ validate, style ]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -125,10 +120,10 @@ jobs:
|
||||
git config --global core.symlinks false
|
||||
shell:
|
||||
powershell
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -144,11 +139,11 @@ jobs:
|
||||
echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
env:
|
||||
ProgressPreference: SilentlyContinue
|
||||
- uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Windows Spack Installer Bundle
|
||||
path: ${{ env.installer_root }}\pkg\Spack.exe
|
||||
- uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Windows Spack Installer
|
||||
path: ${{ env.installer_root}}\pkg\Spack.msi
|
||||
@@ -159,7 +154,7 @@ jobs:
|
||||
run:
|
||||
shell: pwsh
|
||||
steps:
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
204
CHANGELOG.md
204
CHANGELOG.md
@@ -1,205 +1,3 @@
|
||||
# v0.18.0 (2022-05-28)
|
||||
|
||||
`v0.18.0` is a major feature release.
|
||||
|
||||
## Major features in this release
|
||||
|
||||
1. **Concretizer now reuses by default**
|
||||
|
||||
`spack install --reuse` was introduced in `v0.17.0`, and `--reuse`
|
||||
is now the default concretization mode. Spack will try hard to
|
||||
resolve dependencies using installed packages or binaries (#30396).
|
||||
|
||||
To avoid reuse and to use the latest package configurations, (the
|
||||
old default), you can use `spack install --fresh`, or add
|
||||
configuration like this to your environment or `concretizer.yaml`:
|
||||
|
||||
```yaml
|
||||
concretizer:
|
||||
reuse: false
|
||||
```
|
||||
|
||||
2. **Finer-grained hashes**
|
||||
|
||||
Spack hashes now include `link`, `run`, *and* `build` dependencies,
|
||||
as well as a canonical hash of package recipes. Previously, hashes
|
||||
only included `link` and `run` dependencies (though `build`
|
||||
dependencies were stored by environments). We coarsened the hash to
|
||||
reduce churn in user installations, but the new default concretizer
|
||||
behavior mitigates this concern and gets us reuse *and* provenance.
|
||||
You will be able to see the build dependencies of new installations
|
||||
with `spack find`. Old installations will not change and their
|
||||
hashes will not be affected. (#28156, #28504, #30717, #30861)
|
||||
|
||||
3. **Improved error messages**
|
||||
|
||||
Error handling with the new concretizer is now done with
|
||||
optimization criteria rather than with unsatisfiable cores, and
|
||||
Spack reports many more details about conflicting constraints.
|
||||
(#30669)
|
||||
|
||||
4. **Unify environments when possible**
|
||||
|
||||
Environments have thus far supported `concretization: together` or
|
||||
`concretization: separately`. These have been replaced by a new
|
||||
preference in `concretizer.yaml`:
|
||||
|
||||
```yaml
|
||||
concretizer:
|
||||
unify: [true|false|when_possible]
|
||||
```
|
||||
|
||||
`concretizer:unify:when_possible` will *try* to resolve a fully
|
||||
unified environment, but if it cannot, it will create multiple
|
||||
configurations of some packages where it has to. For large
|
||||
environments that previously had to be concretized separately, this
|
||||
can result in a huge speedup (40-50x). (#28941)
|
||||
|
||||
5. **Automatically find externals on Cray machines**
|
||||
|
||||
Spack can now automatically discover installed packages in the Cray
|
||||
Programming Environment by running `spack external find` (or `spack
|
||||
external read-cray-manifest` to *only* query the PE). Packages from
|
||||
the PE (e.g., `cray-mpich` are added to the database with full
|
||||
dependency information, and compilers from the PE are added to
|
||||
`compilers.yaml`. Available with the June 2022 release of the Cray
|
||||
Programming Environment. (#24894, #30428)
|
||||
|
||||
6. **New binary format and hardened signing**
|
||||
|
||||
Spack now has an updated binary format, with improvements for
|
||||
security. The new format has a detached signature file, and Spack
|
||||
verifies the signature before untarring or decompressing the binary
|
||||
package. The previous format embedded the signature in a `tar`
|
||||
file, which required the client to run `tar` *before* verifying
|
||||
(#30750). Spack can still install from build caches using the old
|
||||
format, but we encourage users to switch to the new format going
|
||||
forward.
|
||||
|
||||
Production GitLab pipelines have been hardened to securely sign
|
||||
binaries. There is now a separate signing stage so that signing
|
||||
keys are never exposed to build system code, and signing keys are
|
||||
ephemeral and only live as long as the signing pipeline stage.
|
||||
(#30753)
|
||||
|
||||
7. **Bootstrap mirror generation**
|
||||
|
||||
The `spack bootstrap mirror` command can automatically create a
|
||||
mirror for bootstrapping the concretizer and other needed
|
||||
dependencies in an air-gapped environment. (#28556)
|
||||
|
||||
8. **Nascent Windows support**
|
||||
|
||||
Spack now has initial support for Windows. Spack core has been
|
||||
refactored to run in the Windows environment, and a small number of
|
||||
packages can now build for Windows. More details are
|
||||
[in the documentation](https://spack.rtfd.io/en/latest/getting_started.html#spack-on-windows)
|
||||
(#27021, #28385, many more)
|
||||
|
||||
9. **Makefile generation**
|
||||
|
||||
`spack env depfile` can be used to generate a `Makefile` from an
|
||||
environment, which can be used to build packages the environment
|
||||
in parallel on a single node. e.g.:
|
||||
|
||||
```console
|
||||
spack -e myenv env depfile > Makefile
|
||||
make
|
||||
```
|
||||
|
||||
Spack propagates `gmake` jobserver information to builds so that
|
||||
their jobs can share cores. (#30039, #30254, #30302, #30526)
|
||||
|
||||
10. **New variant features**
|
||||
|
||||
In addition to being conditional themselves, variants can now have
|
||||
[conditional *values*](https://spack.readthedocs.io/en/latest/packaging_guide.html#conditional-possible-values)
|
||||
that are only possible for certain configurations of a package. (#29530)
|
||||
|
||||
Variants can be
|
||||
[declared "sticky"](https://spack.readthedocs.io/en/latest/packaging_guide.html#sticky-variants),
|
||||
which prevents them from being enabled or disabled by the
|
||||
concretizer. Sticky variants must be set explicitly by users
|
||||
on the command line or in `packages.yaml`. (#28630)
|
||||
|
||||
* Allow conditional possible values in variants
|
||||
* Add a "sticky" property to variants
|
||||
|
||||
|
||||
## Other new features of note
|
||||
|
||||
* Environment views can optionally link only `run` dependencies
|
||||
with `link:run` (#29336)
|
||||
* `spack external find --all` finds library-only packages in
|
||||
addition to build dependencies (#28005)
|
||||
* Customizable `config:license_dir` option (#30135)
|
||||
* `spack external find --path PATH` takes a custom search path (#30479)
|
||||
* `spack spec` has a new `--format` argument like `spack find` (#27908)
|
||||
* `spack concretize --quiet` skips printing concretized specs (#30272)
|
||||
* `spack info` now has cleaner output and displays test info (#22097)
|
||||
* Package-level submodule option for git commit versions (#30085, #30037)
|
||||
* Using `/hash` syntax to refer to concrete specs in an environment
|
||||
now works even if `/hash` is not installed. (#30276)
|
||||
|
||||
## Major internal refactors
|
||||
|
||||
* full hash (see above)
|
||||
* new develop versioning scheme `0.19.0-dev0`
|
||||
* Allow for multiple dependencies/dependents from the same package (#28673)
|
||||
* Splice differing virtual packages (#27919)
|
||||
|
||||
## Performance Improvements
|
||||
|
||||
* Concretization of large environments with `unify: when_possible` is
|
||||
much faster than concretizing separately (#28941, see above)
|
||||
* Single-pass view generation algorithm is 2.6x faster (#29443)
|
||||
|
||||
## Archspec improvements
|
||||
|
||||
* `oneapi` and `dpcpp` flag support (#30783)
|
||||
* better support for `M1` and `a64fx` (#30683)
|
||||
|
||||
## Removals and Deprecations
|
||||
|
||||
* Spack no longer supports Python `2.6` (#27256)
|
||||
* Removed deprecated `--run-tests` option of `spack install`;
|
||||
use `spack test` (#30461)
|
||||
* Removed deprecated `spack flake8`; use `spack style` (#27290)
|
||||
|
||||
* Deprecate `spack:concretization` config option; use
|
||||
`concretizer:unify` (#30038)
|
||||
* Deprecate top-level module configuration; use module sets (#28659)
|
||||
* `spack activate` and `spack deactivate` are deprecated in favor of
|
||||
environments; will be removed in `0.19.0` (#29430; see also `link:run`
|
||||
in #29336 above)
|
||||
|
||||
## Notable Bugfixes
|
||||
|
||||
* Fix bug that broke locks with many parallel builds (#27846)
|
||||
* Many bugfixes and consistency improvements for the new concretizer
|
||||
and `--reuse` (#30357, #30092, #29835, #29933, #28605, #29694, #28848)
|
||||
|
||||
## Packages
|
||||
|
||||
* `CMakePackage` uses `CMAKE_INSTALL_RPATH_USE_LINK_PATH` (#29703)
|
||||
* Refactored `lua` support: `lua-lang` virtual supports both
|
||||
`lua` and `luajit` via new `LuaPackage` build system(#28854)
|
||||
* PythonPackage: now installs packages with `pip` (#27798)
|
||||
* Python: improve site_packages_dir handling (#28346)
|
||||
* Extends: support spec, not just package name (#27754)
|
||||
* `find_libraries`: search for both .so and .dylib on macOS (#28924)
|
||||
* Use stable URLs and `?full_index=1` for all github patches (#29239)
|
||||
|
||||
## Spack community stats
|
||||
|
||||
* 6,416 total packages, 458 new since `v0.17.0`
|
||||
* 219 new Python packages
|
||||
* 60 new R packages
|
||||
* 377 people contributed to this release
|
||||
* 337 committers to packages
|
||||
* 85 committers to core
|
||||
|
||||
|
||||
# v0.17.2 (2022-04-13)
|
||||
|
||||
### Spack bugfixes
|
||||
@@ -213,7 +11,7 @@
|
||||
* Fixed a few bugs affecting the spack ci command (#29518, #29419)
|
||||
* Fix handling of Intel compiler environment (#29439)
|
||||
* Fix a few edge cases when reindexing the DB (#28764)
|
||||
* Remove "Known issues" from documentation (#29664)
|
||||
* Remove "Known issues" from documentation (#29664)
|
||||
* Other miscellaneous bugfixes (0b72e070583fc5bcd016f5adc8a84c99f2b7805f, #28403, #29261)
|
||||
|
||||
# v0.17.1 (2021-12-23)
|
||||
|
@@ -6,15 +6,34 @@ bootstrap:
|
||||
# by Spack is installed in a "store" subfolder of this root directory
|
||||
root: $user_cache_path/bootstrap
|
||||
# Methods that can be used to bootstrap software. Each method may or
|
||||
# may not be able to bootstrap all the software that Spack needs,
|
||||
# may not be able to bootstrap all of the software that Spack needs,
|
||||
# depending on its type.
|
||||
sources:
|
||||
- name: 'github-actions-v0.2'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.2
|
||||
type: buildcache
|
||||
description: |
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
info:
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.2
|
||||
homepage: https://github.com/spack/spack-bootstrap-mirrors
|
||||
releases: https://github.com/spack/spack-bootstrap-mirrors/releases
|
||||
- name: 'github-actions-v0.1'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.1
|
||||
- name: 'spack-install'
|
||||
metadata: $spack/share/spack/bootstrap/spack-install
|
||||
type: buildcache
|
||||
description: |
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
info:
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.1
|
||||
homepage: https://github.com/spack/spack-bootstrap-mirrors
|
||||
releases: https://github.com/spack/spack-bootstrap-mirrors/releases
|
||||
# This method is just Spack bootstrapping the software it needs from sources.
|
||||
# It has been added here so that users can selectively disable bootstrapping
|
||||
# from sources by "untrusting" it.
|
||||
- name: spack-install
|
||||
type: install
|
||||
description: |
|
||||
Specs built from sources by Spack. May take a long time.
|
||||
trusted:
|
||||
# By default we trust bootstrapping from sources and from binaries
|
||||
# produced on Github via the workflow
|
||||
|
@@ -14,23 +14,4 @@ concretizer:
|
||||
# concretizing specs. If `true`, we'll try to use as many installs/binaries
|
||||
# as possible, rather than building. If `false`, we'll always give you a fresh
|
||||
# concretization.
|
||||
reuse: true
|
||||
# Options that tune which targets are considered for concretization. The
|
||||
# concretization process is very sensitive to the number targets, and the time
|
||||
# needed to reach a solution increases noticeably with the number of targets
|
||||
# considered.
|
||||
targets:
|
||||
# Determine whether we want to target specific or generic microarchitectures.
|
||||
# An example of the first kind might be for instance "skylake" or "bulldozer",
|
||||
# while generic microarchitectures are for instance "aarch64" or "x86_64_v4".
|
||||
granularity: microarchitectures
|
||||
# If "false" allow targets that are incompatible with the current host (for
|
||||
# instance concretize with target "icelake" while running on "haswell").
|
||||
# If "true" only allow targets that are compatible with the host.
|
||||
host_compatible: true
|
||||
# When "true" concretize root specs of environments together, so that each unique
|
||||
# package in an environment corresponds to one concrete spec. This ensures
|
||||
# environments can always be activated. When "false" perform concretization separately
|
||||
# on each root spec, allowing different versions and variants of the same package in
|
||||
# an environment.
|
||||
unify: false
|
||||
reuse: false
|
||||
|
@@ -33,9 +33,6 @@ config:
|
||||
template_dirs:
|
||||
- $spack/share/spack/templates
|
||||
|
||||
# Directory where licenses should be located
|
||||
license_dir: $spack/etc/spack/licenses
|
||||
|
||||
# Temporary locations Spack can try to use for builds.
|
||||
#
|
||||
# Recommended options are given below.
|
||||
|
@@ -35,8 +35,7 @@ packages:
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
lapack: [openblas, amdlibflame]
|
||||
libllvm: [llvm, llvm-amdgpu]
|
||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||
luajit: [lua-luajit-openresty, lua-luajit]
|
||||
lua-lang: [lua, lua-luajit]
|
||||
mariadb-client: [mariadb-c-client, mariadb]
|
||||
mkl: [intel-mkl]
|
||||
mpe: [mpe2]
|
||||
|
@@ -192,32 +192,32 @@ you can use them to customize an installation in :ref:`sec-specs`.
|
||||
Reusing installed dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
By default, when you run ``spack install``, Spack tries hard to reuse existing installations
|
||||
as dependencies, either from a local store or from remote buildcaches if configured.
|
||||
This minimizes unwanted rebuilds of common dependencies, in particular if
|
||||
you update Spack frequently.
|
||||
.. warning::
|
||||
|
||||
In case you want the latest versions and configurations to be installed instead,
|
||||
you can add the ``--fresh`` option:
|
||||
The ``--reuse`` option described here will become the default installation
|
||||
method in the next Spack version, and you will be able to get the current
|
||||
behavior by using ``spack install --fresh``.
|
||||
|
||||
By default, when you run ``spack install``, Spack tries to build a new
|
||||
version of the package you asked for, along with updated versions of
|
||||
its dependencies. This gets you the latest versions and configurations,
|
||||
but it can result in unwanted rebuilds if you update Spack frequently.
|
||||
|
||||
If you want Spack to try hard to reuse existing installations as dependencies,
|
||||
you can add the ``--reuse`` option:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --fresh mpich
|
||||
$ spack install --reuse mpich
|
||||
|
||||
Reusing installations in this mode is "accidental", and happening only if
|
||||
there's a match between existing installations and what Spack would have installed
|
||||
anyhow.
|
||||
|
||||
You can use the ``spack spec -I mpich`` command to see what
|
||||
This will not do anything if ``mpich`` is already installed. If ``mpich``
|
||||
is not installed, but dependencies like ``hwloc`` and ``libfabric`` are,
|
||||
the ``mpich`` will be build with the installed versions, if possible.
|
||||
You can use the :ref:`spack spec -I <cmd-spack-spec>` command to see what
|
||||
will be reused and what will be built before you install.
|
||||
|
||||
You can configure Spack to use the ``--fresh`` behavior by default in
|
||||
``concretizer.yaml``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
reuse: false
|
||||
You can configure Spack to use the ``--reuse`` behavior by default in
|
||||
``concretizer.yaml``.
|
||||
|
||||
.. _cmd-spack-uninstall:
|
||||
|
||||
|
@@ -50,13 +50,6 @@ build cache files for the "ninja" spec:
|
||||
Note that the targeted spec must already be installed. Once you have a build cache,
|
||||
you can add it as a mirror, discussed next.
|
||||
|
||||
.. warning::
|
||||
|
||||
Spack improved the format used for binary caches in v0.18. The entire v0.18 series
|
||||
will be able to verify and install binary caches both in the new and in the old format.
|
||||
Support for using the old format is expected to end in v0.19, so we advise users to
|
||||
recreate relevant buildcaches using Spack v0.18 or higher.
|
||||
|
||||
---------------------------------------
|
||||
Finding or installing build cache files
|
||||
---------------------------------------
|
||||
|
@@ -1,160 +0,0 @@
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _bootstrapping:
|
||||
|
||||
=============
|
||||
Bootstrapping
|
||||
=============
|
||||
|
||||
In the :ref:`Getting started <getting_started>` Section we already mentioned that
|
||||
Spack can bootstrap some of its dependencies, including ``clingo``. In fact, there
|
||||
is an entire command dedicated to the management of every aspect of bootstrapping:
|
||||
|
||||
.. command-output:: spack bootstrap --help
|
||||
|
||||
The first thing to know to understand bootstrapping in Spack is that each of
|
||||
Spack's dependencies is bootstrapped lazily; i.e. the first time it is needed and
|
||||
can't be found. You can readily check if any prerequisite for using Spack
|
||||
is missing by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap status
|
||||
Spack v0.17.1 - python@3.8
|
||||
|
||||
[FAIL] Core Functionalities
|
||||
[B] MISSING "clingo": required to concretize specs
|
||||
|
||||
[FAIL] Binary packages
|
||||
[B] MISSING "gpg2": required to sign/verify buildcaches
|
||||
|
||||
|
||||
Spack will take care of bootstrapping any missing dependency marked as [B]. Dependencies marked as [-] are instead required to be found on the system.
|
||||
|
||||
In the case of the output shown above Spack detected that both ``clingo`` and ``gnupg``
|
||||
are missing and it's giving detailed information on why they are needed and whether
|
||||
they can be bootstrapped. Running a command that concretize a spec, like:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack solve zlib
|
||||
==> Bootstrapping clingo from pre-built binaries
|
||||
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.1/build_cache/darwin-catalina-x86_64/apple-clang-12.0.0/clingo-bootstrap-spack/darwin-catalina-x86_64-apple-clang-12.0.0-clingo-bootstrap-spack-p5on7i4hejl775ezndzfdkhvwra3hatn.spack
|
||||
==> Installing "clingo-bootstrap@spack%apple-clang@12.0.0~docs~ipo+python build_type=Release arch=darwin-catalina-x86_64" from a buildcache
|
||||
[ ... ]
|
||||
|
||||
triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||
|
||||
-----------------------
|
||||
The Bootstrapping store
|
||||
-----------------------
|
||||
|
||||
The software installed for bootstrapping purposes is deployed in a separate store.
|
||||
Its location can be checked with the following command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap root
|
||||
|
||||
It can also be changed with the same command by just specifying the newly desired path:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap root /opt/spack/bootstrap
|
||||
|
||||
You can check what is installed in the bootstrapping store at any time using:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack find -b
|
||||
==> Showing internal bootstrap store at "/Users/spack/.spack/bootstrap/store"
|
||||
==> 11 installed packages
|
||||
-- darwin-catalina-x86_64 / apple-clang@12.0.0 ------------------
|
||||
clingo-bootstrap@spack libassuan@2.5.5 libgpg-error@1.42 libksba@1.5.1 pinentry@1.1.1 zlib@1.2.11
|
||||
gnupg@2.3.1 libgcrypt@1.9.3 libiconv@1.16 npth@1.6 python@3.8
|
||||
|
||||
In case it is needed you can remove all the software in the current bootstrapping store with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack clean -b
|
||||
==> Removing bootstrapped software and configuration in "/Users/spack/.spack/bootstrap"
|
||||
|
||||
% spack find -b
|
||||
==> Showing internal bootstrap store at "/Users/spack/.spack/bootstrap/store"
|
||||
==> 0 installed packages
|
||||
|
||||
--------------------------------------------
|
||||
Enabling and disabling bootstrapping methods
|
||||
--------------------------------------------
|
||||
|
||||
Bootstrapping is always performed by trying the methods listed by:
|
||||
|
||||
.. command-output:: spack bootstrap list
|
||||
|
||||
in the order they appear, from top to bottom. By default Spack is
|
||||
configured to try first bootstrapping from pre-built binaries and to
|
||||
fall-back to bootstrapping from sources if that failed.
|
||||
|
||||
If need be, you can disable bootstrapping altogether by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap disable
|
||||
|
||||
in which case it's your responsibility to ensure Spack runs in an
|
||||
environment where all its prerequisites are installed. You can
|
||||
also configure Spack to skip certain bootstrapping methods by *untrusting*
|
||||
them. For instance:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap untrust github-actions
|
||||
==> "github-actions" is now untrusted and will not be used for bootstrapping
|
||||
|
||||
tells Spack to skip trying to bootstrap from binaries. To add the "github-actions" method back you can:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap trust github-actions
|
||||
|
||||
There is also an option to reset the bootstrapping configuration to Spack's defaults:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap reset
|
||||
==> Bootstrapping configuration is being reset to Spack's defaults. Current configuration will be lost.
|
||||
Do you want to continue? [Y/n]
|
||||
%
|
||||
|
||||
----------------------------------------
|
||||
Creating a mirror for air-gapped systems
|
||||
----------------------------------------
|
||||
|
||||
Spack's default configuration for bootstrapping relies on the user having
|
||||
access to the internet, either to fetch pre-compiled binaries or source tarballs.
|
||||
Sometimes though Spack is deployed on air-gapped systems where such access is denied.
|
||||
|
||||
To help with similar situations Spack has a command that recreates, in a local folder
|
||||
of choice, a mirror containing the source tarballs and/or binary packages needed for
|
||||
bootstrapping.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap mirror --binary-packages /opt/bootstrap
|
||||
==> Adding "clingo-bootstrap@spack+python %apple-clang target=x86_64" and dependencies to the mirror at /opt/bootstrap/local-mirror
|
||||
==> Adding "gnupg@2.3: %apple-clang target=x86_64" and dependencies to the mirror at /opt/bootstrap/local-mirror
|
||||
==> Adding "patchelf@0.13.1:0.13.99 %apple-clang target=x86_64" and dependencies to the mirror at /opt/bootstrap/local-mirror
|
||||
==> Adding binary packages from "https://github.com/alalazo/spack-bootstrap-mirrors/releases/download/v0.1-rc.2/bootstrap-buildcache.tar.gz" to the mirror at /opt/bootstrap/local-mirror
|
||||
|
||||
To register the mirror on the platform where it's supposed to be used run the following command(s):
|
||||
% spack bootstrap add --trust local-sources /opt/bootstrap/metadata/sources
|
||||
% spack bootstrap add --trust local-binaries /opt/bootstrap/metadata/binaries
|
||||
|
||||
|
||||
This command needs to be run on a machine with internet access and the resulting folder
|
||||
has to be moved over to the air-gapped system. Once the local sources are added using the
|
||||
commands suggested at the prompt, they can be used to bootstrap Spack.
|
@@ -219,65 +219,33 @@ Concretizer options
|
||||
but you can also use ``concretizer.yaml`` to customize aspects of the
|
||||
algorithm it uses to select the dependencies you install:
|
||||
|
||||
.. literalinclude:: _spack_root/etc/spack/defaults/concretizer.yaml
|
||||
:language: yaml
|
||||
.. _code-block: yaml
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Reuse already installed packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
concretizer:
|
||||
# Whether to consider installed packages or packages from buildcaches when
|
||||
# concretizing specs. If `true`, we'll try to use as many installs/binaries
|
||||
# as possible, rather than building. If `false`, we'll always give you a fresh
|
||||
# concretization.
|
||||
reuse: false
|
||||
|
||||
The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or
|
||||
^^^^^^^^^^^^^^^^
|
||||
``reuse``
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
This controls whether Spack will prefer to use installed packages (``true``), or
|
||||
whether it will do a "fresh" installation and prefer the latest settings from
|
||||
``package.py`` files and ``packages.yaml`` (``false``).
|
||||
You can use:
|
||||
``package.py`` files and ``packages.yaml`` (``false``). .
|
||||
|
||||
.. code-block:: console
|
||||
You can use ``spack install --reuse`` to enable reuse for a single installation,
|
||||
and you can use ``spack install --fresh`` to do a fresh install if ``reuse`` is
|
||||
enabled by default.
|
||||
|
||||
% spack install --reuse <spec>
|
||||
.. note::
|
||||
|
||||
to enable reuse for a single installation, and you can use:
|
||||
``reuse: false`` is the current default, but ``reuse: true`` will be the default
|
||||
in the next Spack release. You will still be able to use ``spack install --fresh``
|
||||
to get the old behavior.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack install --fresh <spec>
|
||||
|
||||
to do a fresh install if ``reuse`` is enabled by default.
|
||||
``reuse: true`` is the default.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Selection of the target microarchitectures
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The options under the ``targets`` attribute control which targets are considered during a solve.
|
||||
Currently the options in this section are only configurable from the ``concretization.yaml`` file
|
||||
and there are no corresponding command line arguments to enable them for a single solve.
|
||||
|
||||
The ``granularity`` option can take two possible values: ``microarchitectures`` and ``generic``.
|
||||
If set to:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
targets:
|
||||
granularity: microarchitectures
|
||||
|
||||
Spack will consider all the microarchitectures known to ``archspec`` to label nodes for
|
||||
compatibility. If instead the option is set to:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
targets:
|
||||
granularity: generic
|
||||
|
||||
Spack will consider only generic microarchitectures. For instance, when running on an
|
||||
Haswell node, Spack will consider ``haswell`` as the best target in the former case and
|
||||
``x86_64_v3`` as the best target in the latter case.
|
||||
|
||||
The ``host_compatible`` option is a Boolean option that determines whether or not the
|
||||
microarchitectures considered during the solve are constrained to be compatible with the
|
||||
host Spack is currently running on. For instance, if this option is set to ``true``, a
|
||||
user cannot concretize for ``target=icelake`` while running on an Haswell node.
|
||||
|
||||
.. _package-preferences:
|
||||
|
||||
|
@@ -39,7 +39,6 @@ on these ideas for each distinct build system that Spack supports:
|
||||
|
||||
build_systems/autotoolspackage
|
||||
build_systems/cmakepackage
|
||||
build_systems/cachedcmakepackage
|
||||
build_systems/mesonpackage
|
||||
build_systems/qmakepackage
|
||||
build_systems/sippackage
|
||||
@@ -48,7 +47,6 @@ on these ideas for each distinct build system that Spack supports:
|
||||
:maxdepth: 1
|
||||
:caption: Language-specific
|
||||
|
||||
build_systems/luapackage
|
||||
build_systems/octavepackage
|
||||
build_systems/perlpackage
|
||||
build_systems/pythonpackage
|
||||
|
@@ -1,123 +0,0 @@
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _cachedcmakepackage:
|
||||
|
||||
------------------
|
||||
CachedCMakePackage
|
||||
------------------
|
||||
|
||||
The CachedCMakePackage base class is used for CMake-based workflows
|
||||
that create a CMake cache file prior to running ``cmake``. This is
|
||||
useful for packages with arguments longer than the system limit, and
|
||||
for reproducibility.
|
||||
|
||||
The documentation for this class assumes that the user is familiar with
|
||||
the ``CMakePackage`` class from which it inherits. See the documentation
|
||||
for :ref:`CMakePackage <cmakepackage>`.
|
||||
|
||||
^^^^^^
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``CachedCMakePackage`` base class comes with the following phases:
|
||||
|
||||
#. ``initconfig`` - generate the CMake cache file
|
||||
#. ``cmake`` - generate the Makefile
|
||||
#. ``build`` - build the package
|
||||
#. ``install`` - install the package
|
||||
|
||||
By default, these phases run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ mkdir spack-build
|
||||
$ cd spack-build
|
||||
$ cat << EOF > name-arch-compiler@version.cmake
|
||||
# Write information on compilers and dependencies
|
||||
# includes information on mpi and cuda if applicable
|
||||
$ cmake .. -DCMAKE_INSTALL_PREFIX=/path/to/installation/prefix -C name-arch-compiler@version.cmake
|
||||
$ make
|
||||
$ make test # optional
|
||||
$ make install
|
||||
|
||||
The ``CachedCMakePackage`` class inherits from the ``CMakePackage``
|
||||
class, and accepts all of the same options and adds all of the same
|
||||
flags to the ``cmake`` command. Similar to the ``CMakePAckage`` class,
|
||||
you may need to add a few arguments yourself, and the
|
||||
``CachedCMakePackage`` provides the same interface to add those
|
||||
flags.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Adding entries to the CMake cache
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In addition to adding flags to the ``cmake`` command, you may need to
|
||||
add entries to the CMake cache in the ``initconfig`` phase. This can
|
||||
be done by overriding one of four methods:
|
||||
|
||||
#. ``CachedCMakePackage.initconfig_compiler_entries``
|
||||
#. ``CachedCMakePackage.initconfig_mpi_entries``
|
||||
#. ``CachedCMakePackage.initconfig_hardware_entries``
|
||||
#. ``CachedCMakePackage.initconfig_package_entries``
|
||||
|
||||
Each of these methods returns a list of CMake cache strings. The
|
||||
distinction between these methods is merely to provide a
|
||||
well-structured and legible cmake cache file -- otherwise, entries
|
||||
from each of these methods are handled identically.
|
||||
|
||||
Spack also provides convenience methods for generating CMake cache
|
||||
entries. These methods are available at module scope in every Spack
|
||||
package. Because CMake parses boolean options, strings, and paths
|
||||
differently, there are three such methods:
|
||||
|
||||
#. ``cmake_cache_option``
|
||||
#. ``cmake_cache_string``
|
||||
#. ``cmake_cache_path``
|
||||
|
||||
These methods each accept three parameters -- the name of the CMake
|
||||
variable associated with the entry, the value of the entry, and an
|
||||
optional comment -- and return strings in the appropriate format to be
|
||||
returned from any of the ``initconfig*`` methods. Additionally, these
|
||||
methods may return comments beginning with the ``#`` character.
|
||||
|
||||
A typical usage of these methods may look something like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def initconfig_mpi_entries(self)
|
||||
# Get existing MPI configurations
|
||||
entries = super(self, Foo).initconfig_mpi_entries()
|
||||
|
||||
# The existing MPI configurations key on whether ``mpi`` is in the spec
|
||||
# This spec has an MPI variant, and we need to enable MPI when it is on.
|
||||
# This hypothetical package controls MPI with the ``FOO_MPI`` option to
|
||||
# cmake.
|
||||
if '+mpi' in self.spec:
|
||||
entries.append(cmake_cache_option('FOO_MPI', True, "enable mpi"))
|
||||
else:
|
||||
entries.append(cmake_cache_option('FOO_MPI', False, "disable mpi"))
|
||||
|
||||
def initconfig_package_entries(self):
|
||||
# Package specific options
|
||||
entries = []
|
||||
|
||||
entries.append('#Entries for build options')
|
||||
|
||||
bar_on = '+bar' in self.spec
|
||||
entries.append(cmake_cache_option('FOO_BAR', bar_on, 'toggle bar'))
|
||||
|
||||
entries.append('#Entries for dependencies')
|
||||
|
||||
if self.spec['blas'].name == 'baz': # baz is our blas provider
|
||||
entries.append(cmake_cache_string('FOO_BLAS', 'baz', 'Use baz'))
|
||||
entries.append(cmake_cache_path('BAZ_PREFIX', self.spec['baz'].prefix))
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
External documentation
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
For more information on CMake cache files, see:
|
||||
https://cmake.org/cmake/help/latest/manual/cmake.1.html
|
@@ -84,8 +84,8 @@ build ``hdf5`` with Intel oneAPI MPI do::
|
||||
|
||||
spack install hdf5 +mpi ^intel-oneapi-mpi
|
||||
|
||||
Using Externally Installed oneAPI Tools
|
||||
=======================================
|
||||
Using an Externally Installed oneAPI
|
||||
====================================
|
||||
|
||||
Spack can also use oneAPI tools that are manually installed with
|
||||
`Intel Installers`_. The procedures for configuring Spack to use
|
||||
@@ -110,7 +110,7 @@ Another option is to manually add the configuration to
|
||||
Libraries
|
||||
---------
|
||||
|
||||
If you want Spack to use oneMKL that you have installed without Spack in
|
||||
If you want Spack to use MKL that you have installed without Spack in
|
||||
the default location, then add the following to
|
||||
``~/.spack/packages.yaml``, adjusting the version as appropriate::
|
||||
|
||||
@@ -139,7 +139,7 @@ You can also use Spack-installed libraries. For example::
|
||||
spack load intel-oneapi-mkl
|
||||
|
||||
Will update your environment CPATH, LIBRARY_PATH, and other
|
||||
environment variables for building an application with oneMKL.
|
||||
environment variables for building an application with MKL.
|
||||
|
||||
More information
|
||||
================
|
||||
|
@@ -15,9 +15,6 @@ IntelPackage
|
||||
Intel packages in Spack
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
This is an earlier version of Intel software development tools and has
|
||||
now been replaced by Intel oneAPI Toolkits.
|
||||
|
||||
Spack can install and use several software development products offered by Intel.
|
||||
Some of these are available under no-cost terms, others require a paid license.
|
||||
All share the same basic steps for configuration, installation, and, where
|
||||
|
@@ -1,105 +0,0 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _luapackage:
|
||||
|
||||
------------
|
||||
LuaPackage
|
||||
------------
|
||||
|
||||
LuaPackage is a helper for the common case of Lua packages that provide
|
||||
a rockspec file. This is not meant to take a rock archive, but to build
|
||||
a source archive or repository that provides a rockspec, which should cover
|
||||
most lua packages. In the case a Lua package builds by Make rather than
|
||||
luarocks, prefer MakefilePackage.
|
||||
|
||||
^^^^^^
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``LuaPackage`` base class comes with the following phases:
|
||||
|
||||
#. ``unpack`` - if using a rock, unpacks the rock and moves into the source directory
|
||||
#. ``preprocess`` - adjust sources or rockspec to fix build
|
||||
#. ``install`` - install the project
|
||||
|
||||
By default, these phases run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# If the archive is a source rock
|
||||
$ luarocks unpack <archive>.src.rock
|
||||
$ # preprocess is a noop by default
|
||||
$ luarocks make <name>.rockspec
|
||||
|
||||
|
||||
Any of these phases can be overridden in your package as necessary.
|
||||
|
||||
^^^^^^^^^^^^^^^
|
||||
Important files
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
Packages that use the Lua/LuaRocks build system can be identified by the
|
||||
presence of a ``*.rockspec`` file in their sourcetree, or can be fetched as
|
||||
a source rock archive (``.src.rock``). This file declares things like build
|
||||
instructions and dependencies, the ``.src.rock`` also contains all code.
|
||||
|
||||
It is common for the rockspec file to list the lua version required in
|
||||
a dependency. The LuaPackage class adds appropriate dependencies on a Lua
|
||||
implementation, but it is a good idea to specify the version required with
|
||||
a ``depends_on`` statement. The block normally will be a table definition like
|
||||
this:
|
||||
|
||||
.. code-block:: lua
|
||||
|
||||
dependencies = {
|
||||
"lua >= 5.1",
|
||||
}
|
||||
|
||||
The LuaPackage class supports source repositories and archives containing
|
||||
a rockspec and directly downloading source rock files. It *does not* support
|
||||
downloading dependencies listed inside a rockspec, and thus does not support
|
||||
directly downloading a rockspec as an archive.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build system dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
All base dependencies are added by the build system, but LuaRocks is run to
|
||||
avoid downloading extra Lua dependencies during build. If the package needs
|
||||
Lua libraries outside the standard set, they should be added as dependencies.
|
||||
|
||||
To specify a Lua version constraint but allow all lua implementations, prefer
|
||||
to use ``depends_on("lua-lang@5.1:5.1.99")`` to express any 5.1 compatible
|
||||
version. If the package requires LuaJit rather than Lua,
|
||||
a ``depends_on("luajit")`` should be used to ensure a LuaJit distribution is
|
||||
used instead of the Lua interpreter. Alternately, if only interpreted Lua will
|
||||
work ``depends_on("lua")`` will express that.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Passing arguments to luarocks make
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you need to pass any arguments to the ``luarocks make`` call, you can
|
||||
override the ``luarocks_args`` method like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def luarocks_args(self):
|
||||
return ['flag1', 'flag2']
|
||||
|
||||
One common use of this is to override warnings or flags for newer compilers, as in:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def luarocks_args(self):
|
||||
return ["CFLAGS='-Wno-error=implicit-function-declaration'"]
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
External documentation
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
For more information on the LuaRocks build system, see:
|
||||
https://luarocks.org/
|
@@ -95,7 +95,7 @@ class of your package. For example, you can add it to your
|
||||
# Set up the hip macros needed by the build
|
||||
args.extend([
|
||||
'-DENABLE_HIP=ON',
|
||||
'-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix)])
|
||||
'-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix])
|
||||
rocm_archs = spec.variants['amdgpu_target'].value
|
||||
if 'none' not in rocm_archs:
|
||||
args.append('-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'
|
||||
|
@@ -23,10 +23,7 @@
|
||||
import sys
|
||||
from glob import glob
|
||||
|
||||
from docutils.statemachine import StringList
|
||||
from sphinx.domains.python import PythonDomain
|
||||
from sphinx.ext.apidoc import main as sphinx_apidoc
|
||||
from sphinx.parsers import RSTParser
|
||||
|
||||
# -- Spack customizations -----------------------------------------------------
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
@@ -85,6 +82,9 @@
|
||||
#
|
||||
# Disable duplicate cross-reference warnings.
|
||||
#
|
||||
from sphinx.domains.python import PythonDomain
|
||||
|
||||
|
||||
class PatchedPythonDomain(PythonDomain):
|
||||
def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||
if 'refspecific' in node:
|
||||
@@ -92,20 +92,8 @@ def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||
return super(PatchedPythonDomain, self).resolve_xref(
|
||||
env, fromdocname, builder, typ, target, node, contnode)
|
||||
|
||||
#
|
||||
# Disable tabs to space expansion in code blocks
|
||||
# since Makefiles require tabs.
|
||||
#
|
||||
class NoTabExpansionRSTParser(RSTParser):
|
||||
def parse(self, inputstring, document):
|
||||
if isinstance(inputstring, str):
|
||||
lines = inputstring.splitlines()
|
||||
inputstring = StringList(lines, document.current_source)
|
||||
super().parse(inputstring, document)
|
||||
|
||||
def setup(sphinx):
|
||||
sphinx.add_domain(PatchedPythonDomain, override=True)
|
||||
sphinx.add_source_parser(NoTabExpansionRSTParser, override=True)
|
||||
|
||||
# -- General configuration -----------------------------------------------------
|
||||
|
||||
|
@@ -59,8 +59,7 @@ other techniques to minimize the size of the final image:
|
||||
&& echo " specs:" \
|
||||
&& echo " - gromacs+mpi" \
|
||||
&& echo " - mpich" \
|
||||
&& echo " concretizer: together" \
|
||||
&& echo " unify: true" \
|
||||
&& echo " concretization: together" \
|
||||
&& echo " config:" \
|
||||
&& echo " install_tree: /opt/software" \
|
||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||
@@ -109,10 +108,9 @@ Spack Images on Docker Hub
|
||||
--------------------------
|
||||
|
||||
Docker images with Spack preinstalled and ready to be used are
|
||||
built when a release is tagged, or nightly on ``develop``. The images
|
||||
are then pushed both to `Docker Hub <https://hub.docker.com/u/spack>`_
|
||||
and to `GitHub Container Registry <https://github.com/orgs/spack/packages?repo_name=spack>`_.
|
||||
The OS that are currently supported are summarized in the table below:
|
||||
built on `Docker Hub <https://hub.docker.com/u/spack>`_
|
||||
at every push to ``develop`` or to a release branch. The OS that
|
||||
are currently supported are summarized in the table below:
|
||||
|
||||
.. _containers-supported-os:
|
||||
|
||||
@@ -122,31 +120,22 @@ The OS that are currently supported are summarized in the table below:
|
||||
* - Operating System
|
||||
- Base Image
|
||||
- Spack Image
|
||||
* - Ubuntu 16.04
|
||||
- ``ubuntu:16.04``
|
||||
- ``spack/ubuntu-xenial``
|
||||
* - Ubuntu 18.04
|
||||
- ``ubuntu:18.04``
|
||||
- ``spack/ubuntu-bionic``
|
||||
* - Ubuntu 20.04
|
||||
- ``ubuntu:20.04``
|
||||
- ``spack/ubuntu-focal``
|
||||
* - Ubuntu 22.04
|
||||
- ``ubuntu:22.04``
|
||||
- ``spack/ubuntu-jammy``
|
||||
* - CentOS 7
|
||||
- ``centos:7``
|
||||
- ``spack/centos7``
|
||||
* - CentOS Stream
|
||||
- ``quay.io/centos/centos:stream``
|
||||
- ``spack/centos-stream``
|
||||
* - openSUSE Leap
|
||||
- ``opensuse/leap``
|
||||
- ``spack/leap15``
|
||||
* - Amazon Linux 2
|
||||
- ``amazonlinux:2``
|
||||
- ``spack/amazon-linux``
|
||||
|
||||
All the images are tagged with the corresponding release of Spack:
|
||||
|
||||
.. image:: images/ghcr_spack.png
|
||||
.. image:: dockerhub_spack.png
|
||||
|
||||
with the exception of the ``latest`` tag that points to the HEAD
|
||||
of the ``develop`` branch. These images are available for anyone
|
||||
@@ -256,8 +245,7 @@ software is respectively built and installed:
|
||||
&& echo " specs:" \
|
||||
&& echo " - gromacs+mpi" \
|
||||
&& echo " - mpich" \
|
||||
&& echo " concretizer:" \
|
||||
&& echo " unify: true" \
|
||||
&& echo " concretization: together" \
|
||||
&& echo " config:" \
|
||||
&& echo " install_tree: /opt/software" \
|
||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||
@@ -378,8 +366,7 @@ produces, for instance, the following ``Dockerfile``:
|
||||
&& echo " externals:" \
|
||||
&& echo " - spec: cuda%gcc" \
|
||||
&& echo " prefix: /usr/local/cuda" \
|
||||
&& echo " concretizer:" \
|
||||
&& echo " unify: true" \
|
||||
&& echo " concretization: together" \
|
||||
&& echo " config:" \
|
||||
&& echo " install_tree: /opt/software" \
|
||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||
|
@@ -151,7 +151,7 @@ Package-related modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
:mod:`spack.package`
|
||||
Contains the :class:`~spack.package_base.Package` class, which
|
||||
Contains the :class:`~spack.package.Package` class, which
|
||||
is the superclass for all packages in Spack. Methods on ``Package``
|
||||
implement all phases of the :ref:`package lifecycle
|
||||
<package-lifecycle>` and manage the build process.
|
||||
|
BIN
lib/spack/docs/dockerhub_spack.png
Normal file
BIN
lib/spack/docs/dockerhub_spack.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 88 KiB |
@@ -273,9 +273,19 @@ or
|
||||
Concretizing
|
||||
^^^^^^^^^^^^
|
||||
|
||||
Once some user specs have been added to an environment, they can be concretized.
|
||||
There are at the moment three different modes of operation to concretize an environment,
|
||||
which are explained in details in :ref:`environments_concretization_config`.
|
||||
Once some user specs have been added to an environment, they can be
|
||||
concretized. *By default specs are concretized separately*, one after
|
||||
the other. This mode of operation permits to deploy a full
|
||||
software stack where multiple configurations of the same package
|
||||
need to be installed alongside each other. Central installations done
|
||||
at HPC centers by system administrators or user support groups
|
||||
are a common case that fits in this behavior.
|
||||
Environments *can also be configured to concretize all
|
||||
the root specs in a self-consistent way* to ensure that
|
||||
each package in the environment comes with a single configuration. This
|
||||
mode of operation is usually what is required by software developers that
|
||||
want to deploy their development environment.
|
||||
|
||||
Regardless of which mode of operation has been chosen, the following
|
||||
command will ensure all the root specs are concretized according to the
|
||||
constraints that are prescribed in the configuration:
|
||||
@@ -339,24 +349,6 @@ If the Environment has been concretized, Spack will install the
|
||||
concretized specs. Otherwise, ``spack install`` will first concretize
|
||||
the Environment and then install the concretized specs.
|
||||
|
||||
.. note::
|
||||
|
||||
Every ``spack install`` process builds one package at a time with multiple build
|
||||
jobs, controlled by the ``-j`` flag and the ``config:build_jobs`` option
|
||||
(see :ref:`build-jobs`). To speed up environment builds further, independent
|
||||
packages can be installed in parallel by launching more Spack instances. For
|
||||
example, the following will build at most four packages in parallel using
|
||||
three background jobs:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
[myenv]$ spack install & spack install & spack install & spack install
|
||||
|
||||
Another option is to generate a ``Makefile`` and run ``make -j<N>`` to control
|
||||
the number of parallel install processes. See :ref:`env-generate-depfile`
|
||||
for details.
|
||||
|
||||
|
||||
As it installs, ``spack install`` creates symbolic links in the
|
||||
``logs/`` directory in the Environment, allowing for easy inspection
|
||||
of build logs related to that environment. The ``spack install``
|
||||
@@ -483,76 +475,32 @@ Appending to this list in the yaml is identical to using the ``spack
|
||||
add`` command from the command line. However, there is more power
|
||||
available from the yaml file.
|
||||
|
||||
.. _environments_concretization_config:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Spec concretization
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
An environment can be concretized in three different modes and the behavior active under any environment
|
||||
is determined by the ``concretizer:unify`` property. By default specs are concretized *separately*, one after the other:
|
||||
|
||||
Specs can be concretized separately or together, as already
|
||||
explained in :ref:`environments_concretization`. The behavior active
|
||||
under any environment is determined by the ``concretization`` property:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- hdf5~mpi
|
||||
- hdf5+mpi
|
||||
- zlib@1.2.8
|
||||
concretizer:
|
||||
unify: false
|
||||
- ncview
|
||||
- netcdf
|
||||
- nco
|
||||
- py-sphinx
|
||||
concretization: together
|
||||
|
||||
This mode of operation permits to deploy a full software stack where multiple configurations of the same package
|
||||
need to be installed alongside each other using the best possible selection of transitive dependencies. The downside
|
||||
is that redundancy of installations is disregarded completely, and thus environments might be more bloated than
|
||||
strictly needed. In the example above, for instance, if a version of ``zlib`` newer than ``1.2.8`` is known to Spack,
|
||||
then it will be used for both ``hdf5`` installations.
|
||||
|
||||
If redundancy of the environment is a concern, Spack provides a way to install it *together where possible*,
|
||||
i.e. trying to maximize reuse of dependencies across different specs:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- hdf5~mpi
|
||||
- hdf5+mpi
|
||||
- zlib@1.2.8
|
||||
concretizer:
|
||||
unify: when_possible
|
||||
|
||||
Also in this case Spack allows having multiple configurations of the same package, but privileges the reuse of
|
||||
specs over other factors. Going back to our example, this means that both ``hdf5`` installations will use
|
||||
``zlib@1.2.8`` as a dependency even if newer versions of that library are available.
|
||||
Central installations done at HPC centers by system administrators or user support groups are a common case
|
||||
that fits either of these two modes.
|
||||
|
||||
Environments can also be configured to concretize all the root specs *together*, in a self-consistent way, to
|
||||
ensure that each package in the environment comes with a single configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- hdf5+mpi
|
||||
- zlib@1.2.8
|
||||
concretizer:
|
||||
unify: true
|
||||
|
||||
This mode of operation is usually what is required by software developers that want to deploy their development
|
||||
environment and have a single view of it in the filesystem.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``concretizer:unify`` config option was introduced in Spack 0.18 to
|
||||
replace the ``concretization`` property. For reference,
|
||||
``concretization: together`` is replaced by ``concretizer:unify:true``,
|
||||
and ``concretization: separately`` is replaced by ``concretizer:unify:false``.
|
||||
which can currently take either one of the two allowed values ``together`` or ``separately``
|
||||
(the default).
|
||||
|
||||
.. admonition:: Re-concretization of user specs
|
||||
|
||||
When concretizing specs *together* or *together where possible* the entire set of specs will be
|
||||
When concretizing specs together the entire set of specs will be
|
||||
re-concretized after any addition of new user specs, to ensure that
|
||||
the environment remains consistent / minimal. When instead the specs are concretized
|
||||
the environment remains consistent. When instead the specs are concretized
|
||||
separately only the new specs will be re-concretized after any addition.
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
@@ -799,7 +747,7 @@ directories.
|
||||
select: [^mpi]
|
||||
exclude: ['%pgi@18.5']
|
||||
projections:
|
||||
all: '{name}/{version}-{compiler.name}'
|
||||
all: {name}/{version}-{compiler.name}
|
||||
link: all
|
||||
link_type: symlink
|
||||
|
||||
@@ -962,93 +910,3 @@ environment.
|
||||
|
||||
The ``spack env deactivate`` command will remove the default view of
|
||||
the environment from the user's path.
|
||||
|
||||
|
||||
.. _env-generate-depfile:
|
||||
|
||||
|
||||
------------------------------------------
|
||||
Generating Depfiles from Environments
|
||||
------------------------------------------
|
||||
|
||||
Spack can generate ``Makefile``\s to make it easier to build multiple
|
||||
packages in an environment in parallel. Generated ``Makefile``\s expose
|
||||
targets that can be included in existing ``Makefile``\s, to allow
|
||||
other targets to depend on the environment installation.
|
||||
|
||||
A typical workflow is as follows:
|
||||
|
||||
.. code:: console
|
||||
|
||||
spack env create -d .
|
||||
spack -e . add perl
|
||||
spack -e . concretize
|
||||
spack -e . env depfile > Makefile
|
||||
make -j64
|
||||
|
||||
This generates a ``Makefile`` from a concretized environment in the
|
||||
current working directory, and ``make -j64`` installs the environment,
|
||||
exploiting parallelism across packages as much as possible. Spack
|
||||
respects the Make jobserver and forwards it to the build environment
|
||||
of packages, meaning that a single ``-j`` flag is enough to control the
|
||||
load, even when packages are built in parallel.
|
||||
|
||||
By default the following phony convenience targets are available:
|
||||
|
||||
- ``make all``: installs the environment (default target);
|
||||
- ``make fetch-all``: only fetch sources of all packages;
|
||||
- ``make clean``: cleans files used by make, but does not uninstall packages.
|
||||
|
||||
.. tip::
|
||||
|
||||
GNU Make version 4.3 and above have great support for output synchronization
|
||||
through the ``-O`` and ``--output-sync`` flags, which ensure that output is
|
||||
printed orderly per package install. To get synchronized output with colors,
|
||||
use ``make -j<N> SPACK_COLOR=always --output-sync=recurse``.
|
||||
|
||||
The following advanced example shows how generated targets can be used in a
|
||||
``Makefile``:
|
||||
|
||||
.. code:: Makefile
|
||||
|
||||
SPACK ?= spack
|
||||
|
||||
.PHONY: all clean fetch env
|
||||
|
||||
all: env
|
||||
|
||||
spack.lock: spack.yaml
|
||||
$(SPACK) -e . concretize -f
|
||||
|
||||
env.mk: spack.lock
|
||||
$(SPACK) -e . env depfile -o $@ --make-target-prefix spack
|
||||
|
||||
fetch: spack/fetch
|
||||
$(info Environment fetched!)
|
||||
|
||||
env: spack/env
|
||||
$(info Environment installed!)
|
||||
|
||||
clean:
|
||||
rm -rf spack.lock env.mk spack/
|
||||
|
||||
ifeq (,$(filter clean,$(MAKECMDGOALS)))
|
||||
include env.mk
|
||||
endif
|
||||
|
||||
When ``make`` is invoked, it first "remakes" the missing include ``env.mk``
|
||||
from its rule, which triggers concretization. When done, the generated targets
|
||||
``spack/fetch`` and ``spack/env`` are available. In the above
|
||||
example, the ``env`` target uses the latter as a prerequisite, meaning
|
||||
that it can make use of the installed packages in its commands.
|
||||
|
||||
As it is typically undesirable to remake ``env.mk`` as part of ``make clean``,
|
||||
the include is conditional.
|
||||
|
||||
.. note::
|
||||
|
||||
When including generated ``Makefile``\s, it is important to use
|
||||
the ``--make-target-prefix`` flag and use the non-phony targets
|
||||
``<target-prefix>/env`` and ``<target-prefix>/fetch`` as
|
||||
prerequisites, instead of the phony targets ``<target-prefix>/all``
|
||||
and ``<target-prefix>/fetch-all`` respectively.
|
||||
|
Binary file not shown.
Before Width: | Height: | Size: 70 KiB |
@@ -63,7 +63,6 @@ or refer to the full manual below.
|
||||
|
||||
configuration
|
||||
config_yaml
|
||||
bootstrapping
|
||||
build_settings
|
||||
environments
|
||||
containers
|
||||
|
@@ -1070,32 +1070,13 @@ Commits
|
||||
|
||||
Submodules
|
||||
You can supply ``submodules=True`` to cause Spack to fetch submodules
|
||||
recursively along with the repository at fetch time.
|
||||
recursively along with the repository at fetch time. For more information
|
||||
about git submodules see the manpage of git: ``man git-submodule``.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
version('1.0.1', tag='v1.0.1', submodules=True)
|
||||
|
||||
If a package has needs more fine-grained control over submodules, define
|
||||
``submodules`` to be a callable function that takes the package instance as
|
||||
its only argument. The function should return a list of submodules to be fetched.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def submodules(package):
|
||||
submodules = []
|
||||
if "+variant-1" in package.spec:
|
||||
submodules.append("submodule_for_variant_1")
|
||||
if "+variant-2" in package.spec:
|
||||
submodules.append("submodule_for_variant_2")
|
||||
return submodules
|
||||
|
||||
|
||||
class MyPackage(Package):
|
||||
version("0.1.0", submodules=submodules)
|
||||
|
||||
For more information about git submodules see the manpage of git: ``man
|
||||
git-submodule``.
|
||||
|
||||
.. _github-fetch:
|
||||
|
||||
@@ -2412,9 +2393,9 @@ Influence how dependents are built or run
|
||||
|
||||
Spack provides a mechanism for dependencies to influence the
|
||||
environment of their dependents by overriding the
|
||||
:meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
|
||||
:meth:`setup_dependent_run_environment <spack.package.PackageBase.setup_dependent_run_environment>`
|
||||
or the
|
||||
:meth:`setup_dependent_build_environment <spack.package_base.PackageBase.setup_dependent_build_environment>`
|
||||
:meth:`setup_dependent_build_environment <spack.package.PackageBase.setup_dependent_build_environment>`
|
||||
methods.
|
||||
The Qt package, for instance, uses this call:
|
||||
|
||||
@@ -2436,7 +2417,7 @@ will have the ``PYTHONPATH``, ``PYTHONHOME`` and ``PATH`` environment
|
||||
variables set appropriately before starting the installation. To make things
|
||||
even simpler the ``python setup.py`` command is also inserted into the module
|
||||
scope of dependents by overriding a third method called
|
||||
:meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`
|
||||
:meth:`setup_dependent_package <spack.package.PackageBase.setup_dependent_package>`
|
||||
:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
||||
@@ -2794,256 +2775,6 @@ Suppose a user invokes ``spack install`` like this:
|
||||
Spack will fail with a constraint violation, because the version of
|
||||
MPICH requested is too low for the ``mpi`` requirement in ``foo``.
|
||||
|
||||
.. _custom-attributes:
|
||||
|
||||
------------------
|
||||
Custom attributes
|
||||
------------------
|
||||
|
||||
Often a package will need to provide attributes for dependents to query
|
||||
various details about what it provides. While any number of custom defined
|
||||
attributes can be implemented by a package, the four specific attributes
|
||||
described below are always available on every package with default
|
||||
implementations and the ability to customize with alternate implementations
|
||||
in the case of virtual packages provided:
|
||||
|
||||
=========== =========================================== =====================
|
||||
Attribute Purpose Default
|
||||
=========== =========================================== =====================
|
||||
``home`` The installation path for the package ``spec.prefix``
|
||||
``command`` An executable command for the package | ``spec.name`` found
|
||||
in
|
||||
| ``.home.bin``
|
||||
``headers`` A list of headers provided by the package | All headers
|
||||
searched
|
||||
| recursively in
|
||||
``.home.include``
|
||||
``libs`` A list of libraries provided by the package | ``lib{spec.name}``
|
||||
searched
|
||||
| recursively in
|
||||
``.home`` starting
|
||||
| with ``lib``,
|
||||
``lib64``, then the
|
||||
| rest of ``.home``
|
||||
=========== =========================================== =====================
|
||||
|
||||
Each of these can be customized by implementing the relevant attribute
|
||||
as a ``@property`` in the package's class:
|
||||
|
||||
.. code-block:: python
|
||||
:linenos:
|
||||
|
||||
class Foo(Package):
|
||||
...
|
||||
@property
|
||||
def libs(self):
|
||||
# The library provided by Foo is libMyFoo.so
|
||||
return find_libraries('libMyFoo', root=self.home, recursive=True)
|
||||
|
||||
A package may also provide a custom implementation of each attribute
|
||||
for the virtual packages it provides by implementing the
|
||||
``virtualpackagename_attributename`` property in the package's class.
|
||||
The implementation used is the first one found from:
|
||||
|
||||
#. Specialized virtual: ``Package.virtualpackagename_attributename``
|
||||
#. Generic package: ``Package.attributename``
|
||||
#. Default
|
||||
|
||||
The use of customized attributes is demonstrated in the next example.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Example: Customized attributes for virtual packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Consider a package ``foo`` that can optionally provide two virtual
|
||||
packages ``bar`` and ``baz``. When both are enabled the installation tree
|
||||
appears as follows:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
include/foo.h
|
||||
include/bar/bar.h
|
||||
lib64/libFoo.so
|
||||
lib64/libFooBar.so
|
||||
baz/include/baz/baz.h
|
||||
baz/lib/libFooBaz.so
|
||||
|
||||
The install tree shows that ``foo`` is providing the header ``include/foo.h``
|
||||
and library ``lib64/libFoo.so`` in it's install prefix. The virtual
|
||||
package ``bar`` is providing ``include/bar/bar.h`` and library
|
||||
``lib64/libFooBar.so``, also in ``foo``'s install prefix. The ``baz``
|
||||
package, however, is provided in the ``baz`` subdirectory of ``foo``'s
|
||||
prefix with the ``include/baz/baz.h`` header and ``lib/libFooBaz.so``
|
||||
library. Such a package could implement the optional attributes as
|
||||
follows:
|
||||
|
||||
.. code-block:: python
|
||||
:linenos:
|
||||
|
||||
class Foo(Package):
|
||||
...
|
||||
variant('bar', default=False, description='Enable the Foo implementation of bar')
|
||||
variant('baz', default=False, description='Enable the Foo implementation of baz')
|
||||
...
|
||||
provides('bar', when='+bar')
|
||||
provides('baz', when='+baz')
|
||||
....
|
||||
|
||||
# Just the foo headers
|
||||
@property
|
||||
def headers(self):
|
||||
return find_headers('foo', root=self.home.include, recursive=False)
|
||||
|
||||
# Just the foo libraries
|
||||
@property
|
||||
def libs(self):
|
||||
return find_libraries('libFoo', root=self.home, recursive=True)
|
||||
|
||||
# The header provided by the bar virutal package
|
||||
@property
|
||||
def bar_headers(self):
|
||||
return find_headers('bar/bar.h', root=self.home.include, recursive=False)
|
||||
|
||||
# The libary provided by the bar virtual package
|
||||
@property
|
||||
def bar_libs(self):
|
||||
return find_libraries('libFooBar', root=sef.home, recursive=True)
|
||||
|
||||
# The baz virtual package home
|
||||
@property
|
||||
def baz_home(self):
|
||||
return self.prefix.baz
|
||||
|
||||
# The header provided by the baz virtual package
|
||||
@property
|
||||
def baz_headers(self):
|
||||
return find_headers('baz/baz', root=self.baz_home.include, recursive=False)
|
||||
|
||||
# The library provided by the baz virtual package
|
||||
@property
|
||||
def baz_libs(self):
|
||||
return find_libraries('libFooBaz', root=self.baz_home, recursive=True)
|
||||
|
||||
Now consider another package, ``foo-app``, depending on all three:
|
||||
|
||||
.. code-block:: python
|
||||
:linenos:
|
||||
|
||||
class FooApp(CMakePackage):
|
||||
...
|
||||
depends_on('foo')
|
||||
depends_on('bar')
|
||||
depends_on('baz')
|
||||
|
||||
The resulting spec objects for it's dependencies shows the result of
|
||||
the above attribute implementations:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# The core headers and libraries of the foo package
|
||||
|
||||
>>> spec['foo']
|
||||
foo@1.0%gcc@11.3.1+bar+baz arch=linux-fedora35-haswell
|
||||
>>> spec['foo'].prefix
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
||||
|
||||
# home defaults to the package install prefix without an explicit implementation
|
||||
>>> spec['foo'].home
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
||||
|
||||
# foo headers from the foo prefix
|
||||
>>> spec['foo'].headers
|
||||
HeaderList([
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/include/foo.h',
|
||||
])
|
||||
|
||||
# foo include directories from the foo prefix
|
||||
>>> spec['foo'].headers.directories
|
||||
['/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/include']
|
||||
|
||||
# foo libraries from the foo prefix
|
||||
>>> spec['foo'].libs
|
||||
LibraryList([
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/lib64/libFoo.so',
|
||||
])
|
||||
|
||||
# foo library directories from the foo prefix
|
||||
>>> spec['foo'].libs.directories
|
||||
['/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/lib64']
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# The virtual bar package in the same prefix as foo
|
||||
|
||||
# bar resolves to the foo package
|
||||
>>> spec['bar']
|
||||
foo@1.0%gcc@11.3.1+bar+baz arch=linux-fedora35-haswell
|
||||
>>> spec['bar'].prefix
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
||||
|
||||
# home defaults to the foo prefix without either a Foo.bar_home
|
||||
# or Foo.home implementation
|
||||
>>> spec['bar'].home
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
||||
|
||||
# bar header in the foo prefix
|
||||
>>> spec['bar'].headers
|
||||
HeaderList([
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/include/bar/bar.h'
|
||||
])
|
||||
|
||||
# bar include dirs from the foo prefix
|
||||
>>> spec['bar'].headers.directories
|
||||
['/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/include']
|
||||
|
||||
# bar library from the foo prefix
|
||||
>>> spec['bar'].libs
|
||||
LibraryList([
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/lib64/libFooBar.so'
|
||||
])
|
||||
|
||||
# bar library directories from the foo prefix
|
||||
>>> spec['bar'].libs.directories
|
||||
['/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/lib64']
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# The virtual baz package in a subdirectory of foo's prefix
|
||||
|
||||
# baz resolves to the foo package
|
||||
>>> spec['baz']
|
||||
foo@1.0%gcc@11.3.1+bar+baz arch=linux-fedora35-haswell
|
||||
>>> spec['baz'].prefix
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
||||
|
||||
# baz_home implementation provides the subdirectory inside the foo prefix
|
||||
>>> spec['baz'].home
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz'
|
||||
|
||||
# baz headers in the baz subdirectory of the foo prefix
|
||||
>>> spec['baz'].headers
|
||||
HeaderList([
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/include/baz/baz.h'
|
||||
])
|
||||
|
||||
# baz include directories in the baz subdirectory of the foo prefix
|
||||
>>> spec['baz'].headers.directories
|
||||
[
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/include'
|
||||
]
|
||||
|
||||
# baz libraries in the baz subdirectory of the foo prefix
|
||||
>>> spec['baz'].libs
|
||||
LibraryList([
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/lib/libFooBaz.so'
|
||||
])
|
||||
|
||||
# baz library directories in the baz subdirectory of the foo porefix
|
||||
>>> spec['baz'].libs.directories
|
||||
[
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/lib'
|
||||
]
|
||||
|
||||
.. _abstract-and-concrete:
|
||||
|
||||
-------------------------
|
||||
@@ -3291,7 +3022,7 @@ The classes that are currently provided by Spack are:
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| **Base Class** | **Purpose** |
|
||||
+==========================================================+==================================+
|
||||
| :class:`~spack.package_base.Package` | General base class not |
|
||||
| :class:`~spack.package.Package` | General base class not |
|
||||
| | specialized for any build system |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.makefile.MakefilePackage` | Specialized class for packages |
|
||||
@@ -3422,7 +3153,7 @@ for the install phase is:
|
||||
For those not used to Python instance methods, this is the
|
||||
package itself. In this case it's an instance of ``Foo``, which
|
||||
extends ``Package``. For API docs on Package objects, see
|
||||
:py:class:`Package <spack.package_base.Package>`.
|
||||
:py:class:`Package <spack.package.Package>`.
|
||||
|
||||
``spec``
|
||||
This is the concrete spec object created by Spack from an
|
||||
@@ -5592,7 +5323,7 @@ would be quite complicated to do using regex only. Employing the
|
||||
.. code-block:: python
|
||||
|
||||
class Gcc(Package):
|
||||
executables = ['g++']
|
||||
executables = [r'g\+\+']
|
||||
|
||||
def filter_detected_exes(cls, prefix, exes_in_prefix):
|
||||
return [x for x in exes_in_prefix if 'clang' not in x]
|
||||
@@ -5745,24 +5476,6 @@ Version Lists
|
||||
|
||||
Spack packages should list supported versions with the newest first.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Using ``home`` vs ``prefix``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
``home`` and ``prefix`` are both attributes that can be queried on a
|
||||
package's dependencies, often when passing configure arguments pointing to the
|
||||
location of a dependency. The difference is that while ``prefix`` is the
|
||||
location on disk where a concrete package resides, ``home`` is the `logical`
|
||||
location that a package resides, which may be different than ``prefix`` in
|
||||
the case of virtual packages or other special circumstances. For most use
|
||||
cases inside a package, it's dependency locations can be accessed via either
|
||||
``self.spec['foo'].home`` or ``self.spec['foo'].prefix``. Specific packages
|
||||
that should be consumed by dependents via ``.home`` instead of ``.prefix``
|
||||
should be noted in their respective documentation.
|
||||
|
||||
See :ref:`custom-attributes` for more details and an example implementing
|
||||
a custom ``home`` attribute.
|
||||
|
||||
---------------------------
|
||||
Packaging workflow commands
|
||||
---------------------------
|
||||
|
@@ -115,8 +115,7 @@ And here's the spack environment built by the pipeline represented as a
|
||||
|
||||
spack:
|
||||
view: false
|
||||
concretizer:
|
||||
unify: false
|
||||
concretization: separately
|
||||
|
||||
definitions:
|
||||
- pkgs:
|
||||
|
@@ -61,7 +61,7 @@ You can see the packages we added earlier in the ``specs:`` section. If you
|
||||
ever want to add more packages, you can either use ``spack add`` or manually
|
||||
edit this file.
|
||||
|
||||
We also need to change the ``concretizer:unify`` option. By default, Spack
|
||||
We also need to change the ``concretization:`` option. By default, Spack
|
||||
concretizes each spec *separately*, allowing multiple versions of the same
|
||||
package to coexist. Since we want a single consistent environment, we want to
|
||||
concretize all of the specs *together*.
|
||||
@@ -78,8 +78,7 @@ Here is what your ``spack.yaml`` looks like with this new setting:
|
||||
# add package specs to the `specs` list
|
||||
specs: [bash@5, python, py-numpy, py-scipy, py-matplotlib]
|
||||
view: true
|
||||
concretizer:
|
||||
unify: true
|
||||
concretization: together
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
Symlink location
|
||||
|
@@ -25,5 +25,4 @@ spack:
|
||||
- subversion
|
||||
# Plotting
|
||||
- graphviz
|
||||
concretizer:
|
||||
unify: true
|
||||
concretization: together
|
||||
|
@@ -7,7 +7,7 @@ bash, , , Compiler wrappers
|
||||
tar, , , Extract/create archives
|
||||
gzip, , , Compress/Decompress archives
|
||||
unzip, , , Compress/Decompress archives
|
||||
bzip2, , , Compress/Decompress archives
|
||||
bzip, , , Compress/Decompress archives
|
||||
xz, , , Compress/Decompress archives
|
||||
zstd, , Optional, Compress/Decompress archives
|
||||
file, , , Create/Use Buildcaches
|
||||
@@ -15,4 +15,4 @@ gnupg2, , , Sign/Verify Buildcaches
|
||||
git, , , Manage Software Repositories
|
||||
svn, , Optional, Manage Software Repositories
|
||||
hg, , Optional, Manage Software Repositories
|
||||
Python header files, , Optional (e.g. ``python3-dev`` on Debian), Bootstrapping from sources
|
||||
Python header files, , Optional (e.g. ``python3-dev`` on Debian), Bootstrapping from sources
|
|
10
lib/spack/env/cc
vendored
10
lib/spack/env/cc
vendored
@@ -1,4 +1,4 @@
|
||||
#!/bin/sh -f
|
||||
#!/bin/sh
|
||||
# shellcheck disable=SC2034 # evals in this script fool shellcheck
|
||||
#
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
@@ -241,14 +241,14 @@ case "$command" in
|
||||
mode=cpp
|
||||
debug_flags="-g"
|
||||
;;
|
||||
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc|amdclang|cl.exe)
|
||||
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc|amdclang|cl.exe|emcc)
|
||||
command="$SPACK_CC"
|
||||
language="C"
|
||||
comp="CC"
|
||||
lang_flags=C
|
||||
debug_flags="-g"
|
||||
;;
|
||||
c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++)
|
||||
c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|em++)
|
||||
command="$SPACK_CXX"
|
||||
language="C++"
|
||||
comp="CXX"
|
||||
@@ -768,9 +768,7 @@ if [ "$SPACK_DEBUG" = TRUE ]; then
|
||||
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
|
||||
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
|
||||
echo "[$mode] $command $input_command" >> "$input_log"
|
||||
IFS="$lsep"
|
||||
echo "[$mode] "$full_command_list >> "$output_log"
|
||||
unset IFS
|
||||
echo "[$mode] ${full_command_list}" >> "$output_log"
|
||||
fi
|
||||
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
|
1
lib/spack/env/emscripten/em++
vendored
Symbolic link
1
lib/spack/env/emscripten/em++
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/emscripten/emcc
vendored
Symbolic link
1
lib/spack/env/emscripten/emcc
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.1.4 (commit b8eea9df2b4204ff27d204452cd46f5199a0b423)
|
||||
* Version: 0.1.2 (commit 85757b6666422fca86aa882a769bf78b0f992f54)
|
||||
|
||||
argparse
|
||||
--------
|
||||
|
75
lib/spack/external/archspec/cpu/detect.py
vendored
75
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -61,7 +61,7 @@ def proc_cpuinfo():
|
||||
``/proc/cpuinfo``
|
||||
"""
|
||||
info = {}
|
||||
with open("/proc/cpuinfo") as file: # pylint: disable=unspecified-encoding
|
||||
with open("/proc/cpuinfo") as file:
|
||||
for line in file:
|
||||
key, separator, value = line.partition(":")
|
||||
|
||||
@@ -80,46 +80,26 @@ def proc_cpuinfo():
|
||||
|
||||
|
||||
def _check_output(args, env):
|
||||
output = subprocess.Popen( # pylint: disable=consider-using-with
|
||||
args, stdout=subprocess.PIPE, env=env
|
||||
).communicate()[0]
|
||||
output = subprocess.Popen(args, stdout=subprocess.PIPE, env=env).communicate()[0]
|
||||
return six.text_type(output.decode("utf-8"))
|
||||
|
||||
|
||||
def _machine():
|
||||
""" "Return the machine architecture we are on"""
|
||||
operating_system = platform.system()
|
||||
|
||||
# If we are not on Darwin, trust what Python tells us
|
||||
if operating_system != "Darwin":
|
||||
return platform.machine()
|
||||
|
||||
# On Darwin it might happen that we are on M1, but using an interpreter
|
||||
# built for x86_64. In that case "platform.machine() == 'x86_64'", so we
|
||||
# need to fix that.
|
||||
#
|
||||
# See: https://bugs.python.org/issue42704
|
||||
output = _check_output(
|
||||
["sysctl", "-n", "machdep.cpu.brand_string"], env=_ensure_bin_usrbin_in_path()
|
||||
).strip()
|
||||
|
||||
if "Apple" in output:
|
||||
# Note that a native Python interpreter on Apple M1 would return
|
||||
# "arm64" instead of "aarch64". Here we normalize to the latter.
|
||||
return "aarch64"
|
||||
|
||||
return "x86_64"
|
||||
|
||||
|
||||
@info_dict(operating_system="Darwin")
|
||||
def sysctl_info_dict():
|
||||
"""Returns a raw info dictionary parsing the output of sysctl."""
|
||||
child_environment = _ensure_bin_usrbin_in_path()
|
||||
# Make sure that /sbin and /usr/sbin are in PATH as sysctl is
|
||||
# usually found there
|
||||
child_environment = dict(os.environ.items())
|
||||
search_paths = child_environment.get("PATH", "").split(os.pathsep)
|
||||
for additional_path in ("/sbin", "/usr/sbin"):
|
||||
if additional_path not in search_paths:
|
||||
search_paths.append(additional_path)
|
||||
child_environment["PATH"] = os.pathsep.join(search_paths)
|
||||
|
||||
def sysctl(*args):
|
||||
return _check_output(["sysctl"] + list(args), env=child_environment).strip()
|
||||
|
||||
if _machine() == "x86_64":
|
||||
if platform.machine() == "x86_64":
|
||||
flags = (
|
||||
sysctl("-n", "machdep.cpu.features").lower()
|
||||
+ " "
|
||||
@@ -145,18 +125,6 @@ def sysctl(*args):
|
||||
return info
|
||||
|
||||
|
||||
def _ensure_bin_usrbin_in_path():
|
||||
# Make sure that /sbin and /usr/sbin are in PATH as sysctl is
|
||||
# usually found there
|
||||
child_environment = dict(os.environ.items())
|
||||
search_paths = child_environment.get("PATH", "").split(os.pathsep)
|
||||
for additional_path in ("/sbin", "/usr/sbin"):
|
||||
if additional_path not in search_paths:
|
||||
search_paths.append(additional_path)
|
||||
child_environment["PATH"] = os.pathsep.join(search_paths)
|
||||
return child_environment
|
||||
|
||||
|
||||
def adjust_raw_flags(info):
|
||||
"""Adjust the flags detected on the system to homogenize
|
||||
slightly different representations.
|
||||
@@ -216,7 +184,12 @@ def compatible_microarchitectures(info):
|
||||
Args:
|
||||
info (dict): dictionary containing information on the host cpu
|
||||
"""
|
||||
architecture_family = _machine()
|
||||
architecture_family = platform.machine()
|
||||
# On Apple M1 platform.machine() returns "arm64" instead of "aarch64"
|
||||
# so we should normalize the name here
|
||||
if architecture_family == "arm64":
|
||||
architecture_family = "aarch64"
|
||||
|
||||
# If a tester is not registered, be conservative and assume no known
|
||||
# target is compatible with the host
|
||||
tester = COMPATIBILITY_CHECKS.get(architecture_family, lambda x, y: False)
|
||||
@@ -271,7 +244,12 @@ def compatibility_check(architecture_family):
|
||||
architecture_family = (architecture_family,)
|
||||
|
||||
def decorator(func):
|
||||
COMPATIBILITY_CHECKS.update({family: func for family in architecture_family})
|
||||
# pylint: disable=fixme
|
||||
# TODO: on removal of Python 2.6 support this can be re-written as
|
||||
# TODO: an update + a dict comprehension
|
||||
for arch_family in architecture_family:
|
||||
COMPATIBILITY_CHECKS[arch_family] = func
|
||||
|
||||
return func
|
||||
|
||||
return decorator
|
||||
@@ -310,7 +288,7 @@ def compatibility_check_for_x86_64(info, target):
|
||||
arch_root = TARGETS[basename]
|
||||
return (
|
||||
(target == arch_root or arch_root in target.ancestors)
|
||||
and target.vendor in (vendor, "generic")
|
||||
and (target.vendor == vendor or target.vendor == "generic")
|
||||
and target.features.issubset(features)
|
||||
)
|
||||
|
||||
@@ -325,9 +303,8 @@ def compatibility_check_for_aarch64(info, target):
|
||||
arch_root = TARGETS[basename]
|
||||
return (
|
||||
(target == arch_root or arch_root in target.ancestors)
|
||||
and target.vendor in (vendor, "generic")
|
||||
# On macOS it seems impossible to get all the CPU features with syctl info
|
||||
and (target.features.issubset(features) or platform.system() == "Darwin")
|
||||
and (target.vendor == vendor or target.vendor == "generic")
|
||||
and target.features.issubset(features)
|
||||
)
|
||||
|
||||
|
||||
|
4
lib/spack/external/archspec/cpu/schema.py
vendored
4
lib/spack/external/archspec/cpu/schema.py
vendored
@@ -11,7 +11,7 @@
|
||||
try:
|
||||
from collections.abc import MutableMapping # novm
|
||||
except ImportError:
|
||||
from collections import MutableMapping # pylint: disable=deprecated-class
|
||||
from collections import MutableMapping
|
||||
|
||||
|
||||
class LazyDictionary(MutableMapping):
|
||||
@@ -56,7 +56,7 @@ def _load_json_file(json_file):
|
||||
|
||||
def _factory():
|
||||
filename = os.path.join(json_dir, json_file)
|
||||
with open(filename, "r") as file: # pylint: disable=unspecified-encoding
|
||||
with open(filename, "r") as file:
|
||||
return json.load(file)
|
||||
|
||||
return _factory
|
||||
|
@@ -85,21 +85,7 @@
|
||||
"intel": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "x86-64",
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
@@ -143,20 +129,6 @@
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v2",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v2",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -214,20 +186,6 @@
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v3",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v3",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -290,20 +248,6 @@
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave -mavx512f -mavx512bw -mavx512cd -mavx512dq -mavx512vl"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -344,19 +288,8 @@
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -400,18 +333,6 @@
|
||||
"versions": "16.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -463,20 +384,6 @@
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -525,20 +432,6 @@
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -597,18 +490,6 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -669,18 +550,6 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -746,18 +615,6 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -815,18 +672,6 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -887,18 +732,6 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -965,20 +798,6 @@
|
||||
"name": "knl",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "knl",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "knl",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1049,20 +868,6 @@
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1132,18 +937,6 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1211,18 +1004,6 @@
|
||||
"versions": "19.0.1:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1317,20 +1098,6 @@
|
||||
"name": "icelake-client",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "icelake-client",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "icelake-client",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1375,20 +1142,6 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse2"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse2"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse2"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1439,20 +1192,6 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1507,20 +1246,6 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1576,20 +1301,6 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse4.2"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse4.2"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse4.2"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1649,22 +1360,6 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1727,22 +1422,6 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1806,22 +1485,6 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1880,30 +1543,6 @@
|
||||
"name": "znver3",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -2149,6 +1788,7 @@
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
@@ -2181,26 +1821,18 @@
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16"
|
||||
},
|
||||
{
|
||||
"versions": "8:10.2",
|
||||
"flags": "-march=armv8.2-a+crc+sha2+fp16+sve -msve-vector-bits=512"
|
||||
},
|
||||
{
|
||||
"versions": "10.3:",
|
||||
"flags": "-mcpu=a64fx -msve-vector-bits=512"
|
||||
"versions": "8:",
|
||||
"flags": "-march=armv8.2-a+crc+aes+sha2+fp16+sve -msve-vector-bits=512"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "3.9:4.9",
|
||||
"flags": "-march=armv8.2-a+crc+sha2+fp16"
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16"
|
||||
},
|
||||
{
|
||||
"versions": "5:10",
|
||||
"flags": "-march=armv8.2-a+crc+sha2+fp16+sve"
|
||||
},
|
||||
{
|
||||
"versions": "11:",
|
||||
"flags": "-mcpu=a64fx"
|
||||
"versions": "5:",
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
|
||||
}
|
||||
],
|
||||
"arm": [
|
||||
@@ -2322,40 +1954,7 @@
|
||||
"m1": {
|
||||
"from": ["aarch64"],
|
||||
"vendor": "Apple",
|
||||
"features": [
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"crc32",
|
||||
"atomics",
|
||||
"fphp",
|
||||
"asimdhp",
|
||||
"cpuid",
|
||||
"asimdrdm",
|
||||
"jscvt",
|
||||
"fcma",
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"asimdfhm",
|
||||
"dit",
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"sb",
|
||||
"paca",
|
||||
"pacg",
|
||||
"dcpodp",
|
||||
"flagm2",
|
||||
"frint"
|
||||
],
|
||||
"features": [],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
@@ -2365,22 +1964,14 @@
|
||||
],
|
||||
"clang" : [
|
||||
{
|
||||
"versions": "9.0:12.0",
|
||||
"versions": "9.0:",
|
||||
"flags" : "-march=armv8.4-a"
|
||||
},
|
||||
{
|
||||
"versions": "13.0:",
|
||||
"flags" : "-mcpu=apple-m1"
|
||||
}
|
||||
],
|
||||
"apple-clang": [
|
||||
{
|
||||
"versions": "11.0:12.5",
|
||||
"versions": "11.0:",
|
||||
"flags" : "-march=armv8.4-a"
|
||||
},
|
||||
{
|
||||
"versions": "13.0:",
|
||||
"flags" : "-mcpu=apple-m1"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@@ -64,7 +64,6 @@
|
||||
'is_exe',
|
||||
'join_path',
|
||||
'last_modification_time_recursive',
|
||||
'library_extensions',
|
||||
'mkdirp',
|
||||
'partition_path',
|
||||
'prefixes',
|
||||
@@ -110,15 +109,12 @@ def path_contains_subdirectory(path, root):
|
||||
return norm_path.startswith(norm_root)
|
||||
|
||||
|
||||
#: This generates the library filenames that may appear on any OS.
|
||||
library_extensions = ['a', 'la', 'so', 'tbd', 'dylib']
|
||||
|
||||
|
||||
def possible_library_filenames(library_names):
|
||||
"""Given a collection of library names like 'libfoo', generate the set of
|
||||
library filenames that may be found on the system (e.g. libfoo.so).
|
||||
library filenames that may be found on the system (e.g. libfoo.so). This
|
||||
generates the library filenames that may appear on any OS.
|
||||
"""
|
||||
lib_extensions = library_extensions
|
||||
lib_extensions = ['a', 'la', 'so', 'tbd', 'dylib']
|
||||
return set(
|
||||
'.'.join((lib, extension)) for lib, extension in
|
||||
itertools.product(library_names, lib_extensions))
|
||||
@@ -308,68 +304,6 @@ def change_sed_delimiter(old_delim, new_delim, *filenames):
|
||||
filter_file(double_quoted, '"%s"' % repl, f)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def exploding_archive_catch(stage):
|
||||
# Check for an exploding tarball, i.e. one that doesn't expand to
|
||||
# a single directory. If the tarball *didn't* explode, move its
|
||||
# contents to the staging source directory & remove the container
|
||||
# directory. If the tarball did explode, just rename the tarball
|
||||
# directory to the staging source directory.
|
||||
#
|
||||
# NOTE: The tar program on Mac OS X will encode HFS metadata in
|
||||
# hidden files, which can end up *alongside* a single top-level
|
||||
# directory. We initially ignore presence of hidden files to
|
||||
# accomodate these "semi-exploding" tarballs but ensure the files
|
||||
# are copied to the source directory.
|
||||
|
||||
# Expand all tarballs in their own directory to contain
|
||||
# exploding tarballs.
|
||||
tarball_container = os.path.join(stage.path,
|
||||
"spack-expanded-archive")
|
||||
mkdirp(tarball_container)
|
||||
orig_dir = os.getcwd()
|
||||
os.chdir(tarball_container)
|
||||
try:
|
||||
yield
|
||||
# catch an exploding archive on sucessful extraction
|
||||
os.chdir(orig_dir)
|
||||
exploding_archive_handler(tarball_container, stage)
|
||||
except Exception as e:
|
||||
# return current directory context to previous on failure
|
||||
os.chdir(orig_dir)
|
||||
raise e
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def exploding_archive_handler(tarball_container, stage):
|
||||
"""
|
||||
Args:
|
||||
tarball_container: where the archive was expanded to
|
||||
stage: Stage object referencing filesystem location
|
||||
where archive is being expanded
|
||||
"""
|
||||
files = os.listdir(tarball_container)
|
||||
non_hidden = [f for f in files if not f.startswith('.')]
|
||||
if len(non_hidden) == 1:
|
||||
src = os.path.join(tarball_container, non_hidden[0])
|
||||
if os.path.isdir(src):
|
||||
stage.srcdir = non_hidden[0]
|
||||
shutil.move(src, stage.source_path)
|
||||
if len(files) > 1:
|
||||
files.remove(non_hidden[0])
|
||||
for f in files:
|
||||
src = os.path.join(tarball_container, f)
|
||||
dest = os.path.join(stage.path, f)
|
||||
shutil.move(src, dest)
|
||||
os.rmdir(tarball_container)
|
||||
else:
|
||||
# This is a non-directory entry (e.g., a patch file) so simply
|
||||
# rename the tarball container to be the source path.
|
||||
shutil.move(tarball_container, stage.source_path)
|
||||
else:
|
||||
shutil.move(tarball_container, stage.source_path)
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def get_owner_uid(path, err_msg=None):
|
||||
if not os.path.exists(path):
|
||||
@@ -429,7 +363,7 @@ def group_ids(uid=None):
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def chgrp(path, group, follow_symlinks=True):
|
||||
def chgrp(path, group):
|
||||
"""Implement the bash chgrp function on a single path"""
|
||||
if is_windows:
|
||||
raise OSError("Function 'chgrp' is not supported on Windows")
|
||||
@@ -438,10 +372,7 @@ def chgrp(path, group, follow_symlinks=True):
|
||||
gid = grp.getgrnam(group).gr_gid
|
||||
else:
|
||||
gid = group
|
||||
if follow_symlinks:
|
||||
os.chown(path, -1, gid)
|
||||
else:
|
||||
os.lchown(path, -1, gid)
|
||||
os.chown(path, -1, gid)
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
@@ -833,36 +764,39 @@ def __init__(self, inner_exception, outer_exception):
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def replace_directory_transaction(directory_name):
|
||||
"""Temporarily renames a directory in the same parent dir. If the operations
|
||||
executed within the context manager don't raise an exception, the renamed directory
|
||||
is deleted. If there is an exception, the move is undone.
|
||||
def replace_directory_transaction(directory_name, tmp_root=None):
|
||||
"""Moves a directory to a temporary space. If the operations executed
|
||||
within the context manager don't raise an exception, the directory is
|
||||
deleted. If there is an exception, the move is undone.
|
||||
|
||||
Args:
|
||||
directory_name (path): absolute path of the directory name
|
||||
tmp_root (path): absolute path of the parent directory where to create
|
||||
the temporary
|
||||
|
||||
Returns:
|
||||
temporary directory where ``directory_name`` has been moved
|
||||
"""
|
||||
# Check the input is indeed a directory with absolute path.
|
||||
# Raise before anything is done to avoid moving the wrong directory
|
||||
directory_name = os.path.abspath(directory_name)
|
||||
assert os.path.isdir(directory_name), 'Not a directory: ' + directory_name
|
||||
assert os.path.isdir(directory_name), \
|
||||
'Invalid directory: ' + directory_name
|
||||
assert os.path.isabs(directory_name), \
|
||||
'"directory_name" must contain an absolute path: ' + directory_name
|
||||
|
||||
# Note: directory_name is normalized here, meaning the trailing slash is dropped,
|
||||
# so dirname is the directory's parent not the directory itself.
|
||||
tmpdir = tempfile.mkdtemp(
|
||||
dir=os.path.dirname(directory_name),
|
||||
prefix='.backup')
|
||||
directory_basename = os.path.basename(directory_name)
|
||||
|
||||
# We have to jump through hoops to support Windows, since
|
||||
# os.rename(directory_name, tmpdir) errors there.
|
||||
backup_dir = os.path.join(tmpdir, 'backup')
|
||||
os.rename(directory_name, backup_dir)
|
||||
tty.debug('Directory moved [src={0}, dest={1}]'.format(directory_name, backup_dir))
|
||||
if tmp_root is not None:
|
||||
assert os.path.isabs(tmp_root)
|
||||
|
||||
tmp_dir = tempfile.mkdtemp(dir=tmp_root)
|
||||
tty.debug('Temporary directory created [{0}]'.format(tmp_dir))
|
||||
|
||||
shutil.move(src=directory_name, dst=tmp_dir)
|
||||
tty.debug('Directory moved [src={0}, dest={1}]'.format(directory_name, tmp_dir))
|
||||
|
||||
try:
|
||||
yield backup_dir
|
||||
yield tmp_dir
|
||||
except (Exception, KeyboardInterrupt, SystemExit) as inner_exception:
|
||||
# Try to recover the original directory, if this fails, raise a
|
||||
# composite exception.
|
||||
@@ -870,7 +804,10 @@ def replace_directory_transaction(directory_name):
|
||||
# Delete what was there, before copying back the original content
|
||||
if os.path.exists(directory_name):
|
||||
shutil.rmtree(directory_name)
|
||||
os.rename(backup_dir, directory_name)
|
||||
shutil.move(
|
||||
src=os.path.join(tmp_dir, directory_basename),
|
||||
dst=os.path.dirname(directory_name)
|
||||
)
|
||||
except Exception as outer_exception:
|
||||
raise CouldNotRestoreDirectoryBackup(inner_exception, outer_exception)
|
||||
|
||||
@@ -878,8 +815,8 @@ def replace_directory_transaction(directory_name):
|
||||
raise
|
||||
else:
|
||||
# Otherwise delete the temporary directory
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
tty.debug('Temporary directory deleted [{0}]'.format(tmpdir))
|
||||
shutil.rmtree(tmp_dir, ignore_errors=True)
|
||||
tty.debug('Temporary directory deleted [{0}]'.format(tmp_dir))
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@@ -1160,32 +1097,7 @@ def visit_directory_tree(root, visitor, rel_path='', depth=0):
|
||||
for f in dir_entries:
|
||||
if sys.version_info >= (3, 5, 0):
|
||||
rel_child = os.path.join(rel_path, f.name)
|
||||
islink = f.is_symlink()
|
||||
# On Windows, symlinks to directories are distinct from
|
||||
# symlinks to files, and it is possible to create a
|
||||
# broken symlink to a directory (e.g. using os.symlink
|
||||
# without `target_is_directory=True`), invoking `isdir`
|
||||
# on a symlink on Windows that is broken in this manner
|
||||
# will result in an error. In this case we can work around
|
||||
# the issue by reading the target and resolving the
|
||||
# directory ourselves
|
||||
try:
|
||||
isdir = f.is_dir()
|
||||
except OSError as e:
|
||||
if is_windows and hasattr(e, 'winerror')\
|
||||
and e.winerror == 5 and islink:
|
||||
# if path is a symlink, determine destination and
|
||||
# evaluate file vs directory
|
||||
link_target = resolve_link_target_relative_to_the_link(f)
|
||||
# link_target might be relative but
|
||||
# resolve_link_target_relative_to_the_link
|
||||
# will ensure that if so, that it is relative
|
||||
# to the CWD and therefore
|
||||
# makes sense
|
||||
isdir = os.path.isdir(link_target)
|
||||
else:
|
||||
raise e
|
||||
|
||||
islink, isdir = f.is_symlink(), f.is_dir()
|
||||
else:
|
||||
rel_child = os.path.join(rel_path, f)
|
||||
lexists, islink, isdir = lexists_islink_isdir(os.path.join(dir, f))
|
||||
@@ -1193,7 +1105,7 @@ def visit_directory_tree(root, visitor, rel_path='', depth=0):
|
||||
continue
|
||||
|
||||
if not isdir:
|
||||
# handle files
|
||||
# Handle files
|
||||
visitor.visit_file(root, rel_child, depth)
|
||||
elif not islink and visitor.before_visit_dir(root, rel_child, depth):
|
||||
# Handle ordinary directories
|
||||
@@ -1268,35 +1180,6 @@ def remove_if_dead_link(path):
|
||||
os.unlink(path)
|
||||
|
||||
|
||||
def readonly_file_handler(ignore_errors=False):
|
||||
# TODO: generate stages etc. with write permissions wherever
|
||||
# so this callback is no-longer required
|
||||
"""
|
||||
Generate callback for shutil.rmtree to handle permissions errors on
|
||||
Windows. Some files may unexpectedly lack write permissions even
|
||||
though they were generated by Spack on behalf of the user (e.g. the
|
||||
stage), so this callback will detect such cases and modify the
|
||||
permissions if that is the issue. For other errors, the fallback
|
||||
is either to raise (if ignore_errors is False) or ignore (if
|
||||
ignore_errors is True). This is only intended for Windows systems
|
||||
and will raise a separate error if it is ever invoked (by accident)
|
||||
on a non-Windows system.
|
||||
"""
|
||||
def error_remove_readonly(func, path, exc):
|
||||
if not is_windows:
|
||||
raise RuntimeError("This method should only be invoked on Windows")
|
||||
excvalue = exc[1]
|
||||
if is_windows and func in (os.rmdir, os.remove, os.unlink) and\
|
||||
excvalue.errno == errno.EACCES:
|
||||
# change the file to be readable,writable,executable: 0777
|
||||
os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
|
||||
# retry
|
||||
func(path)
|
||||
elif not ignore_errors:
|
||||
raise
|
||||
return error_remove_readonly
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def remove_linked_tree(path):
|
||||
"""Removes a directory and its contents.
|
||||
@@ -1304,18 +1187,23 @@ def remove_linked_tree(path):
|
||||
If the directory is a symlink, follows the link and removes the real
|
||||
directory before removing the link.
|
||||
|
||||
This method will force-delete files on Windows
|
||||
|
||||
Parameters:
|
||||
path (str): Directory to be removed
|
||||
"""
|
||||
kwargs = {'ignore_errors': True}
|
||||
# On windows, cleaning a Git stage can be an issue
|
||||
# as git leaves readonly files that Python handles
|
||||
# poorly on Windows. Remove readonly status and try again
|
||||
def onerror(func, path, exe_info):
|
||||
os.chmod(path, stat.S_IWUSR)
|
||||
try:
|
||||
func(path)
|
||||
except Exception as e:
|
||||
tty.warn(e)
|
||||
pass
|
||||
|
||||
# Windows readonly files cannot be removed by Python
|
||||
# directly.
|
||||
kwargs = {'ignore_errors': True}
|
||||
if is_windows:
|
||||
kwargs['ignore_errors'] = False
|
||||
kwargs['onerror'] = readonly_file_handler(ignore_errors=True)
|
||||
kwargs = {'onerror': onerror}
|
||||
|
||||
if os.path.exists(path):
|
||||
if os.path.islink(path):
|
||||
|
@@ -11,9 +11,7 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Tuple
|
||||
|
||||
import six
|
||||
from six import string_types
|
||||
@@ -1011,64 +1009,3 @@ def __repr__(self):
|
||||
|
||||
def __str__(self):
|
||||
return str(self.data)
|
||||
|
||||
|
||||
class GroupedExceptionHandler(object):
|
||||
"""A generic mechanism to coalesce multiple exceptions and preserve tracebacks."""
|
||||
|
||||
def __init__(self):
|
||||
self.exceptions = [] # type: List[Tuple[str, Exception, List[str]]]
|
||||
|
||||
def __bool__(self):
|
||||
"""Whether any exceptions were handled."""
|
||||
return bool(self.exceptions)
|
||||
|
||||
def forward(self, context):
|
||||
# type: (str) -> GroupedExceptionForwarder
|
||||
"""Return a contextmanager which extracts tracebacks and prefixes a message."""
|
||||
return GroupedExceptionForwarder(context, self)
|
||||
|
||||
def _receive_forwarded(self, context, exc, tb):
|
||||
# type: (str, Exception, List[str]) -> None
|
||||
self.exceptions.append((context, exc, tb))
|
||||
|
||||
def grouped_message(self, with_tracebacks=True):
|
||||
# type: (bool) -> str
|
||||
"""Print out an error message coalescing all the forwarded errors."""
|
||||
each_exception_message = [
|
||||
'{0} raised {1}: {2}{3}'.format(
|
||||
context,
|
||||
exc.__class__.__name__,
|
||||
exc,
|
||||
'\n{0}'.format(''.join(tb)) if with_tracebacks else '',
|
||||
)
|
||||
for context, exc, tb in self.exceptions
|
||||
]
|
||||
return 'due to the following failures:\n{0}'.format(
|
||||
'\n'.join(each_exception_message)
|
||||
)
|
||||
|
||||
|
||||
class GroupedExceptionForwarder(object):
|
||||
"""A contextmanager to capture exceptions and forward them to a
|
||||
GroupedExceptionHandler."""
|
||||
|
||||
def __init__(self, context, handler):
|
||||
# type: (str, GroupedExceptionHandler) -> None
|
||||
self._context = context
|
||||
self._handler = handler
|
||||
|
||||
def __enter__(self):
|
||||
return None
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
if exc_value is not None:
|
||||
self._handler._receive_forwarded(
|
||||
self._context,
|
||||
exc_value,
|
||||
traceback.format_tb(tb),
|
||||
)
|
||||
|
||||
# Suppress any exception from being re-raised:
|
||||
# https://docs.python.org/3/reference/datamodel.html#object.__exit__.
|
||||
return True
|
||||
|
@@ -809,23 +809,19 @@ def __enter__(self):
|
||||
def background_reader(reader, echo_writer, _kill):
|
||||
# for each line printed to logfile, read it
|
||||
# if echo: write line to user
|
||||
try:
|
||||
while True:
|
||||
is_killed = _kill.wait(.1)
|
||||
# Flush buffered build output to file
|
||||
# stdout/err fds refer to log file
|
||||
self.stderr.flush()
|
||||
self.stdout.flush()
|
||||
|
||||
while True:
|
||||
is_killed = _kill.wait(.1)
|
||||
self.stderr.flush()
|
||||
self.stdout.flush()
|
||||
line = reader.readline()
|
||||
while line:
|
||||
if self.echo:
|
||||
self.echo_writer.write('{0}'.format(line.decode()))
|
||||
self.echo_writer.flush()
|
||||
line = reader.readline()
|
||||
if self.echo and line:
|
||||
echo_writer.write('{0}'.format(line.decode()))
|
||||
echo_writer.flush()
|
||||
|
||||
if is_killed:
|
||||
break
|
||||
finally:
|
||||
reader.close()
|
||||
if is_killed:
|
||||
break
|
||||
|
||||
self._active = True
|
||||
with replace_environment(self.env):
|
||||
@@ -841,6 +837,7 @@ def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self._ioflag = False
|
||||
else:
|
||||
self.writer.close()
|
||||
self.reader.close()
|
||||
self.echo_writer.flush()
|
||||
self.stdout.flush()
|
||||
self.stderr.flush()
|
||||
@@ -856,7 +853,10 @@ def force_echo(self):
|
||||
if not self._active:
|
||||
raise RuntimeError(
|
||||
"Can't call force_echo() outside log_output region!")
|
||||
yield
|
||||
try:
|
||||
yield self
|
||||
finally:
|
||||
pass
|
||||
|
||||
|
||||
def _writer_daemon(stdin_multiprocess_fd, read_multiprocess_fd, write_fd, echo,
|
||||
|
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
#: (major, minor, micro, dev release) tuple
|
||||
spack_version_info = (0, 19, 0, 'dev0')
|
||||
spack_version_info = (0, 18, 0, 'dev0')
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
spack_version = '.'.join(str(s) for s in spack_version_info)
|
||||
|
@@ -12,7 +12,7 @@
|
||||
import spack.error
|
||||
import spack.hooks
|
||||
import spack.monitor
|
||||
import spack.package_base
|
||||
import spack.package
|
||||
import spack.repo
|
||||
import spack.util.executable
|
||||
|
||||
|
@@ -276,30 +276,11 @@ def _search_duplicate_specs_in_externals(error_cls):
|
||||
)
|
||||
|
||||
|
||||
@package_directives
|
||||
def _check_build_test_callbacks(pkgs, error_cls):
|
||||
"""Ensure stand-alone test method is not included in build-time callbacks"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
test_callbacks = pkg.build_time_test_callbacks
|
||||
|
||||
if test_callbacks and 'test' in test_callbacks:
|
||||
msg = ('{0} package contains "test" method in '
|
||||
'build_time_test_callbacks')
|
||||
instr = ('Remove "test" from: [{0}]'
|
||||
.format(', '.join(test_callbacks)))
|
||||
errors.append(error_cls(msg.format(pkg.name), [instr]))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_directives
|
||||
def _check_patch_urls(pkgs, error_cls):
|
||||
"""Ensure that patches fetched from GitHub have stable sha256 hashes."""
|
||||
github_patch_url_re = (
|
||||
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
||||
".+/.+/(?:commit|pull)/[a-fA-F0-9]*.(?:patch|diff)"
|
||||
r"^https?://github\.com/.+/.+/(?:commit|pull)/[a-fA-F0-9]*.(?:patch|diff)"
|
||||
)
|
||||
|
||||
errors = []
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -5,7 +5,6 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import contextlib
|
||||
import copy
|
||||
import fnmatch
|
||||
import functools
|
||||
import json
|
||||
@@ -22,7 +21,6 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import GroupedExceptionHandler
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.config
|
||||
@@ -38,11 +36,6 @@
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml
|
||||
import spack.util.url
|
||||
|
||||
#: Name of the file containing metadata about the bootstrapping source
|
||||
METADATA_YAML_FILENAME = 'metadata.yaml'
|
||||
|
||||
#: Map a bootstrapper type to the corresponding class
|
||||
_bootstrap_methods = {}
|
||||
@@ -80,41 +73,32 @@ def _try_import_from_store(module, query_spec, query_info=None):
|
||||
|
||||
for candidate_spec in installed_specs:
|
||||
pkg = candidate_spec['python'].package
|
||||
module_paths = [
|
||||
module_paths = {
|
||||
os.path.join(candidate_spec.prefix, pkg.purelib),
|
||||
os.path.join(candidate_spec.prefix, pkg.platlib),
|
||||
] # type: list[str]
|
||||
path_before = list(sys.path)
|
||||
# NOTE: try module_paths first and last, last allows an existing version in path
|
||||
# to be picked up and used, possibly depending on something in the store, first
|
||||
# allows the bootstrap version to work when an incompatible version is in
|
||||
# sys.path
|
||||
orders = [
|
||||
module_paths + sys.path,
|
||||
sys.path + module_paths,
|
||||
]
|
||||
for path in orders:
|
||||
sys.path = path
|
||||
try:
|
||||
_fix_ext_suffix(candidate_spec)
|
||||
if _python_import(module):
|
||||
msg = ('[BOOTSTRAP MODULE {0}] The installed spec "{1}/{2}" '
|
||||
'provides the "{0}" Python module').format(
|
||||
module, query_spec, candidate_spec.dag_hash()
|
||||
)
|
||||
tty.debug(msg)
|
||||
if query_info is not None:
|
||||
query_info['spec'] = candidate_spec
|
||||
return True
|
||||
except Exception as e:
|
||||
msg = ('unexpected error while trying to import module '
|
||||
'"{0}" from spec "{1}" [error="{2}"]')
|
||||
tty.warn(msg.format(module, candidate_spec, str(e)))
|
||||
else:
|
||||
msg = "Spec {0} did not provide module {1}"
|
||||
tty.warn(msg.format(candidate_spec, module))
|
||||
}
|
||||
sys.path.extend(module_paths)
|
||||
|
||||
sys.path = path_before
|
||||
try:
|
||||
_fix_ext_suffix(candidate_spec)
|
||||
if _python_import(module):
|
||||
msg = ('[BOOTSTRAP MODULE {0}] The installed spec "{1}/{2}" '
|
||||
'provides the "{0}" Python module').format(
|
||||
module, query_spec, candidate_spec.dag_hash()
|
||||
)
|
||||
tty.debug(msg)
|
||||
if query_info is not None:
|
||||
query_info['spec'] = candidate_spec
|
||||
return True
|
||||
except Exception as e:
|
||||
msg = ('unexpected error while trying to import module '
|
||||
'"{0}" from spec "{1}" [error="{2}"]')
|
||||
tty.warn(msg.format(module, candidate_spec, str(e)))
|
||||
else:
|
||||
msg = "Spec {0} did not provide module {1}"
|
||||
tty.warn(msg.format(candidate_spec, module))
|
||||
|
||||
sys.path = sys.path[:-3]
|
||||
|
||||
return False
|
||||
|
||||
@@ -219,43 +203,12 @@ def _executables_in_store(executables, query_spec, query_info=None):
|
||||
return False
|
||||
|
||||
|
||||
class _BootstrapperBase(object):
|
||||
"""Base class to derive types that can bootstrap software for Spack"""
|
||||
config_scope_name = ''
|
||||
|
||||
@_bootstrapper(type='buildcache')
|
||||
class _BuildcacheBootstrapper(object):
|
||||
"""Install the software needed during bootstrapping from a buildcache."""
|
||||
def __init__(self, conf):
|
||||
self.name = conf['name']
|
||||
self.url = conf['info']['url']
|
||||
|
||||
@property
|
||||
def mirror_url(self):
|
||||
# Absolute paths
|
||||
if os.path.isabs(self.url):
|
||||
return spack.util.url.format(self.url)
|
||||
|
||||
# Check for :// and assume it's an url if we find it
|
||||
if '://' in self.url:
|
||||
return self.url
|
||||
|
||||
# Otherwise, it's a relative path
|
||||
return spack.util.url.format(os.path.join(self.metadata_dir, self.url))
|
||||
|
||||
@property
|
||||
def mirror_scope(self):
|
||||
return spack.config.InternalConfigScope(
|
||||
self.config_scope_name, {'mirrors:': {self.name: self.mirror_url}}
|
||||
)
|
||||
|
||||
|
||||
@_bootstrapper(type='buildcache')
|
||||
class _BuildcacheBootstrapper(_BootstrapperBase):
|
||||
"""Install the software needed during bootstrapping from a buildcache."""
|
||||
|
||||
config_scope_name = 'bootstrap_buildcache'
|
||||
|
||||
def __init__(self, conf):
|
||||
super(_BuildcacheBootstrapper, self).__init__(conf)
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf['metadata'])
|
||||
self.last_search = None
|
||||
|
||||
@staticmethod
|
||||
@@ -278,8 +231,9 @@ def _spec_and_platform(abstract_spec_str):
|
||||
def _read_metadata(self, package_name):
|
||||
"""Return metadata about the given package."""
|
||||
json_filename = '{0}.json'.format(package_name)
|
||||
json_dir = self.metadata_dir
|
||||
json_path = os.path.join(json_dir, json_filename)
|
||||
json_path = os.path.join(
|
||||
spack.paths.share_path, 'bootstrap', self.name, json_filename
|
||||
)
|
||||
with open(json_path) as f:
|
||||
data = json.load(f)
|
||||
return data
|
||||
@@ -353,6 +307,12 @@ def _install_and_test(
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def mirror_scope(self):
|
||||
return spack.config.InternalConfigScope(
|
||||
'bootstrap_buildcache', {'mirrors:': {self.name: self.url}}
|
||||
)
|
||||
|
||||
def try_import(self, module, abstract_spec_str):
|
||||
test_fn, info = functools.partial(_try_import_from_store, module), {}
|
||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||
@@ -382,13 +342,9 @@ def try_search_path(self, executables, abstract_spec_str):
|
||||
|
||||
|
||||
@_bootstrapper(type='install')
|
||||
class _SourceBootstrapper(_BootstrapperBase):
|
||||
class _SourceBootstrapper(object):
|
||||
"""Install the software needed during bootstrapping from sources."""
|
||||
config_scope_name = 'bootstrap_source'
|
||||
|
||||
def __init__(self, conf):
|
||||
super(_SourceBootstrapper, self).__init__(conf)
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf['metadata'])
|
||||
self.conf = conf
|
||||
self.last_search = None
|
||||
|
||||
@@ -421,8 +377,7 @@ def try_import(self, module, abstract_spec_str):
|
||||
tty.debug(msg.format(module, abstract_spec_str))
|
||||
|
||||
# Install the spec that should make the module importable
|
||||
with spack.config.override(self.mirror_scope):
|
||||
concrete_spec.package.do_install(fail_fast=True)
|
||||
concrete_spec.package.do_install(fail_fast=True)
|
||||
|
||||
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
||||
self.last_search = info
|
||||
@@ -435,8 +390,6 @@ def try_search_path(self, executables, abstract_spec_str):
|
||||
self.last_search = info
|
||||
return True
|
||||
|
||||
tty.info("Bootstrapping {0} from sources".format(abstract_spec_str))
|
||||
|
||||
# If we compile code from sources detecting a few build tools
|
||||
# might reduce compilation time by a fair amount
|
||||
_add_externals_if_missing()
|
||||
@@ -449,8 +402,7 @@ def try_search_path(self, executables, abstract_spec_str):
|
||||
|
||||
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||
tty.debug(msg.format(abstract_spec_str))
|
||||
with spack.config.override(self.mirror_scope):
|
||||
concrete_spec.package.do_install()
|
||||
concrete_spec.package.do_install()
|
||||
if _executables_in_store(executables, concrete_spec, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
@@ -465,11 +417,11 @@ def _make_bootstrapper(conf):
|
||||
return _bootstrap_methods[btype](conf)
|
||||
|
||||
|
||||
def source_is_enabled_or_raise(conf):
|
||||
"""Raise ValueError if the source is not enabled for bootstrapping"""
|
||||
def _source_is_trusted(conf):
|
||||
trusted, name = spack.config.get('bootstrap:trusted'), conf['name']
|
||||
if not trusted.get(name, False):
|
||||
raise ValueError('source is not trusted')
|
||||
if name not in trusted:
|
||||
return False
|
||||
return trusted[name]
|
||||
|
||||
|
||||
def spec_for_current_python():
|
||||
@@ -534,26 +486,36 @@ def ensure_module_importable_or_raise(module, abstract_spec=None):
|
||||
return
|
||||
|
||||
abstract_spec = abstract_spec or module
|
||||
source_configs = spack.config.get('bootstrap:sources', [])
|
||||
|
||||
h = GroupedExceptionHandler()
|
||||
errors = {}
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
with h.forward(current_config['name']):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
for current_config in source_configs:
|
||||
if not _source_is_trusted(current_config):
|
||||
msg = ('[BOOTSTRAP MODULE {0}] Skipping source "{1}" since it is '
|
||||
'not trusted').format(module, current_config['name'])
|
||||
tty.debug(msg)
|
||||
continue
|
||||
|
||||
b = _make_bootstrapper(current_config)
|
||||
b = _make_bootstrapper(current_config)
|
||||
try:
|
||||
if b.try_import(module, abstract_spec):
|
||||
return
|
||||
except Exception as e:
|
||||
msg = '[BOOTSTRAP MODULE {0}] Unexpected error "{1}"'
|
||||
tty.debug(msg.format(module, str(e)))
|
||||
errors[current_config['name']] = e
|
||||
|
||||
assert h, 'expected at least one exception to have been raised at this point: while bootstrapping {0}'.format(module) # noqa: E501
|
||||
msg = 'cannot bootstrap the "{0}" Python module '.format(module)
|
||||
# We couldn't import in any way, so raise an import error
|
||||
msg = 'cannot bootstrap the "{0}" Python module'.format(module)
|
||||
if abstract_spec:
|
||||
msg += 'from spec "{0}" '.format(abstract_spec)
|
||||
if tty.is_debug():
|
||||
msg += h.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += h.grouped_message(with_tracebacks=False)
|
||||
msg += '\nRun `spack --debug ...` for more detailed errors'
|
||||
msg += ' from spec "{0}"'.format(abstract_spec)
|
||||
msg += ' due to the following failures:\n'
|
||||
for method in errors:
|
||||
err = errors[method]
|
||||
msg += " '{0}' raised {1}: {2}\n".format(
|
||||
method, err.__class__.__name__, str(err))
|
||||
msg += ' Please run `spack -d spec zlib` for more verbose error messages'
|
||||
raise ImportError(msg)
|
||||
|
||||
|
||||
@@ -576,14 +538,16 @@ def ensure_executables_in_path_or_raise(executables, abstract_spec):
|
||||
return cmd
|
||||
|
||||
executables_str = ', '.join(executables)
|
||||
source_configs = spack.config.get('bootstrap:sources', [])
|
||||
for current_config in source_configs:
|
||||
if not _source_is_trusted(current_config):
|
||||
msg = ('[BOOTSTRAP EXECUTABLES {0}] Skipping source "{1}" since it is '
|
||||
'not trusted').format(executables_str, current_config['name'])
|
||||
tty.debug(msg)
|
||||
continue
|
||||
|
||||
h = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
with h.forward(current_config['name']):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
|
||||
b = _make_bootstrapper(current_config)
|
||||
b = _make_bootstrapper(current_config)
|
||||
try:
|
||||
if b.try_search_path(executables, abstract_spec):
|
||||
# Additional environment variables needed
|
||||
concrete_spec, cmd = b.last_search['spec'], b.last_search['command']
|
||||
@@ -598,16 +562,14 @@ def ensure_executables_in_path_or_raise(executables, abstract_spec):
|
||||
)
|
||||
cmd.add_default_envmod(env_mods)
|
||||
return cmd
|
||||
except Exception as e:
|
||||
msg = '[BOOTSTRAP EXECUTABLES {0}] Unexpected error "{1}"'
|
||||
tty.debug(msg.format(executables_str, str(e)))
|
||||
|
||||
assert h, 'expected at least one exception to have been raised at this point: while bootstrapping {0}'.format(executables_str) # noqa: E501
|
||||
msg = 'cannot bootstrap any of the {0} executables '.format(executables_str)
|
||||
# We couldn't import in any way, so raise an import error
|
||||
msg = 'cannot bootstrap any of the {0} executables'.format(executables_str)
|
||||
if abstract_spec:
|
||||
msg += 'from spec "{0}" '.format(abstract_spec)
|
||||
if tty.is_debug():
|
||||
msg += h.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += h.grouped_message(with_tracebacks=False)
|
||||
msg += '\nRun `spack --debug ...` for more detailed errors'
|
||||
msg += ' from spec "{0}"'.format(abstract_spec)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
@@ -864,19 +826,6 @@ def ensure_flake8_in_path_or_raise():
|
||||
return ensure_executables_in_path_or_raise([executable], abstract_spec=root_spec)
|
||||
|
||||
|
||||
def all_root_specs(development=False):
|
||||
"""Return a list of all the root specs that may be used to bootstrap Spack.
|
||||
|
||||
Args:
|
||||
development (bool): if True include dev dependencies
|
||||
"""
|
||||
specs = [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()]
|
||||
if development:
|
||||
specs += [isort_root_spec(), mypy_root_spec(),
|
||||
black_root_spec(), flake8_root_spec()]
|
||||
return specs
|
||||
|
||||
|
||||
def _missing(name, purpose, system_only=True):
|
||||
"""Message to be printed if an executable is not found"""
|
||||
msg = '[{2}] MISSING "{0}": {1}'
|
||||
@@ -1014,23 +963,3 @@ def status_message(section):
|
||||
msg += '\n'
|
||||
msg = msg.format(pass_token if not missing_software else fail_token)
|
||||
return msg, missing_software
|
||||
|
||||
|
||||
def bootstrapping_sources(scope=None):
|
||||
"""Return the list of configured sources of software for bootstrapping Spack
|
||||
|
||||
Args:
|
||||
scope (str or None): if a valid configuration scope is given, return the
|
||||
list only from that scope
|
||||
"""
|
||||
source_configs = spack.config.get('bootstrap:sources', default=None, scope=scope)
|
||||
source_configs = source_configs or []
|
||||
list_of_sources = []
|
||||
for entry in source_configs:
|
||||
current = copy.copy(entry)
|
||||
metadata_dir = spack.util.path.canonicalize_path(entry['metadata'])
|
||||
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
||||
with open(metadata_yaml) as f:
|
||||
current.update(spack.util.spack_yaml.load(f))
|
||||
list_of_sources.append(current)
|
||||
return list_of_sources
|
||||
|
@@ -55,7 +55,7 @@
|
||||
import spack.config
|
||||
import spack.install_test
|
||||
import spack.main
|
||||
import spack.package_base
|
||||
import spack.package
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
@@ -111,20 +111,6 @@
|
||||
dso_suffix = 'dylib' if sys.platform == 'darwin' else 'so'
|
||||
|
||||
|
||||
def should_set_parallel_jobs(jobserver_support=False):
|
||||
"""Returns true in general, except when:
|
||||
- The env variable SPACK_NO_PARALLEL_MAKE=1 is set
|
||||
- jobserver_support is enabled, and a jobserver was found.
|
||||
"""
|
||||
if (
|
||||
jobserver_support and
|
||||
'MAKEFLAGS' in os.environ and
|
||||
'--jobserver' in os.environ['MAKEFLAGS']
|
||||
):
|
||||
return False
|
||||
return not env_flag(SPACK_NO_PARALLEL_MAKE)
|
||||
|
||||
|
||||
class MakeExecutable(Executable):
|
||||
"""Special callable executable object for make so the user can specify
|
||||
parallelism options on a per-invocation basis. Specifying
|
||||
@@ -134,6 +120,9 @@ class MakeExecutable(Executable):
|
||||
call will name an environment variable which will be set to the
|
||||
parallelism level (without affecting the normal invocation with
|
||||
-j).
|
||||
|
||||
Note that if the SPACK_NO_PARALLEL_MAKE env var is set it overrides
|
||||
everything.
|
||||
"""
|
||||
|
||||
def __init__(self, name, jobs):
|
||||
@@ -144,8 +133,9 @@ def __call__(self, *args, **kwargs):
|
||||
"""parallel, and jobs_env from kwargs are swallowed and used here;
|
||||
remaining arguments are passed through to the superclass.
|
||||
"""
|
||||
parallel = should_set_parallel_jobs(jobserver_support=True) and \
|
||||
kwargs.pop('parallel', self.jobs > 1)
|
||||
|
||||
disable = env_flag(SPACK_NO_PARALLEL_MAKE)
|
||||
parallel = (not disable) and kwargs.pop('parallel', self.jobs > 1)
|
||||
|
||||
if parallel:
|
||||
args = ('-j{0}'.format(self.jobs),) + args
|
||||
@@ -191,7 +181,7 @@ def clean_environment():
|
||||
env.unset('PYTHONPATH')
|
||||
|
||||
# Affects GNU make, can e.g. indirectly inhibit enabling parallel build
|
||||
# env.unset('MAKEFLAGS')
|
||||
env.unset('MAKEFLAGS')
|
||||
|
||||
# Avoid that libraries of build dependencies get hijacked.
|
||||
env.unset('LD_PRELOAD')
|
||||
@@ -522,7 +512,14 @@ def _set_variables_for_single_module(pkg, module):
|
||||
m.make_jobs = jobs
|
||||
|
||||
# TODO: make these build deps that can be installed if not found.
|
||||
# FIXME: !!!!!
|
||||
m.make = MakeExecutable('make', jobs)
|
||||
m.emmake = MakeExecutable('emmake', jobs)
|
||||
m.emmake.add_default_arg('make')
|
||||
m.emmake.add_default_arg('AR=emar')
|
||||
m.emmake.add_default_arg('RANLIB=emranlib')
|
||||
m.emmake.add_default_arg('NM=emnm')
|
||||
|
||||
m.gmake = MakeExecutable('gmake', jobs)
|
||||
m.scons = MakeExecutable('scons', jobs)
|
||||
m.ninja = MakeExecutable('ninja', jobs)
|
||||
@@ -533,9 +530,15 @@ def _set_variables_for_single_module(pkg, module):
|
||||
# Find the configure script in the archive path
|
||||
# Don't use which for this; we want to find it in the current dir.
|
||||
m.configure = Executable('./configure')
|
||||
m.emconfigure = Executable('emconfigure')
|
||||
m.emconfigure.add_default_arg('./configure')
|
||||
|
||||
m.meson = Executable('meson')
|
||||
|
||||
m.cmake = Executable('cmake')
|
||||
m.emcmake = Executable('emcmake')
|
||||
m.emcmake.add_default_arg('cmake')
|
||||
|
||||
m.ctest = MakeExecutable('ctest', jobs)
|
||||
|
||||
if sys.platform == 'win32':
|
||||
@@ -722,7 +725,7 @@ def get_std_cmake_args(pkg):
|
||||
package were a CMakePackage instance.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package under consideration
|
||||
pkg (spack.package.PackageBase): package under consideration
|
||||
|
||||
Returns:
|
||||
list: arguments for cmake
|
||||
@@ -738,7 +741,7 @@ def get_std_meson_args(pkg):
|
||||
package were a MesonPackage instance.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package under consideration
|
||||
pkg (spack.package.PackageBase): package under consideration
|
||||
|
||||
Returns:
|
||||
list: arguments for meson
|
||||
@@ -748,12 +751,12 @@ def get_std_meson_args(pkg):
|
||||
|
||||
def parent_class_modules(cls):
|
||||
"""
|
||||
Get list of superclass modules that descend from spack.package_base.PackageBase
|
||||
Get list of superclass modules that descend from spack.package.PackageBase
|
||||
|
||||
Includes cls.__module__
|
||||
"""
|
||||
if (not issubclass(cls, spack.package_base.PackageBase) or
|
||||
issubclass(spack.package_base.PackageBase, cls)):
|
||||
if (not issubclass(cls, spack.package.PackageBase) or
|
||||
issubclass(spack.package.PackageBase, cls)):
|
||||
return []
|
||||
result = []
|
||||
module = sys.modules.get(cls.__module__)
|
||||
@@ -771,7 +774,7 @@ def load_external_modules(pkg):
|
||||
associated with them.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package to load deps for
|
||||
pkg (spack.package.PackageBase): package to load deps for
|
||||
"""
|
||||
for dep in list(pkg.spec.traverse()):
|
||||
external_modules = dep.external_modules or []
|
||||
@@ -839,7 +842,7 @@ def setup_package(pkg, dirty, context='build'):
|
||||
# PrgEnv modules on cray platform. Module unload does no damage when
|
||||
# unnecessary
|
||||
on_cray, _ = _on_cray()
|
||||
if on_cray and not dirty:
|
||||
if on_cray:
|
||||
for mod in ['cray-mpich', 'cray-libsci']:
|
||||
module('unload', mod)
|
||||
|
||||
@@ -1038,7 +1041,7 @@ def get_cmake_prefix_path(pkg):
|
||||
|
||||
|
||||
def _setup_pkg_and_run(serialized_pkg, function, kwargs, child_pipe,
|
||||
input_multiprocess_fd, jsfd1, jsfd2):
|
||||
input_multiprocess_fd):
|
||||
|
||||
context = kwargs.get('context', 'build')
|
||||
|
||||
@@ -1109,7 +1112,7 @@ def start_build_process(pkg, function, kwargs):
|
||||
|
||||
Args:
|
||||
|
||||
pkg (spack.package_base.PackageBase): package whose environment we should set up the
|
||||
pkg (spack.package.PackageBase): package whose environment we should set up the
|
||||
child process for.
|
||||
function (typing.Callable): argless function to run in the child
|
||||
process.
|
||||
@@ -1145,8 +1148,6 @@ def child_fun():
|
||||
"""
|
||||
parent_pipe, child_pipe = multiprocessing.Pipe()
|
||||
input_multiprocess_fd = None
|
||||
jobserver_fd1 = None
|
||||
jobserver_fd2 = None
|
||||
|
||||
serialized_pkg = spack.subprocess_context.PackageInstallContext(pkg)
|
||||
|
||||
@@ -1156,17 +1157,11 @@ def child_fun():
|
||||
'fileno'):
|
||||
input_fd = os.dup(sys.stdin.fileno())
|
||||
input_multiprocess_fd = MultiProcessFd(input_fd)
|
||||
mflags = os.environ.get('MAKEFLAGS', False)
|
||||
if mflags:
|
||||
m = re.search(r'--jobserver-[^=]*=(\d),(\d)', mflags)
|
||||
if m:
|
||||
jobserver_fd1 = MultiProcessFd(int(m.group(1)))
|
||||
jobserver_fd2 = MultiProcessFd(int(m.group(2)))
|
||||
|
||||
p = multiprocessing.Process(
|
||||
target=_setup_pkg_and_run,
|
||||
args=(serialized_pkg, function, kwargs, child_pipe,
|
||||
input_multiprocess_fd, jobserver_fd1, jobserver_fd2))
|
||||
input_multiprocess_fd))
|
||||
|
||||
p.start()
|
||||
|
||||
@@ -1234,7 +1229,7 @@ def make_stack(tb, stack=None):
|
||||
if 'self' in frame.f_locals:
|
||||
# Find the first proper subclass of PackageBase.
|
||||
obj = frame.f_locals['self']
|
||||
if isinstance(obj, spack.package_base.PackageBase):
|
||||
if isinstance(obj, spack.package.PackageBase):
|
||||
break
|
||||
|
||||
# We found obj, the Package implementation we care about.
|
||||
|
@@ -9,7 +9,7 @@
|
||||
|
||||
from spack.build_systems.autotools import AutotoolsPackage
|
||||
from spack.directives import extends
|
||||
from spack.package_base import ExtensionError
|
||||
from spack.package import ExtensionError
|
||||
from spack.util.executable import which
|
||||
|
||||
|
||||
|
@@ -16,7 +16,7 @@
|
||||
from spack.build_environment import InstallError
|
||||
from spack.directives import conflicts, depends_on
|
||||
from spack.operating_systems.mac_os import macos_version
|
||||
from spack.package_base import PackageBase, run_after, run_before
|
||||
from spack.package import PackageBase, run_after, run_before
|
||||
from spack.util.executable import Executable
|
||||
from spack.version import Version
|
||||
|
||||
@@ -419,7 +419,10 @@ def configure(self, spec, prefix):
|
||||
options += self.configure_args()
|
||||
|
||||
with working_dir(self.build_directory, create=True):
|
||||
inspect.getmodule(self).configure(*options)
|
||||
if self.spec.satisfies('%emscripten'):
|
||||
inspect.getmodule(self).emconfigure(*options)
|
||||
else:
|
||||
inspect.getmodule(self).configure(*options)
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
if (self.spec.platform == 'darwin'
|
||||
|
@@ -8,7 +8,7 @@
|
||||
from llnl.util.filesystem import install, mkdirp
|
||||
|
||||
from spack.build_systems.cmake import CMakePackage
|
||||
from spack.package_base import run_after
|
||||
from spack.package import run_after
|
||||
|
||||
|
||||
def cmake_cache_path(name, value, comment=""):
|
||||
@@ -210,10 +210,6 @@ def std_initconfig_entries(self):
|
||||
"#------------------{0}\n".format("-" * 60),
|
||||
]
|
||||
|
||||
def initconfig_package_entries(self):
|
||||
"""This method is to be overwritten by the package"""
|
||||
return []
|
||||
|
||||
def initconfig(self, spec, prefix):
|
||||
cache_entries = (self.std_initconfig_entries() +
|
||||
self.initconfig_compiler_entries() +
|
||||
|
@@ -18,7 +18,7 @@
|
||||
|
||||
import spack.build_environment
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.package_base import InstallError, PackageBase, run_after
|
||||
from spack.package import InstallError, PackageBase, run_after
|
||||
from spack.util.path import convert_to_posix_path
|
||||
|
||||
# Regex to extract the primary generator from the CMake generator
|
||||
@@ -94,11 +94,13 @@ class CMakePackage(PackageBase):
|
||||
#: See https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html
|
||||
#: for more information.
|
||||
|
||||
generator = "Unix Makefiles"
|
||||
# generator = "Unix Makefiles"
|
||||
generator = "Ninja"
|
||||
depends_on('ninja', type='build')
|
||||
|
||||
if sys.platform == 'win32':
|
||||
generator = "Ninja"
|
||||
depends_on('ninja')
|
||||
depends_on('ninja', type='build')
|
||||
|
||||
# https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
|
||||
variant('build_type', default='RelWithDebInfo',
|
||||
@@ -176,7 +178,6 @@ def _std_args(pkg):
|
||||
'-G', generator,
|
||||
define('CMAKE_INSTALL_PREFIX', convert_to_posix_path(pkg.prefix)),
|
||||
define('CMAKE_BUILD_TYPE', build_type),
|
||||
define('BUILD_TESTING', pkg.run_tests),
|
||||
]
|
||||
|
||||
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
|
||||
@@ -362,7 +363,6 @@ def cmake_args(self):
|
||||
|
||||
* CMAKE_INSTALL_PREFIX
|
||||
* CMAKE_BUILD_TYPE
|
||||
* BUILD_TESTING
|
||||
|
||||
which will be set automatically.
|
||||
|
||||
@@ -376,7 +376,10 @@ def cmake(self, spec, prefix):
|
||||
options += self.cmake_args()
|
||||
options.append(os.path.abspath(self.root_cmakelists_dir))
|
||||
with working_dir(self.build_directory, create=True):
|
||||
inspect.getmodule(self).cmake(*options)
|
||||
if self.spec.satisfies('%emscripten'):
|
||||
inspect.getmodule(self).emcmake(*options)
|
||||
else:
|
||||
inspect.getmodule(self).cmake(*options)
|
||||
|
||||
def build(self, spec, prefix):
|
||||
"""Make the build targets"""
|
||||
|
@@ -6,7 +6,7 @@
|
||||
import spack.variant
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
from spack.package_base import PackageBase
|
||||
from spack.package import PackageBase
|
||||
|
||||
|
||||
class CudaPackage(PackageBase):
|
||||
@@ -37,7 +37,6 @@ class CudaPackage(PackageBase):
|
||||
variant('cuda_arch',
|
||||
description='CUDA architecture',
|
||||
values=spack.variant.any_combination_of(*cuda_arch_values),
|
||||
sticky=True,
|
||||
when='+cuda')
|
||||
|
||||
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#nvcc-examples
|
||||
@@ -108,10 +107,10 @@ def cuda_flags(arch_list):
|
||||
# each release of a new cuda minor version.
|
||||
conflicts('%gcc@10:', when='+cuda ^cuda@:11.0')
|
||||
conflicts('%gcc@11:', when='+cuda ^cuda@:11.4.0')
|
||||
conflicts('%gcc@12:', when='+cuda ^cuda@:11.7')
|
||||
conflicts('%gcc@12:', when='+cuda ^cuda@:11.6')
|
||||
conflicts('%clang@12:', when='+cuda ^cuda@:11.4.0')
|
||||
conflicts('%clang@13:', when='+cuda ^cuda@:11.5')
|
||||
conflicts('%clang@14:', when='+cuda ^cuda@:11.7')
|
||||
conflicts('%clang@14:', when='+cuda ^cuda@:11.6')
|
||||
|
||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||
conflicts('%gcc@10', when='+cuda ^cuda@:11.4.0')
|
||||
|
@@ -3,16 +3,14 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from typing import Optional
|
||||
|
||||
import spack.package_base
|
||||
import spack.package
|
||||
import spack.util.url
|
||||
|
||||
|
||||
class GNUMirrorPackage(spack.package_base.PackageBase):
|
||||
class GNUMirrorPackage(spack.package.PackageBase):
|
||||
"""Mixin that takes care of setting url and mirrors for GNU packages."""
|
||||
#: Path of the package in a GNU mirror
|
||||
gnu_mirror_path = None # type: Optional[str]
|
||||
gnu_mirror_path = None
|
||||
|
||||
#: List of GNU mirrors used by Spack
|
||||
base_mirrors = [
|
||||
|
@@ -26,7 +26,7 @@
|
||||
|
||||
import spack.error
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.package_base import InstallError, PackageBase, run_after
|
||||
from spack.package import InstallError, PackageBase, run_after
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.prefix import Prefix
|
||||
@@ -1115,7 +1115,7 @@ def _setup_dependent_env_callback(
|
||||
raise InstallError('compilers_of_client arg required for MPI')
|
||||
|
||||
def setup_dependent_package(self, module, dep_spec):
|
||||
# https://spack.readthedocs.io/en/latest/spack.html#spack.package_base.PackageBase.setup_dependent_package
|
||||
# https://spack.readthedocs.io/en/latest/spack.html#spack.package.PackageBase.setup_dependent_package
|
||||
# Reminder: "module" refers to Python module.
|
||||
# Called before the install() method of dependents.
|
||||
|
||||
@@ -1259,14 +1259,6 @@ def install(self, spec, prefix):
|
||||
for f in glob.glob('%s/intel*log' % tmpdir):
|
||||
install(f, dst)
|
||||
|
||||
@run_after('install')
|
||||
def validate_install(self):
|
||||
# Sometimes the installer exits with an error but doesn't pass a
|
||||
# non-zero exit code to spack. Check for the existence of a 'bin'
|
||||
# directory to catch this error condition.
|
||||
if not os.path.exists(self.prefix.bin):
|
||||
raise InstallError('The installer has failed to install anything.')
|
||||
|
||||
@run_after('install')
|
||||
def configure_rpath(self):
|
||||
if '+rpath' not in self.spec:
|
||||
|
@@ -1,102 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
import os
|
||||
|
||||
from llnl.util.filesystem import find
|
||||
|
||||
from spack.directives import depends_on, extends
|
||||
from spack.multimethod import when
|
||||
from spack.package_base import PackageBase
|
||||
from spack.util.executable import Executable
|
||||
|
||||
|
||||
class LuaPackage(PackageBase):
|
||||
"""Specialized class for lua packages"""
|
||||
|
||||
phases = ['unpack', 'generate_luarocks_config', 'preprocess', 'install']
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
build_system_class = 'LuaPackage'
|
||||
|
||||
list_depth = 1 # LuaRocks requires at least one level of spidering to find versions
|
||||
depends_on('lua-lang')
|
||||
extends('lua', when='^lua')
|
||||
with when('^lua-luajit'):
|
||||
extends('lua-luajit')
|
||||
depends_on('luajit')
|
||||
depends_on('lua-luajit+lualinks')
|
||||
with when('^lua-luajit-openresty'):
|
||||
extends('lua-luajit-openresty')
|
||||
depends_on('luajit')
|
||||
depends_on('lua-luajit-openresty+lualinks')
|
||||
|
||||
def unpack(self, spec, prefix):
|
||||
if os.path.splitext(self.stage.archive_file)[1] == '.rock':
|
||||
directory = self.luarocks('unpack', self.stage.archive_file, output=str)
|
||||
dirlines = directory.split('\n')
|
||||
# TODO: figure out how to scope this better
|
||||
os.chdir(dirlines[2])
|
||||
|
||||
def _generate_tree_line(self, name, prefix):
|
||||
return """{{ name = "{name}", root = "{prefix}" }};""".format(
|
||||
name=name,
|
||||
prefix=prefix,
|
||||
)
|
||||
|
||||
def _luarocks_config_path(self):
|
||||
return os.path.join(self.stage.source_path, 'spack_luarocks.lua')
|
||||
|
||||
def generate_luarocks_config(self, spec, prefix):
|
||||
spec = self.spec
|
||||
table_entries = []
|
||||
for d in spec.traverse(
|
||||
deptypes=("build", "run"), deptype_query="run"
|
||||
):
|
||||
if d.package.extends(self.extendee_spec):
|
||||
table_entries.append(self._generate_tree_line(d.name, d.prefix))
|
||||
|
||||
path = self._luarocks_config_path()
|
||||
with open(path, 'w') as config:
|
||||
config.write(
|
||||
"""
|
||||
deps_mode="all"
|
||||
rocks_trees={{
|
||||
{}
|
||||
}}
|
||||
""".format(
|
||||
"\n".join(table_entries)
|
||||
)
|
||||
)
|
||||
return path
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
env.set('LUAROCKS_CONFIG', self._luarocks_config_path())
|
||||
|
||||
def preprocess(self, spec, prefix):
|
||||
"""Override this to preprocess source before building with luarocks"""
|
||||
pass
|
||||
|
||||
@property
|
||||
def lua(self):
|
||||
return Executable(self.spec['lua-lang'].prefix.bin.lua)
|
||||
|
||||
@property
|
||||
def luarocks(self):
|
||||
lr = Executable(self.spec['lua-lang'].prefix.bin.luarocks)
|
||||
return lr
|
||||
|
||||
def luarocks_args(self):
|
||||
return []
|
||||
|
||||
def install(self, spec, prefix):
|
||||
rock = '.'
|
||||
specs = find('.', '*.rockspec', recursive=False)
|
||||
if specs:
|
||||
rock = specs[0]
|
||||
rocks_args = self.luarocks_args()
|
||||
rocks_args.append(rock)
|
||||
self.luarocks('--tree=' + prefix, 'make', *rocks_args)
|
@@ -11,7 +11,7 @@
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.directives import conflicts
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
class MakefilePackage(PackageBase):
|
||||
@@ -82,14 +82,20 @@ def build(self, spec, prefix):
|
||||
as targets.
|
||||
"""
|
||||
with working_dir(self.build_directory):
|
||||
inspect.getmodule(self).make(*self.build_targets)
|
||||
if self.spec.satisfies('%emscripten'):
|
||||
inspect.getmodule(self).emmake(*self.build_targets)
|
||||
else:
|
||||
inspect.getmodule(self).make(*self.build_targets)
|
||||
|
||||
def install(self, spec, prefix):
|
||||
"""Calls make, passing :py:attr:`~.MakefilePackage.install_targets`
|
||||
as targets.
|
||||
"""
|
||||
with working_dir(self.build_directory):
|
||||
inspect.getmodule(self).make(*self.install_targets)
|
||||
if self.spec.satisfies('%emscripten'):
|
||||
inspect.getmodule(self).emmake(*self.install_targets)
|
||||
else:
|
||||
inspect.getmodule(self).make(*self.install_targets)
|
||||
|
||||
run_after('build')(PackageBase._run_default_build_time_test_callbacks)
|
||||
|
||||
|
@@ -7,7 +7,7 @@
|
||||
from llnl.util.filesystem import install_tree, working_dir
|
||||
|
||||
from spack.directives import depends_on
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.util.executable import which
|
||||
|
||||
|
||||
|
@@ -11,7 +11,7 @@
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.directives import depends_on, variant
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
class MesonPackage(PackageBase):
|
||||
|
@@ -6,7 +6,7 @@
|
||||
import inspect
|
||||
|
||||
from spack.directives import extends
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
class OctavePackage(PackageBase):
|
||||
|
@@ -14,7 +14,7 @@
|
||||
|
||||
from llnl.util.filesystem import find_headers, find_libraries, join_path
|
||||
|
||||
from spack.package_base import Package
|
||||
from spack.package import Package
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
|
||||
@@ -30,15 +30,6 @@ class IntelOneApiPackage(Package):
|
||||
# organization (e.g. University/Company).
|
||||
redistribute_source = False
|
||||
|
||||
@staticmethod
|
||||
def update_description(cls):
|
||||
"""Updates oneapi package descriptions with common text."""
|
||||
|
||||
text = """ LICENSE INFORMATION: By downloading and using this software, you agree to the terms
|
||||
and conditions of the software license agreements at https://intel.ly/393CijO."""
|
||||
cls.__doc__ = cls.__doc__ + text
|
||||
return cls
|
||||
|
||||
@property
|
||||
def component_dir(self):
|
||||
"""Subdirectory for this component in the install prefix."""
|
||||
|
@@ -10,7 +10,7 @@
|
||||
from llnl.util.filesystem import filter_file
|
||||
|
||||
from spack.directives import extends
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.util.executable import Executable
|
||||
|
||||
|
||||
|
@@ -6,7 +6,6 @@
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
from typing import Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import (
|
||||
@@ -20,13 +19,13 @@
|
||||
from llnl.util.lang import match_predicate
|
||||
|
||||
from spack.directives import depends_on, extends
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
class PythonPackage(PackageBase):
|
||||
"""Specialized class for packages that are built using pip."""
|
||||
#: Package name, version, and extension on PyPI
|
||||
pypi = None # type: Optional[str]
|
||||
pypi = None
|
||||
|
||||
maintainers = ['adamjstewart']
|
||||
|
||||
@@ -47,7 +46,7 @@ class PythonPackage(PackageBase):
|
||||
# package manually
|
||||
depends_on('py-wheel', type='build')
|
||||
|
||||
py_namespace = None # type: Optional[str]
|
||||
py_namespace = None
|
||||
|
||||
@staticmethod
|
||||
def _std_args(cls):
|
||||
|
@@ -9,7 +9,7 @@
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.directives import depends_on
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
class QMakePackage(PackageBase):
|
||||
|
@@ -5,10 +5,9 @@
|
||||
|
||||
|
||||
import inspect
|
||||
from typing import Optional
|
||||
|
||||
from spack.directives import extends
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
class RPackage(PackageBase):
|
||||
@@ -29,10 +28,10 @@ class RPackage(PackageBase):
|
||||
# package attributes that can be expanded to set the homepage, url,
|
||||
# list_url, and git values
|
||||
# For CRAN packages
|
||||
cran = None # type: Optional[str]
|
||||
cran = None
|
||||
|
||||
# For Bioconductor packages
|
||||
bioc = None # type: Optional[str]
|
||||
bioc = None
|
||||
|
||||
maintainers = ['glennpj']
|
||||
|
||||
|
@@ -3,14 +3,13 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
from typing import Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
|
||||
from spack.directives import extends
|
||||
from spack.package_base import PackageBase
|
||||
from spack.package import PackageBase
|
||||
from spack.util.environment import env_flag
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
@@ -37,8 +36,8 @@ class RacketPackage(PackageBase):
|
||||
extends('racket')
|
||||
|
||||
pkgs = False
|
||||
subdirectory = None # type: Optional[str]
|
||||
name = None # type: Optional[str]
|
||||
subdirectory = None
|
||||
name = None
|
||||
parallel = True
|
||||
|
||||
@property
|
||||
|
@@ -77,7 +77,7 @@
|
||||
|
||||
import spack.variant
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.package_base import PackageBase
|
||||
from spack.package import PackageBase
|
||||
|
||||
|
||||
class ROCmPackage(PackageBase):
|
||||
|
@@ -7,7 +7,7 @@
|
||||
import inspect
|
||||
|
||||
from spack.directives import extends
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
class RubyPackage(PackageBase):
|
||||
|
@@ -7,7 +7,7 @@
|
||||
import inspect
|
||||
|
||||
from spack.directives import depends_on
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
class SConsPackage(PackageBase):
|
||||
|
@@ -11,7 +11,7 @@
|
||||
from llnl.util.filesystem import find, join_path, working_dir
|
||||
|
||||
from spack.directives import depends_on, extends
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
class SIPPackage(PackageBase):
|
||||
|
@@ -3,17 +3,15 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from typing import Optional
|
||||
|
||||
import spack.package_base
|
||||
import spack.package
|
||||
import spack.util.url
|
||||
|
||||
|
||||
class SourceforgePackage(spack.package_base.PackageBase):
|
||||
class SourceforgePackage(spack.package.PackageBase):
|
||||
"""Mixin that takes care of setting url and mirrors for Sourceforge
|
||||
packages."""
|
||||
#: Path of the package in a Sourceforge mirror
|
||||
sourceforge_mirror_path = None # type: Optional[str]
|
||||
sourceforge_mirror_path = None
|
||||
|
||||
#: List of Sourceforge mirrors used by Spack
|
||||
base_mirrors = [
|
||||
|
@@ -2,17 +2,16 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from typing import Optional
|
||||
|
||||
import spack.package_base
|
||||
import spack.package
|
||||
import spack.util.url
|
||||
|
||||
|
||||
class SourcewarePackage(spack.package_base.PackageBase):
|
||||
class SourcewarePackage(spack.package.PackageBase):
|
||||
"""Mixin that takes care of setting url and mirrors for Sourceware.org
|
||||
packages."""
|
||||
#: Path of the package in a Sourceware mirror
|
||||
sourceware_mirror_path = None # type: Optional[str]
|
||||
sourceware_mirror_path = None
|
||||
|
||||
#: List of Sourceware mirrors used by Spack
|
||||
base_mirrors = [
|
||||
|
@@ -9,7 +9,7 @@
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.directives import depends_on
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
class WafPackage(PackageBase):
|
||||
|
@@ -3,17 +3,15 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from typing import Optional
|
||||
|
||||
import spack.package_base
|
||||
import spack.package
|
||||
import spack.util.url
|
||||
|
||||
|
||||
class XorgPackage(spack.package_base.PackageBase):
|
||||
class XorgPackage(spack.package.PackageBase):
|
||||
"""Mixin that takes care of setting url and mirrors for x.org
|
||||
packages."""
|
||||
#: Path of the package in a x.org mirror
|
||||
xorg_mirror_path = None # type: Optional[str]
|
||||
xorg_mirror_path = None
|
||||
|
||||
#: List of x.org mirrors used by Spack
|
||||
# Note: x.org mirrors are a bit tricky, since many are out-of-sync or off.
|
||||
|
@@ -33,6 +33,7 @@
|
||||
import spack.util.executable as exe
|
||||
import spack.util.gpg as gpg_util
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.error import SpackError
|
||||
from spack.spec import Spec
|
||||
@@ -41,8 +42,10 @@
|
||||
'always',
|
||||
]
|
||||
|
||||
SPACK_PR_MIRRORS_ROOT_URL = 's3://spack-binaries-prs'
|
||||
SPACK_SHARED_PR_MIRROR_URL = url_util.join(SPACK_PR_MIRRORS_ROOT_URL,
|
||||
'shared_pr_mirror')
|
||||
TEMP_STORAGE_MIRROR_NAME = 'ci_temporary_mirror'
|
||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||
|
||||
spack_gpg = spack.main.SpackCommand('gpg')
|
||||
spack_compiler = spack.main.SpackCommand('compiler')
|
||||
@@ -87,8 +90,8 @@ def _create_buildgroup(opener, headers, url, project, group_name, group_type):
|
||||
return build_group_id
|
||||
|
||||
|
||||
def _populate_buildgroup(job_names, group_name, project, site,
|
||||
credentials, cdash_url):
|
||||
def populate_buildgroup(job_names, group_name, project, site,
|
||||
credentials, cdash_url):
|
||||
url = "{0}/api/v1/buildgroup.php".format(cdash_url)
|
||||
|
||||
headers = {
|
||||
@@ -129,30 +132,16 @@ def _populate_buildgroup(job_names, group_name, project, site,
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200:
|
||||
msg = 'Error response code ({0}) in _populate_buildgroup'.format(
|
||||
msg = 'Error response code ({0}) in populate_buildgroup'.format(
|
||||
response_code)
|
||||
tty.warn(msg)
|
||||
|
||||
|
||||
def _is_main_phase(phase_name):
|
||||
def is_main_phase(phase_name):
|
||||
return True if phase_name == 'specs' else False
|
||||
|
||||
|
||||
def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
||||
""" Given the necessary parts, format the gitlab job name
|
||||
|
||||
Arguments:
|
||||
phase (str): Either 'specs' for the main phase, or the name of a
|
||||
bootstrapping phase
|
||||
strip_compiler (bool): Should compiler be stripped from job name
|
||||
spec (spack.spec.Spec): Spec job will build
|
||||
osarch: Architecture TODO: (this is a spack.spec.ArchSpec,
|
||||
but sphinx doesn't recognize the type and fails).
|
||||
build_group (str): Name of build group this job belongs to (a CDash
|
||||
notion)
|
||||
|
||||
Returns: The job name
|
||||
"""
|
||||
item_idx = 0
|
||||
format_str = ''
|
||||
format_args = []
|
||||
@@ -174,7 +163,7 @@ def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
||||
format_args.append(spec.version)
|
||||
item_idx += 1
|
||||
|
||||
if _is_main_phase(phase) is True or strip_compiler is False:
|
||||
if is_main_phase(phase) is True or strip_compiler is False:
|
||||
format_str += ' {{{0}}}'.format(item_idx)
|
||||
format_args.append(spec.compiler)
|
||||
item_idx += 1
|
||||
@@ -191,17 +180,12 @@ def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
||||
return format_str.format(*format_args)
|
||||
|
||||
|
||||
def _get_cdash_build_name(spec, build_group):
|
||||
def get_cdash_build_name(spec, build_group):
|
||||
return '{0}@{1}%{2} arch={3} ({4})'.format(
|
||||
spec.name, spec.version, spec.compiler, spec.architecture, build_group)
|
||||
|
||||
|
||||
def _remove_reserved_tags(tags):
|
||||
"""Convenience function to strip reserved tags from jobs"""
|
||||
return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS]
|
||||
|
||||
|
||||
def _get_spec_string(spec):
|
||||
def get_spec_string(spec):
|
||||
format_elements = [
|
||||
'{name}{@version}',
|
||||
'{%compiler}',
|
||||
@@ -213,15 +197,15 @@ def _get_spec_string(spec):
|
||||
return spec.format(''.join(format_elements))
|
||||
|
||||
|
||||
def _format_root_spec(spec, main_phase, strip_compiler):
|
||||
def format_root_spec(spec, main_phase, strip_compiler):
|
||||
if main_phase is False and strip_compiler is True:
|
||||
return '{0}@{1} arch={2}'.format(
|
||||
spec.name, spec.version, spec.architecture)
|
||||
else:
|
||||
return spec.dag_hash()
|
||||
return spec.build_hash()
|
||||
|
||||
|
||||
def _spec_deps_key(s):
|
||||
def spec_deps_key(s):
|
||||
return '{0}/{1}'.format(s.name, s.dag_hash(7))
|
||||
|
||||
|
||||
@@ -233,10 +217,8 @@ def _add_dependency(spec_label, dep_label, deps):
|
||||
deps[spec_label].add(dep_label)
|
||||
|
||||
|
||||
def _get_spec_dependencies(specs, deps, spec_labels, check_index_only=False,
|
||||
mirrors_to_check=None):
|
||||
spec_deps_obj = _compute_spec_deps(specs, check_index_only=check_index_only,
|
||||
mirrors_to_check=mirrors_to_check)
|
||||
def get_spec_dependencies(specs, deps, spec_labels, check_index_only=False):
|
||||
spec_deps_obj = compute_spec_deps(specs, check_index_only=check_index_only)
|
||||
|
||||
if spec_deps_obj:
|
||||
dependencies = spec_deps_obj['dependencies']
|
||||
@@ -253,7 +235,7 @@ def _get_spec_dependencies(specs, deps, spec_labels, check_index_only=False,
|
||||
_add_dependency(entry['spec'], entry['depends'], deps)
|
||||
|
||||
|
||||
def stage_spec_jobs(specs, check_index_only=False, mirrors_to_check=None):
|
||||
def stage_spec_jobs(specs, check_index_only=False):
|
||||
"""Take a set of release specs and generate a list of "stages", where the
|
||||
jobs in any stage are dependent only on jobs in previous stages. This
|
||||
allows us to maximize build parallelism within the gitlab-ci framework.
|
||||
@@ -265,8 +247,6 @@ def stage_spec_jobs(specs, check_index_only=False, mirrors_to_check=None):
|
||||
are up to date on those mirrors. This flag limits that search to
|
||||
the binary cache indices on those mirrors to speed the process up,
|
||||
even though there is no garantee the index is up to date.
|
||||
mirrors_to_checK: Optional mapping giving mirrors to check instead of
|
||||
any configured mirrors.
|
||||
|
||||
Returns: A tuple of information objects describing the specs, dependencies
|
||||
and stages:
|
||||
@@ -286,11 +266,11 @@ def stage_spec_jobs(specs, check_index_only=False, mirrors_to_check=None):
|
||||
|
||||
"""
|
||||
|
||||
# The convenience method below, "_remove_satisfied_deps()", does not modify
|
||||
# The convenience method below, "remove_satisfied_deps()", does not modify
|
||||
# the "deps" parameter. Instead, it returns a new dictionary where only
|
||||
# dependencies which have not yet been satisfied are included in the
|
||||
# return value.
|
||||
def _remove_satisfied_deps(deps, satisfied_list):
|
||||
def remove_satisfied_deps(deps, satisfied_list):
|
||||
new_deps = {}
|
||||
|
||||
for key, value in iteritems(deps):
|
||||
@@ -303,8 +283,8 @@ def _remove_satisfied_deps(deps, satisfied_list):
|
||||
deps = {}
|
||||
spec_labels = {}
|
||||
|
||||
_get_spec_dependencies(specs, deps, spec_labels, check_index_only=check_index_only,
|
||||
mirrors_to_check=mirrors_to_check)
|
||||
get_spec_dependencies(
|
||||
specs, deps, spec_labels, check_index_only=check_index_only)
|
||||
|
||||
# Save the original deps, as we need to return them at the end of the
|
||||
# function. In the while loop below, the "dependencies" variable is
|
||||
@@ -322,7 +302,7 @@ def _remove_satisfied_deps(deps, satisfied_list):
|
||||
# Note that "dependencies" is a dictionary mapping each dependent
|
||||
# package to the set of not-yet-handled dependencies. The final step
|
||||
# below removes all the dependencies that are handled by this stage.
|
||||
dependencies = _remove_satisfied_deps(dependencies, next_stage)
|
||||
dependencies = remove_satisfied_deps(dependencies, next_stage)
|
||||
|
||||
if unstaged:
|
||||
stages.append(unstaged.copy())
|
||||
@@ -330,12 +310,13 @@ def _remove_satisfied_deps(deps, satisfied_list):
|
||||
return spec_labels, deps, stages
|
||||
|
||||
|
||||
def _print_staging_summary(spec_labels, dependencies, stages):
|
||||
def print_staging_summary(spec_labels, dependencies, stages):
|
||||
if not stages:
|
||||
return
|
||||
|
||||
tty.msg(' Staging summary ([x] means a job needs rebuilding):')
|
||||
for stage_index, stage in enumerate(stages):
|
||||
tty.msg(' Staging summary:')
|
||||
stage_index = 0
|
||||
for stage in stages:
|
||||
tty.msg(' stage {0} ({1} jobs):'.format(stage_index, len(stage)))
|
||||
|
||||
for job in sorted(stage):
|
||||
@@ -343,10 +324,12 @@ def _print_staging_summary(spec_labels, dependencies, stages):
|
||||
tty.msg(' [{1}] {0} -> {2}'.format(
|
||||
job,
|
||||
'x' if spec_labels[job]['needs_rebuild'] else ' ',
|
||||
_get_spec_string(s)))
|
||||
get_spec_string(s)))
|
||||
|
||||
stage_index += 1
|
||||
|
||||
|
||||
def _compute_spec_deps(spec_list, check_index_only=False, mirrors_to_check=None):
|
||||
def compute_spec_deps(spec_list, check_index_only=False):
|
||||
"""
|
||||
Computes all the dependencies for the spec(s) and generates a JSON
|
||||
object which provides both a list of unique spec names as well as a
|
||||
@@ -419,17 +402,17 @@ def append_dep(s, d):
|
||||
continue
|
||||
|
||||
up_to_date_mirrors = bindist.get_mirrors_for_spec(
|
||||
spec=s, mirrors_to_check=mirrors_to_check, index_only=check_index_only)
|
||||
spec=s, full_hash_match=True, index_only=check_index_only)
|
||||
|
||||
skey = _spec_deps_key(s)
|
||||
skey = spec_deps_key(s)
|
||||
spec_labels[skey] = {
|
||||
'spec': _get_spec_string(s),
|
||||
'spec': get_spec_string(s),
|
||||
'root': root_spec,
|
||||
'needs_rebuild': not up_to_date_mirrors,
|
||||
}
|
||||
|
||||
for d in s.dependencies(deptype=all):
|
||||
dkey = _spec_deps_key(d)
|
||||
dkey = spec_deps_key(d)
|
||||
if d.external:
|
||||
tty.msg('Will not stage external dep: {0}'.format(d))
|
||||
continue
|
||||
@@ -452,11 +435,11 @@ def append_dep(s, d):
|
||||
return deps_json_obj
|
||||
|
||||
|
||||
def _spec_matches(spec, match_string):
|
||||
def spec_matches(spec, match_string):
|
||||
return spec.satisfies(match_string)
|
||||
|
||||
|
||||
def _copy_attributes(attrs_list, src_dict, dest_dict):
|
||||
def copy_attributes(attrs_list, src_dict, dest_dict):
|
||||
for runner_attr in attrs_list:
|
||||
if runner_attr in src_dict:
|
||||
if runner_attr in dest_dict and runner_attr == 'tags':
|
||||
@@ -477,7 +460,7 @@ def _copy_attributes(attrs_list, src_dict, dest_dict):
|
||||
dest_dict[runner_attr] = copy.deepcopy(src_dict[runner_attr])
|
||||
|
||||
|
||||
def _find_matching_config(spec, gitlab_ci):
|
||||
def find_matching_config(spec, gitlab_ci):
|
||||
runner_attributes = {}
|
||||
overridable_attrs = [
|
||||
'image',
|
||||
@@ -488,16 +471,16 @@ def _find_matching_config(spec, gitlab_ci):
|
||||
'after_script',
|
||||
]
|
||||
|
||||
_copy_attributes(overridable_attrs, gitlab_ci, runner_attributes)
|
||||
copy_attributes(overridable_attrs, gitlab_ci, runner_attributes)
|
||||
|
||||
ci_mappings = gitlab_ci['mappings']
|
||||
for ci_mapping in ci_mappings:
|
||||
for match_string in ci_mapping['match']:
|
||||
if _spec_matches(spec, match_string):
|
||||
if spec_matches(spec, match_string):
|
||||
if 'runner-attributes' in ci_mapping:
|
||||
_copy_attributes(overridable_attrs,
|
||||
ci_mapping['runner-attributes'],
|
||||
runner_attributes)
|
||||
copy_attributes(overridable_attrs,
|
||||
ci_mapping['runner-attributes'],
|
||||
runner_attributes)
|
||||
return runner_attributes
|
||||
else:
|
||||
return None
|
||||
@@ -505,16 +488,16 @@ def _find_matching_config(spec, gitlab_ci):
|
||||
return runner_attributes
|
||||
|
||||
|
||||
def _pkg_name_from_spec_label(spec_label):
|
||||
def pkg_name_from_spec_label(spec_label):
|
||||
return spec_label[:spec_label.index('/')]
|
||||
|
||||
|
||||
def _format_job_needs(phase_name, strip_compilers, dep_jobs,
|
||||
osname, build_group, prune_dag, stage_spec_dict,
|
||||
enable_artifacts_buildcache):
|
||||
def format_job_needs(phase_name, strip_compilers, dep_jobs,
|
||||
osname, build_group, prune_dag, stage_spec_dict,
|
||||
enable_artifacts_buildcache):
|
||||
needs_list = []
|
||||
for dep_job in dep_jobs:
|
||||
dep_spec_key = _spec_deps_key(dep_job)
|
||||
dep_spec_key = spec_deps_key(dep_job)
|
||||
dep_spec_info = stage_spec_dict[dep_spec_key]
|
||||
|
||||
if not prune_dag or dep_spec_info['needs_rebuild']:
|
||||
@@ -608,38 +591,7 @@ def get_spec_filter_list(env, affected_pkgs, dependencies=True, dependents=True)
|
||||
def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
prune_dag=False, check_index_only=False,
|
||||
run_optimizer=False, use_dependencies=False,
|
||||
artifacts_root=None, remote_mirror_override=None):
|
||||
""" Generate a gitlab yaml file to run a dynamic child pipeline from
|
||||
the spec matrix in the active environment.
|
||||
|
||||
Arguments:
|
||||
env (spack.environment.Environment): Activated environment object
|
||||
which must contain a gitlab-ci section describing how to map
|
||||
specs to runners
|
||||
print_summary (bool): Should we print a summary of all the jobs in
|
||||
the stages in which they were placed.
|
||||
output_file (str): File path where generated file should be written
|
||||
prune_dag (bool): If True, do not generate jobs for specs already
|
||||
exist built on the mirror.
|
||||
check_index_only (bool): If True, attempt to fetch the mirror index
|
||||
and only use that to determine whether built specs on the mirror
|
||||
this mode results in faster yaml generation time). Otherwise, also
|
||||
check each spec directly by url (useful if there is no index or it
|
||||
might be out of date).
|
||||
run_optimizer (bool): If True, post-process the generated yaml to try
|
||||
try to reduce the size (attempts to collect repeated configuration
|
||||
and replace with definitions).)
|
||||
use_dependencies (bool): If true, use "dependencies" rather than "needs"
|
||||
("needs" allows DAG scheduling). Useful if gitlab instance cannot
|
||||
be configured to handle more than a few "needs" per job.
|
||||
artifacts_root (str): Path where artifacts like logs, environment
|
||||
files (spack.yaml, spack.lock), etc should be written. GitLab
|
||||
requires this to be within the project directory.
|
||||
remote_mirror_override (str): Typically only needed when one spack.yaml
|
||||
is used to populate several mirrors with binaries, based on some
|
||||
criteria. Spack protected pipelines populate different mirrors based
|
||||
on branch name, facilitated by this option.
|
||||
"""
|
||||
artifacts_root=None):
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
with env.write_transaction():
|
||||
env.concretize()
|
||||
@@ -688,19 +640,17 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
for s in affected_specs:
|
||||
tty.debug(' {0}'.format(s.name))
|
||||
|
||||
# Downstream jobs will "need" (depend on, for both scheduling and
|
||||
# artifacts, which include spack.lock file) this pipeline generation
|
||||
# job by both name and pipeline id. If those environment variables
|
||||
# do not exist, then maybe this is just running in a shell, in which
|
||||
# case, there is no expectation gitlab will ever run the generated
|
||||
# pipeline and those environment variables do not matter.
|
||||
generate_job_name = os.environ.get('CI_JOB_NAME', 'job-does-not-exist')
|
||||
parent_pipeline_id = os.environ.get('CI_PIPELINE_ID', 'pipeline-does-not-exist')
|
||||
generate_job_name = os.environ.get('CI_JOB_NAME', None)
|
||||
parent_pipeline_id = os.environ.get('CI_PIPELINE_ID', None)
|
||||
|
||||
# Values: "spack_pull_request", "spack_protected_branch", or not set
|
||||
spack_pipeline_type = os.environ.get('SPACK_PIPELINE_TYPE', None)
|
||||
is_pr_pipeline = spack_pipeline_type == 'spack_pull_request'
|
||||
|
||||
spack_buildcache_copy = os.environ.get('SPACK_COPY_BUILDCACHE', None)
|
||||
spack_pr_branch = os.environ.get('SPACK_PR_BRANCH', None)
|
||||
pr_mirror_url = None
|
||||
if spack_pr_branch:
|
||||
pr_mirror_url = url_util.join(SPACK_PR_MIRRORS_ROOT_URL,
|
||||
spack_pr_branch)
|
||||
|
||||
if 'mirrors' not in yaml_root or len(yaml_root['mirrors'].values()) < 1:
|
||||
tty.die('spack ci generate requires an env containing a mirror')
|
||||
@@ -755,25 +705,14 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
'strip-compilers': False,
|
||||
})
|
||||
|
||||
# If a remote mirror override (alternate buildcache destination) was
|
||||
# specified, add it here in case it has already built hashes we might
|
||||
# generate.
|
||||
mirrors_to_check = None
|
||||
if remote_mirror_override:
|
||||
if spack_pipeline_type == 'spack_protected_branch':
|
||||
# Overriding the main mirror in this case might result
|
||||
# in skipping jobs on a release pipeline because specs are
|
||||
# up to date in develop. Eventually we want to notice and take
|
||||
# advantage of this by scheduling a job to copy the spec from
|
||||
# develop to the release, but until we have that, this makes
|
||||
# sure we schedule a rebuild job if the spec isn't already in
|
||||
# override mirror.
|
||||
mirrors_to_check = {
|
||||
'override': remote_mirror_override
|
||||
}
|
||||
else:
|
||||
spack.mirror.add(
|
||||
'ci_pr_mirror', remote_mirror_override, cfg.default_modify_scope())
|
||||
# Add per-PR mirror (and shared PR mirror) if enabled, as some specs might
|
||||
# be up to date in one of those and thus not need to be rebuilt.
|
||||
if pr_mirror_url:
|
||||
spack.mirror.add(
|
||||
'ci_pr_mirror', pr_mirror_url, cfg.default_modify_scope())
|
||||
spack.mirror.add('ci_shared_pr_mirror',
|
||||
SPACK_SHARED_PR_MIRROR_URL,
|
||||
cfg.default_modify_scope())
|
||||
|
||||
pipeline_artifacts_dir = artifacts_root
|
||||
if not pipeline_artifacts_dir:
|
||||
@@ -848,13 +787,11 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
phase_spec.concretize()
|
||||
staged_phases[phase_name] = stage_spec_jobs(
|
||||
concrete_phase_specs,
|
||||
check_index_only=check_index_only,
|
||||
mirrors_to_check=mirrors_to_check)
|
||||
check_index_only=check_index_only)
|
||||
finally:
|
||||
# Clean up remote mirror override if enabled
|
||||
if remote_mirror_override:
|
||||
if spack_pipeline_type != 'spack_protected_branch':
|
||||
spack.mirror.remove('ci_pr_mirror', cfg.default_modify_scope())
|
||||
# Clean up PR mirror if enabled
|
||||
if pr_mirror_url:
|
||||
spack.mirror.remove('ci_pr_mirror', cfg.default_modify_scope())
|
||||
|
||||
all_job_names = []
|
||||
output_object = {}
|
||||
@@ -867,7 +804,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
max_needs_job = ''
|
||||
|
||||
# If this is configured, spack will fail "spack ci generate" if it
|
||||
# generates any hash which exists under the broken specs url.
|
||||
# generates any full hash which exists under the broken specs url.
|
||||
broken_spec_urls = None
|
||||
if broken_specs_url:
|
||||
if broken_specs_url.startswith('http'):
|
||||
@@ -882,7 +819,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
phase_name = phase['name']
|
||||
strip_compilers = phase['strip-compilers']
|
||||
|
||||
main_phase = _is_main_phase(phase_name)
|
||||
main_phase = is_main_phase(phase_name)
|
||||
spec_labels, dependencies, stages = staged_phases[phase_name]
|
||||
|
||||
for stage_jobs in stages:
|
||||
@@ -893,9 +830,11 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
for spec_label in stage_jobs:
|
||||
spec_record = spec_labels[spec_label]
|
||||
root_spec = spec_record['rootSpec']
|
||||
pkg_name = _pkg_name_from_spec_label(spec_label)
|
||||
pkg_name = pkg_name_from_spec_label(spec_label)
|
||||
release_spec = root_spec[pkg_name]
|
||||
release_spec_full_hash = release_spec.full_hash()
|
||||
release_spec_dag_hash = release_spec.dag_hash()
|
||||
release_spec_build_hash = release_spec.build_hash()
|
||||
|
||||
if prune_untouched_packages:
|
||||
if release_spec not in affected_specs:
|
||||
@@ -904,7 +843,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
spec_record['needs_rebuild'] = False
|
||||
continue
|
||||
|
||||
runner_attribs = _find_matching_config(
|
||||
runner_attribs = find_matching_config(
|
||||
release_spec, gitlab_ci)
|
||||
|
||||
if not runner_attribs:
|
||||
@@ -914,14 +853,6 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
|
||||
tags = [tag for tag in runner_attribs['tags']]
|
||||
|
||||
if spack_pipeline_type is not None:
|
||||
# For spack pipelines "public" and "protected" are reserved tags
|
||||
tags = _remove_reserved_tags(tags)
|
||||
if spack_pipeline_type == 'spack_protected_branch':
|
||||
tags.extend(['aws', 'protected'])
|
||||
elif spack_pipeline_type == 'spack_pull_request':
|
||||
tags.extend(['public'])
|
||||
|
||||
variables = {}
|
||||
if 'variables' in runner_attribs:
|
||||
variables.update(runner_attribs['variables'])
|
||||
@@ -966,13 +897,15 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
compiler_action = 'NONE'
|
||||
if len(phases) > 1:
|
||||
compiler_action = 'FIND_ANY'
|
||||
if _is_main_phase(phase_name):
|
||||
if is_main_phase(phase_name):
|
||||
compiler_action = 'INSTALL_MISSING'
|
||||
|
||||
job_vars = {
|
||||
'SPACK_ROOT_SPEC': _format_root_spec(
|
||||
'SPACK_ROOT_SPEC': format_root_spec(
|
||||
root_spec, main_phase, strip_compilers),
|
||||
'SPACK_JOB_SPEC_DAG_HASH': release_spec_dag_hash,
|
||||
'SPACK_JOB_SPEC_BUILD_HASH': release_spec_build_hash,
|
||||
'SPACK_JOB_SPEC_FULL_HASH': release_spec_full_hash,
|
||||
'SPACK_JOB_SPEC_PKG_NAME': release_spec.name,
|
||||
'SPACK_COMPILER_ACTION': compiler_action
|
||||
}
|
||||
@@ -991,15 +924,15 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
# purposes, so we only get the direct dependencies.
|
||||
dep_jobs = []
|
||||
for dep_label in dependencies[spec_label]:
|
||||
dep_pkg = _pkg_name_from_spec_label(dep_label)
|
||||
dep_pkg = pkg_name_from_spec_label(dep_label)
|
||||
dep_root = spec_labels[dep_label]['rootSpec']
|
||||
dep_jobs.append(dep_root[dep_pkg])
|
||||
|
||||
job_dependencies.extend(
|
||||
_format_job_needs(phase_name, strip_compilers,
|
||||
dep_jobs, osname, build_group,
|
||||
prune_dag, spec_labels,
|
||||
enable_artifacts_buildcache))
|
||||
format_job_needs(phase_name, strip_compilers,
|
||||
dep_jobs, osname, build_group,
|
||||
prune_dag, spec_labels,
|
||||
enable_artifacts_buildcache))
|
||||
|
||||
rebuild_spec = spec_record['needs_rebuild']
|
||||
|
||||
@@ -1010,7 +943,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
# compiler we are supposed to use is listed in any of the
|
||||
# bootstrap spec lists, then we will add more dependencies to
|
||||
# the job (that compiler and maybe it's dependencies as well).
|
||||
if _is_main_phase(phase_name):
|
||||
if is_main_phase(phase_name):
|
||||
spec_arch_family = (release_spec.architecture
|
||||
.target
|
||||
.microarchitecture
|
||||
@@ -1038,7 +971,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
# be rebuilt if the compiler targeted to build it
|
||||
# needs to be rebuilt.
|
||||
bs_specs, _, _ = staged_phases[bs['phase-name']]
|
||||
c_spec_key = _spec_deps_key(c_spec)
|
||||
c_spec_key = spec_deps_key(c_spec)
|
||||
rbld_comp = bs_specs[c_spec_key]['needs_rebuild']
|
||||
rebuild_spec = rebuild_spec or rbld_comp
|
||||
# Also update record so dependents do not fail to
|
||||
@@ -1052,14 +985,14 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
]
|
||||
|
||||
job_dependencies.extend(
|
||||
_format_job_needs(bs['phase-name'],
|
||||
bs['strip-compilers'],
|
||||
dep_jobs,
|
||||
str(bs_arch),
|
||||
build_group,
|
||||
prune_dag,
|
||||
bs_specs,
|
||||
enable_artifacts_buildcache))
|
||||
format_job_needs(bs['phase-name'],
|
||||
bs['strip-compilers'],
|
||||
dep_jobs,
|
||||
str(bs_arch),
|
||||
build_group,
|
||||
prune_dag,
|
||||
bs_specs,
|
||||
enable_artifacts_buildcache))
|
||||
else:
|
||||
debug_msg = ''.join([
|
||||
'Considered compiler {0} for spec ',
|
||||
@@ -1076,9 +1009,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
continue
|
||||
|
||||
if (broken_spec_urls is not None and
|
||||
release_spec_dag_hash in broken_spec_urls):
|
||||
release_spec_full_hash in broken_spec_urls):
|
||||
known_broken_specs_encountered.append('{0} ({1})'.format(
|
||||
release_spec, release_spec_dag_hash))
|
||||
release_spec, release_spec_full_hash))
|
||||
|
||||
if artifacts_root:
|
||||
job_dependencies.append({
|
||||
@@ -1089,7 +1022,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
job_vars['SPACK_SPEC_NEEDS_REBUILD'] = str(rebuild_spec)
|
||||
|
||||
if enable_cdash_reporting:
|
||||
cdash_build_name = _get_cdash_build_name(
|
||||
cdash_build_name = get_cdash_build_name(
|
||||
release_spec, build_group)
|
||||
all_job_names.append(cdash_build_name)
|
||||
job_vars['SPACK_CDASH_BUILD_NAME'] = cdash_build_name
|
||||
@@ -1154,7 +1087,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
phase_name = phase['name']
|
||||
tty.msg('Stages for phase "{0}"'.format(phase_name))
|
||||
phase_stages = staged_phases[phase_name]
|
||||
_print_staging_summary(*phase_stages)
|
||||
print_staging_summary(*phase_stages)
|
||||
|
||||
tty.debug('{0} build jobs generated in {1} stages'.format(
|
||||
job_id, stage_id))
|
||||
@@ -1166,8 +1099,8 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
# Use "all_job_names" to populate the build group for this set
|
||||
if enable_cdash_reporting and cdash_auth_token:
|
||||
try:
|
||||
_populate_buildgroup(all_job_names, build_group, cdash_project,
|
||||
cdash_site, cdash_auth_token, cdash_url)
|
||||
populate_buildgroup(all_job_names, build_group, cdash_project,
|
||||
cdash_site, cdash_auth_token, cdash_url)
|
||||
except (SpackError, HTTPError, URLError) as err:
|
||||
tty.warn('Problem populating buildgroup: {0}'.format(err))
|
||||
else:
|
||||
@@ -1203,13 +1136,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
cleanup_job = {}
|
||||
|
||||
if service_job_config:
|
||||
_copy_attributes(default_attrs,
|
||||
service_job_config,
|
||||
cleanup_job)
|
||||
|
||||
if 'tags' in cleanup_job:
|
||||
service_tags = _remove_reserved_tags(cleanup_job['tags'])
|
||||
cleanup_job['tags'] = service_tags
|
||||
copy_attributes(default_attrs,
|
||||
service_job_config,
|
||||
cleanup_job)
|
||||
|
||||
cleanup_job['stage'] = 'cleanup-temp-storage'
|
||||
cleanup_job['script'] = [
|
||||
@@ -1218,91 +1147,22 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
]
|
||||
cleanup_job['when'] = 'always'
|
||||
cleanup_job['retry'] = service_job_retries
|
||||
cleanup_job['interruptible'] = True
|
||||
|
||||
output_object['cleanup'] = cleanup_job
|
||||
|
||||
if ('signing-job-attributes' in gitlab_ci and
|
||||
spack_pipeline_type == 'spack_protected_branch'):
|
||||
# External signing: generate a job to check and sign binary pkgs
|
||||
stage_names.append('stage-sign-pkgs')
|
||||
signing_job_config = gitlab_ci['signing-job-attributes']
|
||||
signing_job = {}
|
||||
|
||||
signing_job_attrs_to_copy = [
|
||||
'image',
|
||||
'tags',
|
||||
'variables',
|
||||
'before_script',
|
||||
'script',
|
||||
'after_script',
|
||||
]
|
||||
|
||||
_copy_attributes(signing_job_attrs_to_copy,
|
||||
signing_job_config,
|
||||
signing_job)
|
||||
|
||||
signing_job_tags = []
|
||||
if 'tags' in signing_job:
|
||||
signing_job_tags = _remove_reserved_tags(signing_job['tags'])
|
||||
|
||||
for tag in ['aws', 'protected', 'notary']:
|
||||
if tag not in signing_job_tags:
|
||||
signing_job_tags.append(tag)
|
||||
signing_job['tags'] = signing_job_tags
|
||||
|
||||
signing_job['stage'] = 'stage-sign-pkgs'
|
||||
signing_job['when'] = 'always'
|
||||
signing_job['retry'] = {
|
||||
'max': 2,
|
||||
'when': ['always']
|
||||
}
|
||||
signing_job['interruptible'] = True
|
||||
|
||||
output_object['sign-pkgs'] = signing_job
|
||||
|
||||
if spack_buildcache_copy:
|
||||
# Generate a job to copy the contents from wherever the builds are getting
|
||||
# pushed to the url specified in the "SPACK_BUILDCACHE_COPY" environment
|
||||
# variable.
|
||||
src_url = remote_mirror_override or remote_mirror_url
|
||||
dest_url = spack_buildcache_copy
|
||||
|
||||
stage_names.append('stage-copy-buildcache')
|
||||
copy_job = {
|
||||
'stage': 'stage-copy-buildcache',
|
||||
'tags': ['spack', 'public', 'medium', 'aws', 'x86_64'],
|
||||
'image': 'ghcr.io/spack/python-aws-bash:0.0.1',
|
||||
'when': 'on_success',
|
||||
'interruptible': True,
|
||||
'retry': service_job_retries,
|
||||
'script': [
|
||||
'. ./share/spack/setup-env.sh',
|
||||
'spack --version',
|
||||
'aws s3 sync --exclude *index.json* --exclude *pgp* {0} {1}'.format(
|
||||
src_url, dest_url)
|
||||
]
|
||||
}
|
||||
|
||||
output_object['copy-mirror'] = copy_job
|
||||
|
||||
if rebuild_index_enabled:
|
||||
# Add a final job to regenerate the index
|
||||
stage_names.append('stage-rebuild-index')
|
||||
final_job = {}
|
||||
|
||||
if service_job_config:
|
||||
_copy_attributes(default_attrs,
|
||||
service_job_config,
|
||||
final_job)
|
||||
|
||||
if 'tags' in final_job:
|
||||
service_tags = _remove_reserved_tags(final_job['tags'])
|
||||
final_job['tags'] = service_tags
|
||||
copy_attributes(default_attrs,
|
||||
service_job_config,
|
||||
final_job)
|
||||
|
||||
index_target_mirror = mirror_urls[0]
|
||||
if remote_mirror_override:
|
||||
index_target_mirror = remote_mirror_override
|
||||
if is_pr_pipeline:
|
||||
index_target_mirror = pr_mirror_url
|
||||
|
||||
final_job['stage'] = 'stage-rebuild-index'
|
||||
final_job['script'] = [
|
||||
@@ -1311,7 +1171,6 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
]
|
||||
final_job['when'] = 'always'
|
||||
final_job['retry'] = service_job_retries
|
||||
final_job['interruptible'] = True
|
||||
|
||||
output_object['rebuild-index'] = final_job
|
||||
|
||||
@@ -1344,9 +1203,8 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
'SPACK_PIPELINE_TYPE': str(spack_pipeline_type)
|
||||
}
|
||||
|
||||
if remote_mirror_override:
|
||||
(output_object['variables']
|
||||
['SPACK_REMOTE_MIRROR_OVERRIDE']) = remote_mirror_override
|
||||
if pr_mirror_url:
|
||||
output_object['variables']['SPACK_PR_MIRROR_URL'] = pr_mirror_url
|
||||
|
||||
spack_stack_name = os.environ.get('SPACK_CI_STACK_NAME', None)
|
||||
if spack_stack_name:
|
||||
@@ -1371,9 +1229,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
noop_job = {}
|
||||
|
||||
if service_job_config:
|
||||
_copy_attributes(default_attrs,
|
||||
service_job_config,
|
||||
noop_job)
|
||||
copy_attributes(default_attrs,
|
||||
service_job_config,
|
||||
noop_job)
|
||||
|
||||
if 'script' not in noop_job:
|
||||
noop_job['script'] = [
|
||||
@@ -1396,7 +1254,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
outf.write(syaml.dump_config(sorted_output, default_flow_style=True))
|
||||
|
||||
|
||||
def _url_encode_string(input_string):
|
||||
def url_encode_string(input_string):
|
||||
encoded_keyval = urlencode({'donotcare': input_string})
|
||||
eq_idx = encoded_keyval.find('=') + 1
|
||||
encoded_value = encoded_keyval[eq_idx:]
|
||||
@@ -1404,17 +1262,6 @@ def _url_encode_string(input_string):
|
||||
|
||||
|
||||
def import_signing_key(base64_signing_key):
|
||||
""" Given Base64-encoded gpg key, decode and import it to use for
|
||||
signing packages.
|
||||
|
||||
Arguments:
|
||||
base64_signing_key (str): A gpg key including the secret key,
|
||||
armor-exported and base64 encoded, so it can be stored in a
|
||||
gitlab CI variable. For an example of how to generate such
|
||||
a key, see:
|
||||
|
||||
https://github.com/spack/spack-infrastructure/blob/main/gitlab-docker/files/gen-key
|
||||
"""
|
||||
if not base64_signing_key:
|
||||
tty.warn('No key found for signing/verifying packages')
|
||||
return
|
||||
@@ -1452,34 +1299,14 @@ def import_signing_key(base64_signing_key):
|
||||
|
||||
|
||||
def can_sign_binaries():
|
||||
""" Utility method to determine if this spack instance is capable of
|
||||
signing binary packages. This is currently only possible if the
|
||||
spack gpg keystore contains exactly one secret key."""
|
||||
return len(gpg_util.signing_keys()) == 1
|
||||
|
||||
|
||||
def can_verify_binaries():
|
||||
""" Utility method to determin if this spack instance is capable (at
|
||||
least in theory) of verifying signed binaries."""
|
||||
return len(gpg_util.public_keys()) >= 1
|
||||
|
||||
|
||||
def configure_compilers(compiler_action, scope=None):
|
||||
""" Depending on the compiler_action parameter, either turn on the
|
||||
install_missing_compilers config option, or find spack compilers,
|
||||
or do nothing. This is used from rebuild jobs in bootstrapping
|
||||
pipelines, where in the bootsrapping phase we would pass
|
||||
FIND_ANY in case of compiler-agnostic bootstrapping, while in the
|
||||
spec building phase we would pass INSTALL_MISSING in order to get
|
||||
spack to use the compiler which was built in the previous phase and
|
||||
is now sitting in the binary mirror.
|
||||
|
||||
Arguments:
|
||||
compiler_action (str): 'FIND_ANY', 'INSTALL_MISSING' have meanings
|
||||
described above. Any other value essentially results in a no-op.
|
||||
scope (spack.config.ConfigScope): Optional. The scope in which to look for
|
||||
compilers, in case 'FIND_ANY' was provided.
|
||||
"""
|
||||
if compiler_action == 'INSTALL_MISSING':
|
||||
tty.debug('Make sure bootstrapped compiler will be installed')
|
||||
config = cfg.get('config')
|
||||
@@ -1503,35 +1330,6 @@ def configure_compilers(compiler_action, scope=None):
|
||||
|
||||
|
||||
def get_concrete_specs(env, root_spec, job_name, compiler_action):
|
||||
""" Build a dictionary of concrete specs relevant to a particular
|
||||
rebuild job. This includes the root spec and the spec to be
|
||||
rebuilt (which could be the same).
|
||||
|
||||
Arguments:
|
||||
|
||||
env (spack.environment.Environment): Activated spack environment
|
||||
used to get concrete root spec by hash in case compiler_action
|
||||
is anthing other than FIND_ANY.
|
||||
root_spec (str): If compiler_action is FIND_ANY root_spec is
|
||||
a string representation which can be turned directly into
|
||||
a spec, otherwise, it's a hash used to index the activated
|
||||
spack environment.
|
||||
job_name (str): Name of package to be built, used to index the
|
||||
concrete root spec and produce the concrete spec to be
|
||||
built.
|
||||
compiler_action (str): Determines how to interpret the root_spec
|
||||
parameter, either as a string representation as a hash.
|
||||
|
||||
Returns:
|
||||
|
||||
.. code-block:: JSON
|
||||
|
||||
{
|
||||
"root": "<spec>",
|
||||
"<job-pkg-name>": "<spec>",
|
||||
}
|
||||
|
||||
"""
|
||||
spec_map = {
|
||||
'root': None,
|
||||
}
|
||||
@@ -1578,19 +1376,6 @@ def _push_mirror_contents(env, specfile_path, sign_binaries, mirror_url):
|
||||
|
||||
|
||||
def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries):
|
||||
""" Push one or more binary packages to the mirror.
|
||||
|
||||
Arguments:
|
||||
|
||||
env (spack.environment.Environment): Optional environment. If
|
||||
provided, it is used to make sure binary package to push
|
||||
exists in the environment.
|
||||
specfile_path (str): Path to spec.json corresponding to built pkg
|
||||
to push.
|
||||
mirror_url (str): Base url of target mirror
|
||||
sign_binaries (bool): If True, spack will attempt to sign binary
|
||||
package before pushing.
|
||||
"""
|
||||
try:
|
||||
_push_mirror_contents(env, specfile_path, sign_binaries, mirror_url)
|
||||
except Exception as inst:
|
||||
@@ -1615,15 +1400,6 @@ def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries):
|
||||
|
||||
|
||||
def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
|
||||
""" Looks for spack-build-out.txt in the stage directory of the given
|
||||
job_spec, and attempts to copy the file into the directory given
|
||||
by job_log_dir.
|
||||
|
||||
Arguments:
|
||||
|
||||
job_spec (spack.spec.Spec): Spec associated with spack install log
|
||||
job_log_dir (str): Path into which build log should be copied
|
||||
"""
|
||||
try:
|
||||
job_pkg = spack.repo.get(job_spec)
|
||||
tty.debug('job package: {0}'.format(job_pkg))
|
||||
@@ -1642,14 +1418,6 @@ def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
|
||||
|
||||
|
||||
def download_and_extract_artifacts(url, work_dir):
|
||||
""" Look for gitlab artifacts.zip at the given url, and attempt to download
|
||||
and extract the contents into the given work_dir
|
||||
|
||||
Arguments:
|
||||
|
||||
url (str): Complete url to artifacts.zip file
|
||||
work_dir (str): Path to destination where artifacts should be extracted
|
||||
"""
|
||||
tty.msg('Fetching artifacts from: {0}\n'.format(url))
|
||||
|
||||
headers = {
|
||||
@@ -1689,8 +1457,6 @@ def download_and_extract_artifacts(url, work_dir):
|
||||
|
||||
|
||||
def get_spack_info():
|
||||
""" If spack is running from a git repo, return the most recent git log
|
||||
entry, otherwise, return a string containing the spack version. """
|
||||
git_path = os.path.join(spack.paths.prefix, ".git")
|
||||
if os.path.exists(git_path):
|
||||
git = exe.which("git")
|
||||
@@ -1706,23 +1472,6 @@ def get_spack_info():
|
||||
|
||||
|
||||
def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||
""" Look in the local spack clone to find the checkout_commit, and if
|
||||
provided, the merge_commit given as arguments. If those commits can
|
||||
be found locally, then clone spack and attempt to recreate a merge
|
||||
commit with the same parent commits as tested in gitlab. This looks
|
||||
something like 1) git clone repo && cd repo 2) git checkout
|
||||
<checkout_commit> 3) git merge <merge_commit>. If there is no
|
||||
merge_commit provided, then skip step (3).
|
||||
|
||||
Arguments:
|
||||
|
||||
repro_dir (str): Location where spack should be cloned
|
||||
checkout_commit (str): SHA of PR branch commit
|
||||
merge_commit (str): SHA of target branch parent
|
||||
|
||||
Returns: True if git repo state was successfully recreated, or False
|
||||
otherwise.
|
||||
"""
|
||||
# figure out the path to the spack git version being used for the
|
||||
# reproduction
|
||||
print('checkout_commit: {0}'.format(checkout_commit))
|
||||
@@ -1764,7 +1513,7 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||
fail_on_error=False)
|
||||
|
||||
if git.returncode != 0:
|
||||
tty.error('Unable to clone your local spack repo:')
|
||||
tty.error('Unable to clone your local spac repo:')
|
||||
tty.msg(clone_out)
|
||||
return False
|
||||
|
||||
@@ -1797,18 +1546,6 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||
|
||||
|
||||
def reproduce_ci_job(url, work_dir):
|
||||
""" Given a url to gitlab artifacts.zip from a failed 'spack ci rebuild' job,
|
||||
attempt to setup an environment in which the failure can be reproduced
|
||||
locally. This entails the following:
|
||||
|
||||
First download and extract artifacts. Then look through those artifacts
|
||||
to glean some information needed for the reproduer (e.g. one of the
|
||||
artifacts contains information about the version of spack tested by
|
||||
gitlab, another is the generated pipeline yaml containing details
|
||||
of the job like the docker image used to run it). The output of this
|
||||
function is a set of printed instructions for running docker and then
|
||||
commands to run to reproduce the build once inside the container.
|
||||
"""
|
||||
download_and_extract_artifacts(url, work_dir)
|
||||
|
||||
lock_file = fs.find(work_dir, 'spack.lock')[0]
|
||||
|
@@ -8,10 +8,7 @@
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
from textwrap import dedent
|
||||
from typing import List, Tuple
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
import six
|
||||
@@ -150,58 +147,6 @@ def get_command(cmd_name):
|
||||
return getattr(get_module(cmd_name), pname)
|
||||
|
||||
|
||||
class _UnquotedFlags(object):
|
||||
"""Use a heuristic in `.extract()` to detect whether the user is trying to set
|
||||
multiple flags like the docker ENV attribute allows (e.g. 'cflags=-Os -pipe').
|
||||
|
||||
If the heuristic finds a match (which can be checked with `__bool__()`), a warning
|
||||
message explaining how to quote multiple flags correctly can be generated with
|
||||
`.report()`.
|
||||
"""
|
||||
|
||||
flags_arg_pattern = re.compile(
|
||||
r'^({0})=([^\'"].*)$'.format(
|
||||
'|'.join(spack.spec.FlagMap.valid_compiler_flags()),
|
||||
))
|
||||
|
||||
def __init__(self, all_unquoted_flag_pairs):
|
||||
# type: (List[Tuple[re.Match, str]]) -> None
|
||||
self._flag_pairs = all_unquoted_flag_pairs
|
||||
|
||||
def __bool__(self):
|
||||
# type: () -> bool
|
||||
return bool(self._flag_pairs)
|
||||
|
||||
@classmethod
|
||||
def extract(cls, sargs):
|
||||
# type: (str) -> _UnquotedFlags
|
||||
all_unquoted_flag_pairs = [] # type: List[Tuple[re.Match, str]]
|
||||
prev_flags_arg = None
|
||||
for arg in shlex.split(sargs):
|
||||
if prev_flags_arg is not None:
|
||||
all_unquoted_flag_pairs.append((prev_flags_arg, arg))
|
||||
prev_flags_arg = cls.flags_arg_pattern.match(arg)
|
||||
return cls(all_unquoted_flag_pairs)
|
||||
|
||||
def report(self):
|
||||
# type: () -> str
|
||||
single_errors = [
|
||||
'({0}) {1} {2} => {3}'.format(
|
||||
i + 1, match.group(0), next_arg,
|
||||
'{0}="{1} {2}"'.format(match.group(1), match.group(2), next_arg),
|
||||
)
|
||||
for i, (match, next_arg) in enumerate(self._flag_pairs)
|
||||
]
|
||||
return dedent("""\
|
||||
Some compiler or linker flags were provided without quoting their arguments,
|
||||
which now causes spack to try to parse the *next* argument as a spec component
|
||||
such as a variant instead of an additional compiler or linker flag. If the
|
||||
intent was to set multiple flags, try quoting them together as described below.
|
||||
|
||||
Possible flag quotation errors (with the correctly-quoted version after the =>):
|
||||
{0}""").format('\n'.join(single_errors))
|
||||
|
||||
|
||||
def parse_specs(args, **kwargs):
|
||||
"""Convenience function for parsing arguments from specs. Handles common
|
||||
exceptions and dies if there are errors.
|
||||
@@ -210,28 +155,29 @@ def parse_specs(args, **kwargs):
|
||||
normalize = kwargs.get('normalize', False)
|
||||
tests = kwargs.get('tests', False)
|
||||
|
||||
sargs = args
|
||||
if not isinstance(args, six.string_types):
|
||||
sargs = ' '.join(args)
|
||||
unquoted_flags = _UnquotedFlags.extract(sargs)
|
||||
|
||||
try:
|
||||
sargs = args
|
||||
if not isinstance(args, six.string_types):
|
||||
sargs = ' '.join(spack.util.string.quote(args))
|
||||
specs = spack.spec.parse(sargs)
|
||||
for spec in specs:
|
||||
if concretize:
|
||||
spec.concretize(tests=tests) # implies normalize
|
||||
elif normalize:
|
||||
spec.normalize(tests=tests)
|
||||
|
||||
return specs
|
||||
|
||||
except spack.spec.SpecParseError as e:
|
||||
msg = e.message + "\n" + str(e.string) + "\n"
|
||||
msg += (e.pos + 2) * " " + "^"
|
||||
raise spack.error.SpackError(msg)
|
||||
|
||||
except spack.error.SpecError as e:
|
||||
|
||||
msg = e.message
|
||||
if e.long_message:
|
||||
msg += e.long_message
|
||||
if unquoted_flags:
|
||||
msg += '\n\n'
|
||||
msg += unquoted_flags.report()
|
||||
|
||||
raise spack.error.SpackError(msg)
|
||||
|
||||
|
@@ -6,9 +6,7 @@
|
||||
|
||||
import os.path
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty
|
||||
import llnl.util.tty.color
|
||||
|
||||
@@ -17,9 +15,6 @@
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.main
|
||||
import spack.mirror
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.path
|
||||
|
||||
description = "manage bootstrap configuration"
|
||||
@@ -27,38 +22,6 @@
|
||||
level = "long"
|
||||
|
||||
|
||||
# Tarball to be downloaded if binary packages are requested in a local mirror
|
||||
BINARY_TARBALL = 'https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.2/bootstrap-buildcache.tar.gz'
|
||||
|
||||
#: Subdirectory where to create the mirror
|
||||
LOCAL_MIRROR_DIR = 'bootstrap_cache'
|
||||
|
||||
# Metadata for a generated binary mirror
|
||||
BINARY_METADATA = {
|
||||
'type': 'buildcache',
|
||||
'description': ('Buildcache copied from a public tarball available on Github.'
|
||||
'The sha256 checksum of binaries is checked before installation.'),
|
||||
'info': {
|
||||
'url': os.path.join('..', '..', LOCAL_MIRROR_DIR),
|
||||
'homepage': 'https://github.com/spack/spack-bootstrap-mirrors',
|
||||
'releases': 'https://github.com/spack/spack-bootstrap-mirrors/releases',
|
||||
'tarball': BINARY_TARBALL
|
||||
}
|
||||
}
|
||||
|
||||
CLINGO_JSON = '$spack/share/spack/bootstrap/github-actions-v0.2/clingo.json'
|
||||
GNUPG_JSON = '$spack/share/spack/bootstrap/github-actions-v0.2/gnupg.json'
|
||||
|
||||
# Metadata for a generated source mirror
|
||||
SOURCE_METADATA = {
|
||||
'type': 'install',
|
||||
'description': 'Mirror with software needed to bootstrap Spack',
|
||||
'info': {
|
||||
'url': os.path.join('..', '..', LOCAL_MIRROR_DIR)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def _add_scope_option(parser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
@@ -104,61 +67,24 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
list = sp.add_parser(
|
||||
'list', help='list all the sources of software to bootstrap Spack'
|
||||
'list', help='list the methods available for bootstrapping'
|
||||
)
|
||||
_add_scope_option(list)
|
||||
|
||||
trust = sp.add_parser(
|
||||
'trust', help='trust a bootstrapping source'
|
||||
'trust', help='trust a bootstrapping method'
|
||||
)
|
||||
_add_scope_option(trust)
|
||||
trust.add_argument(
|
||||
'name', help='name of the source to be trusted'
|
||||
'name', help='name of the method to be trusted'
|
||||
)
|
||||
|
||||
untrust = sp.add_parser(
|
||||
'untrust', help='untrust a bootstrapping source'
|
||||
'untrust', help='untrust a bootstrapping method'
|
||||
)
|
||||
_add_scope_option(untrust)
|
||||
untrust.add_argument(
|
||||
'name', help='name of the source to be untrusted'
|
||||
)
|
||||
|
||||
add = sp.add_parser(
|
||||
'add', help='add a new source for bootstrapping'
|
||||
)
|
||||
_add_scope_option(add)
|
||||
add.add_argument(
|
||||
'--trust', action='store_true',
|
||||
help='trust the source immediately upon addition')
|
||||
add.add_argument(
|
||||
'name', help='name of the new source of software'
|
||||
)
|
||||
add.add_argument(
|
||||
'metadata_dir', help='directory where to find metadata files'
|
||||
)
|
||||
|
||||
remove = sp.add_parser(
|
||||
'remove', help='remove a bootstrapping source'
|
||||
)
|
||||
remove.add_argument(
|
||||
'name', help='name of the source to be removed'
|
||||
)
|
||||
|
||||
mirror = sp.add_parser(
|
||||
'mirror', help='create a local mirror to bootstrap Spack'
|
||||
)
|
||||
mirror.add_argument(
|
||||
'--binary-packages', action='store_true',
|
||||
help='download public binaries in the mirror'
|
||||
)
|
||||
mirror.add_argument(
|
||||
'--dev', action='store_true',
|
||||
help='download dev dependencies too'
|
||||
)
|
||||
mirror.add_argument(
|
||||
metavar='DIRECTORY', dest='root_dir',
|
||||
help='root directory in which to create the mirror and metadata'
|
||||
'name', help='name of the method to be untrusted'
|
||||
)
|
||||
|
||||
|
||||
@@ -211,7 +137,10 @@ def _root(args):
|
||||
|
||||
|
||||
def _list(args):
|
||||
sources = spack.bootstrap.bootstrapping_sources(scope=args.scope)
|
||||
sources = spack.config.get(
|
||||
'bootstrap:sources', default=None, scope=args.scope
|
||||
)
|
||||
|
||||
if not sources:
|
||||
llnl.util.tty.msg(
|
||||
"No method available for bootstrapping Spack's dependencies"
|
||||
@@ -320,121 +249,6 @@ def _status(args):
|
||||
print()
|
||||
|
||||
|
||||
def _add(args):
|
||||
initial_sources = spack.bootstrap.bootstrapping_sources()
|
||||
names = [s['name'] for s in initial_sources]
|
||||
|
||||
# If the name is already used error out
|
||||
if args.name in names:
|
||||
msg = 'a source named "{0}" already exist. Please choose a different name'
|
||||
raise RuntimeError(msg.format(args.name))
|
||||
|
||||
# Check that the metadata file exists
|
||||
metadata_dir = spack.util.path.canonicalize_path(args.metadata_dir)
|
||||
if not os.path.exists(metadata_dir) or not os.path.isdir(metadata_dir):
|
||||
raise RuntimeError(
|
||||
'the directory "{0}" does not exist'.format(args.metadata_dir)
|
||||
)
|
||||
|
||||
file = os.path.join(metadata_dir, 'metadata.yaml')
|
||||
if not os.path.exists(file):
|
||||
raise RuntimeError('the file "{0}" does not exist'.format(file))
|
||||
|
||||
# Insert the new source as the highest priority one
|
||||
write_scope = args.scope or spack.config.default_modify_scope(section='bootstrap')
|
||||
sources = spack.config.get('bootstrap:sources', scope=write_scope) or []
|
||||
sources = [
|
||||
{'name': args.name, 'metadata': args.metadata_dir}
|
||||
] + sources
|
||||
spack.config.set('bootstrap:sources', sources, scope=write_scope)
|
||||
|
||||
msg = 'New bootstrapping source "{0}" added in the "{1}" configuration scope'
|
||||
llnl.util.tty.msg(msg.format(args.name, write_scope))
|
||||
if args.trust:
|
||||
_trust(args)
|
||||
|
||||
|
||||
def _remove(args):
|
||||
initial_sources = spack.bootstrap.bootstrapping_sources()
|
||||
names = [s['name'] for s in initial_sources]
|
||||
if args.name not in names:
|
||||
msg = ('cannot find any bootstrapping source named "{0}". '
|
||||
'Run `spack bootstrap list` to see available sources.')
|
||||
raise RuntimeError(msg.format(args.name))
|
||||
|
||||
for current_scope in spack.config.scopes():
|
||||
sources = spack.config.get('bootstrap:sources', scope=current_scope) or []
|
||||
if args.name in [s['name'] for s in sources]:
|
||||
sources = [s for s in sources if s['name'] != args.name]
|
||||
spack.config.set('bootstrap:sources', sources, scope=current_scope)
|
||||
msg = ('Removed the bootstrapping source named "{0}" from the '
|
||||
'"{1}" configuration scope.')
|
||||
llnl.util.tty.msg(msg.format(args.name, current_scope))
|
||||
trusted = spack.config.get('bootstrap:trusted', scope=current_scope) or []
|
||||
if args.name in trusted:
|
||||
trusted.pop(args.name)
|
||||
spack.config.set('bootstrap:trusted', trusted, scope=current_scope)
|
||||
msg = 'Deleting information on "{0}" from list of trusted sources'
|
||||
llnl.util.tty.msg(msg.format(args.name))
|
||||
|
||||
|
||||
def _mirror(args):
|
||||
mirror_dir = spack.util.path.canonicalize_path(
|
||||
os.path.join(args.root_dir, LOCAL_MIRROR_DIR)
|
||||
)
|
||||
|
||||
# TODO: Here we are adding gnuconfig manually, but this can be fixed
|
||||
# TODO: as soon as we have an option to add to a mirror all the possible
|
||||
# TODO: dependencies of a spec
|
||||
root_specs = spack.bootstrap.all_root_specs(development=args.dev) + ['gnuconfig']
|
||||
for spec_str in root_specs:
|
||||
msg = 'Adding "{0}" and dependencies to the mirror at {1}'
|
||||
llnl.util.tty.msg(msg.format(spec_str, mirror_dir))
|
||||
# Suppress tty from the call below for terser messages
|
||||
llnl.util.tty.set_msg_enabled(False)
|
||||
spec = spack.spec.Spec(spec_str).concretized()
|
||||
for node in spec.traverse():
|
||||
spack.mirror.create(mirror_dir, [node])
|
||||
llnl.util.tty.set_msg_enabled(True)
|
||||
|
||||
if args.binary_packages:
|
||||
msg = 'Adding binary packages from "{0}" to the mirror at {1}'
|
||||
llnl.util.tty.msg(msg.format(BINARY_TARBALL, mirror_dir))
|
||||
llnl.util.tty.set_msg_enabled(False)
|
||||
stage = spack.stage.Stage(BINARY_TARBALL, path=tempfile.mkdtemp())
|
||||
stage.create()
|
||||
stage.fetch()
|
||||
stage.expand_archive()
|
||||
build_cache_dir = os.path.join(stage.source_path, 'build_cache')
|
||||
shutil.move(build_cache_dir, mirror_dir)
|
||||
llnl.util.tty.set_msg_enabled(True)
|
||||
|
||||
def write_metadata(subdir, metadata):
|
||||
metadata_rel_dir = os.path.join('metadata', subdir)
|
||||
metadata_yaml = os.path.join(
|
||||
args.root_dir, metadata_rel_dir, 'metadata.yaml'
|
||||
)
|
||||
llnl.util.filesystem.mkdirp(os.path.dirname(metadata_yaml))
|
||||
with open(metadata_yaml, mode='w') as f:
|
||||
spack.util.spack_yaml.dump(metadata, stream=f)
|
||||
return os.path.dirname(metadata_yaml), metadata_rel_dir
|
||||
|
||||
instructions = ('\nTo register the mirror on the platform where it\'s supposed '
|
||||
'to be used, move "{0}" to its final location and run the '
|
||||
'following command(s):\n\n').format(args.root_dir)
|
||||
cmd = ' % spack bootstrap add --trust {0} <final-path>/{1}\n'
|
||||
_, rel_directory = write_metadata(subdir='sources', metadata=SOURCE_METADATA)
|
||||
instructions += cmd.format('local-sources', rel_directory)
|
||||
if args.binary_packages:
|
||||
abs_directory, rel_directory = write_metadata(
|
||||
subdir='binaries', metadata=BINARY_METADATA
|
||||
)
|
||||
shutil.copy(spack.util.path.canonicalize_path(CLINGO_JSON), abs_directory)
|
||||
shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory)
|
||||
instructions += cmd.format('local-binaries', rel_directory)
|
||||
print(instructions)
|
||||
|
||||
|
||||
def bootstrap(parser, args):
|
||||
callbacks = {
|
||||
'status': _status,
|
||||
@@ -444,9 +258,6 @@ def bootstrap(parser, args):
|
||||
'root': _root,
|
||||
'list': _list,
|
||||
'trust': _trust,
|
||||
'untrust': _untrust,
|
||||
'add': _add,
|
||||
'remove': _remove,
|
||||
'mirror': _mirror
|
||||
'untrust': _untrust
|
||||
}
|
||||
callbacks[args.subcommand](args)
|
||||
|
@@ -161,6 +161,11 @@ def setup_parser(subparser):
|
||||
help=('Check single spec from json or yaml file instead of release ' +
|
||||
'specs file'))
|
||||
|
||||
check.add_argument(
|
||||
'--rebuild-on-error', default=False, action='store_true',
|
||||
help="Default to rebuilding packages if errors are encountered " +
|
||||
"during the process of checking whether rebuilding is needed")
|
||||
|
||||
check.set_defaults(func=check_fn)
|
||||
|
||||
# Download tarball and specfile
|
||||
@@ -356,7 +361,7 @@ def list_fn(args):
|
||||
try:
|
||||
specs = bindist.update_cache_and_get_specs()
|
||||
except bindist.FetchCacheError as e:
|
||||
tty.die(e)
|
||||
tty.error(e)
|
||||
|
||||
if not args.allarch:
|
||||
arch = spack.spec.Spec.default_arch()
|
||||
@@ -425,7 +430,7 @@ def check_fn(args):
|
||||
sys.exit(0)
|
||||
|
||||
sys.exit(bindist.check_specs_against_mirrors(
|
||||
configured_mirrors, specs, args.output_file))
|
||||
configured_mirrors, specs, args.output_file, args.rebuild_on_error))
|
||||
|
||||
|
||||
def download_fn(args):
|
||||
@@ -478,12 +483,11 @@ def save_specfile_fn(args):
|
||||
if args.root_specfile:
|
||||
with open(args.root_specfile) as fd:
|
||||
root_spec_as_json = fd.read()
|
||||
spec_format = 'yaml' if args.root_specfile.endswith('yaml') else 'json'
|
||||
else:
|
||||
root_spec = Spec(args.root_spec)
|
||||
root_spec.concretize()
|
||||
root_spec_as_json = root_spec.to_json(hash=ht.dag_hash)
|
||||
spec_format = 'json'
|
||||
root_spec_as_json = root_spec.to_json(hash=ht.build_hash)
|
||||
spec_format = 'yaml' if args.root_specfile.endswith('yaml') else 'json'
|
||||
save_dependency_specfiles(
|
||||
root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format)
|
||||
|
||||
@@ -697,7 +701,7 @@ def update_index(mirror_url, update_keys=False):
|
||||
|
||||
def update_index_fn(args):
|
||||
"""Update a buildcache index."""
|
||||
outdir = 'file://.'
|
||||
outdir = '.'
|
||||
if args.mirror_url:
|
||||
outdir = args.mirror_url
|
||||
|
||||
|
@@ -14,7 +14,7 @@
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.util.crypto
|
||||
from spack.package_base import preferred_version
|
||||
from spack.package import preferred_version
|
||||
from spack.util.naming import valid_fully_qualified_module_name
|
||||
from spack.version import Version, ver
|
||||
|
||||
|
@@ -64,11 +64,6 @@ def setup_parser(subparser):
|
||||
'--dependencies', action='store_true', default=False,
|
||||
help="(Experimental) disable DAG scheduling; use "
|
||||
' "plain" dependencies.')
|
||||
generate.add_argument(
|
||||
'--buildcache-destination', default=None,
|
||||
help="Override the mirror configured in the environment (spack.yaml) " +
|
||||
"in order to push binaries from the generated pipeline to a " +
|
||||
"different location.")
|
||||
prune_group = generate.add_mutually_exclusive_group()
|
||||
prune_group.add_argument(
|
||||
'--prune-dag', action='store_true', dest='prune_dag',
|
||||
@@ -132,7 +127,6 @@ def ci_generate(args):
|
||||
prune_dag = args.prune_dag
|
||||
index_only = args.index_only
|
||||
artifacts_root = args.artifacts_root
|
||||
buildcache_destination = args.buildcache_destination
|
||||
|
||||
if not output_file:
|
||||
output_file = os.path.abspath(".gitlab-ci.yml")
|
||||
@@ -146,8 +140,7 @@ def ci_generate(args):
|
||||
spack_ci.generate_gitlab_ci_yaml(
|
||||
env, True, output_file, prune_dag=prune_dag,
|
||||
check_index_only=index_only, run_optimizer=run_optimizer,
|
||||
use_dependencies=use_dependencies, artifacts_root=artifacts_root,
|
||||
remote_mirror_override=buildcache_destination)
|
||||
use_dependencies=use_dependencies, artifacts_root=artifacts_root)
|
||||
|
||||
if copy_yaml_to:
|
||||
copy_to_dir = os.path.dirname(copy_yaml_to)
|
||||
@@ -174,7 +167,8 @@ def ci_reindex(args):
|
||||
|
||||
def ci_rebuild(args):
|
||||
"""Check a single spec against the remote mirror, and rebuild it from
|
||||
source if the mirror does not contain the hash. """
|
||||
source if the mirror does not contain the full hash match of the spec
|
||||
as computed locally. """
|
||||
env = spack.cmd.require_active_env(cmd_name='ci rebuild')
|
||||
|
||||
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
||||
@@ -187,9 +181,6 @@ def ci_rebuild(args):
|
||||
if not gitlab_ci:
|
||||
tty.die('spack ci rebuild requires an env containing gitlab-ci cfg')
|
||||
|
||||
tty.msg('SPACK_BUILDCACHE_DESTINATION={0}'.format(
|
||||
os.environ.get('SPACK_BUILDCACHE_DESTINATION', None)))
|
||||
|
||||
# Grab the environment variables we need. These either come from the
|
||||
# pipeline generation step ("spack ci generate"), where they were written
|
||||
# out as variables, or else provided by GitLab itself.
|
||||
@@ -206,7 +197,7 @@ def ci_rebuild(args):
|
||||
compiler_action = get_env_var('SPACK_COMPILER_ACTION')
|
||||
cdash_build_name = get_env_var('SPACK_CDASH_BUILD_NAME')
|
||||
spack_pipeline_type = get_env_var('SPACK_PIPELINE_TYPE')
|
||||
remote_mirror_override = get_env_var('SPACK_REMOTE_MIRROR_OVERRIDE')
|
||||
pr_mirror_url = get_env_var('SPACK_PR_MIRROR_URL')
|
||||
remote_mirror_url = get_env_var('SPACK_REMOTE_MIRROR_URL')
|
||||
|
||||
# Construct absolute paths relative to current $CI_PROJECT_DIR
|
||||
@@ -254,10 +245,6 @@ def ci_rebuild(args):
|
||||
tty.debug('Pipeline type - PR: {0}, develop: {1}'.format(
|
||||
spack_is_pr_pipeline, spack_is_develop_pipeline))
|
||||
|
||||
# If no override url exists, then just push binary package to the
|
||||
# normal remote mirror url.
|
||||
buildcache_mirror_url = remote_mirror_override or remote_mirror_url
|
||||
|
||||
# Figure out what is our temporary storage mirror: Is it artifacts
|
||||
# buildcache? Or temporary-storage-url-prefix? In some cases we need to
|
||||
# force something or pipelines might not have a way to propagate build
|
||||
@@ -293,8 +280,8 @@ def ci_rebuild(args):
|
||||
env, root_spec, job_spec_pkg_name, compiler_action)
|
||||
job_spec = spec_map[job_spec_pkg_name]
|
||||
|
||||
job_spec_json_file = '{0}.json'.format(job_spec_pkg_name)
|
||||
job_spec_json_path = os.path.join(repro_dir, job_spec_json_file)
|
||||
job_spec_yaml_file = '{0}.yaml'.format(job_spec_pkg_name)
|
||||
job_spec_yaml_path = os.path.join(repro_dir, job_spec_yaml_file)
|
||||
|
||||
# To provide logs, cdash reports, etc for developer download/perusal,
|
||||
# these things have to be put into artifacts. This means downstream
|
||||
@@ -348,23 +335,23 @@ def ci_rebuild(args):
|
||||
# using a compiler already installed on the target system).
|
||||
spack_ci.configure_compilers(compiler_action)
|
||||
|
||||
# Write this job's spec json into the reproduction directory, and it will
|
||||
# Write this job's spec yaml into the reproduction directory, and it will
|
||||
# also be used in the generated "spack install" command to install the spec
|
||||
tty.debug('job concrete spec path: {0}'.format(job_spec_json_path))
|
||||
with open(job_spec_json_path, 'w') as fd:
|
||||
fd.write(job_spec.to_json(hash=ht.dag_hash))
|
||||
tty.debug('job concrete spec path: {0}'.format(job_spec_yaml_path))
|
||||
with open(job_spec_yaml_path, 'w') as fd:
|
||||
fd.write(job_spec.to_yaml(hash=ht.build_hash))
|
||||
|
||||
# Write the concrete root spec json into the reproduction directory
|
||||
root_spec_json_path = os.path.join(repro_dir, 'root.json')
|
||||
with open(root_spec_json_path, 'w') as fd:
|
||||
fd.write(spec_map['root'].to_json(hash=ht.dag_hash))
|
||||
# Write the concrete root spec yaml into the reproduction directory
|
||||
root_spec_yaml_path = os.path.join(repro_dir, 'root.yaml')
|
||||
with open(root_spec_yaml_path, 'w') as fd:
|
||||
fd.write(spec_map['root'].to_yaml(hash=ht.build_hash))
|
||||
|
||||
# Write some other details to aid in reproduction into an artifact
|
||||
repro_file = os.path.join(repro_dir, 'repro.json')
|
||||
repro_details = {
|
||||
'job_name': ci_job_name,
|
||||
'job_spec_json': job_spec_json_file,
|
||||
'root_spec_json': 'root.json',
|
||||
'job_spec_yaml': job_spec_yaml_file,
|
||||
'root_spec_yaml': 'root.yaml',
|
||||
'ci_project_dir': ci_project_dir
|
||||
}
|
||||
with open(repro_file, 'w') as fd:
|
||||
@@ -379,41 +366,25 @@ def ci_rebuild(args):
|
||||
fd.write(b'\n')
|
||||
|
||||
# If we decided there should be a temporary storage mechanism, add that
|
||||
# mirror now so it's used when we check for a hash match already
|
||||
# mirror now so it's used when we check for a full hash match already
|
||||
# built for this spec.
|
||||
if pipeline_mirror_url:
|
||||
spack.mirror.add(spack_ci.TEMP_STORAGE_MIRROR_NAME,
|
||||
pipeline_mirror_url,
|
||||
cfg.default_modify_scope())
|
||||
|
||||
# Check configured mirrors for a built spec with a matching hash
|
||||
mirrors_to_check = None
|
||||
if remote_mirror_override and spack_pipeline_type == 'spack_protected_branch':
|
||||
# Passing "mirrors_to_check" below means we *only* look in the override
|
||||
# mirror to see if we should skip building, which is what we want.
|
||||
mirrors_to_check = {
|
||||
'override': remote_mirror_override
|
||||
}
|
||||
|
||||
# Adding this mirror to the list of configured mirrors means dependencies
|
||||
# could be installed from either the override mirror or any other configured
|
||||
# mirror (e.g. remote_mirror_url which is defined in the environment or
|
||||
# pipeline_mirror_url), which is also what we want.
|
||||
spack.mirror.add('mirror_override',
|
||||
remote_mirror_override,
|
||||
cfg.default_modify_scope())
|
||||
|
||||
# Check configured mirrors for a built spec with a matching full hash
|
||||
matches = bindist.get_mirrors_for_spec(
|
||||
job_spec, mirrors_to_check=mirrors_to_check, index_only=False)
|
||||
job_spec, full_hash_match=True, index_only=False)
|
||||
|
||||
if matches:
|
||||
# Got a hash match on at least one configured mirror. All
|
||||
# Got a full hash match on at least one configured mirror. All
|
||||
# matches represent the fully up-to-date spec, so should all be
|
||||
# equivalent. If artifacts mirror is enabled, we just pick one
|
||||
# of the matches and download the buildcache files from there to
|
||||
# the artifacts, so they're available to be used by dependent
|
||||
# jobs in subsequent stages.
|
||||
tty.msg('No need to rebuild {0}, found hash match at: '.format(
|
||||
tty.msg('No need to rebuild {0}, found full hash match at: '.format(
|
||||
job_spec_pkg_name))
|
||||
for match in matches:
|
||||
tty.msg(' {0}'.format(match['mirror_url']))
|
||||
@@ -432,7 +403,7 @@ def ci_rebuild(args):
|
||||
# Now we are done and successful
|
||||
sys.exit(0)
|
||||
|
||||
# No hash match anywhere means we need to rebuild spec
|
||||
# No full hash match anywhere means we need to rebuild spec
|
||||
|
||||
# Start with spack arguments
|
||||
install_args = [base_arg for base_arg in CI_REBUILD_INSTALL_BASE_ARGS]
|
||||
@@ -444,6 +415,7 @@ def ci_rebuild(args):
|
||||
install_args.extend([
|
||||
'install',
|
||||
'--keep-stage',
|
||||
'--require-full-hash-match',
|
||||
])
|
||||
|
||||
can_verify = spack_ci.can_verify_binaries()
|
||||
@@ -471,8 +443,8 @@ def ci_rebuild(args):
|
||||
|
||||
# TODO: once we have the concrete spec registry, use the DAG hash
|
||||
# to identify the spec to install, rather than the concrete spec
|
||||
# json file.
|
||||
install_args.extend(['-f', job_spec_json_path])
|
||||
# yaml file.
|
||||
install_args.extend(['-f', job_spec_yaml_path])
|
||||
|
||||
tty.debug('Installing {0} from source'.format(job_spec.name))
|
||||
tty.debug('spack install arguments: {0}'.format(
|
||||
@@ -505,13 +477,13 @@ def ci_rebuild(args):
|
||||
tty.debug('spack install exited {0}'.format(install_exit_code))
|
||||
|
||||
# If a spec fails to build in a spack develop pipeline, we add it to a
|
||||
# list of known broken hashes. This allows spack PR pipelines to
|
||||
# list of known broken full hashes. This allows spack PR pipelines to
|
||||
# avoid wasting compute cycles attempting to build those hashes.
|
||||
if install_exit_code == INSTALL_FAIL_CODE and spack_is_develop_pipeline:
|
||||
tty.debug('Install failed on develop')
|
||||
if 'broken-specs-url' in gitlab_ci:
|
||||
broken_specs_url = gitlab_ci['broken-specs-url']
|
||||
dev_fail_hash = job_spec.dag_hash()
|
||||
dev_fail_hash = job_spec.full_hash()
|
||||
broken_spec_path = url_util.join(broken_specs_url, dev_fail_hash)
|
||||
tty.msg('Reporting broken develop build as: {0}'.format(
|
||||
broken_spec_path))
|
||||
@@ -522,7 +494,7 @@ def ci_rebuild(args):
|
||||
'broken-spec': {
|
||||
'job-url': get_env_var('CI_JOB_URL'),
|
||||
'pipeline-url': get_env_var('CI_PIPELINE_URL'),
|
||||
'concrete-spec-dict': job_spec.to_dict(hash=ht.dag_hash)
|
||||
'concrete-spec-yaml': job_spec.to_dict(hash=ht.full_hash)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -548,6 +520,13 @@ def ci_rebuild(args):
|
||||
# any logs from the staging directory to artifacts now
|
||||
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
|
||||
|
||||
# Create buildcache on remote mirror, either on pr-specific mirror or
|
||||
# on the main mirror defined in the gitlab-enabled spack environment
|
||||
if spack_is_pr_pipeline:
|
||||
buildcache_mirror_url = pr_mirror_url
|
||||
else:
|
||||
buildcache_mirror_url = remote_mirror_url
|
||||
|
||||
# If the install succeeded, create a buildcache entry for this job spec
|
||||
# and push it to one or more mirrors. If the install did not succeed,
|
||||
# print out some instructions on how to reproduce this build failure
|
||||
@@ -560,7 +539,7 @@ def ci_rebuild(args):
|
||||
# per-PR mirror, if this is a PR pipeline
|
||||
if buildcache_mirror_url:
|
||||
spack_ci.push_mirror_contents(
|
||||
env, job_spec_json_path, buildcache_mirror_url, sign_binaries
|
||||
env, job_spec_yaml_path, buildcache_mirror_url, sign_binaries
|
||||
)
|
||||
|
||||
# Create another copy of that buildcache in the per-pipeline
|
||||
@@ -569,14 +548,14 @@ def ci_rebuild(args):
|
||||
# prefix is set)
|
||||
if pipeline_mirror_url:
|
||||
spack_ci.push_mirror_contents(
|
||||
env, job_spec_json_path, pipeline_mirror_url, sign_binaries
|
||||
env, job_spec_yaml_path, pipeline_mirror_url, sign_binaries
|
||||
)
|
||||
|
||||
# If this is a develop pipeline, check if the spec that we just built is
|
||||
# on the broken-specs list. If so, remove it.
|
||||
if spack_is_develop_pipeline and 'broken-specs-url' in gitlab_ci:
|
||||
broken_specs_url = gitlab_ci['broken-specs-url']
|
||||
just_built_hash = job_spec.dag_hash()
|
||||
just_built_hash = job_spec.full_hash()
|
||||
broken_spec_path = url_util.join(broken_specs_url, just_built_hash)
|
||||
if web_util.url_exists(broken_spec_path):
|
||||
tty.msg('Removing {0} from the list of broken specs'.format(
|
||||
|
@@ -18,8 +18,6 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
arguments.add_common_arguments(subparser, ['clean', 'dirty'])
|
||||
arguments.add_concretizer_args(subparser)
|
||||
|
||||
subparser.add_argument(
|
||||
'--dump', metavar="FILE",
|
||||
help="dump a source-able environment to FILE"
|
||||
|
@@ -22,9 +22,6 @@ def setup_parser(subparser):
|
||||
help="""Concretize with test dependencies. When 'root' is chosen, test
|
||||
dependencies are only added for the environment's root specs. When 'all' is
|
||||
chosen, test dependencies are enabled for all packages in the environment.""")
|
||||
subparser.add_argument(
|
||||
'-q', '--quiet', action='store_true',
|
||||
help="Don't print concretized specs")
|
||||
|
||||
spack.cmd.common.arguments.add_concretizer_args(subparser)
|
||||
|
||||
@@ -41,6 +38,5 @@ def concretize(parser, args):
|
||||
|
||||
with env.write_transaction():
|
||||
concretized_specs = env.concretize(force=args.force, tests=tests)
|
||||
if not args.quiet:
|
||||
ev.display_specs(concretized_specs)
|
||||
ev.display_specs(concretized_specs)
|
||||
env.write()
|
||||
|
@@ -57,7 +57,7 @@
|
||||
# See the Spack documentation for more information on packaging.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
from spack.package import *
|
||||
from spack import *
|
||||
|
||||
|
||||
class {class_name}({base_class_name}):
|
||||
@@ -187,27 +187,6 @@ def cmake_args(self):
|
||||
return args"""
|
||||
|
||||
|
||||
class LuaPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for LuaRocks-based packages"""
|
||||
|
||||
base_class_name = 'LuaPackage'
|
||||
|
||||
body_def = """\
|
||||
def luarocks_args(self):
|
||||
# FIXME: Add arguments to `luarocks make` other than rockspec path
|
||||
# FIXME: If not needed delete this function
|
||||
args = []
|
||||
return args"""
|
||||
|
||||
def __init__(self, name, url, *args, **kwargs):
|
||||
# If the user provided `--name lua-lpeg`, don't rename it lua-lua-lpeg
|
||||
if not name.startswith('lua-'):
|
||||
# Make it more obvious that we are renaming the package
|
||||
tty.msg("Changing package name from {0} to lua-{0}".format(name))
|
||||
name = 'lua-{0}'.format(name)
|
||||
super(LuaPackageTemplate, self).__init__(name, url, *args, **kwargs)
|
||||
|
||||
|
||||
class MesonPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for meson-based packages"""
|
||||
|
||||
@@ -601,7 +580,6 @@ def __init__(self, name, *args, **kwargs):
|
||||
'makefile': MakefilePackageTemplate,
|
||||
'intel': IntelPackageTemplate,
|
||||
'meson': MesonPackageTemplate,
|
||||
'lua': LuaPackageTemplate,
|
||||
'sip': SIPPackageTemplate,
|
||||
'generic': PackageTemplate,
|
||||
}
|
||||
@@ -666,9 +644,6 @@ def __call__(self, stage, url):
|
||||
if url.endswith('.whl') or '.whl#' in url:
|
||||
self.build_system = 'python'
|
||||
return
|
||||
if url.endswith('.rock'):
|
||||
self.build_system = 'lua'
|
||||
return
|
||||
|
||||
# A list of clues that give us an idea of the build system a package
|
||||
# uses. If the regular expression matches a file contained in the
|
||||
@@ -693,7 +668,6 @@ def __call__(self, stage, url):
|
||||
(r'/Rakefile$', 'ruby'),
|
||||
(r'/setup\.rb$', 'ruby'),
|
||||
(r'/.*\.pro$', 'qmake'),
|
||||
(r'/.*\.rockspec$', 'lua'),
|
||||
(r'/(GNU)?[Mm]akefile$', 'makefile'),
|
||||
(r'/DESCRIPTION$', 'octave'),
|
||||
(r'/meson\.build$', 'meson'),
|
||||
@@ -826,7 +800,7 @@ def get_versions(args, name):
|
||||
spack.util.url.require_url_format(args.url)
|
||||
if args.url.startswith('file://'):
|
||||
valid_url = False # No point in spidering these
|
||||
except (ValueError, TypeError):
|
||||
except ValueError:
|
||||
valid_url = False
|
||||
|
||||
if args.url is not None and args.template != 'bundle' and valid_url:
|
||||
|
@@ -11,7 +11,7 @@
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.package_base
|
||||
import spack.package
|
||||
import spack.repo
|
||||
import spack.store
|
||||
|
||||
@@ -57,7 +57,7 @@ def dependencies(parser, args):
|
||||
|
||||
else:
|
||||
spec = specs[0]
|
||||
dependencies = spack.package_base.possible_dependencies(
|
||||
dependencies = spack.package.possible_dependencies(
|
||||
spec,
|
||||
transitive=args.transitive,
|
||||
expand_virtuals=args.expand_virtuals,
|
||||
|
@@ -91,8 +91,8 @@ def dev_build(self, args):
|
||||
spec.concretize()
|
||||
package = spack.repo.get(spec)
|
||||
|
||||
if spec.installed:
|
||||
tty.error("Already installed in %s" % spec.prefix)
|
||||
if package.installed:
|
||||
tty.error("Already installed in %s" % package.prefix)
|
||||
tty.msg("Uninstall or try adding a version suffix for this dev build.")
|
||||
sys.exit(1)
|
||||
|
||||
|
@@ -68,14 +68,8 @@ def compare_specs(a, b, to_string=False, color=None):
|
||||
# Prepare a solver setup to parse differences
|
||||
setup = asp.SpackSolverSetup()
|
||||
|
||||
# get facts for specs, making sure to include build dependencies of concrete
|
||||
# specs and to descend into dependency hashes so we include all facts.
|
||||
a_facts = set(t for t in setup.spec_clauses(
|
||||
a, body=True, expand_hashes=True, concrete_build_deps=True,
|
||||
))
|
||||
b_facts = set(t for t in setup.spec_clauses(
|
||||
b, body=True, expand_hashes=True, concrete_build_deps=True,
|
||||
))
|
||||
a_facts = set(t for t in setup.spec_clauses(a, body=True, expand_hashes=True))
|
||||
b_facts = set(t for t in setup.spec_clauses(b, body=True, expand_hashes=True))
|
||||
|
||||
# We want to present them to the user as simple key: values
|
||||
intersect = sorted(a_facts.intersection(b_facts))
|
||||
|
@@ -8,8 +8,6 @@
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import six
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import colify
|
||||
@@ -43,8 +41,7 @@
|
||||
'loads',
|
||||
'view',
|
||||
'update',
|
||||
'revert',
|
||||
'depfile'
|
||||
'revert'
|
||||
]
|
||||
|
||||
|
||||
@@ -526,154 +523,6 @@ def env_revert(args):
|
||||
tty.msg(msg.format(manifest_file))
|
||||
|
||||
|
||||
def env_depfile_setup_parser(subparser):
|
||||
"""generate a depfile from the concrete environment specs"""
|
||||
subparser.add_argument(
|
||||
'--make-target-prefix', default=None, metavar='TARGET',
|
||||
help='prefix Makefile targets with <TARGET>/<name>. By default the absolute '
|
||||
'path to the directory makedeps under the environment metadata dir is '
|
||||
'used. Can be set to an empty string --make-target-prefix \'\'.')
|
||||
subparser.add_argument(
|
||||
'--make-disable-jobserver', default=True, action='store_false',
|
||||
dest='jobserver', help='disable POSIX jobserver support.')
|
||||
subparser.add_argument(
|
||||
'-o', '--output', default=None, metavar='FILE',
|
||||
help='write the depfile to FILE rather than to stdout')
|
||||
subparser.add_argument(
|
||||
'-G', '--generator', default='make', choices=('make',),
|
||||
help='specify the depfile type. Currently only make is supported.')
|
||||
|
||||
|
||||
def env_depfile(args):
|
||||
# Currently only make is supported.
|
||||
spack.cmd.require_active_env(cmd_name='env depfile')
|
||||
env = ev.active_environment()
|
||||
|
||||
# Maps each hash in the environment to a string of install prereqs
|
||||
hash_to_prereqs = {}
|
||||
hash_to_spec = {}
|
||||
|
||||
if args.make_target_prefix is None:
|
||||
target_prefix = os.path.join(env.env_subdir_path, 'makedeps')
|
||||
else:
|
||||
target_prefix = args.make_target_prefix
|
||||
|
||||
def get_target(name):
|
||||
# The `all`, `fetch` and `clean` targets are phony. It doesn't make sense to
|
||||
# have /abs/path/to/env/metadir/{all,clean} targets. But it *does* make
|
||||
# sense to have a prefix like `env/all`, `env/fetch`, `env/clean` when they are
|
||||
# supposed to be included
|
||||
if name in ('all', 'fetch-all', 'clean') and os.path.isabs(target_prefix):
|
||||
return name
|
||||
else:
|
||||
return os.path.join(target_prefix, name)
|
||||
|
||||
def get_install_target(name):
|
||||
return os.path.join(target_prefix, '.install', name)
|
||||
|
||||
def get_fetch_target(name):
|
||||
return os.path.join(target_prefix, '.fetch', name)
|
||||
|
||||
for _, spec in env.concretized_specs():
|
||||
for s in spec.traverse(root=True):
|
||||
hash_to_spec[s.dag_hash()] = s
|
||||
hash_to_prereqs[s.dag_hash()] = [
|
||||
get_install_target(dep.dag_hash()) for dep in s.dependencies()]
|
||||
|
||||
root_dags = [s.dag_hash() for _, s in env.concretized_specs()]
|
||||
|
||||
# Root specs without deps are the prereqs for the environment target
|
||||
root_install_targets = [get_install_target(h) for h in root_dags]
|
||||
|
||||
# All package install targets, not just roots.
|
||||
all_install_targets = [get_install_target(h) for h in hash_to_spec.keys()]
|
||||
|
||||
# Fetch targets for all packages in the environment, not just roots.
|
||||
all_fetch_targets = [get_fetch_target(h) for h in hash_to_spec.keys()]
|
||||
|
||||
buf = six.StringIO()
|
||||
|
||||
buf.write("""SPACK ?= spack
|
||||
|
||||
.PHONY: {} {} {}
|
||||
|
||||
{}: {}
|
||||
|
||||
{}: {}
|
||||
|
||||
{}: {}
|
||||
\t@touch $@
|
||||
|
||||
{}: {}
|
||||
\t@touch $@
|
||||
|
||||
{}:
|
||||
\t@mkdir -p {} {}
|
||||
|
||||
{}: | {}
|
||||
\t$(info Fetching $(SPEC))
|
||||
\t$(SPACK) -e '{}' fetch $(SPACK_FETCH_FLAGS) /$(notdir $@) && touch $@
|
||||
|
||||
{}: {}
|
||||
\t$(info Installing $(SPEC))
|
||||
\t{}$(SPACK) -e '{}' install $(SPACK_INSTALL_FLAGS) --only-concrete --only=package \
|
||||
--no-add /$(notdir $@) && touch $@
|
||||
|
||||
""".format(get_target('all'), get_target('fetch-all'), get_target('clean'),
|
||||
get_target('all'), get_target('env'),
|
||||
get_target('fetch-all'), get_target('fetch'),
|
||||
get_target('env'), ' '.join(root_install_targets),
|
||||
get_target('fetch'), ' '.join(all_fetch_targets),
|
||||
get_target('dirs'), get_target('.fetch'), get_target('.install'),
|
||||
get_target('.fetch/%'), get_target('dirs'),
|
||||
env.path,
|
||||
get_target('.install/%'), get_target('.fetch/%'),
|
||||
'+' if args.jobserver else '', env.path))
|
||||
|
||||
# Targets are of the form <prefix>/<name>: [<prefix>/<depname>]...,
|
||||
# The prefix can be an empty string, in that case we don't add the `/`.
|
||||
# The name is currently the dag hash of the spec. In principle it
|
||||
# could be the package name in case of `concretization: together` so
|
||||
# it can be more easily referred to, but for now we don't special case
|
||||
# this.
|
||||
fmt = '{name}{@version}{%compiler}{variants}{arch=architecture}'
|
||||
|
||||
# Set SPEC for each hash
|
||||
buf.write('# Set the human-readable spec for each target\n')
|
||||
for dag_hash in hash_to_prereqs.keys():
|
||||
formatted_spec = hash_to_spec[dag_hash].format(fmt)
|
||||
buf.write("{}: SPEC = {}\n".format(get_target('%/' + dag_hash), formatted_spec))
|
||||
buf.write('\n')
|
||||
|
||||
# Set install dependencies
|
||||
buf.write('# Install dependencies\n')
|
||||
for parent, children in hash_to_prereqs.items():
|
||||
if not children:
|
||||
continue
|
||||
buf.write('{}: {}\n'.format(get_install_target(parent), ' '.join(children)))
|
||||
buf.write('\n')
|
||||
|
||||
# Clean target: remove target files but not their folders, cause
|
||||
# --make-target-prefix can be any existing directory we do not control,
|
||||
# including empty string (which means deleting the containing folder
|
||||
# would delete the folder with the Makefile)
|
||||
buf.write("{}:\n\trm -f -- {} {} {} {}\n".format(
|
||||
get_target('clean'),
|
||||
get_target('env'),
|
||||
get_target('fetch'),
|
||||
' '.join(all_fetch_targets),
|
||||
' '.join(all_install_targets)))
|
||||
|
||||
makefile = buf.getvalue()
|
||||
|
||||
# Finally write to stdout/file.
|
||||
if args.output:
|
||||
with open(args.output, 'w') as f:
|
||||
f.write(makefile)
|
||||
else:
|
||||
sys.stdout.write(makefile)
|
||||
|
||||
|
||||
#: Dictionary mapping subcommand names and aliases to functions
|
||||
subcommand_functions = {}
|
||||
|
||||
|
@@ -5,7 +5,6 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
@@ -14,7 +13,6 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
import spack.cray_manifest as cray_manifest
|
||||
import spack.detection
|
||||
import spack.error
|
||||
import spack.util.environment
|
||||
@@ -37,9 +35,6 @@ def setup_parser(subparser):
|
||||
find_parser.add_argument(
|
||||
'--not-buildable', action='store_true', default=False,
|
||||
help="packages with detected externals won't be built with Spack")
|
||||
find_parser.add_argument(
|
||||
'-p', '--path', default=None, action='append',
|
||||
help="Alternative search paths for finding externals. May be repeated")
|
||||
find_parser.add_argument(
|
||||
'--scope', choices=scopes, metavar=scopes_metavar,
|
||||
default=spack.config.default_modify_scope('packages'),
|
||||
@@ -60,40 +55,8 @@ def setup_parser(subparser):
|
||||
'list', help='list detectable packages, by repository and name'
|
||||
)
|
||||
|
||||
read_cray_manifest = sp.add_parser(
|
||||
'read-cray-manifest', help=(
|
||||
"consume a Spack-compatible description of externally-installed "
|
||||
"packages, including dependency relationships"
|
||||
)
|
||||
)
|
||||
read_cray_manifest.add_argument(
|
||||
'--file', default=None,
|
||||
help="specify a location other than the default")
|
||||
read_cray_manifest.add_argument(
|
||||
'--directory', default=None,
|
||||
help="specify a directory storing a group of manifest files")
|
||||
read_cray_manifest.add_argument(
|
||||
'--dry-run', action='store_true', default=False,
|
||||
help="don't modify DB with files that are read")
|
||||
read_cray_manifest.add_argument(
|
||||
'--fail-on-error', action='store_true',
|
||||
help=("if a manifest file cannot be parsed, fail and report the "
|
||||
"full stack trace")
|
||||
)
|
||||
|
||||
|
||||
def external_find(args):
|
||||
if args.all or not (args.tags or args.packages):
|
||||
# If the user calls 'spack external find' with no arguments, and
|
||||
# this system has a description of installed packages, then we should
|
||||
# consume it automatically.
|
||||
try:
|
||||
_collect_and_consume_cray_manifest_files()
|
||||
except NoManifestFileError:
|
||||
# It's fine to not find any manifest file if we are doing the
|
||||
# search implicitly (i.e. as part of 'spack external find')
|
||||
pass
|
||||
|
||||
# If the user didn't specify anything, search for build tools by default
|
||||
if not args.tags and not args.all and not args.packages:
|
||||
args.tags = ['core-packages', 'build-tools']
|
||||
@@ -125,12 +88,10 @@ def external_find(args):
|
||||
|
||||
# If the list of packages is empty, search for every possible package
|
||||
if not args.tags and not packages_to_check:
|
||||
packages_to_check = list(spack.repo.path.all_packages())
|
||||
packages_to_check = spack.repo.path.all_packages()
|
||||
|
||||
detected_packages = spack.detection.by_executable(
|
||||
packages_to_check, path_hints=args.path)
|
||||
detected_packages.update(spack.detection.by_library(
|
||||
packages_to_check, path_hints=args.path))
|
||||
detected_packages = spack.detection.by_executable(packages_to_check)
|
||||
detected_packages.update(spack.detection.by_library(packages_to_check))
|
||||
|
||||
new_entries = spack.detection.update_configuration(
|
||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||
@@ -145,75 +106,16 @@ def external_find(args):
|
||||
tty.msg('No new external packages detected')
|
||||
|
||||
|
||||
def external_read_cray_manifest(args):
|
||||
_collect_and_consume_cray_manifest_files(
|
||||
manifest_file=args.file,
|
||||
manifest_directory=args.directory,
|
||||
dry_run=args.dry_run,
|
||||
fail_on_error=args.fail_on_error
|
||||
)
|
||||
|
||||
|
||||
def _collect_and_consume_cray_manifest_files(
|
||||
manifest_file=None, manifest_directory=None, dry_run=False,
|
||||
fail_on_error=False):
|
||||
|
||||
manifest_files = []
|
||||
if manifest_file:
|
||||
manifest_files.append(manifest_file)
|
||||
|
||||
manifest_dirs = []
|
||||
if manifest_directory:
|
||||
manifest_dirs.append(manifest_directory)
|
||||
|
||||
if os.path.isdir(cray_manifest.default_path):
|
||||
tty.debug(
|
||||
"Cray manifest path {0} exists: collecting all files to read."
|
||||
.format(cray_manifest.default_path))
|
||||
manifest_dirs.append(cray_manifest.default_path)
|
||||
else:
|
||||
tty.debug("Default Cray manifest directory {0} does not exist."
|
||||
.format(cray_manifest.default_path))
|
||||
|
||||
for directory in manifest_dirs:
|
||||
for fname in os.listdir(directory):
|
||||
if fname.endswith('.json'):
|
||||
fpath = os.path.join(directory, fname)
|
||||
tty.debug("Adding manifest file: {0}".format(fpath))
|
||||
manifest_files.append(os.path.join(directory, fpath))
|
||||
|
||||
if not manifest_files:
|
||||
raise NoManifestFileError(
|
||||
"--file/--directory not specified, and no manifest found at {0}"
|
||||
.format(cray_manifest.default_path))
|
||||
|
||||
for path in manifest_files:
|
||||
tty.debug("Reading manifest file: " + path)
|
||||
try:
|
||||
cray_manifest.read(path, not dry_run)
|
||||
except (spack.compilers.UnknownCompilerError, spack.error.SpackError) as e:
|
||||
if fail_on_error:
|
||||
raise
|
||||
else:
|
||||
tty.warn("Failure reading manifest file: {0}"
|
||||
"\n\t{1}".format(path, str(e)))
|
||||
|
||||
|
||||
def external_list(args):
|
||||
# Trigger a read of all packages, might take a long time.
|
||||
list(spack.repo.path.all_packages())
|
||||
# Print all the detectable packages
|
||||
tty.msg("Detectable packages per repository")
|
||||
for namespace, pkgs in sorted(spack.package_base.detectable_packages.items()):
|
||||
for namespace, pkgs in sorted(spack.package.detectable_packages.items()):
|
||||
print("Repository:", namespace)
|
||||
colify.colify(pkgs, indent=4, output=sys.stdout)
|
||||
|
||||
|
||||
def external(parser, args):
|
||||
action = {'find': external_find, 'list': external_list,
|
||||
'read-cray-manifest': external_read_cray_manifest}
|
||||
action = {'find': external_find, 'list': external_list}
|
||||
action[args.external_command](args)
|
||||
|
||||
|
||||
class NoManifestFileError(spack.error.SpackError):
|
||||
pass
|
||||
|
@@ -69,10 +69,14 @@ def fetch(parser, args):
|
||||
|
||||
for spec in specs:
|
||||
if args.missing or args.dependencies:
|
||||
for s in spec.traverse(root=False):
|
||||
for s in spec.traverse():
|
||||
package = spack.repo.get(s)
|
||||
|
||||
# Skip already-installed packages with --missing
|
||||
if args.missing and s.installed:
|
||||
if args.missing and package.installed:
|
||||
continue
|
||||
|
||||
s.package.do_fetch()
|
||||
spec.package.do_fetch()
|
||||
package.do_fetch()
|
||||
|
||||
package = spack.repo.get(spec)
|
||||
package.do_fetch()
|
||||
|
@@ -18,7 +18,7 @@
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
from spack.package_base import has_test_method, preferred_version
|
||||
from spack.package import has_test_method, preferred_version
|
||||
|
||||
description = 'get detailed information on a particular package'
|
||||
section = 'basic'
|
||||
@@ -184,9 +184,8 @@ def print_detectable(pkg):
|
||||
color.cprint('')
|
||||
color.cprint(section_title('Externally Detectable: '))
|
||||
|
||||
# If the package has an 'executables' of 'libraries' field, it
|
||||
# can detect an installation
|
||||
if hasattr(pkg, 'executables') or hasattr(pkg, 'libraries'):
|
||||
# If the package has an 'executables' field, it can detect an installation
|
||||
if hasattr(pkg, 'executables'):
|
||||
find_attributes = []
|
||||
if hasattr(pkg, 'determine_version'):
|
||||
find_attributes.append('version')
|
||||
@@ -269,14 +268,14 @@ def print_tests(pkg):
|
||||
names = []
|
||||
pkg_cls = pkg if inspect.isclass(pkg) else pkg.__class__
|
||||
if has_test_method(pkg_cls):
|
||||
pkg_base = spack.package_base.PackageBase
|
||||
pkg_base = spack.package.PackageBase
|
||||
test_pkgs = [str(cls.test) for cls in inspect.getmro(pkg_cls) if
|
||||
issubclass(cls, pkg_base) and cls.test != pkg_base.test]
|
||||
test_pkgs = list(set(test_pkgs))
|
||||
names.extend([(test.split()[1]).lower() for test in test_pkgs])
|
||||
|
||||
# TODO Refactor START
|
||||
# Use code from package_base.py's test_process IF this functionality is
|
||||
# Use code from package.py's test_process IF this functionality is
|
||||
# accepted.
|
||||
v_names = list(set([vspec.name for vspec in pkg.virtuals_provided]))
|
||||
|
||||
|
@@ -47,6 +47,7 @@ def update_kwargs_from_args(args, kwargs):
|
||||
'explicit': True, # Always true for install command
|
||||
'stop_at': args.until,
|
||||
'unsigned': args.unsigned,
|
||||
'full_hash_match': args.full_hash_match,
|
||||
})
|
||||
|
||||
kwargs.update({
|
||||
@@ -116,6 +117,11 @@ def setup_parser(subparser):
|
||||
'--no-check-signature', action='store_true',
|
||||
dest='unsigned', default=False,
|
||||
help="do not check signatures of binary packages")
|
||||
subparser.add_argument(
|
||||
'--require-full-hash-match', action='store_true',
|
||||
dest='full_hash_match', default=False, help="""when installing from
|
||||
binary mirrors, do not install binary package unless the full hash of the
|
||||
remote spec matches that of the local spec""")
|
||||
subparser.add_argument(
|
||||
'--show-log-on-error', action='store_true',
|
||||
help="print full build log to stderr if build fails")
|
||||
@@ -153,6 +159,10 @@ def setup_parser(subparser):
|
||||
if 'all' is chosen, run package tests during installation for all
|
||||
packages. If neither are chosen, don't run tests for any packages."""
|
||||
)
|
||||
testing.add_argument(
|
||||
'--run-tests', action='store_true',
|
||||
help='run package tests during installation (same as --test=all)'
|
||||
)
|
||||
subparser.add_argument(
|
||||
'--log-format',
|
||||
default=None,
|
||||
@@ -302,12 +312,15 @@ def install(parser, args, **kwargs):
|
||||
)
|
||||
|
||||
reporter = spack.report.collect_info(
|
||||
spack.package_base.PackageInstaller, '_install_task', args.log_format, args)
|
||||
spack.package.PackageInstaller, '_install_task', args.log_format, args)
|
||||
if args.log_file:
|
||||
reporter.filename = args.log_file
|
||||
|
||||
if args.run_tests:
|
||||
tty.warn("Deprecated option: --run-tests: use --test=all instead")
|
||||
|
||||
def get_tests(specs):
|
||||
if args.test == 'all':
|
||||
if args.test == 'all' or args.run_tests:
|
||||
return True
|
||||
elif args.test == 'root':
|
||||
return [spec.name for spec in specs]
|
||||
@@ -464,7 +477,7 @@ def get_tests(specs):
|
||||
})
|
||||
|
||||
# If we are using the monitor, we send configs. and create build
|
||||
# The dag_hash is the main package id
|
||||
# The full_hash is the main package id, the build_hash for others
|
||||
if args.use_monitor and specs:
|
||||
monitor.new_configuration(specs)
|
||||
install_specs(args, kwargs, zip(abstract_specs, specs))
|
||||
|
@@ -12,7 +12,7 @@
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
import spack.package
|
||||
import spack.repo
|
||||
import spack.store
|
||||
from spack.database import InstallStatuses
|
||||
|
@@ -273,7 +273,7 @@ def refresh(module_type, specs, args):
|
||||
return
|
||||
|
||||
if not args.upstream_modules:
|
||||
specs = list(s for s in specs if not s.installed_upstream)
|
||||
specs = list(s for s in specs if not s.package.installed_upstream)
|
||||
|
||||
if not args.yes_to_all:
|
||||
msg = 'You are about to regenerate {types} module files for:\n'
|
||||
|
@@ -15,10 +15,8 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.hash_types as ht
|
||||
import spack.package_base
|
||||
import spack.package
|
||||
import spack.solver.asp as asp
|
||||
|
||||
description = "concretize a specs using an ASP solver"
|
||||
@@ -76,51 +74,6 @@ def setup_parser(subparser):
|
||||
spack.cmd.common.arguments.add_concretizer_args(subparser)
|
||||
|
||||
|
||||
def _process_result(result, show, required_format, kwargs):
|
||||
result.raise_if_unsat()
|
||||
opt, _, _ = min(result.answers)
|
||||
if ("opt" in show) and (not required_format):
|
||||
tty.msg("Best of %d considered solutions." % result.nmodels)
|
||||
tty.msg("Optimization Criteria:")
|
||||
|
||||
maxlen = max(len(s[2]) for s in result.criteria)
|
||||
color.cprint(
|
||||
"@*{ Priority Criterion %sInstalled ToBuild}" % ((maxlen - 10) * " ")
|
||||
)
|
||||
|
||||
fmt = " @K{%%-8d} %%-%ds%%9s %%7s" % maxlen
|
||||
for i, (installed_cost, build_cost, name) in enumerate(result.criteria, 1):
|
||||
color.cprint(
|
||||
fmt % (
|
||||
i,
|
||||
name,
|
||||
"-" if build_cost is None else installed_cost,
|
||||
installed_cost if build_cost is None else build_cost,
|
||||
)
|
||||
)
|
||||
print()
|
||||
|
||||
# dump the solutions as concretized specs
|
||||
if 'solutions' in show:
|
||||
for spec in result.specs:
|
||||
# With -y, just print YAML to output.
|
||||
if required_format == 'yaml':
|
||||
# use write because to_yaml already has a newline.
|
||||
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
||||
elif required_format == 'json':
|
||||
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
|
||||
else:
|
||||
sys.stdout.write(
|
||||
spec.tree(color=sys.stdout.isatty(), **kwargs))
|
||||
print()
|
||||
|
||||
if result.unsolved_specs and "solutions" in show:
|
||||
tty.msg("Unsolved specs")
|
||||
for spec in result.unsolved_specs:
|
||||
print(spec)
|
||||
print()
|
||||
|
||||
|
||||
def solve(parser, args):
|
||||
# these are the same options as `spack spec`
|
||||
name_fmt = '{namespace}.{name}' if args.namespaces else '{name}'
|
||||
@@ -149,42 +102,58 @@ def solve(parser, args):
|
||||
if models < 0:
|
||||
tty.die("model count must be non-negative: %d")
|
||||
|
||||
# Format required for the output (JSON, YAML or None)
|
||||
required_format = args.format
|
||||
|
||||
# If we have an active environment, pick the specs from there
|
||||
env = spack.environment.active_environment()
|
||||
if env and args.specs:
|
||||
msg = "cannot give explicit specs when an environment is active"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
specs = list(env.user_specs) if env else spack.cmd.parse_specs(args.specs)
|
||||
specs = spack.cmd.parse_specs(args.specs)
|
||||
|
||||
# set up solver parameters
|
||||
# Note: reuse and other concretizer prefs are passed as configuration
|
||||
solver = asp.Solver()
|
||||
output = sys.stdout if "asp" in show else None
|
||||
setup_only = set(show) == {'asp'}
|
||||
unify = spack.config.get('concretizer:unify')
|
||||
if unify != 'when_possible':
|
||||
# set up solver parameters
|
||||
# Note: reuse and other concretizer prefs are passed as configuration
|
||||
result = solver.solve(
|
||||
specs,
|
||||
out=output,
|
||||
models=models,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=setup_only
|
||||
)
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
else:
|
||||
for idx, result in enumerate(solver.solve_in_rounds(
|
||||
specs, out=output, models=models, timers=args.timers, stats=args.stats
|
||||
)):
|
||||
if "solutions" in show:
|
||||
tty.msg("ROUND {0}".format(idx))
|
||||
tty.msg("")
|
||||
result = solver.solve(
|
||||
specs,
|
||||
out=output,
|
||||
models=models,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=(set(show) == {'asp'})
|
||||
)
|
||||
if 'solutions' not in show:
|
||||
return
|
||||
|
||||
# die if no solution was found
|
||||
result.raise_if_unsat()
|
||||
|
||||
# show the solutions as concretized specs
|
||||
if 'solutions' in show:
|
||||
opt, _, _ = min(result.answers)
|
||||
|
||||
if ("opt" in show) and (not args.format):
|
||||
tty.msg("Best of %d considered solutions." % result.nmodels)
|
||||
tty.msg("Optimization Criteria:")
|
||||
|
||||
maxlen = max(len(s[2]) for s in result.criteria)
|
||||
color.cprint(
|
||||
"@*{ Priority Criterion %sInstalled ToBuild}" % ((maxlen - 10) * " ")
|
||||
)
|
||||
|
||||
fmt = " @K{%%-8d} %%-%ds%%9s %%7s" % maxlen
|
||||
for i, (idx, build_idx, name) in enumerate(result.criteria, 1):
|
||||
color.cprint(
|
||||
fmt % (
|
||||
i,
|
||||
name,
|
||||
"-" if build_idx is None else opt[idx],
|
||||
opt[idx] if build_idx is None else opt[build_idx],
|
||||
)
|
||||
)
|
||||
print()
|
||||
|
||||
for spec in result.specs:
|
||||
# With -y, just print YAML to output.
|
||||
if args.format == 'yaml':
|
||||
# use write because to_yaml already has a newline.
|
||||
sys.stdout.write(spec.to_yaml(hash=ht.build_hash))
|
||||
elif args.format == 'json':
|
||||
sys.stdout.write(spec.to_json(hash=ht.build_hash))
|
||||
else:
|
||||
print("% END ROUND {0}\n".format(idx))
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
sys.stdout.write(
|
||||
spec.tree(color=sys.stdout.isatty(), **kwargs))
|
||||
|
@@ -34,16 +34,12 @@ def setup_parser(subparser):
|
||||
arguments.add_common_arguments(
|
||||
subparser, ['long', 'very_long', 'install_status']
|
||||
)
|
||||
format_group = subparser.add_mutually_exclusive_group()
|
||||
format_group.add_argument(
|
||||
subparser.add_argument(
|
||||
'-y', '--yaml', action='store_const', dest='format', default=None,
|
||||
const='yaml', help='print concrete spec as YAML')
|
||||
format_group.add_argument(
|
||||
subparser.add_argument(
|
||||
'-j', '--json', action='store_const', dest='format', default=None,
|
||||
const='json', help='print concrete spec as JSON')
|
||||
format_group.add_argument(
|
||||
'--format', action='store', default=None,
|
||||
help='print concrete spec with the specified format string')
|
||||
subparser.add_argument(
|
||||
'-c', '--cover', action='store',
|
||||
default='nodes', choices=['nodes', 'edges', 'paths'],
|
||||
@@ -51,6 +47,10 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-N', '--namespaces', action='store_true', default=False,
|
||||
help='show fully qualified package names')
|
||||
subparser.add_argument(
|
||||
'--hash-type', default="build_hash",
|
||||
choices=['build_hash', 'full_hash', 'dag_hash'],
|
||||
help='generate spec with a particular hash type.')
|
||||
subparser.add_argument(
|
||||
'-t', '--types', action='store_true', default=False,
|
||||
help='show dependency types')
|
||||
@@ -80,8 +80,7 @@ def spec(parser, args):
|
||||
# Use command line specified specs, otherwise try to use environment specs.
|
||||
if args.specs:
|
||||
input_specs = spack.cmd.parse_specs(args.specs)
|
||||
concretized_specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
specs = list(zip(input_specs, concretized_specs))
|
||||
specs = [(s, s.concretized()) for s in input_specs]
|
||||
else:
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
@@ -93,13 +92,14 @@ def spec(parser, args):
|
||||
for (input, output) in specs:
|
||||
# With -y, just print YAML to output.
|
||||
if args.format:
|
||||
# The user can specify the hash type to use
|
||||
hash_type = getattr(ht, args.hash_type)
|
||||
|
||||
if args.format == 'yaml':
|
||||
# use write because to_yaml already has a newline.
|
||||
sys.stdout.write(output.to_yaml(hash=ht.dag_hash))
|
||||
elif args.format == 'json':
|
||||
print(output.to_json(hash=ht.dag_hash))
|
||||
sys.stdout.write(output.to_yaml(hash=hash_type))
|
||||
else:
|
||||
print(output.format(args.format))
|
||||
print(output.to_json(hash=hash_type))
|
||||
continue
|
||||
|
||||
with tree_context():
|
||||
|
@@ -27,6 +27,12 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def stage(parser, args):
|
||||
# We temporarily modify the working directory when setting up a stage, so we need to
|
||||
# convert this to an absolute path here in order for it to remain valid later.
|
||||
custom_path = os.path.abspath(args.path) if args.path else None
|
||||
if custom_path:
|
||||
spack.stage.create_stage_root(custom_path)
|
||||
|
||||
if not args.specs:
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
@@ -48,10 +54,6 @@ def stage(parser, args):
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||
|
||||
# We temporarily modify the working directory when setting up a stage, so we need to
|
||||
# convert this to an absolute path here in order for it to remain valid later.
|
||||
custom_path = os.path.abspath(args.path) if args.path else None
|
||||
|
||||
# prevent multiple specs from extracting in the same folder
|
||||
if len(specs) > 1 and custom_path:
|
||||
tty.die("`--path` requires a single spec, but multiple were provided")
|
||||
|
@@ -65,7 +65,7 @@ def is_package(f):
|
||||
packages, since we allow `from spack import *` and poking globals
|
||||
into packages.
|
||||
"""
|
||||
return f.startswith("var/spack/repos/") and f.endswith('package.py')
|
||||
return f.startswith("var/spack/repos/")
|
||||
|
||||
|
||||
#: decorator for adding tools to the list
|
||||
@@ -94,16 +94,16 @@ def changed_files(base="develop", untracked=True, all_files=False, root=None):
|
||||
git = which("git", required=True)
|
||||
|
||||
# ensure base is in the repo
|
||||
base_sha = git("rev-parse", "--quiet", "--verify", "--revs-only", base,
|
||||
fail_on_error=False, output=str)
|
||||
git("show-ref", "--verify", "--quiet", "refs/heads/%s" % base,
|
||||
fail_on_error=False)
|
||||
if git.returncode != 0:
|
||||
tty.die(
|
||||
"This repository does not have a '%s' revision." % base,
|
||||
"This repository does not have a '%s' branch." % base,
|
||||
"spack style needs this branch to determine which files changed.",
|
||||
"Ensure that '%s' exists, or specify files to check explicitly." % base
|
||||
)
|
||||
|
||||
range = "{0}...".format(base_sha.strip())
|
||||
range = "{0}...".format(base)
|
||||
|
||||
git_args = [
|
||||
# Add changed files committed since branching off of develop
|
||||
@@ -236,7 +236,7 @@ def translate(match):
|
||||
continue
|
||||
if not args.root_relative and re_obj:
|
||||
line = re_obj.sub(translate, line)
|
||||
print(line)
|
||||
print(" " + line)
|
||||
|
||||
|
||||
def print_style_header(file_list, args):
|
||||
@@ -290,26 +290,20 @@ def run_flake8(flake8_cmd, file_list, args):
|
||||
@tool("mypy")
|
||||
def run_mypy(mypy_cmd, file_list, args):
|
||||
# always run with config from running spack prefix
|
||||
common_mypy_args = [
|
||||
mypy_args = [
|
||||
"--config-file", os.path.join(spack.paths.prefix, "pyproject.toml"),
|
||||
"--show-error-codes",
|
||||
]
|
||||
mypy_arg_sets = [common_mypy_args + [
|
||||
"--package", "spack",
|
||||
"--package", "llnl",
|
||||
]]
|
||||
if 'SPACK_MYPY_CHECK_PACKAGES' in os.environ:
|
||||
mypy_arg_sets.append(common_mypy_args + [
|
||||
'--package', 'packages',
|
||||
'--disable-error-code', 'no-redef',
|
||||
])
|
||||
"--show-error-codes",
|
||||
]
|
||||
# not yet, need other updates to enable this
|
||||
# if any([is_package(f) for f in file_list]):
|
||||
# mypy_args.extend(["--package", "packages"])
|
||||
|
||||
returncode = 0
|
||||
for mypy_args in mypy_arg_sets:
|
||||
output = mypy_cmd(*mypy_args, fail_on_error=False, output=str)
|
||||
returncode |= mypy_cmd.returncode
|
||||
output = mypy_cmd(*mypy_args, fail_on_error=False, output=str)
|
||||
returncode = mypy_cmd.returncode
|
||||
|
||||
rewrite_and_print_output(output, args)
|
||||
rewrite_and_print_output(output, args)
|
||||
|
||||
print_tool_result("mypy", returncode)
|
||||
return returncode
|
||||
@@ -324,29 +318,16 @@ def run_isort(isort_cmd, file_list, args):
|
||||
|
||||
pat = re.compile("ERROR: (.*) Imports are incorrectly sorted")
|
||||
replacement = "ERROR: {0} Imports are incorrectly sorted"
|
||||
returncode = [0]
|
||||
returncode = 0
|
||||
for chunk in grouper(file_list, 100):
|
||||
packed_args = isort_args + tuple(chunk)
|
||||
output = isort_cmd(*packed_args, fail_on_error=False, output=str, error=str)
|
||||
returncode |= isort_cmd.returncode
|
||||
|
||||
def process_files(file_list, is_args):
|
||||
for chunk in grouper(file_list, 100):
|
||||
packed_args = is_args + tuple(chunk)
|
||||
output = isort_cmd(*packed_args, fail_on_error=False, output=str, error=str)
|
||||
returncode[0] |= isort_cmd.returncode
|
||||
rewrite_and_print_output(output, args, pat, replacement)
|
||||
|
||||
rewrite_and_print_output(output, args, pat, replacement)
|
||||
|
||||
packages_isort_args = ('--rm', 'spack', '--rm', 'spack.pkgkit', '--rm',
|
||||
'spack.package_defs', '-a', 'from spack.package import *')
|
||||
packages_isort_args = packages_isort_args + isort_args
|
||||
|
||||
# packages
|
||||
process_files(filter(is_package, file_list),
|
||||
packages_isort_args)
|
||||
# non-packages
|
||||
process_files(filter(lambda f: not is_package(f), file_list),
|
||||
isort_args)
|
||||
|
||||
print_tool_result("isort", returncode[0])
|
||||
return returncode[0]
|
||||
print_tool_result("isort", returncode)
|
||||
return returncode
|
||||
|
||||
|
||||
@tool("black")
|
||||
|
@@ -20,7 +20,7 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.install_test
|
||||
import spack.package_base
|
||||
import spack.package
|
||||
import spack.repo
|
||||
import spack.report
|
||||
|
||||
@@ -189,7 +189,7 @@ def test_run(args):
|
||||
# Set up reporter
|
||||
setattr(args, 'package', [s.format() for s in test_suite.specs])
|
||||
reporter = spack.report.collect_info(
|
||||
spack.package_base.PackageBase, 'do_test', args.log_format, args)
|
||||
spack.package.PackageBase, 'do_test', args.log_format, args)
|
||||
if not reporter.filename:
|
||||
if args.log_file:
|
||||
if os.path.isabs(args.log_file):
|
||||
@@ -217,7 +217,7 @@ def test_list(args):
|
||||
else set()
|
||||
|
||||
def has_test_and_tags(pkg_class):
|
||||
return spack.package_base.has_test_method(pkg_class) and \
|
||||
return spack.package.has_test_method(pkg_class) and \
|
||||
(not args.tag or pkg_class.name in tagged)
|
||||
|
||||
if args.list_all:
|
||||
@@ -337,8 +337,6 @@ def _report_suite_results(test_suite, args, constraints):
|
||||
pkg_id, status = line.split()
|
||||
results[pkg_id] = status
|
||||
|
||||
tty.msg('test specs:')
|
||||
|
||||
failed, skipped, untested = 0, 0, 0
|
||||
for pkg_id in test_specs:
|
||||
if pkg_id in results:
|
||||
|
@@ -24,7 +24,7 @@
|
||||
|
||||
|
||||
# tutorial configuration parameters
|
||||
tutorial_branch = "releases/v0.18"
|
||||
tutorial_branch = "releases/v%d.%d" % spack.spack_version_info[:2]
|
||||
tutorial_mirror = "file:///mirror"
|
||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user