Compare commits
1 Commits
v0.18.0
...
versions/g
Author | SHA1 | Date | |
---|---|---|---|
![]() |
dd668047fb |
134
.github/workflows/bootstrap.yml
vendored
134
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,6 @@ jobs:
|
||||
fedora-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "fedora:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -32,25 +31,19 @@ jobs:
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack bootstrap untrust github-actions
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -58,7 +51,6 @@ jobs:
|
||||
ubuntu-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -69,25 +61,19 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack bootstrap untrust github-actions
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -95,7 +81,6 @@ jobs:
|
||||
ubuntu-clingo-binaries-and-patchelf:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -105,20 +90,14 @@ jobs:
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
@@ -126,10 +105,10 @@ jobs:
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
|
||||
opensuse-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "opensuse/leap:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -139,37 +118,32 @@ jobs:
|
||||
bzip2 curl file gcc-c++ gcc gcc-fortran tar git gpg2 gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup repo
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack bootstrap untrust github-actions
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-clingo-sources:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
export PATH=/usr/local/opt/bison@2.7/bin:$PATH
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack bootstrap untrust github-actions
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -178,15 +152,13 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
if: github.repository == 'spack/spack'
|
||||
python-version: ['3.5', '3.6', '3.7', '3.8', '3.9']
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Bootstrap clingo
|
||||
@@ -200,15 +172,13 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
if: github.repository == 'spack/spack'
|
||||
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9']
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Setup repo
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
@@ -223,7 +193,6 @@ jobs:
|
||||
ubuntu-gnupg-binaries:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -233,20 +202,14 @@ jobs:
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap GnuPG
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
@@ -258,7 +221,6 @@ jobs:
|
||||
ubuntu-gnupg-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -269,40 +231,32 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap GnuPG
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack bootstrap untrust github-actions
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-gnupg-binaries:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -312,27 +266,17 @@ jobs:
|
||||
|
||||
macos-gnupg-sources:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install gawk tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack bootstrap untrust github-actions
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
|
||||
# [1] Distros that have patched git to resolve CVE-2022-24765 (e.g. Ubuntu patching v2.25.1)
|
||||
# introduce breaking behaviorso we have to set `safe.directory` in gitconfig ourselves.
|
||||
# See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
|
61
.github/workflows/build-containers.yml
vendored
61
.github/workflows/build-containers.yml
vendored
@@ -13,8 +13,6 @@ on:
|
||||
paths:
|
||||
- '.github/workflows/build-containers.yml'
|
||||
- 'share/spack/docker/*'
|
||||
- 'share/templates/container/*'
|
||||
- 'lib/spack/spack/container/*'
|
||||
# Let's also build & tag Spack containers on releases.
|
||||
release:
|
||||
types: [published]
|
||||
@@ -31,22 +29,15 @@ jobs:
|
||||
# A matrix of Dockerfile paths, associated tags, and which architectures
|
||||
# they support.
|
||||
matrix:
|
||||
# Meaning of the various items in the matrix list
|
||||
# 0: Container name (e.g. ubuntu-bionic)
|
||||
# 1: Platforms to build for
|
||||
# 2: Base image (e.g. ubuntu:18.04)
|
||||
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
||||
[centos7, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:7'],
|
||||
[centos-stream, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream'],
|
||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||
[ubuntu-bionic, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:18.04'],
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04']]
|
||||
dockerfile: [[amazon-linux, amazonlinux-2.dockerfile, 'linux/amd64,linux/arm64'],
|
||||
[centos7, centos-7.dockerfile, 'linux/amd64,linux/arm64,linux/ppc64le'],
|
||||
[leap15, leap-15.dockerfile, 'linux/amd64,linux/arm64,linux/ppc64le'],
|
||||
[ubuntu-xenial, ubuntu-1604.dockerfile, 'linux/amd64,linux/arm64,linux/ppc64le'],
|
||||
[ubuntu-bionic, ubuntu-1804.dockerfile, 'linux/amd64,linux/arm64,linux/ppc64le']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
@@ -61,54 +52,40 @@ jobs:
|
||||
versioned="${{matrix.dockerfile[0]}}:${GITHUB_REF##*/}"
|
||||
echo "versioned=${versioned}" >> $GITHUB_ENV
|
||||
|
||||
- name: Generate the Dockerfile
|
||||
env:
|
||||
SPACK_YAML_OS: "${{ matrix.dockerfile[2] }}"
|
||||
- name: Check ${{ matrix.dockerfile[1] }} Exists
|
||||
run: |
|
||||
.github/workflows/generate_spack_yaml_containerize.sh
|
||||
. share/spack/setup-env.sh
|
||||
mkdir -p dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
spack containerize --last-stage=bootstrap | tee dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile
|
||||
printf "Preparing to build ${{ env.container }} from dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile"
|
||||
if [ ! -f "dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile" ]; then
|
||||
printf "dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile does not exist"
|
||||
printf "Preparing to build ${{ env.container }} from ${{ matrix.dockerfile[1] }}"
|
||||
if [ ! -f "share/spack/docker/${{ matrix.dockerfile[1]}}" ]; then
|
||||
printf "Dockerfile ${{ matrix.dockerfile[0]}} does not exist"
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
with:
|
||||
name: dockerfiles
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@8b122486cedac8393e77aa9734c3528886e4a1a8 # @v1
|
||||
uses: docker/setup-qemu-action@27d0a4f181a40b142cce983c5393082c365d1480 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@dc7b9719a96d48369863986a06765841d7ea23f6 # @v1
|
||||
uses: docker/setup-buildx-action@94ab11c41e45d028884a99163086648e898eed25 # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # @v1
|
||||
uses: docker/login-action@dd4fa0671be5250ee6f50aedf4cb05514abda2c7 # @v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # @v1
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
uses: docker/login-action@dd4fa0671be5250ee6f50aedf4cb05514abda2c7 # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@e551b19e49efd4e98792db7592c17c09b89db8d8 # @v2
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[1] }}
|
||||
uses: docker/build-push-action@7f9d37fa544684fb73bfe4835ed7214c255ce02b # @v2
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
file: share/spack/docker/${{matrix.dockerfile[1]}}
|
||||
platforms: ${{ matrix.dockerfile[2] }}
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
tags: |
|
||||
spack/${{ env.container }}
|
||||
spack/${{ env.versioned }}
|
||||
|
7
.github/workflows/execute_installer.ps1
vendored
7
.github/workflows/execute_installer.ps1
vendored
@@ -1,7 +0,0 @@
|
||||
$ proc = Start-Process ${{ env.spack_installer }}\spack.exe "/install /quiet" -Passthru
|
||||
$handle = $proc.Handle # cache proc.Handle
|
||||
$proc.WaitForExit();
|
||||
|
||||
if ($proc.ExitCode -ne 0) {
|
||||
Write-Warning "$_ exited with status code $($proc.ExitCode)"
|
||||
}
|
@@ -1,9 +0,0 @@
|
||||
#!/bin/bash
|
||||
(echo "spack:" \
|
||||
&& echo " specs: []" \
|
||||
&& echo " container:" \
|
||||
&& echo " format: docker" \
|
||||
&& echo " images:" \
|
||||
&& echo " os: \"${SPACK_YAML_OS}\"" \
|
||||
&& echo " spack:" \
|
||||
&& echo " ref: ${GITHUB_REF}") > spack.yaml
|
15
.github/workflows/macos_python.yml
vendored
15
.github/workflows/macos_python.yml
vendored
@@ -22,11 +22,10 @@ on:
|
||||
jobs:
|
||||
install_gcc:
|
||||
name: gcc with clang
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
@@ -37,12 +36,11 @@ jobs:
|
||||
|
||||
install_jupyter_clang:
|
||||
name: jupyter
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 700
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
@@ -52,11 +50,10 @@ jobs:
|
||||
|
||||
install_scipy_clang:
|
||||
name: scipy, mpl, pd
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
|
12
.github/workflows/setup_git.ps1
vendored
12
.github/workflows/setup_git.ps1
vendored
@@ -1,12 +0,0 @@
|
||||
# (c) 2021 Lawrence Livermore National Laboratory
|
||||
|
||||
Set-Location spack
|
||||
|
||||
git config --global user.email "spack@example.com"
|
||||
git config --global user.name "Test User"
|
||||
git config --global core.longpaths true
|
||||
|
||||
if ($(git branch --show-current) -ne "develop")
|
||||
{
|
||||
git branch develop origin/develop
|
||||
}
|
4
.github/workflows/system_shortcut_check.ps1
vendored
4
.github/workflows/system_shortcut_check.ps1
vendored
@@ -1,4 +0,0 @@
|
||||
param ($systemFolder, $shortcut)
|
||||
|
||||
$start = [System.Environment]::GetFolderPath("$systemFolder")
|
||||
Invoke-Item "$start\Programs\Spack\$shortcut"
|
54
.github/workflows/unit_tests.yaml
vendored
54
.github/workflows/unit_tests.yaml
vendored
@@ -15,10 +15,10 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: 3.9
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
@@ -31,12 +31,12 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools types-six
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
with_coverage: ${{ steps.coverage.outputs.with_coverage }}
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
@@ -96,7 +96,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
python-version: [2.7, 3.5, 3.6, 3.7, 3.8, 3.9]
|
||||
concretizer: ['clingo']
|
||||
include:
|
||||
- python-version: 2.7
|
||||
@@ -106,10 +106,10 @@ jobs:
|
||||
- python-version: 3.9
|
||||
concretizer: original
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -162,7 +162,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
@@ -171,12 +171,12 @@ jobs:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: 3.9
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
@@ -200,7 +200,7 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
@@ -218,7 +218,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -237,12 +237,12 @@ jobs:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: 3.9
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
@@ -274,7 +274,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
@@ -286,10 +286,10 @@ jobs:
|
||||
matrix:
|
||||
python-version: [3.8]
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -320,7 +320,7 @@ jobs:
|
||||
echo "ONLY PACKAGE RECIPES CHANGED [skipping coverage]"
|
||||
$(which spack) unit-test -x -m "not maybeslow" -k "package_sanity"
|
||||
fi
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
files: ./coverage.xml
|
||||
@@ -331,10 +331,10 @@ jobs:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml]==6.2
|
||||
@@ -350,7 +350,7 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,audits
|
||||
|
188
.github/workflows/windows_python.yml
vendored
188
.github/workflows/windows_python.yml
vendored
@@ -1,188 +0,0 @@
|
||||
name: windows tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
powershell Invoke-Expression -Command ".\share\spack\qa\windows_test_setup.ps1"; {0}
|
||||
jobs:
|
||||
validate:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install --upgrade vermin
|
||||
- name: vermin (Spack's Core)
|
||||
run: vermin --backport argparse --backport typing -t='2.7-' -t='3.5-' -v spack/lib/spack/spack/ spack/lib/spack/llnl/ spack/bin/
|
||||
- name: vermin (Repositories)
|
||||
run: vermin --backport argparse --backport typing -t='2.7-' -t='3.5-' -v spack/var/spack/repos
|
||||
# Run style checks on the files that have been changed
|
||||
style:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six setuptools flake8 isort>=4.3.5 mypy>=0.800 black pywin32 types-python-dateutil
|
||||
- name: Create local develop
|
||||
run: |
|
||||
.\spack\.github\workflows\setup_git.ps1
|
||||
- name: Run style tests
|
||||
run: |
|
||||
spack style
|
||||
- name: Verify license headers
|
||||
run: |
|
||||
python spack\bin\spack license verify
|
||||
unittest:
|
||||
needs: [ validate, style ]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
||||
- name: Create local develop
|
||||
run: |
|
||||
.\spack\.github\workflows\setup_git.ps1
|
||||
- name: Unit Test
|
||||
run: |
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
spack unit-test --verbose --ignore=lib/spack/spack/test/cmd
|
||||
unittest-cmd:
|
||||
needs: [ validate, style ]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
||||
- name: Create local develop
|
||||
run: |
|
||||
.\spack\.github\workflows\setup_git.ps1
|
||||
- name: Command Unit Test
|
||||
run: |
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
spack unit-test lib/spack/spack/test/cmd --verbose
|
||||
buildtest:
|
||||
needs: [ validate, style ]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
||||
- name: Build Test
|
||||
run: |
|
||||
spack compiler find
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
spack external find cmake
|
||||
spack external find ninja
|
||||
spack install abseil-cpp
|
||||
generate-installer-test:
|
||||
needs: [ validate, style ]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- name: Disable Windows Symlinks
|
||||
run: |
|
||||
git config --global core.symlinks false
|
||||
shell:
|
||||
powershell
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
||||
- name: Add Light and Candle to Path
|
||||
run: |
|
||||
$env:WIX >> $GITHUB_PATH
|
||||
- name: Run Installer
|
||||
run: |
|
||||
.\spack\share\spack\qa\setup_spack.ps1
|
||||
spack make-installer -s spack -g SILENT pkg
|
||||
echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
env:
|
||||
ProgressPreference: SilentlyContinue
|
||||
- uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
with:
|
||||
name: Windows Spack Installer Bundle
|
||||
path: ${{ env.installer_root }}\pkg\Spack.exe
|
||||
- uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
with:
|
||||
name: Windows Spack Installer
|
||||
path: ${{ env.installer_root}}\pkg\Spack.msi
|
||||
execute-installer:
|
||||
needs: generate-installer-test
|
||||
runs-on: windows-latest
|
||||
defaults:
|
||||
run:
|
||||
shell: pwsh
|
||||
steps:
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
||||
- name: Setup installer directory
|
||||
run: |
|
||||
mkdir -p spack_installer
|
||||
echo "spack_installer=$((pwd).Path)\spack_installer" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: Windows Spack Installer Bundle
|
||||
path: ${{ env.spack_installer }}
|
||||
- name: Execute Bundled Installer
|
||||
run: |
|
||||
$proc = Start-Process ${{ env.spack_installer }}\spack.exe "/install /quiet" -Passthru
|
||||
$handle = $proc.Handle # cache proc.Handle
|
||||
$proc.WaitForExit();
|
||||
$LASTEXITCODE
|
||||
env:
|
||||
ProgressPreference: SilentlyContinue
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: Windows Spack Installer
|
||||
path: ${{ env.spack_installer }}
|
||||
- name: Execute MSI
|
||||
run: |
|
||||
$proc = Start-Process ${{ env.spack_installer }}\spack.msi "/quiet" -Passthru
|
||||
$handle = $proc.Handle # cache proc.Handle
|
||||
$proc.WaitForExit();
|
||||
$LASTEXITCODE
|
218
CHANGELOG.md
218
CHANGELOG.md
@@ -1,221 +1,3 @@
|
||||
# v0.18.0 (2022-05-28)
|
||||
|
||||
`v0.18.0` is a major feature release.
|
||||
|
||||
## Major features in this release
|
||||
|
||||
1. **Concretizer now reuses by default**
|
||||
|
||||
`spack install --reuse` was introduced in `v0.17.0`, and `--reuse`
|
||||
is now the default concretization mode. Spack will try hard to
|
||||
resolve dependencies using installed packages or binaries (#30396).
|
||||
|
||||
To avoid reuse and to use the latest package configurations, (the
|
||||
old default), you can use `spack install --fresh`, or add
|
||||
configuration like this to your environment or `concretizer.yaml`:
|
||||
|
||||
```yaml
|
||||
concretizer:
|
||||
reuse: false
|
||||
```
|
||||
|
||||
2. **Finer-grained hashes**
|
||||
|
||||
Spack hashes now include `link`, `run`, *and* `build` dependencies,
|
||||
as well as a canonical hash of package recipes. Previously, hashes
|
||||
only included `link` and `run` dependencies (though `build`
|
||||
dependencies were stored by environments). We coarsened the hash to
|
||||
reduce churn in user installations, but the new default concretizer
|
||||
behavior mitigates this concern and gets us reuse *and* provenance.
|
||||
You will be able to see the build dependencies of new installations
|
||||
with `spack find`. Old installations will not change and their
|
||||
hashes will not be affected. (#28156, #28504, #30717, #30861)
|
||||
|
||||
3. **Improved error messages**
|
||||
|
||||
Error handling with the new concretizer is now done with
|
||||
optimization criteria rather than with unsatisfiable cores, and
|
||||
Spack reports many more details about conflicting constraints.
|
||||
(#30669)
|
||||
|
||||
4. **Unify environments when possible**
|
||||
|
||||
Environments have thus far supported `concretization: together` or
|
||||
`concretization: separately`. These have been replaced by a new
|
||||
preference in `concretizer.yaml`:
|
||||
|
||||
```yaml
|
||||
concretizer:
|
||||
unify: [true|false|when_possible]
|
||||
```
|
||||
|
||||
`concretizer:unify:when_possible` will *try* to resolve a fully
|
||||
unified environment, but if it cannot, it will create multiple
|
||||
configurations of some packages where it has to. For large
|
||||
environments that previously had to be concretized separately, this
|
||||
can result in a huge speedup (40-50x). (#28941)
|
||||
|
||||
5. **Automatically find externals on Cray machines**
|
||||
|
||||
Spack can now automatically discover installed packages in the Cray
|
||||
Programming Environment by running `spack external find` (or `spack
|
||||
external read-cray-manifest` to *only* query the PE). Packages from
|
||||
the PE (e.g., `cray-mpich` are added to the database with full
|
||||
dependency information, and compilers from the PE are added to
|
||||
`compilers.yaml`. Available with the June 2022 release of the Cray
|
||||
Programming Environment. (#24894, #30428)
|
||||
|
||||
6. **New binary format and hardened signing**
|
||||
|
||||
Spack now has an updated binary format, with improvements for
|
||||
security. The new format has a detached signature file, and Spack
|
||||
verifies the signature before untarring or decompressing the binary
|
||||
package. The previous format embedded the signature in a `tar`
|
||||
file, which required the client to run `tar` *before* verifying
|
||||
(#30750). Spack can still install from build caches using the old
|
||||
format, but we encourage users to switch to the new format going
|
||||
forward.
|
||||
|
||||
Production GitLab pipelines have been hardened to securely sign
|
||||
binaries. There is now a separate signing stage so that signing
|
||||
keys are never exposed to build system code, and signing keys are
|
||||
ephemeral and only live as long as the signing pipeline stage.
|
||||
(#30753)
|
||||
|
||||
7. **Bootstrap mirror generation**
|
||||
|
||||
The `spack bootstrap mirror` command can automatically create a
|
||||
mirror for bootstrapping the concretizer and other needed
|
||||
dependencies in an air-gapped environment. (#28556)
|
||||
|
||||
8. **Nascent Windows support**
|
||||
|
||||
Spack now has initial support for Windows. Spack core has been
|
||||
refactored to run in the Windows environment, and a small number of
|
||||
packages can now build for Windows. More details are
|
||||
[in the documentation](https://spack.rtfd.io/en/latest/getting_started.html#spack-on-windows)
|
||||
(#27021, #28385, many more)
|
||||
|
||||
9. **Makefile generation**
|
||||
|
||||
`spack env depfile` can be used to generate a `Makefile` from an
|
||||
environment, which can be used to build packages the environment
|
||||
in parallel on a single node. e.g.:
|
||||
|
||||
```console
|
||||
spack -e myenv env depfile > Makefile
|
||||
make
|
||||
```
|
||||
|
||||
Spack propagates `gmake` jobserver information to builds so that
|
||||
their jobs can share cores. (#30039, #30254, #30302, #30526)
|
||||
|
||||
10. **New variant features**
|
||||
|
||||
In addition to being conditional themselves, variants can now have
|
||||
[conditional *values*](https://spack.readthedocs.io/en/latest/packaging_guide.html#conditional-possible-values)
|
||||
that are only possible for certain configurations of a package. (#29530)
|
||||
|
||||
Variants can be
|
||||
[declared "sticky"](https://spack.readthedocs.io/en/latest/packaging_guide.html#sticky-variants),
|
||||
which prevents them from being enabled or disabled by the
|
||||
concretizer. Sticky variants must be set explicitly by users
|
||||
on the command line or in `packages.yaml`. (#28630)
|
||||
|
||||
* Allow conditional possible values in variants
|
||||
* Add a "sticky" property to variants
|
||||
|
||||
|
||||
## Other new features of note
|
||||
|
||||
* Environment views can optionally link only `run` dependencies
|
||||
with `link:run` (#29336)
|
||||
* `spack external find --all` finds library-only packages in
|
||||
addition to build dependencies (#28005)
|
||||
* Customizable `config:license_dir` option (#30135)
|
||||
* `spack external find --path PATH` takes a custom search path (#30479)
|
||||
* `spack spec` has a new `--format` argument like `spack find` (#27908)
|
||||
* `spack concretize --quiet` skips printing concretized specs (#30272)
|
||||
* `spack info` now has cleaner output and displays test info (#22097)
|
||||
* Package-level submodule option for git commit versions (#30085, #30037)
|
||||
* Using `/hash` syntax to refer to concrete specs in an environment
|
||||
now works even if `/hash` is not installed. (#30276)
|
||||
|
||||
## Major internal refactors
|
||||
|
||||
* full hash (see above)
|
||||
* new develop versioning scheme `0.19.0-dev0`
|
||||
* Allow for multiple dependencies/dependents from the same package (#28673)
|
||||
* Splice differing virtual packages (#27919)
|
||||
|
||||
## Performance Improvements
|
||||
|
||||
* Concretization of large environments with `unify: when_possible` is
|
||||
much faster than concretizing separately (#28941, see above)
|
||||
* Single-pass view generation algorithm is 2.6x faster (#29443)
|
||||
|
||||
## Archspec improvements
|
||||
|
||||
* `oneapi` and `dpcpp` flag support (#30783)
|
||||
* better support for `M1` and `a64fx` (#30683)
|
||||
|
||||
## Removals and Deprecations
|
||||
|
||||
* Spack no longer supports Python `2.6` (#27256)
|
||||
* Removed deprecated `--run-tests` option of `spack install`;
|
||||
use `spack test` (#30461)
|
||||
* Removed deprecated `spack flake8`; use `spack style` (#27290)
|
||||
|
||||
* Deprecate `spack:concretization` config option; use
|
||||
`concretizer:unify` (#30038)
|
||||
* Deprecate top-level module configuration; use module sets (#28659)
|
||||
* `spack activate` and `spack deactivate` are deprecated in favor of
|
||||
environments; will be removed in `0.19.0` (#29430; see also `link:run`
|
||||
in #29336 above)
|
||||
|
||||
## Notable Bugfixes
|
||||
|
||||
* Fix bug that broke locks with many parallel builds (#27846)
|
||||
* Many bugfixes and consistency improvements for the new concretizer
|
||||
and `--reuse` (#30357, #30092, #29835, #29933, #28605, #29694, #28848)
|
||||
|
||||
## Packages
|
||||
|
||||
* `CMakePackage` uses `CMAKE_INSTALL_RPATH_USE_LINK_PATH` (#29703)
|
||||
* Refactored `lua` support: `lua-lang` virtual supports both
|
||||
`lua` and `luajit` via new `LuaPackage` build system(#28854)
|
||||
* PythonPackage: now installs packages with `pip` (#27798)
|
||||
* Python: improve site_packages_dir handling (#28346)
|
||||
* Extends: support spec, not just package name (#27754)
|
||||
* `find_libraries`: search for both .so and .dylib on macOS (#28924)
|
||||
* Use stable URLs and `?full_index=1` for all github patches (#29239)
|
||||
|
||||
## Spack community stats
|
||||
|
||||
* 6,416 total packages, 458 new since `v0.17.0`
|
||||
* 219 new Python packages
|
||||
* 60 new R packages
|
||||
* 377 people contributed to this release
|
||||
* 337 committers to packages
|
||||
* 85 committers to core
|
||||
|
||||
|
||||
# v0.17.2 (2022-04-13)
|
||||
|
||||
### Spack bugfixes
|
||||
* Fix --reuse with upstreams set in an environment (#29680)
|
||||
* config add: fix parsing of validator error to infer type from oneOf (#29475)
|
||||
* Fix spack -C command_line_scope used in conjunction with other flags (#28418)
|
||||
* Use Spec.constrain to construct spec lists for stacks (#28783)
|
||||
* Fix bug occurring when searching for inherited patches in packages (#29574)
|
||||
* Fixed a few bugs when manipulating symlinks (#28318, #29515, #29636)
|
||||
* Fixed a few minor bugs affecting command prompt, terminal title and argument completion (#28279, #28278, #28939, #29405, #29070, #29402)
|
||||
* Fixed a few bugs affecting the spack ci command (#29518, #29419)
|
||||
* Fix handling of Intel compiler environment (#29439)
|
||||
* Fix a few edge cases when reindexing the DB (#28764)
|
||||
* Remove "Known issues" from documentation (#29664)
|
||||
* Other miscellaneous bugfixes (0b72e070583fc5bcd016f5adc8a84c99f2b7805f, #28403, #29261)
|
||||
|
||||
# v0.17.1 (2021-12-23)
|
||||
|
||||
### Spack Bugfixes
|
||||
|
@@ -1,20 +0,0 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def getpywin():
|
||||
try:
|
||||
import win32con # noqa
|
||||
except ImportError:
|
||||
subprocess.check_call(
|
||||
[sys.executable, "-m", "pip", "-q", "install", "--upgrade", "pip"])
|
||||
subprocess.check_call(
|
||||
[sys.executable, "-m", "pip", "-q", "install", "pywin32"])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
getpywin()
|
223
bin/spack.bat
223
bin/spack.bat
@@ -1,223 +0,0 @@
|
||||
:: Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
:: Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
::
|
||||
:: SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
::#######################################################################
|
||||
::
|
||||
:: This file is part of Spack and sets up the spack environment for batch,
|
||||
:: This includes environment modules and lmod support,
|
||||
:: and it also puts spack in your path. The script also checks that at least
|
||||
:: module support exists, and provides suggestions if it doesn't. Source
|
||||
:: it like this:
|
||||
::
|
||||
:: . /path/to/spack/install/spack_cmd.bat
|
||||
::
|
||||
@echo off
|
||||
|
||||
set spack=%SPACK_ROOT%\bin\spack
|
||||
|
||||
::#######################################################################
|
||||
:: This is a wrapper around the spack command that forwards calls to
|
||||
:: 'spack load' and 'spack unload' to shell functions. This in turn
|
||||
:: allows them to be used to invoke environment modules functions.
|
||||
::
|
||||
:: 'spack load' is smarter than just 'load' because it converts its
|
||||
:: arguments into a unique Spack spec that is then passed to module
|
||||
:: commands. This allows the user to use packages without knowing all
|
||||
:: their installation details.
|
||||
::
|
||||
:: e.g., rather than requiring a full spec for libelf, the user can type:
|
||||
::
|
||||
:: spack load libelf
|
||||
::
|
||||
:: This will first find the available libelf module file and use a
|
||||
:: matching one. If there are two versions of libelf, the user would
|
||||
:: need to be more specific, e.g.:
|
||||
::
|
||||
:: spack load libelf@0.8.13
|
||||
::
|
||||
:: This is very similar to how regular spack commands work and it
|
||||
:: avoids the need to come up with a user-friendly naming scheme for
|
||||
:: spack module files.
|
||||
::#######################################################################
|
||||
|
||||
:_sp_shell_wrapper
|
||||
set "_sp_flags="
|
||||
set "_sp_args="
|
||||
set "_sp_subcommand="
|
||||
setlocal enabledelayedexpansion
|
||||
:: commands have the form '[flags] [subcommand] [args]'
|
||||
:: flags will always start with '-', e.g. --help or -V
|
||||
:: subcommands will never start with '-'
|
||||
:: everything after the subcommand is an arg
|
||||
for %%x in (%*) do (
|
||||
set t="%%~x"
|
||||
if "!t:~0,1!" == "-" (
|
||||
if defined _sp_subcommand (
|
||||
:: We already have a subcommand, processing args now
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
) else (
|
||||
set "_sp_flags=!_sp_flags! !t!"
|
||||
shift
|
||||
)
|
||||
) else if not defined _sp_subcommand (
|
||||
set "_sp_subcommand=!t!"
|
||||
shift
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
shift
|
||||
)
|
||||
)
|
||||
|
||||
:: --help, -h and -V flags don't require further output parsing.
|
||||
:: If we encounter, execute and exit
|
||||
if defined _sp_flags (
|
||||
if NOT "%_sp_flags%"=="%_sp_flags:-h=%" (
|
||||
python "%spack%" %_sp_flags%
|
||||
exit /B 0
|
||||
) else if NOT "%_sp_flags%"=="%_sp_flags:--help=%" (
|
||||
python "%spack%" %_sp_flags%
|
||||
exit /B 0
|
||||
) else if NOT "%_sp_flags%"=="%_sp_flags:-V=%" (
|
||||
python "%spack%" %_sp_flags%
|
||||
exit /B 0
|
||||
)
|
||||
)
|
||||
:: pass parsed variables outside of local scope. Need to do
|
||||
:: this because delayedexpansion can only be set by setlocal
|
||||
echo %_sp_flags%>flags
|
||||
echo %_sp_args%>args
|
||||
echo %_sp_subcommand%>subcmd
|
||||
endlocal
|
||||
set /p _sp_subcommand=<subcmd
|
||||
set /p _sp_flags=<flags
|
||||
set /p _sp_args=<args
|
||||
set str_subcommand=%_sp_subcommand:"='%
|
||||
set str_flags=%_sp_flags:"='%
|
||||
set str_args=%_sp_args:"='%
|
||||
if "%str_subcommand%"=="ECHO is off." (set "_sp_subcommand=")
|
||||
if "%str_flags%"=="ECHO is off." (set "_sp_flags=")
|
||||
if "%str_args%"=="ECHO is off." (set "_sp_args=")
|
||||
del subcmd
|
||||
del flags
|
||||
del args
|
||||
|
||||
:: Filter out some commands. For any others, just run the command.
|
||||
if "%_sp_subcommand%" == "cd" (
|
||||
goto :case_cd
|
||||
) else if "%_sp_subcommand%" == "env" (
|
||||
goto :case_env
|
||||
) else if "%_sp_subcommand%" == "load" (
|
||||
goto :case_load
|
||||
) else if "%_sp_subcommand%" == "unload" (
|
||||
goto :case_load
|
||||
) else (
|
||||
goto :default_case
|
||||
)
|
||||
|
||||
::#######################################################################
|
||||
|
||||
:case_cd
|
||||
:: Check for --help or -h
|
||||
:: TODO: This is not exactly the same as setup-env.
|
||||
:: In setup-env, '--help' or '-h' must follow the cd
|
||||
:: Here, they may be anywhere in the args
|
||||
if defined _sp_args (
|
||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
python "%spack%" cd -h
|
||||
goto :end_switch
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
||||
python "%spack%" cd -h
|
||||
goto :end_switch
|
||||
)
|
||||
)
|
||||
|
||||
for /F "tokens=* USEBACKQ" %%F in (
|
||||
`python "%spack%" location %_sp_args%`) do (
|
||||
set "LOC=%%F"
|
||||
)
|
||||
for %%Z in ("%LOC%") do if EXIST %%~sZ\NUL (cd /d "%LOC%")
|
||||
goto :end_switch
|
||||
|
||||
:case_env
|
||||
:: If no args or args contain --bat or -h/--help: just execute.
|
||||
if NOT defined _sp_args (
|
||||
goto :default_case
|
||||
)else if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args: -h=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:deactivate=%" (
|
||||
for /f "tokens=* USEBACKQ" %%I in (
|
||||
`call python "%spack%" %_sp_flags% env deactivate --bat %_sp_args:deactivate=%`
|
||||
) do %%I
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:activate=%" (
|
||||
for /f "tokens=* USEBACKQ" %%I in (
|
||||
`call python "%spack%" %_sp_flags% env activate --bat %_sp_args:activate=%`
|
||||
) do %%I
|
||||
) else (
|
||||
goto :default_case
|
||||
)
|
||||
goto :end_switch
|
||||
|
||||
:case_load
|
||||
:: If args contain --sh, --csh, or -h/--help: just execute.
|
||||
if defined _sp_args (
|
||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args: -h=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||
goto :default_case
|
||||
)
|
||||
)
|
||||
|
||||
for /f "tokens=* USEBACKQ" %%I in (
|
||||
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`) do %%I
|
||||
)
|
||||
goto :end_switch
|
||||
|
||||
:case_unload
|
||||
goto :case_load
|
||||
|
||||
:default_case
|
||||
python "%spack%" %_sp_flags% %_sp_subcommand% %_sp_args%
|
||||
goto :end_switch
|
||||
|
||||
:end_switch
|
||||
exit /B %ERRORLEVEL%
|
||||
|
||||
|
||||
::########################################################################
|
||||
:: Prepends directories to path, if they exist.
|
||||
:: pathadd /path/to/dir # add to PATH
|
||||
:: or pathadd OTHERPATH /path/to/dir # add to OTHERPATH
|
||||
::########################################################################
|
||||
|
||||
:_spack_pathadd
|
||||
set "_pa_varname=PATH"
|
||||
set "_pa_new_path=%~1"
|
||||
if NOT "%~2" == "" (
|
||||
set "_pa_varname=%~1"
|
||||
set "_pa_new_path=%~2"
|
||||
)
|
||||
set "_pa_oldvalue=%_pa_varname%"
|
||||
for %%Z in ("%_pa_new_path%") do if EXIST %%~sZ\NUL (
|
||||
if defined %_pa_oldvalue% (
|
||||
set "_pa_varname=%_pa_new_path%:%_pa_oldvalue%"
|
||||
) else (
|
||||
set "_pa_varname=%_pa_new_path%"
|
||||
)
|
||||
)
|
||||
exit /b 0
|
||||
|
||||
:: set module system roots
|
||||
:_sp_multi_pathadd
|
||||
for %%I in (%~2) do (
|
||||
for %%Z in (%_sp_compatible_sys_types%) do (
|
||||
:pathadd "%~1" "%%I\%%Z"
|
||||
)
|
||||
)
|
||||
exit /B %ERRORLEVEL%
|
@@ -1,72 +0,0 @@
|
||||
@ECHO OFF
|
||||
setlocal EnableDelayedExpansion
|
||||
:: (c) 2021 Lawrence Livermore National Laboratory
|
||||
:: To use this file independently of Spack's installer, execute this script in its directory, or add the
|
||||
:: associated bin directory to your PATH. Invoke to launch Spack Shell.
|
||||
::
|
||||
:: source_dir/spack/bin/spack_cmd.bat
|
||||
::
|
||||
pushd %~dp0..
|
||||
set SPACK_ROOT=%CD%
|
||||
pushd %CD%\..
|
||||
set spackinstdir=%CD%
|
||||
popd
|
||||
|
||||
|
||||
:: Check if Python is on the PATH
|
||||
if not defined python_pf_ver (
|
||||
(for /f "delims=" %%F in ('where python.exe') do (
|
||||
set "python_pf_ver=%%F"
|
||||
goto :found_python
|
||||
) ) 2> NUL
|
||||
)
|
||||
:found_python
|
||||
if not defined python_pf_ver (
|
||||
:: If not, look for Python from the Spack installer
|
||||
:get_builtin
|
||||
(for /f "tokens=*" %%g in ('dir /b /a:d "!spackinstdir!\Python*"') do (
|
||||
set "python_ver=%%g")) 2> NUL
|
||||
|
||||
if not defined python_ver (
|
||||
echo Python was not found on your system.
|
||||
echo Please install Python or add Python to your PATH.
|
||||
) else (
|
||||
set "py_path=!spackinstdir!\!python_ver!"
|
||||
set "py_exe=!py_path!\python.exe"
|
||||
)
|
||||
goto :exitpoint
|
||||
) else (
|
||||
:: Python is already on the path
|
||||
set "py_exe=!python_pf_ver!"
|
||||
(for /F "tokens=* USEBACKQ" %%F in (
|
||||
`"!py_exe!" --version`) do (set "output=%%F")) 2>NUL
|
||||
if not "!output:Microsoft Store=!"=="!output!" goto :get_builtin
|
||||
goto :exitpoint
|
||||
)
|
||||
:exitpoint
|
||||
|
||||
set "PATH=%SPACK_ROOT%\bin\;%PATH%"
|
||||
if defined py_path (
|
||||
set "PATH=%py_path%;%PATH%"
|
||||
)
|
||||
|
||||
if defined py_exe (
|
||||
"%py_exe%" "%SPACK_ROOT%\bin\haspywin.py"
|
||||
"%py_exe%" "%SPACK_ROOT%\bin\spack" external find python >NUL
|
||||
)
|
||||
|
||||
set "EDITOR=notepad"
|
||||
|
||||
DOSKEY spacktivate=spack env activate $*
|
||||
|
||||
@echo **********************************************************************
|
||||
@echo ** Spack Package Manager
|
||||
@echo **********************************************************************
|
||||
|
||||
IF "%1"=="" GOTO CONTINUE
|
||||
set
|
||||
GOTO:EOF
|
||||
|
||||
:continue
|
||||
set PROMPT=[spack] %PROMPT%
|
||||
%comspec% /k
|
@@ -1,10 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
$Env:SPACK_PS1_PATH="$PSScriptRoot\..\share\spack\setup-env.ps1"
|
||||
& (Get-Process -Id $pid).Path -NoExit {
|
||||
. $Env:SPACK_PS1_PATH ;
|
||||
Push-Location $ENV:SPACK_ROOT
|
||||
}
|
@@ -6,17 +6,27 @@ bootstrap:
|
||||
# by Spack is installed in a "store" subfolder of this root directory
|
||||
root: $user_cache_path/bootstrap
|
||||
# Methods that can be used to bootstrap software. Each method may or
|
||||
# may not be able to bootstrap all the software that Spack needs,
|
||||
# may not be able to bootstrap all of the software that Spack needs,
|
||||
# depending on its type.
|
||||
sources:
|
||||
- name: 'github-actions-v0.2'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.2
|
||||
- name: 'github-actions-v0.1'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.1
|
||||
- name: 'spack-install'
|
||||
metadata: $spack/share/spack/bootstrap/spack-install
|
||||
- name: 'github-actions'
|
||||
type: buildcache
|
||||
description: |
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
info:
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.1
|
||||
homepage: https://github.com/alalazo/spack-bootstrap-mirrors
|
||||
releases: https://github.com/alalazo/spack-bootstrap-mirrors/releases
|
||||
# This method is just Spack bootstrapping the software it needs from sources.
|
||||
# It has been added here so that users can selectively disable bootstrapping
|
||||
# from sources by "untrusting" it.
|
||||
- name: spack-install
|
||||
type: install
|
||||
description: |
|
||||
Specs built from sources by Spack. May take a long time.
|
||||
trusted:
|
||||
# By default we trust bootstrapping from sources and from binaries
|
||||
# produced on Github via the workflow
|
||||
github-actions-v0.2: true
|
||||
github-actions: true
|
||||
spack-install: true
|
||||
|
@@ -14,23 +14,4 @@ concretizer:
|
||||
# concretizing specs. If `true`, we'll try to use as many installs/binaries
|
||||
# as possible, rather than building. If `false`, we'll always give you a fresh
|
||||
# concretization.
|
||||
reuse: true
|
||||
# Options that tune which targets are considered for concretization. The
|
||||
# concretization process is very sensitive to the number targets, and the time
|
||||
# needed to reach a solution increases noticeably with the number of targets
|
||||
# considered.
|
||||
targets:
|
||||
# Determine whether we want to target specific or generic microarchitectures.
|
||||
# An example of the first kind might be for instance "skylake" or "bulldozer",
|
||||
# while generic microarchitectures are for instance "aarch64" or "x86_64_v4".
|
||||
granularity: microarchitectures
|
||||
# If "false" allow targets that are incompatible with the current host (for
|
||||
# instance concretize with target "icelake" while running on "haswell").
|
||||
# If "true" only allow targets that are compatible with the host.
|
||||
host_compatible: true
|
||||
# When "true" concretize root specs of environments together, so that each unique
|
||||
# package in an environment corresponds to one concrete spec. This ensures
|
||||
# environments can always be activated. When "false" perform concretization separately
|
||||
# on each root spec, allowing different versions and variants of the same package in
|
||||
# an environment.
|
||||
unify: false
|
||||
reuse: false
|
||||
|
@@ -33,9 +33,6 @@ config:
|
||||
template_dirs:
|
||||
- $spack/share/spack/templates
|
||||
|
||||
# Directory where licenses should be located
|
||||
license_dir: $spack/etc/spack/licenses
|
||||
|
||||
# Temporary locations Spack can try to use for builds.
|
||||
#
|
||||
# Recommended options are given below.
|
||||
|
@@ -35,10 +35,13 @@ modules:
|
||||
|
||||
# These are configurations for the module set named "default"
|
||||
default:
|
||||
# These values are defaulted in the code. They are not defaulted here so
|
||||
# that we can enable backwards compatibility with the old syntax more
|
||||
# easily (old value is in the config yaml, config:module_roots)
|
||||
# Where to install modules
|
||||
roots:
|
||||
tcl: $spack/share/spack/modules
|
||||
lmod: $spack/share/spack/lmod
|
||||
# roots:
|
||||
# tcl: $spack/share/spack/modules
|
||||
# lmod: $spack/share/spack/lmod
|
||||
# What type of modules to use
|
||||
enable:
|
||||
- tcl
|
||||
|
@@ -35,8 +35,7 @@ packages:
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
lapack: [openblas, amdlibflame]
|
||||
libllvm: [llvm, llvm-amdgpu]
|
||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||
luajit: [lua-luajit-openresty, lua-luajit]
|
||||
lua-lang: [lua, lua-luajit]
|
||||
mariadb-client: [mariadb-c-client, mariadb]
|
||||
mkl: [intel-mkl]
|
||||
mpe: [mpe2]
|
||||
|
@@ -1,5 +0,0 @@
|
||||
config:
|
||||
locks: false
|
||||
concretizer: original
|
||||
build_stage::
|
||||
- '$spack/.staging'
|
@@ -192,32 +192,32 @@ you can use them to customize an installation in :ref:`sec-specs`.
|
||||
Reusing installed dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
By default, when you run ``spack install``, Spack tries hard to reuse existing installations
|
||||
as dependencies, either from a local store or from remote buildcaches if configured.
|
||||
This minimizes unwanted rebuilds of common dependencies, in particular if
|
||||
you update Spack frequently.
|
||||
.. warning::
|
||||
|
||||
In case you want the latest versions and configurations to be installed instead,
|
||||
you can add the ``--fresh`` option:
|
||||
The ``--reuse`` option described here will become the default installation
|
||||
method in the next Spack version, and you will be able to get the current
|
||||
behavior by using ``spack install --fresh``.
|
||||
|
||||
By default, when you run ``spack install``, Spack tries to build a new
|
||||
version of the package you asked for, along with updated versions of
|
||||
its dependencies. This gets you the latest versions and configurations,
|
||||
but it can result in unwanted rebuilds if you update Spack frequently.
|
||||
|
||||
If you want Spack to try hard to reuse existing installations as dependencies,
|
||||
you can add the ``--reuse`` option:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --fresh mpich
|
||||
$ spack install --reuse mpich
|
||||
|
||||
Reusing installations in this mode is "accidental", and happening only if
|
||||
there's a match between existing installations and what Spack would have installed
|
||||
anyhow.
|
||||
|
||||
You can use the ``spack spec -I mpich`` command to see what
|
||||
This will not do anything if ``mpich`` is already installed. If ``mpich``
|
||||
is not installed, but dependencies like ``hwloc`` and ``libfabric`` are,
|
||||
the ``mpich`` will be build with the installed versions, if possible.
|
||||
You can use the :ref:`spack spec -I <cmd-spack-spec>` command to see what
|
||||
will be reused and what will be built before you install.
|
||||
|
||||
You can configure Spack to use the ``--fresh`` behavior by default in
|
||||
``concretizer.yaml``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
reuse: false
|
||||
You can configure Spack to use the ``--reuse`` behavior by default in
|
||||
``concretizer.yaml``.
|
||||
|
||||
.. _cmd-spack-uninstall:
|
||||
|
||||
@@ -1723,8 +1723,8 @@ Activating Extensions in a View
|
||||
|
||||
Another way to use extensions is to create a view, which merges the
|
||||
python installation along with the extensions into a single prefix.
|
||||
See :ref:`configuring_environment_views` for a more in-depth description
|
||||
of views.
|
||||
See :ref:`filesystem-views` for a more in-depth description of views and
|
||||
:ref:`cmd-spack-view` for usage of the ``spack view`` command.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Activating Extensions Globally
|
||||
|
@@ -50,13 +50,6 @@ build cache files for the "ninja" spec:
|
||||
Note that the targeted spec must already be installed. Once you have a build cache,
|
||||
you can add it as a mirror, discussed next.
|
||||
|
||||
.. warning::
|
||||
|
||||
Spack improved the format used for binary caches in v0.18. The entire v0.18 series
|
||||
will be able to verify and install binary caches both in the new and in the old format.
|
||||
Support for using the old format is expected to end in v0.19, so we advise users to
|
||||
recreate relevant buildcaches using Spack v0.18 or higher.
|
||||
|
||||
---------------------------------------
|
||||
Finding or installing build cache files
|
||||
---------------------------------------
|
||||
|
@@ -1,160 +0,0 @@
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _bootstrapping:
|
||||
|
||||
=============
|
||||
Bootstrapping
|
||||
=============
|
||||
|
||||
In the :ref:`Getting started <getting_started>` Section we already mentioned that
|
||||
Spack can bootstrap some of its dependencies, including ``clingo``. In fact, there
|
||||
is an entire command dedicated to the management of every aspect of bootstrapping:
|
||||
|
||||
.. command-output:: spack bootstrap --help
|
||||
|
||||
The first thing to know to understand bootstrapping in Spack is that each of
|
||||
Spack's dependencies is bootstrapped lazily; i.e. the first time it is needed and
|
||||
can't be found. You can readily check if any prerequisite for using Spack
|
||||
is missing by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap status
|
||||
Spack v0.17.1 - python@3.8
|
||||
|
||||
[FAIL] Core Functionalities
|
||||
[B] MISSING "clingo": required to concretize specs
|
||||
|
||||
[FAIL] Binary packages
|
||||
[B] MISSING "gpg2": required to sign/verify buildcaches
|
||||
|
||||
|
||||
Spack will take care of bootstrapping any missing dependency marked as [B]. Dependencies marked as [-] are instead required to be found on the system.
|
||||
|
||||
In the case of the output shown above Spack detected that both ``clingo`` and ``gnupg``
|
||||
are missing and it's giving detailed information on why they are needed and whether
|
||||
they can be bootstrapped. Running a command that concretize a spec, like:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack solve zlib
|
||||
==> Bootstrapping clingo from pre-built binaries
|
||||
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.1/build_cache/darwin-catalina-x86_64/apple-clang-12.0.0/clingo-bootstrap-spack/darwin-catalina-x86_64-apple-clang-12.0.0-clingo-bootstrap-spack-p5on7i4hejl775ezndzfdkhvwra3hatn.spack
|
||||
==> Installing "clingo-bootstrap@spack%apple-clang@12.0.0~docs~ipo+python build_type=Release arch=darwin-catalina-x86_64" from a buildcache
|
||||
[ ... ]
|
||||
|
||||
triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||
|
||||
-----------------------
|
||||
The Bootstrapping store
|
||||
-----------------------
|
||||
|
||||
The software installed for bootstrapping purposes is deployed in a separate store.
|
||||
Its location can be checked with the following command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap root
|
||||
|
||||
It can also be changed with the same command by just specifying the newly desired path:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap root /opt/spack/bootstrap
|
||||
|
||||
You can check what is installed in the bootstrapping store at any time using:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack find -b
|
||||
==> Showing internal bootstrap store at "/Users/spack/.spack/bootstrap/store"
|
||||
==> 11 installed packages
|
||||
-- darwin-catalina-x86_64 / apple-clang@12.0.0 ------------------
|
||||
clingo-bootstrap@spack libassuan@2.5.5 libgpg-error@1.42 libksba@1.5.1 pinentry@1.1.1 zlib@1.2.11
|
||||
gnupg@2.3.1 libgcrypt@1.9.3 libiconv@1.16 npth@1.6 python@3.8
|
||||
|
||||
In case it is needed you can remove all the software in the current bootstrapping store with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack clean -b
|
||||
==> Removing bootstrapped software and configuration in "/Users/spack/.spack/bootstrap"
|
||||
|
||||
% spack find -b
|
||||
==> Showing internal bootstrap store at "/Users/spack/.spack/bootstrap/store"
|
||||
==> 0 installed packages
|
||||
|
||||
--------------------------------------------
|
||||
Enabling and disabling bootstrapping methods
|
||||
--------------------------------------------
|
||||
|
||||
Bootstrapping is always performed by trying the methods listed by:
|
||||
|
||||
.. command-output:: spack bootstrap list
|
||||
|
||||
in the order they appear, from top to bottom. By default Spack is
|
||||
configured to try first bootstrapping from pre-built binaries and to
|
||||
fall-back to bootstrapping from sources if that failed.
|
||||
|
||||
If need be, you can disable bootstrapping altogether by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap disable
|
||||
|
||||
in which case it's your responsibility to ensure Spack runs in an
|
||||
environment where all its prerequisites are installed. You can
|
||||
also configure Spack to skip certain bootstrapping methods by *untrusting*
|
||||
them. For instance:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap untrust github-actions
|
||||
==> "github-actions" is now untrusted and will not be used for bootstrapping
|
||||
|
||||
tells Spack to skip trying to bootstrap from binaries. To add the "github-actions" method back you can:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap trust github-actions
|
||||
|
||||
There is also an option to reset the bootstrapping configuration to Spack's defaults:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap reset
|
||||
==> Bootstrapping configuration is being reset to Spack's defaults. Current configuration will be lost.
|
||||
Do you want to continue? [Y/n]
|
||||
%
|
||||
|
||||
----------------------------------------
|
||||
Creating a mirror for air-gapped systems
|
||||
----------------------------------------
|
||||
|
||||
Spack's default configuration for bootstrapping relies on the user having
|
||||
access to the internet, either to fetch pre-compiled binaries or source tarballs.
|
||||
Sometimes though Spack is deployed on air-gapped systems where such access is denied.
|
||||
|
||||
To help with similar situations Spack has a command that recreates, in a local folder
|
||||
of choice, a mirror containing the source tarballs and/or binary packages needed for
|
||||
bootstrapping.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap mirror --binary-packages /opt/bootstrap
|
||||
==> Adding "clingo-bootstrap@spack+python %apple-clang target=x86_64" and dependencies to the mirror at /opt/bootstrap/local-mirror
|
||||
==> Adding "gnupg@2.3: %apple-clang target=x86_64" and dependencies to the mirror at /opt/bootstrap/local-mirror
|
||||
==> Adding "patchelf@0.13.1:0.13.99 %apple-clang target=x86_64" and dependencies to the mirror at /opt/bootstrap/local-mirror
|
||||
==> Adding binary packages from "https://github.com/alalazo/spack-bootstrap-mirrors/releases/download/v0.1-rc.2/bootstrap-buildcache.tar.gz" to the mirror at /opt/bootstrap/local-mirror
|
||||
|
||||
To register the mirror on the platform where it's supposed to be used run the following command(s):
|
||||
% spack bootstrap add --trust local-sources /opt/bootstrap/metadata/sources
|
||||
% spack bootstrap add --trust local-binaries /opt/bootstrap/metadata/binaries
|
||||
|
||||
|
||||
This command needs to be run on a machine with internet access and the resulting folder
|
||||
has to be moved over to the air-gapped system. Once the local sources are added using the
|
||||
commands suggested at the prompt, they can be used to bootstrap Spack.
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _build-settings:
|
||||
|
||||
================================
|
||||
Package Settings (packages.yaml)
|
||||
================================
|
||||
===================
|
||||
Build Customization
|
||||
===================
|
||||
|
||||
Spack allows you to customize how your software is built through the
|
||||
``packages.yaml`` file. Using it, you can make Spack prefer particular
|
||||
@@ -219,65 +219,33 @@ Concretizer options
|
||||
but you can also use ``concretizer.yaml`` to customize aspects of the
|
||||
algorithm it uses to select the dependencies you install:
|
||||
|
||||
.. literalinclude:: _spack_root/etc/spack/defaults/concretizer.yaml
|
||||
:language: yaml
|
||||
.. _code-block: yaml
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Reuse already installed packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
concretizer:
|
||||
# Whether to consider installed packages or packages from buildcaches when
|
||||
# concretizing specs. If `true`, we'll try to use as many installs/binaries
|
||||
# as possible, rather than building. If `false`, we'll always give you a fresh
|
||||
# concretization.
|
||||
reuse: false
|
||||
|
||||
The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or
|
||||
^^^^^^^^^^^^^^^^
|
||||
``reuse``
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
This controls whether Spack will prefer to use installed packages (``true``), or
|
||||
whether it will do a "fresh" installation and prefer the latest settings from
|
||||
``package.py`` files and ``packages.yaml`` (``false``).
|
||||
You can use:
|
||||
``package.py`` files and ``packages.yaml`` (``false``). .
|
||||
|
||||
.. code-block:: console
|
||||
You can use ``spack install --reuse`` to enable reuse for a single installation,
|
||||
and you can use ``spack install --fresh`` to do a fresh install if ``reuse`` is
|
||||
enabled by default.
|
||||
|
||||
% spack install --reuse <spec>
|
||||
.. note::
|
||||
|
||||
to enable reuse for a single installation, and you can use:
|
||||
``reuse: false`` is the current default, but ``reuse: true`` will be the default
|
||||
in the next Spack release. You will still be able to use ``spack install --fresh``
|
||||
to get the old behavior.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack install --fresh <spec>
|
||||
|
||||
to do a fresh install if ``reuse`` is enabled by default.
|
||||
``reuse: true`` is the default.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Selection of the target microarchitectures
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The options under the ``targets`` attribute control which targets are considered during a solve.
|
||||
Currently the options in this section are only configurable from the ``concretization.yaml`` file
|
||||
and there are no corresponding command line arguments to enable them for a single solve.
|
||||
|
||||
The ``granularity`` option can take two possible values: ``microarchitectures`` and ``generic``.
|
||||
If set to:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
targets:
|
||||
granularity: microarchitectures
|
||||
|
||||
Spack will consider all the microarchitectures known to ``archspec`` to label nodes for
|
||||
compatibility. If instead the option is set to:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
targets:
|
||||
granularity: generic
|
||||
|
||||
Spack will consider only generic microarchitectures. For instance, when running on an
|
||||
Haswell node, Spack will consider ``haswell`` as the best target in the former case and
|
||||
``x86_64_v3`` as the best target in the latter case.
|
||||
|
||||
The ``host_compatible`` option is a Boolean option that determines whether or not the
|
||||
microarchitectures considered during the solve are constrained to be compatible with the
|
||||
host Spack is currently running on. For instance, if this option is set to ``true``, a
|
||||
user cannot concretize for ``target=icelake`` while running on an Haswell node.
|
||||
|
||||
.. _package-preferences:
|
||||
|
||||
|
@@ -39,7 +39,6 @@ on these ideas for each distinct build system that Spack supports:
|
||||
|
||||
build_systems/autotoolspackage
|
||||
build_systems/cmakepackage
|
||||
build_systems/cachedcmakepackage
|
||||
build_systems/mesonpackage
|
||||
build_systems/qmakepackage
|
||||
build_systems/sippackage
|
||||
@@ -48,12 +47,10 @@ on these ideas for each distinct build system that Spack supports:
|
||||
:maxdepth: 1
|
||||
:caption: Language-specific
|
||||
|
||||
build_systems/luapackage
|
||||
build_systems/octavepackage
|
||||
build_systems/perlpackage
|
||||
build_systems/pythonpackage
|
||||
build_systems/rpackage
|
||||
build_systems/racketpackage
|
||||
build_systems/rubypackage
|
||||
|
||||
.. toctree::
|
||||
|
@@ -433,7 +433,7 @@ For example:
|
||||
.. code-block:: python
|
||||
|
||||
variant('profiler', when='@2.0:')
|
||||
config_args += self.with_or_without('profiler')
|
||||
config_args += self.with_or_without('profiler)
|
||||
|
||||
will neither add ``--with-profiler`` nor ``--without-profiler`` when the version is
|
||||
below ``2.0``.
|
||||
|
@@ -1,123 +0,0 @@
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _cachedcmakepackage:
|
||||
|
||||
------------------
|
||||
CachedCMakePackage
|
||||
------------------
|
||||
|
||||
The CachedCMakePackage base class is used for CMake-based workflows
|
||||
that create a CMake cache file prior to running ``cmake``. This is
|
||||
useful for packages with arguments longer than the system limit, and
|
||||
for reproducibility.
|
||||
|
||||
The documentation for this class assumes that the user is familiar with
|
||||
the ``CMakePackage`` class from which it inherits. See the documentation
|
||||
for :ref:`CMakePackage <cmakepackage>`.
|
||||
|
||||
^^^^^^
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``CachedCMakePackage`` base class comes with the following phases:
|
||||
|
||||
#. ``initconfig`` - generate the CMake cache file
|
||||
#. ``cmake`` - generate the Makefile
|
||||
#. ``build`` - build the package
|
||||
#. ``install`` - install the package
|
||||
|
||||
By default, these phases run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ mkdir spack-build
|
||||
$ cd spack-build
|
||||
$ cat << EOF > name-arch-compiler@version.cmake
|
||||
# Write information on compilers and dependencies
|
||||
# includes information on mpi and cuda if applicable
|
||||
$ cmake .. -DCMAKE_INSTALL_PREFIX=/path/to/installation/prefix -C name-arch-compiler@version.cmake
|
||||
$ make
|
||||
$ make test # optional
|
||||
$ make install
|
||||
|
||||
The ``CachedCMakePackage`` class inherits from the ``CMakePackage``
|
||||
class, and accepts all of the same options and adds all of the same
|
||||
flags to the ``cmake`` command. Similar to the ``CMakePAckage`` class,
|
||||
you may need to add a few arguments yourself, and the
|
||||
``CachedCMakePackage`` provides the same interface to add those
|
||||
flags.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Adding entries to the CMake cache
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In addition to adding flags to the ``cmake`` command, you may need to
|
||||
add entries to the CMake cache in the ``initconfig`` phase. This can
|
||||
be done by overriding one of four methods:
|
||||
|
||||
#. ``CachedCMakePackage.initconfig_compiler_entries``
|
||||
#. ``CachedCMakePackage.initconfig_mpi_entries``
|
||||
#. ``CachedCMakePackage.initconfig_hardware_entries``
|
||||
#. ``CachedCMakePackage.initconfig_package_entries``
|
||||
|
||||
Each of these methods returns a list of CMake cache strings. The
|
||||
distinction between these methods is merely to provide a
|
||||
well-structured and legible cmake cache file -- otherwise, entries
|
||||
from each of these methods are handled identically.
|
||||
|
||||
Spack also provides convenience methods for generating CMake cache
|
||||
entries. These methods are available at module scope in every Spack
|
||||
package. Because CMake parses boolean options, strings, and paths
|
||||
differently, there are three such methods:
|
||||
|
||||
#. ``cmake_cache_option``
|
||||
#. ``cmake_cache_string``
|
||||
#. ``cmake_cache_path``
|
||||
|
||||
These methods each accept three parameters -- the name of the CMake
|
||||
variable associated with the entry, the value of the entry, and an
|
||||
optional comment -- and return strings in the appropriate format to be
|
||||
returned from any of the ``initconfig*`` methods. Additionally, these
|
||||
methods may return comments beginning with the ``#`` character.
|
||||
|
||||
A typical usage of these methods may look something like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def initconfig_mpi_entries(self)
|
||||
# Get existing MPI configurations
|
||||
entries = super(self, Foo).initconfig_mpi_entries()
|
||||
|
||||
# The existing MPI configurations key on whether ``mpi`` is in the spec
|
||||
# This spec has an MPI variant, and we need to enable MPI when it is on.
|
||||
# This hypothetical package controls MPI with the ``FOO_MPI`` option to
|
||||
# cmake.
|
||||
if '+mpi' in self.spec:
|
||||
entries.append(cmake_cache_option('FOO_MPI', True, "enable mpi"))
|
||||
else:
|
||||
entries.append(cmake_cache_option('FOO_MPI', False, "disable mpi"))
|
||||
|
||||
def initconfig_package_entries(self):
|
||||
# Package specific options
|
||||
entries = []
|
||||
|
||||
entries.append('#Entries for build options')
|
||||
|
||||
bar_on = '+bar' in self.spec
|
||||
entries.append(cmake_cache_option('FOO_BAR', bar_on, 'toggle bar'))
|
||||
|
||||
entries.append('#Entries for dependencies')
|
||||
|
||||
if self.spec['blas'].name == 'baz': # baz is our blas provider
|
||||
entries.append(cmake_cache_string('FOO_BLAS', 'baz', 'Use baz'))
|
||||
entries.append(cmake_cache_path('BAZ_PREFIX', self.spec['baz'].prefix))
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
External documentation
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
For more information on CMake cache files, see:
|
||||
https://cmake.org/cmake/help/latest/manual/cmake.1.html
|
@@ -159,85 +159,6 @@ and CMake simply ignores the empty command line argument. For example the follow
|
||||
will generate ``'cmake' '-DEXAMPLE=ON' ...`` when `@2.0: +example` is met, but will
|
||||
result in ``'cmake' '' ...`` when the spec version is below ``2.0``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
CMake arguments provided by Spack
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The following default arguments are controlled by Spack:
|
||||
|
||||
|
||||
``CMAKE_INSTALL_PREFIX``
|
||||
------------------------
|
||||
|
||||
Is set to the the package's install directory.
|
||||
|
||||
|
||||
``CMAKE_PREFIX_PATH``
|
||||
---------------------
|
||||
|
||||
CMake finds dependencies through calls to ``find_package()``, ``find_program()``,
|
||||
``find_library()``, ``find_file()``, and ``find_path()``, which use a list of search
|
||||
paths from ``CMAKE_PREFIX_PATH``. Spack sets this variable to a list of prefixes of the
|
||||
spec's transitive dependencies.
|
||||
|
||||
For troubleshooting cases where CMake fails to find a dependency, add the
|
||||
``--debug-find`` flag to ``cmake_args``.
|
||||
|
||||
``CMAKE_BUILD_TYPE``
|
||||
--------------------
|
||||
|
||||
Every CMake-based package accepts a ``-DCMAKE_BUILD_TYPE`` flag to
|
||||
dictate which level of optimization to use. In order to ensure
|
||||
uniformity across packages, the ``CMakePackage`` base class adds
|
||||
a variant to control this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('build_type', default='RelWithDebInfo',
|
||||
description='CMake build type',
|
||||
values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel'))
|
||||
|
||||
However, not every CMake package accepts all four of these options.
|
||||
Grep the ``CMakeLists.txt`` file to see if the default values are
|
||||
missing or replaced. For example, the
|
||||
`dealii <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/dealii/package.py>`_
|
||||
package overrides the default variant with:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('build_type', default='DebugRelease',
|
||||
description='The build type to build',
|
||||
values=('Debug', 'Release', 'DebugRelease'))
|
||||
|
||||
For more information on ``CMAKE_BUILD_TYPE``, see:
|
||||
https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
|
||||
|
||||
|
||||
``CMAKE_INSTALL_RPATH`` and ``CMAKE_INSTALL_RPATH_USE_LINK_PATH=ON``
|
||||
--------------------------------------------------------------------
|
||||
|
||||
CMake uses different RPATHs during the build and after installation, so that executables
|
||||
can locate the libraries they're linked to during the build, and installed executables
|
||||
do not have RPATHs to build directories. In Spack, we have to make sure that RPATHs are
|
||||
set properly after installation.
|
||||
|
||||
Spack sets ``CMAKE_INSTALL_RPATH`` to a list of ``<prefix>/lib`` or ``<prefix>/lib64``
|
||||
directories of the spec's link-type dependencies. Apart from that, it sets
|
||||
``-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=ON``, which should add RPATHs for directories of
|
||||
linked libraries not in the directories covered by ``CMAKE_INSTALL_RPATH``.
|
||||
|
||||
Usually it's enough to set only ``-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=ON``, but the
|
||||
reason to provide both options is that packages may dynamically open shared libraries,
|
||||
which CMake cannot detect. In those cases, the RPATHs from ``CMAKE_INSTALL_RPATH`` are
|
||||
used as search paths.
|
||||
|
||||
.. note::
|
||||
|
||||
Some packages provide stub libraries, which contain an interface for linking without
|
||||
an implementation. When using such libraries, it's best to override the option
|
||||
``-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=OFF`` in ``cmake_args``, so that stub libraries
|
||||
are not used at runtime.
|
||||
|
||||
|
||||
^^^^^^^^^^
|
||||
Generators
|
||||
@@ -275,6 +196,36 @@ generators, but it should be simple to add support for alternative
|
||||
generators. For more information on CMake generators, see:
|
||||
https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
CMAKE_BUILD_TYPE
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
Every CMake-based package accepts a ``-DCMAKE_BUILD_TYPE`` flag to
|
||||
dictate which level of optimization to use. In order to ensure
|
||||
uniformity across packages, the ``CMakePackage`` base class adds
|
||||
a variant to control this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('build_type', default='RelWithDebInfo',
|
||||
description='CMake build type',
|
||||
values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel'))
|
||||
|
||||
However, not every CMake package accepts all four of these options.
|
||||
Grep the ``CMakeLists.txt`` file to see if the default values are
|
||||
missing or replaced. For example, the
|
||||
`dealii <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/dealii/package.py>`_
|
||||
package overrides the default variant with:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('build_type', default='DebugRelease',
|
||||
description='The build type to build',
|
||||
values=('Debug', 'Release', 'DebugRelease'))
|
||||
|
||||
For more information on ``CMAKE_BUILD_TYPE``, see:
|
||||
https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
CMakeLists.txt in a sub-directory
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
@@ -1,105 +0,0 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _luapackage:
|
||||
|
||||
------------
|
||||
LuaPackage
|
||||
------------
|
||||
|
||||
LuaPackage is a helper for the common case of Lua packages that provide
|
||||
a rockspec file. This is not meant to take a rock archive, but to build
|
||||
a source archive or repository that provides a rockspec, which should cover
|
||||
most lua packages. In the case a Lua package builds by Make rather than
|
||||
luarocks, prefer MakefilePackage.
|
||||
|
||||
^^^^^^
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``LuaPackage`` base class comes with the following phases:
|
||||
|
||||
#. ``unpack`` - if using a rock, unpacks the rock and moves into the source directory
|
||||
#. ``preprocess`` - adjust sources or rockspec to fix build
|
||||
#. ``install`` - install the project
|
||||
|
||||
By default, these phases run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# If the archive is a source rock
|
||||
$ luarocks unpack <archive>.src.rock
|
||||
$ # preprocess is a noop by default
|
||||
$ luarocks make <name>.rockspec
|
||||
|
||||
|
||||
Any of these phases can be overridden in your package as necessary.
|
||||
|
||||
^^^^^^^^^^^^^^^
|
||||
Important files
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
Packages that use the Lua/LuaRocks build system can be identified by the
|
||||
presence of a ``*.rockspec`` file in their sourcetree, or can be fetched as
|
||||
a source rock archive (``.src.rock``). This file declares things like build
|
||||
instructions and dependencies, the ``.src.rock`` also contains all code.
|
||||
|
||||
It is common for the rockspec file to list the lua version required in
|
||||
a dependency. The LuaPackage class adds appropriate dependencies on a Lua
|
||||
implementation, but it is a good idea to specify the version required with
|
||||
a ``depends_on`` statement. The block normally will be a table definition like
|
||||
this:
|
||||
|
||||
.. code-block:: lua
|
||||
|
||||
dependencies = {
|
||||
"lua >= 5.1",
|
||||
}
|
||||
|
||||
The LuaPackage class supports source repositories and archives containing
|
||||
a rockspec and directly downloading source rock files. It *does not* support
|
||||
downloading dependencies listed inside a rockspec, and thus does not support
|
||||
directly downloading a rockspec as an archive.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build system dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
All base dependencies are added by the build system, but LuaRocks is run to
|
||||
avoid downloading extra Lua dependencies during build. If the package needs
|
||||
Lua libraries outside the standard set, they should be added as dependencies.
|
||||
|
||||
To specify a Lua version constraint but allow all lua implementations, prefer
|
||||
to use ``depends_on("lua-lang@5.1:5.1.99")`` to express any 5.1 compatible
|
||||
version. If the package requires LuaJit rather than Lua,
|
||||
a ``depends_on("luajit")`` should be used to ensure a LuaJit distribution is
|
||||
used instead of the Lua interpreter. Alternately, if only interpreted Lua will
|
||||
work ``depends_on("lua")`` will express that.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Passing arguments to luarocks make
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you need to pass any arguments to the ``luarocks make`` call, you can
|
||||
override the ``luarocks_args`` method like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def luarocks_args(self):
|
||||
return ['flag1', 'flag2']
|
||||
|
||||
One common use of this is to override warnings or flags for newer compilers, as in:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def luarocks_args(self):
|
||||
return ["CFLAGS='-Wno-error=implicit-function-declaration'"]
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
External documentation
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
For more information on the LuaRocks build system, see:
|
||||
https://luarocks.org/
|
@@ -645,7 +645,8 @@ are not yet in Spack, and Spack contains many Python packages that are
|
||||
not yet in Anaconda. The main advantage of Spack over Anaconda is its
|
||||
ability to choose a specific compiler and BLAS/LAPACK or MPI library.
|
||||
Spack also has better platform support for supercomputers, and can build
|
||||
optimized binaries for your specific microarchitecture.
|
||||
optimized binaries for your specific microarchitecture. On the other hand,
|
||||
Anaconda offers Windows support.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
External documentation
|
||||
|
@@ -1,46 +0,0 @@
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _racketpackage:
|
||||
|
||||
-------------
|
||||
RacketPackage
|
||||
-------------
|
||||
|
||||
Much like Python, Racket packages and modules have their own special build system.
|
||||
To learn more about the specifics of Racket package system, please refer to the
|
||||
`Racket Docs <https://docs.racket-lang.org/pkg/cmdline.html>`_.
|
||||
|
||||
^^^^^^
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``RacketPackage`` base class provides an ``install`` phase that
|
||||
can be overridden, corresponding to the use of:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ raco pkg install
|
||||
|
||||
^^^^^^^
|
||||
Caveats
|
||||
^^^^^^^
|
||||
|
||||
In principle, ``raco`` supports a second, ``setup`` phase; however, we have not
|
||||
implemented this separately, as in normal circumstances, ``install`` also handles
|
||||
running ``setup`` automatically.
|
||||
|
||||
Unlike Python, Racket currently on supports two installation scopes for packages, user
|
||||
or system, and keeps a registry of installed packages at each scope in its configuration files.
|
||||
This means we can't simply compose a "``RACKET_PATH``" environment variable listing all of the
|
||||
places packages are installed, and update this at will.
|
||||
|
||||
Unfortunately this means that all currently installed packages which extend Racket via ``raco pkg install``
|
||||
are accessible whenever Racket is accessible.
|
||||
|
||||
Additionally, because Spack does not implement uninstall hooks, uninstalling a Spack ``rkt-`` package
|
||||
will have no effect on the ``raco`` installed packages visible to your Racket installation.
|
||||
Instead, you must manually run ``raco pkg remove`` to keep the two package managers in a mutually
|
||||
consistent state.
|
@@ -95,7 +95,7 @@ class of your package. For example, you can add it to your
|
||||
# Set up the hip macros needed by the build
|
||||
args.extend([
|
||||
'-DENABLE_HIP=ON',
|
||||
'-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix)])
|
||||
'-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix])
|
||||
rocm_archs = spec.variants['amdgpu_target'].value
|
||||
if 'none' not in rocm_archs:
|
||||
args.append('-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'
|
||||
|
@@ -23,10 +23,7 @@
|
||||
import sys
|
||||
from glob import glob
|
||||
|
||||
from docutils.statemachine import StringList
|
||||
from sphinx.domains.python import PythonDomain
|
||||
from sphinx.ext.apidoc import main as sphinx_apidoc
|
||||
from sphinx.parsers import RSTParser
|
||||
|
||||
# -- Spack customizations -----------------------------------------------------
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
@@ -85,6 +82,9 @@
|
||||
#
|
||||
# Disable duplicate cross-reference warnings.
|
||||
#
|
||||
from sphinx.domains.python import PythonDomain
|
||||
|
||||
|
||||
class PatchedPythonDomain(PythonDomain):
|
||||
def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||
if 'refspecific' in node:
|
||||
@@ -92,20 +92,8 @@ def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||
return super(PatchedPythonDomain, self).resolve_xref(
|
||||
env, fromdocname, builder, typ, target, node, contnode)
|
||||
|
||||
#
|
||||
# Disable tabs to space expansion in code blocks
|
||||
# since Makefiles require tabs.
|
||||
#
|
||||
class NoTabExpansionRSTParser(RSTParser):
|
||||
def parse(self, inputstring, document):
|
||||
if isinstance(inputstring, str):
|
||||
lines = inputstring.splitlines()
|
||||
inputstring = StringList(lines, document.current_source)
|
||||
super().parse(inputstring, document)
|
||||
|
||||
def setup(sphinx):
|
||||
sphinx.add_domain(PatchedPythonDomain, override=True)
|
||||
sphinx.add_source_parser(NoTabExpansionRSTParser, override=True)
|
||||
|
||||
# -- General configuration -----------------------------------------------------
|
||||
|
||||
@@ -192,7 +180,6 @@ def setup(sphinx):
|
||||
('py:class', '_frozen_importlib_external.SourceFileLoader'),
|
||||
# Spack classes that are private and we don't want to expose
|
||||
('py:class', 'spack.provider_index._IndexBase'),
|
||||
('py:class', 'spack.repo._PrependFileLoader'),
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _config-yaml:
|
||||
|
||||
============================
|
||||
Spack Settings (config.yaml)
|
||||
============================
|
||||
==============
|
||||
Basic Settings
|
||||
==============
|
||||
|
||||
Spack's basic configuration options are set in ``config.yaml``. You can
|
||||
see the default settings by looking at
|
||||
@@ -72,6 +72,21 @@ used to configure module names.
|
||||
packages have been installed will prevent Spack from being
|
||||
able to find the old installation directories.
|
||||
|
||||
--------------------
|
||||
``module_roots``
|
||||
--------------------
|
||||
|
||||
Controls where Spack installs generated module files. You can customize
|
||||
the location for each type of module. e.g.:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
module_roots:
|
||||
tcl: $spack/share/spack/modules
|
||||
lmod: $spack/share/spack/lmod
|
||||
|
||||
See :ref:`modules` for details.
|
||||
|
||||
--------------------
|
||||
``build_stage``
|
||||
--------------------
|
||||
|
@@ -37,6 +37,8 @@ Here is an example ``config.yaml`` file:
|
||||
|
||||
config:
|
||||
install_tree: $spack/opt/spack
|
||||
module_roots:
|
||||
lmod: $spack/share/spack/lmod
|
||||
build_stage:
|
||||
- $tempdir/$user/spack-stage
|
||||
- ~/.spack/stage
|
||||
@@ -251,6 +253,8 @@ your configurations look like this:
|
||||
|
||||
config:
|
||||
install_tree: $spack/opt/spack
|
||||
module_roots:
|
||||
lmod: $spack/share/spack/lmod
|
||||
build_stage:
|
||||
- $tempdir/$user/spack-stage
|
||||
- ~/.spack/stage
|
||||
@@ -274,6 +278,8 @@ command:
|
||||
$ spack config get config
|
||||
config:
|
||||
install_tree: /some/other/directory
|
||||
module_roots:
|
||||
lmod: $spack/share/spack/lmod
|
||||
build_stage:
|
||||
- $tempdir/$user/spack-stage
|
||||
- ~/.spack/stage
|
||||
@@ -339,11 +345,13 @@ higher-precedence scope is *prepended* to the defaults. ``spack config
|
||||
get config`` shows the result:
|
||||
|
||||
.. code-block:: console
|
||||
:emphasize-lines: 5-8
|
||||
:emphasize-lines: 7-10
|
||||
|
||||
$ spack config get config
|
||||
config:
|
||||
install_tree: /some/other/directory
|
||||
module_roots:
|
||||
lmod: $spack/share/spack/lmod
|
||||
build_stage:
|
||||
- /lustre-scratch/$user/spack
|
||||
- ~/mystage
|
||||
@@ -367,11 +375,13 @@ user config looked like this:
|
||||
The merged configuration would look like this:
|
||||
|
||||
.. code-block:: console
|
||||
:emphasize-lines: 5-6
|
||||
:emphasize-lines: 7-8
|
||||
|
||||
$ spack config get config
|
||||
config:
|
||||
install_tree: /some/other/directory
|
||||
module_roots:
|
||||
lmod: $spack/share/spack/lmod
|
||||
build_stage:
|
||||
- /lustre-scratch/$user/spack
|
||||
- ~/mystage
|
||||
@@ -492,6 +502,9 @@ account all scopes. For example, to see the fully merged
|
||||
template_dirs:
|
||||
- $spack/templates
|
||||
directory_layout: {architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}
|
||||
module_roots:
|
||||
tcl: $spack/share/spack/modules
|
||||
lmod: $spack/share/spack/lmod
|
||||
build_stage:
|
||||
- $tempdir/$user/spack-stage
|
||||
- ~/.spack/stage
|
||||
@@ -539,6 +552,9 @@ down the problem:
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:23 template_dirs:
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:24 - $spack/templates
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:28 directory_layout: {architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:32 module_roots:
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:33 tcl: $spack/share/spack/modules
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:34 lmod: $spack/share/spack/lmod
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:49 build_stage:
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:50 - $tempdir/$user/spack-stage
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:51 - ~/.spack/stage
|
||||
|
@@ -59,8 +59,7 @@ other techniques to minimize the size of the final image:
|
||||
&& echo " specs:" \
|
||||
&& echo " - gromacs+mpi" \
|
||||
&& echo " - mpich" \
|
||||
&& echo " concretizer: together" \
|
||||
&& echo " unify: true" \
|
||||
&& echo " concretization: together" \
|
||||
&& echo " config:" \
|
||||
&& echo " install_tree: /opt/software" \
|
||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||
@@ -246,8 +245,7 @@ software is respectively built and installed:
|
||||
&& echo " specs:" \
|
||||
&& echo " - gromacs+mpi" \
|
||||
&& echo " - mpich" \
|
||||
&& echo " concretizer:" \
|
||||
&& echo " unify: true" \
|
||||
&& echo " concretization: together" \
|
||||
&& echo " config:" \
|
||||
&& echo " install_tree: /opt/software" \
|
||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||
@@ -368,8 +366,7 @@ produces, for instance, the following ``Dockerfile``:
|
||||
&& echo " externals:" \
|
||||
&& echo " - spec: cuda%gcc" \
|
||||
&& echo " prefix: /usr/local/cuda" \
|
||||
&& echo " concretizer:" \
|
||||
&& echo " unify: true" \
|
||||
&& echo " concretization: together" \
|
||||
&& echo " config:" \
|
||||
&& echo " install_tree: /opt/software" \
|
||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||
|
@@ -1057,39 +1057,39 @@ Release branches
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
There are currently two types of Spack releases: :ref:`major releases
|
||||
<major-releases>` (``0.17.0``, ``0.18.0``, etc.) and :ref:`point releases
|
||||
<point-releases>` (``0.17.1``, ``0.17.2``, ``0.17.3``, etc.). Here is a
|
||||
<major-releases>` (``0.13.0``, ``0.14.0``, etc.) and :ref:`point releases
|
||||
<point-releases>` (``0.13.1``, ``0.13.2``, ``0.13.3``, etc.). Here is a
|
||||
diagram of how Spack release branches work::
|
||||
|
||||
o branch: develop (latest version, v0.19.0.dev0)
|
||||
o branch: develop (latest version)
|
||||
|
|
||||
o
|
||||
| o branch: releases/v0.18, tag: v0.18.1
|
||||
o |
|
||||
| o tag: v0.18.0
|
||||
o |
|
||||
| o
|
||||
o merge v0.14.1 into develop
|
||||
|\
|
||||
| o branch: releases/v0.14, tag: v0.14.1
|
||||
o | merge v0.14.0 into develop
|
||||
|\|
|
||||
| o tag: v0.14.0
|
||||
|/
|
||||
o
|
||||
|
|
||||
o
|
||||
| o branch: releases/v0.17, tag: v0.17.2
|
||||
o |
|
||||
| o tag: v0.17.1
|
||||
o |
|
||||
| o tag: v0.17.0
|
||||
o merge v0.13.2 into develop
|
||||
|\
|
||||
| o branch: releases/v0.13, tag: v0.13.2
|
||||
o | merge v0.13.1 into develop
|
||||
|\|
|
||||
| o tag: v0.13.1
|
||||
o | merge v0.13.0 into develop
|
||||
|\|
|
||||
| o tag: v0.13.0
|
||||
o |
|
||||
| o
|
||||
|/
|
||||
o
|
||||
|
||||
The ``develop`` branch has the latest contributions, and nearly all pull
|
||||
requests target ``develop``. The ``develop`` branch will report that its
|
||||
version is that of the next **major** release with a ``.dev0`` suffix.
|
||||
requests target ``develop``.
|
||||
|
||||
Each Spack release series also has a corresponding branch, e.g.
|
||||
``releases/v0.18`` has ``0.18.x`` versions of Spack, and
|
||||
``releases/v0.17`` has ``0.17.x`` versions. A major release is the first
|
||||
``releases/v0.14`` has ``0.14.x`` versions of Spack, and
|
||||
``releases/v0.13`` has ``0.13.x`` versions. A major release is the first
|
||||
tagged version on a release branch. Minor releases are back-ported from
|
||||
develop onto release branches. This is typically done by cherry-picking
|
||||
bugfix commits off of ``develop``.
|
||||
@@ -1100,20 +1100,12 @@ packages. They should generally only contain fixes to the Spack core.
|
||||
However, sometimes priorities are such that new functionality needs to
|
||||
be added to a minor release.
|
||||
|
||||
Both major and minor releases are tagged. As a convenience, we also tag
|
||||
the latest release as ``releases/latest``, so that users can easily check
|
||||
it out to get the latest stable version. See :ref:`updating-latest-release`
|
||||
for more details.
|
||||
|
||||
.. note::
|
||||
|
||||
Older spack releases were merged **back** into develop so that we could
|
||||
do fancy things with tags, but since tarballs and many git checkouts do
|
||||
not have tags, this proved overly complex and confusing.
|
||||
|
||||
We have since converted to using `PEP 440 <https://peps.python.org/pep-0440/>`_
|
||||
compliant versions. `See here <https://github.com/spack/spack/pull/25267>`_ for
|
||||
details.
|
||||
Both major and minor releases are tagged. After each release, we merge
|
||||
the release branch back into ``develop`` so that the version bump and any
|
||||
other release-specific changes are visible in the mainline. As a
|
||||
convenience, we also tag the latest release as ``releases/latest``,
|
||||
so that users can easily check it out to get the latest
|
||||
stable version. See :ref:`merging-releases` for more details.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Scheduling work for releases
|
||||
@@ -1171,11 +1163,10 @@ completed, the steps to make the major release are:
|
||||
``releases/vX.Y``. That is, you should create a ``releases/vX.Y``
|
||||
branch if you are preparing the ``X.Y.0`` release.
|
||||
|
||||
#. Remove the ``dev0`` development release segment from the version tuple in
|
||||
``lib/spack/spack/__init__.py``.
|
||||
#. Bump the version in ``lib/spack/spack/__init__.py``.
|
||||
|
||||
The version number itself should already be correct and should not be
|
||||
modified.
|
||||
See `this example from 0.13.0
|
||||
<https://github.com/spack/spack/commit/8eeb64096c98b8a43d1c587f13ece743c864fba9>`_
|
||||
|
||||
#. Update ``CHANGELOG.md`` with major highlights in bullet form.
|
||||
|
||||
@@ -1197,16 +1188,9 @@ completed, the steps to make the major release are:
|
||||
is outdated submit pull requests to ``develop`` as normal
|
||||
and keep rebasing the release branch on ``develop``.
|
||||
|
||||
#. Bump the major version in the ``develop`` branch.
|
||||
|
||||
Create a pull request targeting the ``develop`` branch, bumping the major
|
||||
version in ``lib/spack/spack/__init__.py`` with a ``dev0`` release segment.
|
||||
For instance when you have just released ``v0.15.0``, set the version
|
||||
to ``(0, 16, 0, 'dev0')`` on ``develop``.
|
||||
|
||||
#. Follow the steps in :ref:`publishing-releases`.
|
||||
|
||||
#. Follow the steps in :ref:`updating-latest-release`.
|
||||
#. Follow the steps in :ref:`merging-releases`.
|
||||
|
||||
#. Follow the steps in :ref:`announcing-releases`.
|
||||
|
||||
@@ -1282,6 +1266,9 @@ completed, the steps to make the point release are:
|
||||
|
||||
#. Bump the version in ``lib/spack/spack/__init__.py``.
|
||||
|
||||
See `this example from 0.14.1
|
||||
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
|
||||
|
||||
#. Update ``CHANGELOG.md`` with a list of the changes.
|
||||
|
||||
This is typically a summary of the commits you cherry-picked onto the
|
||||
@@ -1303,7 +1290,7 @@ completed, the steps to make the point release are:
|
||||
|
||||
#. Follow the steps in :ref:`publishing-releases`.
|
||||
|
||||
#. Follow the steps in :ref:`updating-latest-release`.
|
||||
#. Follow the steps in :ref:`merging-releases`.
|
||||
|
||||
#. Follow the steps in :ref:`announcing-releases`.
|
||||
|
||||
@@ -1364,11 +1351,11 @@ Publishing a release on GitHub
|
||||
selectable in the versions menu.
|
||||
|
||||
|
||||
.. _updating-latest-release:
|
||||
.. _merging-releases:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Updating `releases/latest`
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Updating `releases/latest` and `develop`
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If the new release is the **highest** Spack release yet, you should
|
||||
also tag it as ``releases/latest``. For example, suppose the highest
|
||||
@@ -1392,6 +1379,40 @@ To tag ``releases/latest``, do this:
|
||||
The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing
|
||||
``releases/latest`` tag with the new one.
|
||||
|
||||
We also merge each release that we tag as ``releases/latest`` into ``develop``.
|
||||
Make sure to do this with a merge commit:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git checkout develop
|
||||
$ git merge --no-ff -s ours vX.Y.Z # vX.Y.Z is the new release's tag
|
||||
$ git push
|
||||
|
||||
We merge back to ``develop`` because it:
|
||||
|
||||
* updates the version and ``CHANGELOG.md`` on ``develop``; and
|
||||
* ensures that your release tag is reachable from the head of
|
||||
``develop``.
|
||||
|
||||
We *must* use a real merge commit (via the ``--no-ff`` option) to
|
||||
ensure that the release tag is reachable from the tip of ``develop``.
|
||||
This is necessary for ``spack -V`` to work properly -- it uses ``git
|
||||
describe --tags`` to find the last reachable tag in the repository and
|
||||
reports how far we are from it. For example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -V
|
||||
0.14.2-1486-b80d5e74e5
|
||||
|
||||
This says that we are at commit ``b80d5e74e5``, which is 1,486 commits
|
||||
ahead of the ``0.14.2`` release.
|
||||
|
||||
We put this step last in the process because it's best to do it only once
|
||||
the release is complete and tagged. If you do it before you've tagged the
|
||||
release and later decide you want to tag some later commit, you'll need
|
||||
to merge again.
|
||||
|
||||
|
||||
.. _announcing-releases:
|
||||
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _environments:
|
||||
|
||||
=========================
|
||||
Environments (spack.yaml)
|
||||
=========================
|
||||
============
|
||||
Environments
|
||||
============
|
||||
|
||||
An environment is used to group together a set of specs for the
|
||||
purpose of building, rebuilding and deploying in a coherent fashion.
|
||||
@@ -273,9 +273,19 @@ or
|
||||
Concretizing
|
||||
^^^^^^^^^^^^
|
||||
|
||||
Once some user specs have been added to an environment, they can be concretized.
|
||||
There are at the moment three different modes of operation to concretize an environment,
|
||||
which are explained in details in :ref:`environments_concretization_config`.
|
||||
Once some user specs have been added to an environment, they can be
|
||||
concretized. *By default specs are concretized separately*, one after
|
||||
the other. This mode of operation permits to deploy a full
|
||||
software stack where multiple configurations of the same package
|
||||
need to be installed alongside each other. Central installations done
|
||||
at HPC centers by system administrators or user support groups
|
||||
are a common case that fits in this behavior.
|
||||
Environments *can also be configured to concretize all
|
||||
the root specs in a self-consistent way* to ensure that
|
||||
each package in the environment comes with a single configuration. This
|
||||
mode of operation is usually what is required by software developers that
|
||||
want to deploy their development environment.
|
||||
|
||||
Regardless of which mode of operation has been chosen, the following
|
||||
command will ensure all the root specs are concretized according to the
|
||||
constraints that are prescribed in the configuration:
|
||||
@@ -339,24 +349,6 @@ If the Environment has been concretized, Spack will install the
|
||||
concretized specs. Otherwise, ``spack install`` will first concretize
|
||||
the Environment and then install the concretized specs.
|
||||
|
||||
.. note::
|
||||
|
||||
Every ``spack install`` process builds one package at a time with multiple build
|
||||
jobs, controlled by the ``-j`` flag and the ``config:build_jobs`` option
|
||||
(see :ref:`build-jobs`). To speed up environment builds further, independent
|
||||
packages can be installed in parallel by launching more Spack instances. For
|
||||
example, the following will build at most four packages in parallel using
|
||||
three background jobs:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
[myenv]$ spack install & spack install & spack install & spack install
|
||||
|
||||
Another option is to generate a ``Makefile`` and run ``make -j<N>`` to control
|
||||
the number of parallel install processes. See :ref:`env-generate-depfile`
|
||||
for details.
|
||||
|
||||
|
||||
As it installs, ``spack install`` creates symbolic links in the
|
||||
``logs/`` directory in the Environment, allowing for easy inspection
|
||||
of build logs related to that environment. The ``spack install``
|
||||
@@ -392,11 +384,18 @@ Sourcing that file in Bash will make the environment available to the
|
||||
user; and can be included in ``.bashrc`` files, etc. The ``loads``
|
||||
file may also be copied out of the environment, renamed, etc.
|
||||
|
||||
----------
|
||||
spack.yaml
|
||||
----------
|
||||
|
||||
Spack environments can be customized at finer granularity by editing
|
||||
the ``spack.yaml`` manifest file directly.
|
||||
|
||||
.. _environment-configuration:
|
||||
|
||||
------------------------
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Configuring Environments
|
||||
------------------------
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
A variety of Spack behaviors are changed through Spack configuration
|
||||
files, covered in more detail in the :ref:`configuration`
|
||||
@@ -418,9 +417,9 @@ environment can be specified by ``env:NAME`` (to affect environment
|
||||
``foo``, set ``--scope env:foo``). These commands will automatically
|
||||
manipulate configuration inline in the ``spack.yaml`` file.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""""
|
||||
Inline configurations
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""""
|
||||
|
||||
Inline Environment-scope configuration is done using the same yaml
|
||||
format as standard Spack configuration scopes, covered in the
|
||||
@@ -441,9 +440,9 @@ a ``packages.yaml`` file) could contain:
|
||||
This configuration sets the default compiler for all packages to
|
||||
``intel``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""""""
|
||||
Included configurations
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""""""
|
||||
|
||||
Spack environments allow an ``include`` heading in their yaml
|
||||
schema. This heading pulls in external configuration files and applies
|
||||
@@ -463,9 +462,9 @@ to make small changes to an individual Environment. Included configs
|
||||
listed earlier will have higher precedence, as the included configs are
|
||||
applied in reverse order.
|
||||
|
||||
-------------------------------
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Manually Editing the Specs List
|
||||
-------------------------------
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The list of abstract/root specs in the Environment is maintained in
|
||||
the ``spack.yaml`` manifest under the heading ``specs``.
|
||||
@@ -483,81 +482,37 @@ Appending to this list in the yaml is identical to using the ``spack
|
||||
add`` command from the command line. However, there is more power
|
||||
available from the yaml file.
|
||||
|
||||
.. _environments_concretization_config:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""
|
||||
Spec concretization
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
An environment can be concretized in three different modes and the behavior active under any environment
|
||||
is determined by the ``concretizer:unify`` property. By default specs are concretized *separately*, one after the other:
|
||||
"""""""""""""""""""
|
||||
|
||||
Specs can be concretized separately or together, as already
|
||||
explained in :ref:`environments_concretization`. The behavior active
|
||||
under any environment is determined by the ``concretization`` property:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- hdf5~mpi
|
||||
- hdf5+mpi
|
||||
- zlib@1.2.8
|
||||
concretizer:
|
||||
unify: false
|
||||
- ncview
|
||||
- netcdf
|
||||
- nco
|
||||
- py-sphinx
|
||||
concretization: together
|
||||
|
||||
This mode of operation permits to deploy a full software stack where multiple configurations of the same package
|
||||
need to be installed alongside each other using the best possible selection of transitive dependencies. The downside
|
||||
is that redundancy of installations is disregarded completely, and thus environments might be more bloated than
|
||||
strictly needed. In the example above, for instance, if a version of ``zlib`` newer than ``1.2.8`` is known to Spack,
|
||||
then it will be used for both ``hdf5`` installations.
|
||||
|
||||
If redundancy of the environment is a concern, Spack provides a way to install it *together where possible*,
|
||||
i.e. trying to maximize reuse of dependencies across different specs:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- hdf5~mpi
|
||||
- hdf5+mpi
|
||||
- zlib@1.2.8
|
||||
concretizer:
|
||||
unify: when_possible
|
||||
|
||||
Also in this case Spack allows having multiple configurations of the same package, but privileges the reuse of
|
||||
specs over other factors. Going back to our example, this means that both ``hdf5`` installations will use
|
||||
``zlib@1.2.8`` as a dependency even if newer versions of that library are available.
|
||||
Central installations done at HPC centers by system administrators or user support groups are a common case
|
||||
that fits either of these two modes.
|
||||
|
||||
Environments can also be configured to concretize all the root specs *together*, in a self-consistent way, to
|
||||
ensure that each package in the environment comes with a single configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- hdf5+mpi
|
||||
- zlib@1.2.8
|
||||
concretizer:
|
||||
unify: true
|
||||
|
||||
This mode of operation is usually what is required by software developers that want to deploy their development
|
||||
environment and have a single view of it in the filesystem.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``concretizer:unify`` config option was introduced in Spack 0.18 to
|
||||
replace the ``concretization`` property. For reference,
|
||||
``concretization: separately`` is replaced by ``concretizer:unify:true``,
|
||||
and ``concretization: together`` is replaced by ``concretizer:unify:false``.
|
||||
which can currently take either one of the two allowed values ``together`` or ``separately``
|
||||
(the default).
|
||||
|
||||
.. admonition:: Re-concretization of user specs
|
||||
|
||||
When concretizing specs *together* or *together where possible* the entire set of specs will be
|
||||
When concretizing specs together the entire set of specs will be
|
||||
re-concretized after any addition of new user specs, to ensure that
|
||||
the environment remains consistent / minimal. When instead the specs are concretized
|
||||
the environment remains consistent. When instead the specs are concretized
|
||||
separately only the new specs will be re-concretized after any addition.
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
"""""""""""""
|
||||
Spec Matrices
|
||||
^^^^^^^^^^^^^
|
||||
"""""""""""""
|
||||
|
||||
Entries in the ``specs`` list can be individual abstract specs or a
|
||||
spec matrix.
|
||||
@@ -617,9 +572,9 @@ This allows one to create toolchains out of combinations of
|
||||
constraints and apply them somewhat indiscriminately to packages,
|
||||
without regard for the applicability of the constraint.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""
|
||||
Spec List References
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""
|
||||
|
||||
The last type of possible entry in the specs list is a reference.
|
||||
|
||||
@@ -719,9 +674,9 @@ The valid variables for a ``when`` clause are:
|
||||
#. ``hostname``. The hostname of the system (if ``hostname`` is an
|
||||
executable in the user's PATH).
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""""""
|
||||
SpecLists as Constraints
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""""""
|
||||
|
||||
Dependencies and compilers in Spack can be both packages in an
|
||||
environment and constraints on other packages. References to SpecLists
|
||||
@@ -753,41 +708,41 @@ For example, the following environment has three root packages:
|
||||
This allows for a much-needed reduction in redundancy between packages
|
||||
and constraints.
|
||||
|
||||
----------------
|
||||
Filesystem Views
|
||||
----------------
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Environment-managed Views
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack Environments can define filesystem views, which provide a direct access point
|
||||
for software similar to the directory hierarchy that might exist under ``/usr/local``.
|
||||
Filesystem views are updated every time the environment is written out to the lock
|
||||
file ``spack.lock``, so the concrete environment and the view are always compatible.
|
||||
The files of the view's installed packages are brought into the view by symbolic or
|
||||
hard links, referencing the original Spack installation, or by copy.
|
||||
Spack Environments can define filesystem views of their software,
|
||||
which are maintained as packages and can be installed and uninstalled from
|
||||
the Environment. Filesystem views provide an access point for packages
|
||||
from the filesystem for users who want to access those packages
|
||||
directly. For more information on filesystem views, see the section
|
||||
:ref:`filesystem-views`.
|
||||
|
||||
Spack Environment managed views are updated every time the environment
|
||||
is written out to the lock file ``spack.lock``, so the concrete
|
||||
environment and the view are always compatible.
|
||||
|
||||
.. _configuring_environment_views:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Configuration in ``spack.yaml``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""""""""""""
|
||||
Configuring environment views
|
||||
"""""""""""""""""""""""""""""
|
||||
|
||||
The Spack Environment manifest file has a top-level keyword
|
||||
``view``. Each entry under that heading is a **view descriptor**, headed
|
||||
by a name. Any number of views may be defined under the ``view`` heading.
|
||||
The view descriptor contains the root of the view, and
|
||||
``view``. Each entry under that heading is a view descriptor, headed
|
||||
by a name. The view descriptor contains the root of the view, and
|
||||
optionally the projections for the view, ``select`` and
|
||||
``exclude`` lists for the view and link information via ``link`` and
|
||||
``link_type``.
|
||||
|
||||
For example, in the following manifest
|
||||
``link_type``. For example, in the following manifest
|
||||
file snippet we define a view named ``mpis``, rooted at
|
||||
``/path/to/view`` in which all projections use the package name,
|
||||
version, and compiler name to determine the path for a given
|
||||
package. This view selects all packages that depend on MPI, and
|
||||
excludes those built with the PGI compiler at version 18.5.
|
||||
The root specs with their (transitive) link and run type dependencies
|
||||
will be put in the view due to the ``link: all`` option,
|
||||
and the files in the view will be symlinks to the spack install
|
||||
directories.
|
||||
All the dependencies of each root spec in the environment will be linked
|
||||
in the view due to the command ``link: all`` and the files in the view will
|
||||
be symlinks to the spack install directories.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -803,26 +758,16 @@ directories.
|
||||
link: all
|
||||
link_type: symlink
|
||||
|
||||
The default for the ``select`` and
|
||||
For more information on using view projections, see the section on
|
||||
:ref:`adding_projections_to_views`. The default for the ``select`` and
|
||||
``exclude`` values is to select everything and exclude nothing. The
|
||||
default projection is the default view projection (``{}``). The ``link``
|
||||
attribute allows the following values:
|
||||
|
||||
#. ``link: all`` include root specs with their transitive run and link type
|
||||
dependencies (default);
|
||||
#. ``link: run`` include root specs with their transitive run type dependencies;
|
||||
#. ``link: roots`` include root specs without their dependencies.
|
||||
|
||||
The ``link_type`` defaults to ``symlink`` but can also take the value
|
||||
of ``hardlink`` or ``copy``.
|
||||
|
||||
.. tip::
|
||||
|
||||
The option ``link: run`` can be used to create small environment views for
|
||||
Python packages. Python will be able to import packages *inside* of the view even
|
||||
when the environment is not activated, and linked libraries will be located
|
||||
*outside* of the view thanks to rpaths.
|
||||
defaults to ``all`` but can also be ``roots`` when only the root specs
|
||||
in the environment are desired in the view. The ``link_type`` defaults
|
||||
to ``symlink`` but can also take the value of ``hardlink`` or ``copy``.
|
||||
|
||||
Any number of views may be defined under the ``view`` heading in a
|
||||
Spack Environment.
|
||||
|
||||
There are two shorthands for environments with a single view. If the
|
||||
environment at ``/path/to/env`` has a single view, with a root at
|
||||
@@ -888,47 +833,9 @@ regenerate`` will regenerate the views for the environment. This will
|
||||
apply any updates in the environment configuration that have not yet
|
||||
been applied.
|
||||
|
||||
.. _view_projections:
|
||||
|
||||
""""""""""""""""
|
||||
View Projections
|
||||
""""""""""""""""
|
||||
The default projection into a view is to link every package into the
|
||||
root of the view. The projections attribute is a mapping of partial specs to
|
||||
spec format strings, defined by the :meth:`~spack.spec.Spec.format`
|
||||
function, as shown in the example below:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
projections:
|
||||
zlib: {name}-{version}
|
||||
^mpi: {name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}
|
||||
all: {name}-{version}/{compiler.name}-{compiler.version}
|
||||
|
||||
The entries in the projections configuration file must all be either
|
||||
specs or the keyword ``all``. For each spec, the projection used will
|
||||
be the first non-``all`` entry that the spec satisfies, or ``all`` if
|
||||
there is an entry for ``all`` and no other entry is satisfied by the
|
||||
spec. Where the keyword ``all`` appears in the file does not
|
||||
matter.
|
||||
|
||||
Given the example above, the spec ``zlib@1.2.8``
|
||||
will be linked into ``/my/view/zlib-1.2.8/``, the spec
|
||||
``hdf5@1.8.10+mpi %gcc@4.9.3 ^mvapich2@2.2`` will be linked into
|
||||
``/my/view/hdf5-1.8.10/mvapich2-2.2-gcc-4.9.3``, and the spec
|
||||
``hdf5@1.8.10~mpi %gcc@4.9.3`` will be linked into
|
||||
``/my/view/hdf5-1.8.10/gcc-4.9.3``.
|
||||
|
||||
If the keyword ``all`` does not appear in the projections
|
||||
configuration file, any spec that does not satisfy any entry in the
|
||||
file will be linked into the root of the view as in a single-prefix
|
||||
view. Any entries that appear below the keyword ``all`` in the
|
||||
projections configuration file will not be used, as all specs will use
|
||||
the projection under ``all`` before reaching those entries.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""""""""""
|
||||
Activating environment views
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""""""""""
|
||||
|
||||
The ``spack env activate`` command will put the default view for the
|
||||
environment into the user's path, in addition to activating the
|
||||
@@ -962,93 +869,3 @@ environment.
|
||||
|
||||
The ``spack env deactivate`` command will remove the default view of
|
||||
the environment from the user's path.
|
||||
|
||||
|
||||
.. _env-generate-depfile:
|
||||
|
||||
|
||||
------------------------------------------
|
||||
Generating Depfiles from Environments
|
||||
------------------------------------------
|
||||
|
||||
Spack can generate ``Makefile``\s to make it easier to build multiple
|
||||
packages in an environment in parallel. Generated ``Makefile``\s expose
|
||||
targets that can be included in existing ``Makefile``\s, to allow
|
||||
other targets to depend on the environment installation.
|
||||
|
||||
A typical workflow is as follows:
|
||||
|
||||
.. code:: console
|
||||
|
||||
spack env create -d .
|
||||
spack -e . add perl
|
||||
spack -e . concretize
|
||||
spack -e . env depfile > Makefile
|
||||
make -j64
|
||||
|
||||
This generates a ``Makefile`` from a concretized environment in the
|
||||
current working directory, and ``make -j64`` installs the environment,
|
||||
exploiting parallelism across packages as much as possible. Spack
|
||||
respects the Make jobserver and forwards it to the build environment
|
||||
of packages, meaning that a single ``-j`` flag is enough to control the
|
||||
load, even when packages are built in parallel.
|
||||
|
||||
By default the following phony convenience targets are available:
|
||||
|
||||
- ``make all``: installs the environment (default target);
|
||||
- ``make fetch-all``: only fetch sources of all packages;
|
||||
- ``make clean``: cleans files used by make, but does not uninstall packages.
|
||||
|
||||
.. tip::
|
||||
|
||||
GNU Make version 4.3 and above have great support for output synchronization
|
||||
through the ``-O`` and ``--output-sync`` flags, which ensure that output is
|
||||
printed orderly per package install. To get synchronized output with colors,
|
||||
use ``make -j<N> SPACK_COLOR=always --output-sync=recurse``.
|
||||
|
||||
The following advanced example shows how generated targets can be used in a
|
||||
``Makefile``:
|
||||
|
||||
.. code:: Makefile
|
||||
|
||||
SPACK ?= spack
|
||||
|
||||
.PHONY: all clean fetch env
|
||||
|
||||
all: env
|
||||
|
||||
spack.lock: spack.yaml
|
||||
$(SPACK) -e . concretize -f
|
||||
|
||||
env.mk: spack.lock
|
||||
$(SPACK) -e . env depfile -o $@ --make-target-prefix spack
|
||||
|
||||
fetch: spack/fetch
|
||||
$(info Environment fetched!)
|
||||
|
||||
env: spack/env
|
||||
$(info Environment installed!)
|
||||
|
||||
clean:
|
||||
rm -rf spack.lock env.mk spack/
|
||||
|
||||
ifeq (,$(filter clean,$(MAKECMDGOALS)))
|
||||
include env.mk
|
||||
endif
|
||||
|
||||
When ``make`` is invoked, it first "remakes" the missing include ``env.mk``
|
||||
from its rule, which triggers concretization. When done, the generated targets
|
||||
``spack/fetch`` and ``spack/env`` are available. In the above
|
||||
example, the ``env`` target uses the latter as a prerequisite, meaning
|
||||
that it can make use of the installed packages in its commands.
|
||||
|
||||
As it is typically undesirable to remake ``env.mk`` as part of ``make clean``,
|
||||
the include is conditional.
|
||||
|
||||
.. note::
|
||||
|
||||
When including generated ``Makefile``\s, it is important to use
|
||||
the ``--make-target-prefix`` flag and use the non-phony targets
|
||||
``<target-prefix>/env`` and ``<target-prefix>/fetch`` as
|
||||
prerequisites, instead of the phony targets ``<target-prefix>/all``
|
||||
and ``<target-prefix>/fetch-all`` respectively.
|
@@ -149,28 +149,27 @@ Spack fall back to bootstrapping from sources:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack bootstrap untrust github-actions-v0.2
|
||||
==> "github-actions-v0.2" is now untrusted and will not be used for bootstrapping
|
||||
$ spack bootstrap untrust github-actions
|
||||
==> "github-actions" is now untrusted and will not be used for bootstrapping
|
||||
|
||||
You can verify that the new settings are effective with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack bootstrap list
|
||||
Name: github-actions-v0.2 UNTRUSTED
|
||||
Name: github-actions UNTRUSTED
|
||||
|
||||
Type: buildcache
|
||||
|
||||
Info:
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.2
|
||||
homepage: https://github.com/spack/spack-bootstrap-mirrors
|
||||
releases: https://github.com/spack/spack-bootstrap-mirrors/releases
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.1
|
||||
homepage: https://github.com/alalazo/spack-bootstrap-mirrors
|
||||
releases: https://github.com/alalazo/spack-bootstrap-mirrors/releases
|
||||
|
||||
Description:
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
|
||||
[ ... ]
|
||||
|
||||
Name: spack-install TRUSTED
|
||||
|
||||
@@ -1517,238 +1516,3 @@ To ensure that Spack does not autodetect the Cray programming
|
||||
environment, unset the environment variable ``MODULEPATH``. This
|
||||
will cause Spack to treat a linux container on a Cray system as a base
|
||||
linux distro.
|
||||
|
||||
.. _windows_support:
|
||||
|
||||
----------------
|
||||
Spack On Windows
|
||||
----------------
|
||||
|
||||
Windows support for Spack is currently under development. While this work is still in an early stage,
|
||||
it is currently possible to set up Spack and perform a few operations on Windows. This section will guide
|
||||
you through the steps needed to install Spack and start running it on a fresh Windows machine.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Step 1: Install prerequisites
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To use Spack on Windows, you will need the following packages:
|
||||
|
||||
Required:
|
||||
* Microsoft Visual Studio
|
||||
* Python
|
||||
* Git
|
||||
|
||||
Optional:
|
||||
* Intel Fortran (needed for some packages)
|
||||
|
||||
.. note::
|
||||
|
||||
Currently MSVC is the only compiler tested for C/C++ projects. Intel OneAPI provides Fortran support.
|
||||
|
||||
"""""""""""""""""""""""
|
||||
Microsoft Visual Studio
|
||||
"""""""""""""""""""""""
|
||||
|
||||
Microsoft Visual Studio provides the only Windows C/C++ compiler that is currently supported by Spack.
|
||||
|
||||
We require several specific components to be included in the Visual Studio installation.
|
||||
One is the C/C++ toolset, which can be selected as "Desktop development with C++" or "C++ build tools,"
|
||||
depending on installation type (Professional, Build Tools, etc.) The other required component is
|
||||
"C++ CMake tools for Windows," which can be selected from among the optional packages.
|
||||
This provides CMake and Ninja for use during Spack configuration.
|
||||
|
||||
If you already have Visual Studio installed, you can make sure these components are installed by
|
||||
rerunning the installer. Next to your installation, select "Modify" and look at the
|
||||
"Installation details" pane on the right.
|
||||
|
||||
"""""""""""""
|
||||
Intel Fortran
|
||||
"""""""""""""
|
||||
|
||||
For Fortran-based packages on Windows, we strongly recommend Intel's oneAPI Fortran compilers.
|
||||
The suite is free to download from Intel's website, located at
|
||||
https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/fortran-compiler.html#gs.70t5tw.
|
||||
The executable of choice for Spack will be Intel's Beta Compiler, ifx, which supports the classic
|
||||
compiler's (ifort's) frontend and runtime libraries by using LLVM.
|
||||
|
||||
""""""
|
||||
Python
|
||||
""""""
|
||||
|
||||
As Spack is a Python-based package, an installation of Python will be needed to run it.
|
||||
Python 3 can be downloaded and installed from the Windows Store, and will be automatically added
|
||||
to your ``PATH`` in this case.
|
||||
|
||||
.. note::
|
||||
Spack currently supports Python versions later than 3.2 inclusive.
|
||||
|
||||
"""
|
||||
Git
|
||||
"""
|
||||
|
||||
A bash console and GUI can be downloaded from https://git-scm.com/downloads.
|
||||
If you are unfamiliar with Git, there are a myriad of resources online to help
|
||||
guide you through checking out repositories and switching development branches.
|
||||
|
||||
When given the option of adjusting your ``PATH``, choose the ``Git from the
|
||||
command line and also from 3rd-party software`` option. This will automatically
|
||||
update your ``PATH`` variable to include the ``git`` command.
|
||||
|
||||
Spack support on Windows is currently dependent on installing the Git for Windows project
|
||||
as the project providing Git support on Windows. This is additionally the recommended method
|
||||
for installing Git on Windows, a link to which can be found above. Spack requires the
|
||||
utilities vendored by this project.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Step 2: Install and setup Spack
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
We are now ready to get the Spack environment set up on our machine. We
|
||||
begin by using Git to clone the Spack repo, hosted at https://github.com/spack/spack.git
|
||||
into a desired directory, for our purposes today, called ``spack_install``.
|
||||
|
||||
In order to install Spack with Windows support, run the following one liner
|
||||
in a Windows CMD prompt.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
git clone https://github.com/spack/spack.git
|
||||
|
||||
.. note::
|
||||
If you chose to install Spack into a directory on Windows that is set up to require Administrative
|
||||
Privleges, Spack will require elevated privleges to run.
|
||||
Administrative Privleges can be denoted either by default such as
|
||||
``C:\Program Files``, or aministrator applied administrative restrictions
|
||||
on a directory that spack installs files to such as ``C:\Users``
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Step 3: Run and configure Spack
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To use Spack, run ``bin\spack_cmd.bat`` (you may need to Run as Administrator) from the top-level spack
|
||||
directory. This will provide a Windows command prompt with an environment properly set up with Spack
|
||||
and its prerequisites. If you receive a warning message that Python is not in your ``PATH``
|
||||
(which may happen if you installed Python from the website and not the Windows Store) add the location
|
||||
of the Python executable to your ``PATH`` now. You can permanently add Python to your ``PATH`` variable
|
||||
by using the ``Edit the system environment variables`` utility in Windows Control Panel.
|
||||
|
||||
.. note::
|
||||
Alternatively, Powershell can be used in place of CMD
|
||||
|
||||
To configure Spack, first run the following command inside the Spack console:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack compiler find
|
||||
|
||||
This creates a ``.staging`` directory in our Spack prefix, along with a ``windows`` subdirectory
|
||||
containing a ``compilers.yaml`` file. On a fresh Windows install with the above packages
|
||||
installed, this command should only detect Microsoft Visual Studio and the Intel Fortran
|
||||
compiler will be integrated within the first version of MSVC present in the ``compilers.yaml``
|
||||
output.
|
||||
|
||||
Spack provides a default ``config.yaml`` file for Windows that it will use unless overridden.
|
||||
This file is located at ``etc\spack\defaults\windows\config.yaml``. You can read more on how to
|
||||
do this and write your own configuration files in the :ref:`Configuration Files<configuration>` section of our
|
||||
documentation. If you do this, pay particular attention to the ``build_stage`` block of the file
|
||||
as this specifies the directory that will temporarily hold the source code for the packages to
|
||||
be installed. This path name must be sufficiently short for compliance with cmd, otherwise you
|
||||
will see build errors during installation (particularly with CMake) tied to long path names.
|
||||
|
||||
To allow Spack use of external tools and dependencies already on your system, the
|
||||
external pieces of software must be described in the ``packages.yaml`` file.
|
||||
There are two methods to populate this file:
|
||||
|
||||
The first and easiest choice is to use Spack to find installation on your system. In
|
||||
the Spack terminal, run the following commands:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack external find cmake
|
||||
spack external find ninja
|
||||
|
||||
The ``spack external find <name>`` will find executables on your system
|
||||
with the same name given. The command will store the items found in
|
||||
``packages.yaml`` in the ``.staging\`` directory.
|
||||
|
||||
Assuming that the command found CMake and Ninja executables in the previous
|
||||
step, continue to Step 4. If no executables were found, we may need to manually direct spack towards the CMake
|
||||
and Ninja installations we set up with Visual Studio. Therefore, your ``packages.yaml`` file will look something
|
||||
like this, with possibly slight variants in the paths to CMake and Ninja:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
cmake:
|
||||
externals:
|
||||
- spec: cmake@3.19
|
||||
prefix: 'c:\Program Files (x86)\Microsoft Visual Studio\2019\Professional\Common7\IDE\CommonExtensions\Microsoft\CMake\CMake'
|
||||
buildable: False
|
||||
ninja:
|
||||
externals:
|
||||
- spec: ninja@1.8.2
|
||||
prefix: 'c:\Program Files (x86)\Microsoft Visual Studio\2019\Professional\Common7\IDE\CommonExtensions\Microsoft\CMake\Ninja'
|
||||
buildable: False
|
||||
|
||||
You can also use an separate installation of CMake if you have one and prefer
|
||||
to use it. If you don't have a path to Ninja analogous to the above, then you can
|
||||
obtain it by running the Visual Studio Installer and following the instructions
|
||||
at the start of this section. Also note that .yaml files use spaces for indentation
|
||||
and not tabs, so ensure that this is the case when editing one directly.
|
||||
|
||||
|
||||
.. note:: Cygwin
|
||||
The use of Cygwin is not officially supported by Spack and is not tested.
|
||||
However Spack will not throw an error, so use if choosing to use Spack
|
||||
with Cygwin, know that no functionality is garunteed.
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
Step 4: Use Spack
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
Once the configuration is complete, it is time to give the installation a test. Install a basic package though the
|
||||
Spack console via:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack install cpuinfo
|
||||
|
||||
If in the previous step, you did not have CMake or Ninja installed, running the command above should boostrap both packages
|
||||
|
||||
"""""""""""""""""""""""""""
|
||||
Windows Compatible Packages
|
||||
"""""""""""""""""""""""""""
|
||||
|
||||
Many Spack packages are not currently compatible with Windows, due to Unix
|
||||
dependencies or incompatible build tools like autoconf. Here are several
|
||||
packages known to work on Windows:
|
||||
|
||||
* abseil-cpp
|
||||
* clingo
|
||||
* cpuinfo
|
||||
* cmake
|
||||
* glm
|
||||
* nasm
|
||||
* netlib-lapack (requires Intel Fortran)
|
||||
* ninja
|
||||
* openssl
|
||||
* perl
|
||||
* python
|
||||
* ruby
|
||||
* wrf
|
||||
* zlib
|
||||
|
||||
.. note::
|
||||
This is by no means a comprehensive list
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
For developers
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
The intent is to provide a Windows installer that will automatically set up
|
||||
Python, Git, and Spack, instead of requiring the user to do so manually.
|
||||
Instructions for creating the installer are at
|
||||
https://github.com/spack/spack/blob/develop/lib/spack/spack/cmd/installer/README.md
|
||||
|
||||
Alternatively a pre-built copy of the Windows installer is available as an artifact of Spack's Windows CI
|
||||
|
@@ -54,8 +54,9 @@ or refer to the full manual below.
|
||||
features
|
||||
getting_started
|
||||
basic_usage
|
||||
workflows
|
||||
Tutorial: Spack 101 <https://spack-tutorial.readthedocs.io>
|
||||
replace_conda_homebrew
|
||||
known_issues
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
@@ -63,7 +64,6 @@ or refer to the full manual below.
|
||||
|
||||
configuration
|
||||
config_yaml
|
||||
bootstrapping
|
||||
build_settings
|
||||
environments
|
||||
containers
|
||||
|
77
lib/spack/docs/known_issues.rst
Normal file
77
lib/spack/docs/known_issues.rst
Normal file
@@ -0,0 +1,77 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
============
|
||||
Known Issues
|
||||
============
|
||||
|
||||
This is a list of known bugs in Spack. It provides ways of getting around these
|
||||
problems if you encounter them.
|
||||
|
||||
---------------------------------------------------
|
||||
Variants are not properly forwarded to dependencies
|
||||
---------------------------------------------------
|
||||
|
||||
**Status:** Expected to be fixed by Spack's new concretizer
|
||||
|
||||
Sometimes, a variant of a package can also affect how its dependencies are
|
||||
built. For example, in order to build MPI support for a package, it may
|
||||
require that its dependencies are also built with MPI support. In the
|
||||
``package.py``, this looks like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('hdf5~mpi', when='~mpi')
|
||||
depends_on('hdf5+mpi', when='+mpi')
|
||||
|
||||
Spack handles this situation properly for *immediate* dependencies, and
|
||||
builds ``hdf5`` with the same variant you used for the package that
|
||||
depends on it. However, for *indirect* dependencies (dependencies of
|
||||
dependencies), Spack does not backtrack up the DAG far enough to handle
|
||||
this. Users commonly run into this situation when trying to build R with
|
||||
X11 support:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install r+X
|
||||
...
|
||||
==> Error: Invalid spec: 'cairo@1.14.8%gcc@6.2.1+X arch=linux-fedora25-x86_64 ^bzip2@1.0.6%gcc@6.2.1+shared arch=linux-fedora25-x86_64 ^font-util@1.3.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^fontconfig@2.12.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^freetype@2.7.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^gettext@0.19.8.1%gcc@6.2.1+bzip2+curses+git~libunistring+libxml2+tar+xz arch=linux-fedora25-x86_64 ^glib@2.53.1%gcc@6.2.1~libmount arch=linux-fedora25-x86_64 ^inputproto@2.3.2%gcc@6.2.1 arch=linux-fedora25-x86_64 ^kbproto@1.0.7%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libffi@3.2.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libpng@1.6.29%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libpthread-stubs@0.4%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libx11@1.6.5%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxau@1.0.8%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxcb@1.12%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxdmcp@1.1.2%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxext@1.3.3%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxml2@2.9.4%gcc@6.2.1~python arch=linux-fedora25-x86_64 ^libxrender@0.9.10%gcc@6.2.1 arch=linux-fedora25-x86_64 ^ncurses@6.0%gcc@6.2.1~symlinks arch=linux-fedora25-x86_64 ^openssl@1.0.2k%gcc@6.2.1 arch=linux-fedora25-x86_64 ^pcre@8.40%gcc@6.2.1+utf arch=linux-fedora25-x86_64 ^pixman@0.34.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^pkg-config@0.29.2%gcc@6.2.1+internal_glib arch=linux-fedora25-x86_64 ^python@2.7.13%gcc@6.2.1+shared~tk~ucs4 arch=linux-fedora25-x86_64 ^readline@7.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^renderproto@0.11.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^sqlite@3.18.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^tar^util-macros@1.19.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xcb-proto@1.12%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xextproto@7.3.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xproto@7.0.31%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xtrans@1.3.5%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xz@5.2.3%gcc@6.2.1 arch=linux-fedora25-x86_64 ^zlib@1.2.11%gcc@6.2.1+pic+shared arch=linux-fedora25-x86_64'.
|
||||
Package cairo requires variant ~X, but spec asked for +X
|
||||
|
||||
A workaround is to explicitly activate the variants of dependencies as well:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install r+X ^cairo+X ^pango+X
|
||||
|
||||
See https://github.com/spack/spack/issues/267 and
|
||||
https://github.com/spack/spack/issues/2546 for further details.
|
||||
|
||||
-----------------------------------------------
|
||||
depends_on cannot handle recursive dependencies
|
||||
-----------------------------------------------
|
||||
|
||||
**Status:** Not yet a work in progress
|
||||
|
||||
Although ``depends_on`` can handle any aspect of Spack's spec syntax,
|
||||
it currently cannot handle recursive dependencies. If the ``^`` sigil
|
||||
appears in a ``depends_on`` statement, the concretizer will hang.
|
||||
For example, something like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('mfem+cuda ^hypre+cuda', when='+cuda')
|
||||
|
||||
|
||||
should be rewritten as:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('mfem+cuda', when='+cuda')
|
||||
depends_on('hypre+cuda', when='+cuda')
|
||||
|
||||
|
||||
See https://github.com/spack/spack/issues/17660 and
|
||||
https://github.com/spack/spack/issues/11160 for more details.
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _mirrors:
|
||||
|
||||
======================
|
||||
Mirrors (mirrors.yaml)
|
||||
======================
|
||||
=======
|
||||
Mirrors
|
||||
=======
|
||||
|
||||
Some sites may not have access to the internet for fetching packages.
|
||||
These sites will need a local repository of tarballs from which they
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _modules:
|
||||
|
||||
======================
|
||||
Modules (modules.yaml)
|
||||
======================
|
||||
=======
|
||||
Modules
|
||||
=======
|
||||
|
||||
The use of module systems to manage user environment in a controlled way
|
||||
is a common practice at HPC centers that is often embraced also by
|
||||
@@ -181,7 +181,10 @@ to the environment variables listed below the folder name.
|
||||
Spack modules can be configured for multiple module sets. The default
|
||||
module set is named ``default``. All Spack commands which operate on
|
||||
modules default to apply the ``default`` module set, but can be
|
||||
applied to any module set in the configuration.
|
||||
applied to any module set in the configuration. Settings applied at
|
||||
the root of the configuration (e.g. ``modules:enable`` rather than
|
||||
``modules:default:enable``) are applied to the default module set for
|
||||
backwards compatibility.
|
||||
|
||||
"""""""""""""""""""""""""
|
||||
Changing the modules root
|
||||
@@ -375,7 +378,7 @@ most likely via the ``+blas`` variant specification.
|
||||
|
||||
The most heavyweight solution to module naming is to change the entire
|
||||
naming convention for module files. This uses the projections format
|
||||
covered in :ref:`view_projections`.
|
||||
covered in :ref:`adding_projections_to_views`.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -537,7 +540,8 @@ configuration:
|
||||
|
||||
#. The configuration is for an :ref:`environment <environments>` and
|
||||
will never be applied outside the environment,
|
||||
#. The environment in question is configured to use a view,
|
||||
#. The environment in question is configured to use a :ref:`view
|
||||
<filesystem-views>`,
|
||||
#. The :ref:`environment view is configured
|
||||
<configuring_environment_views>` with a projection that ensures
|
||||
every package is linked to a unique directory,
|
||||
|
@@ -1423,37 +1423,6 @@ other similar operations:
|
||||
).with_default('auto').with_non_feature_values('auto'),
|
||||
)
|
||||
|
||||
"""""""""""""""""""""""""""
|
||||
Conditional Possible Values
|
||||
"""""""""""""""""""""""""""
|
||||
|
||||
There are cases where a variant may take multiple values, and the list of allowed values
|
||||
expand over time. Think for instance at the C++ standard with which we might compile
|
||||
Boost, which can take one of multiple possible values with the latest standards
|
||||
only available from a certain version on.
|
||||
|
||||
To model a similar situation we can use *conditional possible values* in the variant declaration:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant(
|
||||
'cxxstd', default='98',
|
||||
values=(
|
||||
'98', '11', '14',
|
||||
# C++17 is not supported by Boost < 1.63.0.
|
||||
conditional('17', when='@1.63.0:'),
|
||||
# C++20/2a is not support by Boost < 1.73.0
|
||||
conditional('2a', '2b', when='@1.73.0:')
|
||||
),
|
||||
multi=False,
|
||||
description='Use the specified C++ standard when building.',
|
||||
)
|
||||
|
||||
The snippet above allows ``98``, ``11`` and ``14`` as unconditional possible values for the
|
||||
``cxxstd`` variant, while ``17`` requires a version greater or equal to ``1.63.0``
|
||||
and both ``2a`` and ``2b`` require a version greater or equal to ``1.73.0``.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
Conditional Variants
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -2574,7 +2543,7 @@ from being linked in at activation time.
|
||||
Views
|
||||
-----
|
||||
|
||||
The ``spack view`` command can be
|
||||
As covered in :ref:`filesystem-views`, the ``spack view`` command can be
|
||||
used to symlink a number of packages into a merged prefix. The methods of
|
||||
``PackageViewMixin`` can be overridden to customize how packages are added
|
||||
to views. Generally this can be used to create copies of specific files rather
|
||||
|
@@ -115,8 +115,7 @@ And here's the spack environment built by the pipeline represented as a
|
||||
|
||||
spack:
|
||||
view: false
|
||||
concretizer:
|
||||
unify: false
|
||||
concretization: separately
|
||||
|
||||
definitions:
|
||||
- pkgs:
|
||||
|
@@ -1,207 +0,0 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
=====================================
|
||||
Using Spack to Replace Homebrew/Conda
|
||||
=====================================
|
||||
|
||||
Spack is an incredibly powerful package manager, designed for supercomputers
|
||||
where users have diverse installation needs. But Spack can also be used to
|
||||
handle simple single-user installations on your laptop. Most macOS users are
|
||||
already familiar with package managers like Homebrew and Conda, where all
|
||||
installed packages are symlinked to a single central location like ``/usr/local``.
|
||||
In this section, we will show you how to emulate the behavior of Homebrew/Conda
|
||||
using :ref:`environments`!
|
||||
|
||||
-----
|
||||
Setup
|
||||
-----
|
||||
|
||||
First, let's create a new environment. We'll assume that Spack is already set up
|
||||
correctly, and that you've already sourced the setup script for your shell.
|
||||
To create a new environment, simply run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create myenv
|
||||
|
||||
Here, *myenv* can be anything you want to name your environment. Next, we can add
|
||||
a list of packages we would like to install into our environment. Let's say we
|
||||
want a newer version of Bash than the one that comes with macOS, and we want a
|
||||
few Python libraries. We can run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -e myenv add bash@5 python py-numpy py-scipy py-matplotlib
|
||||
|
||||
Each package can be listed on a separate line, or combined into a single line like we did above.
|
||||
Notice that we're explicitly asking for Bash 5 here. You can use any spec
|
||||
you would normally use on the command line with other Spack commands.
|
||||
|
||||
Next, we want to manually configure a couple of things:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -e myenv config edit
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
# This is a Spack Environment file.
|
||||
#
|
||||
# It describes a set of packages to be installed, along with
|
||||
# configuration settings.
|
||||
spack:
|
||||
# add package specs to the `specs` list
|
||||
specs: [bash@5, python, py-numpy, py-scipy, py-matplotlib]
|
||||
view: true
|
||||
|
||||
You can see the packages we added earlier in the ``specs:`` section. If you
|
||||
ever want to add more packages, you can either use ``spack add`` or manually
|
||||
edit this file.
|
||||
|
||||
We also need to change the ``concretizer:unify`` option. By default, Spack
|
||||
concretizes each spec *separately*, allowing multiple versions of the same
|
||||
package to coexist. Since we want a single consistent environment, we want to
|
||||
concretize all of the specs *together*.
|
||||
|
||||
Here is what your ``spack.yaml`` looks like with this new setting:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
# This is a Spack Environment file.
|
||||
#
|
||||
# It describes a set of packages to be installed, along with
|
||||
# configuration settings.
|
||||
spack:
|
||||
# add package specs to the `specs` list
|
||||
specs: [bash@5, python, py-numpy, py-scipy, py-matplotlib]
|
||||
view: true
|
||||
concretizer:
|
||||
unify: true
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
Symlink location
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack symlinks all installations to ``/Users/me/spack/var/spack/environments/myenv/.spack-env/view``,
|
||||
which is the default when ``view: true``.
|
||||
You can actually change this to any directory you want. For example, Homebrew
|
||||
uses ``/usr/local``, while Conda uses ``/Users/me/anaconda``. In order to access
|
||||
files in these locations, you need to update ``PATH`` and other environment variables
|
||||
to point to them. Activating the Spack environment does this automatically, but
|
||||
you can also manually set them in your ``.bashrc``.
|
||||
|
||||
.. warning::
|
||||
|
||||
There are several reasons why you shouldn't use ``/usr/local``:
|
||||
|
||||
1. If you are on macOS 10.11+ (El Capitan and newer), Apple makes it hard
|
||||
for you. You may notice permissions issues on ``/usr/local`` due to their
|
||||
`System Integrity Protection <https://support.apple.com/en-us/HT204899>`_.
|
||||
By default, users don't have permissions to install anything in ``/usr/local``,
|
||||
and you can't even change this using ``sudo chown`` or ``sudo chmod``.
|
||||
2. Other package managers like Homebrew will try to install things to the
|
||||
same directory. If you plan on using Homebrew in conjunction with Spack,
|
||||
don't symlink things to ``/usr/local``.
|
||||
3. If you are on a shared workstation, or don't have sudo privileges, you
|
||||
can't do this.
|
||||
|
||||
If you still want to do this anyway, there are several ways around SIP.
|
||||
You could disable SIP by booting into recovery mode and running
|
||||
``csrutil disable``, but this is not recommended, as it can open up your OS
|
||||
to security vulnerabilities. Another technique is to run ``spack concretize``
|
||||
and ``spack install`` using ``sudo``. This is also not recommended.
|
||||
|
||||
The safest way I've found is to create your installation directories using
|
||||
sudo, then change ownership back to the user like so:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
for directory in .spack bin contrib include lib man share
|
||||
do
|
||||
sudo mkdir -p /usr/local/$directory
|
||||
sudo chown $(id -un):$(id -gn) /usr/local/$directory
|
||||
done
|
||||
|
||||
Depending on the packages you install in your environment, the exact list of
|
||||
directories you need to create may vary. You may also find some packages
|
||||
like Java libraries that install a single file to the installation prefix
|
||||
instead of in a subdirectory. In this case, the action is the same, just replace
|
||||
``mkdir -p`` with ``touch`` in the for-loop above.
|
||||
|
||||
But again, it's safer just to use the default symlink location.
|
||||
|
||||
|
||||
------------
|
||||
Installation
|
||||
------------
|
||||
|
||||
To actually concretize the environment, run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -e myenv concretize
|
||||
|
||||
This will tell you which if any packages are already installed, and alert you
|
||||
to any conflicting specs.
|
||||
|
||||
To actually install these packages and symlink them to your ``view:``
|
||||
directory, simply run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -e myenv install
|
||||
$ spack env activate myenv
|
||||
|
||||
Now, when you type ``which python3``, it should find the one you just installed.
|
||||
|
||||
In order to change the default shell to our newer Bash installation, we first
|
||||
need to add it to this list of acceptable shells. Run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ sudo vim /etc/shells
|
||||
|
||||
and add the absolute path to your bash executable. Then run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ chsh -s /path/to/bash
|
||||
|
||||
Now, when you log out and log back in, ``echo $SHELL`` should point to the
|
||||
newer version of Bash.
|
||||
|
||||
---------------------------
|
||||
Updating Installed Packages
|
||||
---------------------------
|
||||
|
||||
Let's say you upgraded to a new version of macOS, or a new version of Python
|
||||
was released, and you want to rebuild your entire software stack. To do this,
|
||||
simply run the following commands:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate myenv
|
||||
$ spack concretize --force
|
||||
$ spack install
|
||||
|
||||
The ``--force`` flag tells Spack to overwrite its previous concretization
|
||||
decisions, allowing you to choose a new version of Python. If any of the new
|
||||
packages like Bash are already installed, ``spack install`` won't re-install
|
||||
them, it will keep the symlinks in place.
|
||||
|
||||
--------------
|
||||
Uninstallation
|
||||
--------------
|
||||
|
||||
If you decide that Spack isn't right for you, uninstallation is simple.
|
||||
Just run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate myenv
|
||||
$ spack uninstall --all
|
||||
|
||||
This will uninstall all packages in your environment and remove the symlinks.
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _repositories:
|
||||
|
||||
=================================
|
||||
Package Repositories (repos.yaml)
|
||||
=================================
|
||||
=============================
|
||||
Package Repositories
|
||||
=============================
|
||||
|
||||
Spack comes with thousands of built-in package recipes in
|
||||
``var/spack/repos/builtin/``. This is a **package repository** -- a
|
||||
|
@@ -25,5 +25,4 @@ spack:
|
||||
- subversion
|
||||
# Plotting
|
||||
- graphviz
|
||||
concretizer:
|
||||
unify: true
|
||||
concretization: together
|
||||
|
@@ -1,5 +1,5 @@
|
||||
Name, Supported Versions, Notes, Requirement Reason
|
||||
Python, 2.7/3.5-3.10, , Interpreter for Spack
|
||||
Python, 2.7/3.5-3.9, , Interpreter for Spack
|
||||
C/C++ Compilers, , , Building software
|
||||
make, , , Build software
|
||||
patch, , , Build software
|
||||
|
|
1193
lib/spack/docs/workflows.rst
Normal file
1193
lib/spack/docs/workflows.rst
Normal file
File diff suppressed because it is too large
Load Diff
14
lib/spack/env/cc
vendored
14
lib/spack/env/cc
vendored
@@ -1,4 +1,4 @@
|
||||
#!/bin/sh -f
|
||||
#!/bin/sh
|
||||
# shellcheck disable=SC2034 # evals in this script fool shellcheck
|
||||
#
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
@@ -241,28 +241,28 @@ case "$command" in
|
||||
mode=cpp
|
||||
debug_flags="-g"
|
||||
;;
|
||||
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc|amdclang|cl.exe)
|
||||
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc)
|
||||
command="$SPACK_CC"
|
||||
language="C"
|
||||
comp="CC"
|
||||
lang_flags=C
|
||||
debug_flags="-g"
|
||||
;;
|
||||
c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++)
|
||||
c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC)
|
||||
command="$SPACK_CXX"
|
||||
language="C++"
|
||||
comp="CXX"
|
||||
lang_flags=CXX
|
||||
debug_flags="-g"
|
||||
;;
|
||||
ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt|amdflang)
|
||||
ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt)
|
||||
command="$SPACK_FC"
|
||||
language="Fortran 90"
|
||||
comp="FC"
|
||||
lang_flags=F
|
||||
debug_flags="-g"
|
||||
;;
|
||||
f77|xlf|xlf_r|pgf77|amdflang)
|
||||
f77|xlf|xlf_r|pgf77)
|
||||
command="$SPACK_F77"
|
||||
language="Fortran 77"
|
||||
comp="F77"
|
||||
@@ -768,9 +768,7 @@ if [ "$SPACK_DEBUG" = TRUE ]; then
|
||||
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
|
||||
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
|
||||
echo "[$mode] $command $input_command" >> "$input_log"
|
||||
IFS="$lsep"
|
||||
echo "[$mode] "$full_command_list >> "$output_log"
|
||||
unset IFS
|
||||
echo "[$mode] ${full_command_list}" >> "$output_log"
|
||||
fi
|
||||
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
|
1
lib/spack/env/rocmcc/amdclang
vendored
1
lib/spack/env/rocmcc/amdclang
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/rocmcc/amdclang++
vendored
1
lib/spack/env/rocmcc/amdclang++
vendored
@@ -1 +0,0 @@
|
||||
../cpp
|
1
lib/spack/env/rocmcc/amdflang
vendored
1
lib/spack/env/rocmcc/amdflang
vendored
@@ -1 +0,0 @@
|
||||
../fc
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.1.4 (commit 53fc4ac91e9b4c5e4079f15772503a80bece72ad)
|
||||
* Version: 0.1.2 (commit 85757b6666422fca86aa882a769bf78b0f992f54)
|
||||
|
||||
argparse
|
||||
--------
|
||||
|
75
lib/spack/external/archspec/cpu/detect.py
vendored
75
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -61,7 +61,7 @@ def proc_cpuinfo():
|
||||
``/proc/cpuinfo``
|
||||
"""
|
||||
info = {}
|
||||
with open("/proc/cpuinfo") as file: # pylint: disable=unspecified-encoding
|
||||
with open("/proc/cpuinfo") as file:
|
||||
for line in file:
|
||||
key, separator, value = line.partition(":")
|
||||
|
||||
@@ -80,46 +80,26 @@ def proc_cpuinfo():
|
||||
|
||||
|
||||
def _check_output(args, env):
|
||||
output = subprocess.Popen( # pylint: disable=consider-using-with
|
||||
args, stdout=subprocess.PIPE, env=env
|
||||
).communicate()[0]
|
||||
output = subprocess.Popen(args, stdout=subprocess.PIPE, env=env).communicate()[0]
|
||||
return six.text_type(output.decode("utf-8"))
|
||||
|
||||
|
||||
def _machine():
|
||||
""" "Return the machine architecture we are on"""
|
||||
operating_system = platform.system()
|
||||
|
||||
# If we are not on Darwin, trust what Python tells us
|
||||
if operating_system != "Darwin":
|
||||
return platform.machine()
|
||||
|
||||
# On Darwin it might happen that we are on M1, but using an interpreter
|
||||
# built for x86_64. In that case "platform.machine() == 'x86_64'", so we
|
||||
# need to fix that.
|
||||
#
|
||||
# See: https://bugs.python.org/issue42704
|
||||
output = _check_output(
|
||||
["sysctl", "-n", "machdep.cpu.brand_string"], env=_ensure_bin_usrbin_in_path()
|
||||
).strip()
|
||||
|
||||
if "Apple" in output:
|
||||
# Note that a native Python interpreter on Apple M1 would return
|
||||
# "arm64" instead of "aarch64". Here we normalize to the latter.
|
||||
return "aarch64"
|
||||
|
||||
return "x86_64"
|
||||
|
||||
|
||||
@info_dict(operating_system="Darwin")
|
||||
def sysctl_info_dict():
|
||||
"""Returns a raw info dictionary parsing the output of sysctl."""
|
||||
child_environment = _ensure_bin_usrbin_in_path()
|
||||
# Make sure that /sbin and /usr/sbin are in PATH as sysctl is
|
||||
# usually found there
|
||||
child_environment = dict(os.environ.items())
|
||||
search_paths = child_environment.get("PATH", "").split(os.pathsep)
|
||||
for additional_path in ("/sbin", "/usr/sbin"):
|
||||
if additional_path not in search_paths:
|
||||
search_paths.append(additional_path)
|
||||
child_environment["PATH"] = os.pathsep.join(search_paths)
|
||||
|
||||
def sysctl(*args):
|
||||
return _check_output(["sysctl"] + list(args), env=child_environment).strip()
|
||||
|
||||
if _machine() == "x86_64":
|
||||
if platform.machine() == "x86_64":
|
||||
flags = (
|
||||
sysctl("-n", "machdep.cpu.features").lower()
|
||||
+ " "
|
||||
@@ -145,18 +125,6 @@ def sysctl(*args):
|
||||
return info
|
||||
|
||||
|
||||
def _ensure_bin_usrbin_in_path():
|
||||
# Make sure that /sbin and /usr/sbin are in PATH as sysctl is
|
||||
# usually found there
|
||||
child_environment = dict(os.environ.items())
|
||||
search_paths = child_environment.get("PATH", "").split(os.pathsep)
|
||||
for additional_path in ("/sbin", "/usr/sbin"):
|
||||
if additional_path not in search_paths:
|
||||
search_paths.append(additional_path)
|
||||
child_environment["PATH"] = os.pathsep.join(search_paths)
|
||||
return child_environment
|
||||
|
||||
|
||||
def adjust_raw_flags(info):
|
||||
"""Adjust the flags detected on the system to homogenize
|
||||
slightly different representations.
|
||||
@@ -216,7 +184,12 @@ def compatible_microarchitectures(info):
|
||||
Args:
|
||||
info (dict): dictionary containing information on the host cpu
|
||||
"""
|
||||
architecture_family = _machine()
|
||||
architecture_family = platform.machine()
|
||||
# On Apple M1 platform.machine() returns "arm64" instead of "aarch64"
|
||||
# so we should normalize the name here
|
||||
if architecture_family == "arm64":
|
||||
architecture_family = "aarch64"
|
||||
|
||||
# If a tester is not registered, be conservative and assume no known
|
||||
# target is compatible with the host
|
||||
tester = COMPATIBILITY_CHECKS.get(architecture_family, lambda x, y: False)
|
||||
@@ -271,7 +244,12 @@ def compatibility_check(architecture_family):
|
||||
architecture_family = (architecture_family,)
|
||||
|
||||
def decorator(func):
|
||||
COMPATIBILITY_CHECKS.update({family: func for family in architecture_family})
|
||||
# pylint: disable=fixme
|
||||
# TODO: on removal of Python 2.6 support this can be re-written as
|
||||
# TODO: an update + a dict comprehension
|
||||
for arch_family in architecture_family:
|
||||
COMPATIBILITY_CHECKS[arch_family] = func
|
||||
|
||||
return func
|
||||
|
||||
return decorator
|
||||
@@ -310,7 +288,7 @@ def compatibility_check_for_x86_64(info, target):
|
||||
arch_root = TARGETS[basename]
|
||||
return (
|
||||
(target == arch_root or arch_root in target.ancestors)
|
||||
and target.vendor in (vendor, "generic")
|
||||
and (target.vendor == vendor or target.vendor == "generic")
|
||||
and target.features.issubset(features)
|
||||
)
|
||||
|
||||
@@ -325,9 +303,8 @@ def compatibility_check_for_aarch64(info, target):
|
||||
arch_root = TARGETS[basename]
|
||||
return (
|
||||
(target == arch_root or arch_root in target.ancestors)
|
||||
and target.vendor in (vendor, "generic")
|
||||
# On macOS it seems impossible to get all the CPU features with syctl info
|
||||
and (target.features.issubset(features) or platform.system() == "Darwin")
|
||||
and (target.vendor == vendor or target.vendor == "generic")
|
||||
and target.features.issubset(features)
|
||||
)
|
||||
|
||||
|
||||
|
4
lib/spack/external/archspec/cpu/schema.py
vendored
4
lib/spack/external/archspec/cpu/schema.py
vendored
@@ -11,7 +11,7 @@
|
||||
try:
|
||||
from collections.abc import MutableMapping # novm
|
||||
except ImportError:
|
||||
from collections import MutableMapping # pylint: disable=deprecated-class
|
||||
from collections import MutableMapping
|
||||
|
||||
|
||||
class LazyDictionary(MutableMapping):
|
||||
@@ -56,7 +56,7 @@ def _load_json_file(json_file):
|
||||
|
||||
def _factory():
|
||||
filename = os.path.join(json_dir, json_file)
|
||||
with open(filename, "r") as file: # pylint: disable=unspecified-encoding
|
||||
with open(filename, "r") as file:
|
||||
return json.load(file)
|
||||
|
||||
return _factory
|
||||
|
@@ -88,20 +88,6 @@
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -305,20 +291,6 @@
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -361,18 +333,6 @@
|
||||
"versions": "16.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -424,20 +384,6 @@
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -486,20 +432,6 @@
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -558,18 +490,6 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -630,18 +550,6 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -707,18 +615,6 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -776,18 +672,6 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -848,18 +732,6 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -926,20 +798,6 @@
|
||||
"name": "knl",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "knl",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "knl",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1010,20 +868,6 @@
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1093,18 +937,6 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1172,18 +1004,6 @@
|
||||
"versions": "19.0.1:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1278,20 +1098,6 @@
|
||||
"name": "icelake-client",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "icelake-client",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "icelake-client",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1336,20 +1142,6 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse2"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse2"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse2"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1400,20 +1192,6 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1468,20 +1246,6 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1537,20 +1301,6 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse4.2"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse4.2"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse4.2"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1610,22 +1360,6 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1688,22 +1422,6 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1767,22 +1485,6 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1841,30 +1543,6 @@
|
||||
"name": "znver3",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -2110,6 +1788,7 @@
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
@@ -2142,26 +1821,18 @@
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16"
|
||||
},
|
||||
{
|
||||
"versions": "8:10.2",
|
||||
"flags": "-march=armv8.2-a+crc+sha2+fp16+sve -msve-vector-bits=512"
|
||||
},
|
||||
{
|
||||
"versions": "10.3:",
|
||||
"flags": "-mcpu=a64fx -msve-vector-bits=512"
|
||||
"versions": "8:",
|
||||
"flags": "-march=armv8.2-a+crc+aes+sha2+fp16+sve -msve-vector-bits=512"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "3.9:4.9",
|
||||
"flags": "-march=armv8.2-a+crc+sha2+fp16"
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16"
|
||||
},
|
||||
{
|
||||
"versions": "5:10",
|
||||
"flags": "-march=armv8.2-a+crc+sha2+fp16+sve"
|
||||
},
|
||||
{
|
||||
"versions": "11:",
|
||||
"flags": "-mcpu=a64fx"
|
||||
"versions": "5:",
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
|
||||
}
|
||||
],
|
||||
"arm": [
|
||||
@@ -2283,40 +1954,7 @@
|
||||
"m1": {
|
||||
"from": ["aarch64"],
|
||||
"vendor": "Apple",
|
||||
"features": [
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"crc32",
|
||||
"atomics",
|
||||
"fphp",
|
||||
"asimdhp",
|
||||
"cpuid",
|
||||
"asimdrdm",
|
||||
"jscvt",
|
||||
"fcma",
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"asimdfhm",
|
||||
"dit",
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"sb",
|
||||
"paca",
|
||||
"pacg",
|
||||
"dcpodp",
|
||||
"flagm2",
|
||||
"frint"
|
||||
],
|
||||
"features": [],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
@@ -2326,22 +1964,14 @@
|
||||
],
|
||||
"clang" : [
|
||||
{
|
||||
"versions": "9.0:12.0",
|
||||
"versions": "9.0:",
|
||||
"flags" : "-march=armv8.4-a"
|
||||
},
|
||||
{
|
||||
"versions": "13.0:",
|
||||
"flags" : "-mcpu=apple-m1"
|
||||
}
|
||||
],
|
||||
"apple-clang": [
|
||||
{
|
||||
"versions": "11.0:12.5",
|
||||
"versions": "11.0:",
|
||||
"flags" : "-march=armv8.4-a"
|
||||
},
|
||||
{
|
||||
"versions": "13.0:",
|
||||
"flags" : "-mcpu=apple-m1"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@@ -5,36 +5,26 @@
|
||||
import collections
|
||||
import errno
|
||||
import glob
|
||||
import grp
|
||||
import hashlib
|
||||
import itertools
|
||||
import numbers
|
||||
import os
|
||||
import pwd
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from sys import platform as _platform
|
||||
|
||||
import six
|
||||
|
||||
from llnl.util import tty
|
||||
from llnl.util.compat import Sequence
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.path import path_to_os_path, system_path_filter
|
||||
|
||||
is_windows = _platform == 'win32'
|
||||
|
||||
if not is_windows:
|
||||
import grp
|
||||
import pwd
|
||||
else:
|
||||
import win32security
|
||||
|
||||
|
||||
__all__ = [
|
||||
'FileFilter',
|
||||
@@ -54,7 +44,6 @@
|
||||
'fix_darwin_install_name',
|
||||
'force_remove',
|
||||
'force_symlink',
|
||||
'getuid',
|
||||
'chgrp',
|
||||
'chmod_x',
|
||||
'copy',
|
||||
@@ -64,7 +53,6 @@
|
||||
'is_exe',
|
||||
'join_path',
|
||||
'last_modification_time_recursive',
|
||||
'library_extensions',
|
||||
'mkdirp',
|
||||
'partition_path',
|
||||
'prefixes',
|
||||
@@ -72,7 +60,6 @@
|
||||
'remove_directory_contents',
|
||||
'remove_if_dead_link',
|
||||
'remove_linked_tree',
|
||||
'rename',
|
||||
'set_executable',
|
||||
'set_install_permissions',
|
||||
'touch',
|
||||
@@ -84,41 +71,18 @@
|
||||
]
|
||||
|
||||
|
||||
def getuid():
|
||||
if is_windows:
|
||||
import ctypes
|
||||
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
|
||||
return 1
|
||||
return 0
|
||||
else:
|
||||
return os.getuid()
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def rename(src, dst):
|
||||
# On Windows, os.rename will fail if the destination file already exists
|
||||
if is_windows:
|
||||
if os.path.exists(dst):
|
||||
os.remove(dst)
|
||||
os.rename(src, dst)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def path_contains_subdirectory(path, root):
|
||||
norm_root = os.path.abspath(root).rstrip(os.path.sep) + os.path.sep
|
||||
norm_path = os.path.abspath(path).rstrip(os.path.sep) + os.path.sep
|
||||
return norm_path.startswith(norm_root)
|
||||
|
||||
|
||||
#: This generates the library filenames that may appear on any OS.
|
||||
library_extensions = ['a', 'la', 'so', 'tbd', 'dylib']
|
||||
|
||||
|
||||
def possible_library_filenames(library_names):
|
||||
"""Given a collection of library names like 'libfoo', generate the set of
|
||||
library filenames that may be found on the system (e.g. libfoo.so).
|
||||
library filenames that may be found on the system (e.g. libfoo.so). This
|
||||
generates the library filenames that may appear on any OS.
|
||||
"""
|
||||
lib_extensions = library_extensions
|
||||
lib_extensions = ['a', 'la', 'so', 'tbd', 'dylib']
|
||||
return set(
|
||||
'.'.join((lib, extension)) for lib, extension in
|
||||
itertools.product(library_names, lib_extensions))
|
||||
@@ -131,7 +95,6 @@ def paths_containing_libs(paths, library_names):
|
||||
required_lib_fnames = possible_library_filenames(library_names)
|
||||
|
||||
rpaths_to_include = []
|
||||
paths = path_to_os_path(*paths)
|
||||
for path in paths:
|
||||
fnames = set(os.listdir(path))
|
||||
if fnames & required_lib_fnames:
|
||||
@@ -140,7 +103,6 @@ def paths_containing_libs(paths, library_names):
|
||||
return rpaths_to_include
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def same_path(path1, path2):
|
||||
norm1 = os.path.abspath(path1).rstrip(os.path.sep)
|
||||
norm2 = os.path.abspath(path2).rstrip(os.path.sep)
|
||||
@@ -191,7 +153,7 @@ def groupid_to_group(x):
|
||||
|
||||
if string:
|
||||
regex = re.escape(regex)
|
||||
filenames = path_to_os_path(*filenames)
|
||||
|
||||
for filename in filenames:
|
||||
|
||||
msg = 'FILTER FILE: {0} [replacing "{1}"]'
|
||||
@@ -301,39 +263,13 @@ def change_sed_delimiter(old_delim, new_delim, *filenames):
|
||||
|
||||
repl = r's@\1@\2@g'
|
||||
repl = repl.replace('@', new_delim)
|
||||
filenames = path_to_os_path(*filenames)
|
||||
|
||||
for f in filenames:
|
||||
filter_file(whole_lines, repl, f)
|
||||
filter_file(single_quoted, "'%s'" % repl, f)
|
||||
filter_file(double_quoted, '"%s"' % repl, f)
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def get_owner_uid(path, err_msg=None):
|
||||
if not os.path.exists(path):
|
||||
mkdirp(path, mode=stat.S_IRWXU)
|
||||
|
||||
p_stat = os.stat(path)
|
||||
if p_stat.st_mode & stat.S_IRWXU != stat.S_IRWXU:
|
||||
tty.error("Expected {0} to support mode {1}, but it is {2}"
|
||||
.format(path, stat.S_IRWXU, p_stat.st_mode))
|
||||
|
||||
raise OSError(errno.EACCES,
|
||||
err_msg.format(path, path) if err_msg else "")
|
||||
else:
|
||||
p_stat = os.stat(path)
|
||||
|
||||
if _platform != "win32":
|
||||
owner_uid = p_stat.st_uid
|
||||
else:
|
||||
sid = win32security.GetFileSecurity(
|
||||
path, win32security.OWNER_SECURITY_INFORMATION) \
|
||||
.GetSecurityDescriptorOwner()
|
||||
owner_uid = win32security.LookupAccountSid(None, sid)[0]
|
||||
return owner_uid
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def set_install_permissions(path):
|
||||
"""Set appropriate permissions on the installed file."""
|
||||
# If this points to a file maintained in a Spack prefix, it is assumed that
|
||||
@@ -356,33 +292,21 @@ def group_ids(uid=None):
|
||||
Returns:
|
||||
(list of int): gids of groups the user is a member of
|
||||
"""
|
||||
if is_windows:
|
||||
tty.warn("Function is not supported on Windows")
|
||||
return []
|
||||
|
||||
if uid is None:
|
||||
uid = getuid()
|
||||
uid = os.getuid()
|
||||
user = pwd.getpwuid(uid).pw_name
|
||||
return [g.gr_gid for g in grp.getgrall() if user in g.gr_mem]
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def chgrp(path, group, follow_symlinks=True):
|
||||
def chgrp(path, group):
|
||||
"""Implement the bash chgrp function on a single path"""
|
||||
if is_windows:
|
||||
raise OSError("Function 'chgrp' is not supported on Windows")
|
||||
|
||||
if isinstance(group, six.string_types):
|
||||
gid = grp.getgrnam(group).gr_gid
|
||||
else:
|
||||
gid = group
|
||||
if follow_symlinks:
|
||||
os.chown(path, -1, gid)
|
||||
else:
|
||||
os.lchown(path, -1, gid)
|
||||
os.chown(path, -1, gid)
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def chmod_x(entry, perms):
|
||||
"""Implements chmod, treating all executable bits as set using the chmod
|
||||
utility's `+X` option.
|
||||
@@ -396,7 +320,6 @@ def chmod_x(entry, perms):
|
||||
os.chmod(entry, perms)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def copy_mode(src, dest):
|
||||
"""Set the mode of dest to that of src unless it is a link.
|
||||
"""
|
||||
@@ -413,7 +336,6 @@ def copy_mode(src, dest):
|
||||
os.chmod(dest, dest_mode)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def unset_executable_mode(path):
|
||||
mode = os.stat(path).st_mode
|
||||
mode &= ~stat.S_IXUSR
|
||||
@@ -422,7 +344,6 @@ def unset_executable_mode(path):
|
||||
os.chmod(path, mode)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def copy(src, dest, _permissions=False):
|
||||
"""Copy the file(s) *src* to the file or directory *dest*.
|
||||
|
||||
@@ -467,7 +388,6 @@ def copy(src, dest, _permissions=False):
|
||||
copy_mode(src, dst)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def install(src, dest):
|
||||
"""Install the file(s) *src* to the file or directory *dest*.
|
||||
|
||||
@@ -486,7 +406,6 @@ def install(src, dest):
|
||||
copy(src, dest, _permissions=True)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def resolve_link_target_relative_to_the_link(link):
|
||||
"""
|
||||
os.path.isdir uses os.path.exists, which for links will check
|
||||
@@ -501,7 +420,6 @@ def resolve_link_target_relative_to_the_link(link):
|
||||
return os.path.join(link_dir, target)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def copy_tree(src, dest, symlinks=True, ignore=None, _permissions=False):
|
||||
"""Recursively copy an entire directory tree rooted at *src*.
|
||||
|
||||
@@ -570,7 +488,7 @@ def copy_tree(src, dest, symlinks=True, ignore=None, _permissions=False):
|
||||
.format(target, new_target))
|
||||
target = new_target
|
||||
|
||||
symlink(target, d)
|
||||
os.symlink(target, d)
|
||||
elif os.path.isdir(link_target):
|
||||
mkdirp(d)
|
||||
else:
|
||||
@@ -586,7 +504,6 @@ def copy_tree(src, dest, symlinks=True, ignore=None, _permissions=False):
|
||||
copy_mode(s, d)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
"""Recursively install an entire directory tree rooted at *src*.
|
||||
|
||||
@@ -606,13 +523,11 @@ def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def is_exe(path):
|
||||
"""True if path is an executable file."""
|
||||
return os.path.isfile(path) and os.access(path, os.X_OK)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def get_filetype(path_name):
|
||||
"""
|
||||
Return the output of file path_name as a string to identify file type.
|
||||
@@ -624,30 +539,6 @@ def get_filetype(path_name):
|
||||
return output.strip()
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def is_nonsymlink_exe_with_shebang(path):
|
||||
"""
|
||||
Returns whether the path is an executable script with a shebang.
|
||||
Return False when the path is a *symlink* to an executable script.
|
||||
"""
|
||||
try:
|
||||
st = os.lstat(path)
|
||||
# Should not be a symlink
|
||||
if stat.S_ISLNK(st.st_mode):
|
||||
return False
|
||||
|
||||
# Should be executable
|
||||
if not st.st_mode & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH):
|
||||
return False
|
||||
|
||||
# Should start with a shebang
|
||||
with open(path, 'rb') as f:
|
||||
return f.read(2) == b'#!'
|
||||
except (IOError, OSError):
|
||||
return False
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def chgrp_if_not_world_writable(path, group):
|
||||
"""chgrp path to group if path is not world writable"""
|
||||
mode = os.stat(path).st_mode
|
||||
@@ -677,7 +568,7 @@ def mkdirp(*paths, **kwargs):
|
||||
mode = kwargs.get('mode', None)
|
||||
group = kwargs.get('group', None)
|
||||
default_perms = kwargs.get('default_perms', 'args')
|
||||
paths = path_to_os_path(*paths)
|
||||
|
||||
for path in paths:
|
||||
if not os.path.exists(path):
|
||||
try:
|
||||
@@ -738,7 +629,6 @@ def mkdirp(*paths, **kwargs):
|
||||
raise OSError(errno.EEXIST, "File already exists", path)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def force_remove(*paths):
|
||||
"""Remove files without printing errors. Like ``rm -f``, does NOT
|
||||
remove directories."""
|
||||
@@ -750,7 +640,6 @@ def force_remove(*paths):
|
||||
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def working_dir(dirname, **kwargs):
|
||||
if kwargs.get('create', False):
|
||||
mkdirp(dirname)
|
||||
@@ -770,37 +659,39 @@ def __init__(self, inner_exception, outer_exception):
|
||||
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def replace_directory_transaction(directory_name):
|
||||
"""Temporarily renames a directory in the same parent dir. If the operations
|
||||
executed within the context manager don't raise an exception, the renamed directory
|
||||
is deleted. If there is an exception, the move is undone.
|
||||
def replace_directory_transaction(directory_name, tmp_root=None):
|
||||
"""Moves a directory to a temporary space. If the operations executed
|
||||
within the context manager don't raise an exception, the directory is
|
||||
deleted. If there is an exception, the move is undone.
|
||||
|
||||
Args:
|
||||
directory_name (path): absolute path of the directory name
|
||||
tmp_root (path): absolute path of the parent directory where to create
|
||||
the temporary
|
||||
|
||||
Returns:
|
||||
temporary directory where ``directory_name`` has been moved
|
||||
"""
|
||||
# Check the input is indeed a directory with absolute path.
|
||||
# Raise before anything is done to avoid moving the wrong directory
|
||||
directory_name = os.path.abspath(directory_name)
|
||||
assert os.path.isdir(directory_name), 'Not a directory: ' + directory_name
|
||||
assert os.path.isdir(directory_name), \
|
||||
'Invalid directory: ' + directory_name
|
||||
assert os.path.isabs(directory_name), \
|
||||
'"directory_name" must contain an absolute path: ' + directory_name
|
||||
|
||||
# Note: directory_name is normalized here, meaning the trailing slash is dropped,
|
||||
# so dirname is the directory's parent not the directory itself.
|
||||
tmpdir = tempfile.mkdtemp(
|
||||
dir=os.path.dirname(directory_name),
|
||||
prefix='.backup')
|
||||
directory_basename = os.path.basename(directory_name)
|
||||
|
||||
# We have to jump through hoops to support Windows, since
|
||||
# os.rename(directory_name, tmpdir) errors there.
|
||||
backup_dir = os.path.join(tmpdir, 'backup')
|
||||
os.rename(directory_name, backup_dir)
|
||||
tty.debug('Directory moved [src={0}, dest={1}]'.format(directory_name, backup_dir))
|
||||
if tmp_root is not None:
|
||||
assert os.path.isabs(tmp_root)
|
||||
|
||||
tmp_dir = tempfile.mkdtemp(dir=tmp_root)
|
||||
tty.debug('Temporary directory created [{0}]'.format(tmp_dir))
|
||||
|
||||
shutil.move(src=directory_name, dst=tmp_dir)
|
||||
tty.debug('Directory moved [src={0}, dest={1}]'.format(directory_name, tmp_dir))
|
||||
|
||||
try:
|
||||
yield backup_dir
|
||||
yield tmp_dir
|
||||
except (Exception, KeyboardInterrupt, SystemExit) as inner_exception:
|
||||
# Try to recover the original directory, if this fails, raise a
|
||||
# composite exception.
|
||||
@@ -808,7 +699,10 @@ def replace_directory_transaction(directory_name):
|
||||
# Delete what was there, before copying back the original content
|
||||
if os.path.exists(directory_name):
|
||||
shutil.rmtree(directory_name)
|
||||
os.rename(backup_dir, directory_name)
|
||||
shutil.move(
|
||||
src=os.path.join(tmp_dir, directory_basename),
|
||||
dst=os.path.dirname(directory_name)
|
||||
)
|
||||
except Exception as outer_exception:
|
||||
raise CouldNotRestoreDirectoryBackup(inner_exception, outer_exception)
|
||||
|
||||
@@ -816,11 +710,10 @@ def replace_directory_transaction(directory_name):
|
||||
raise
|
||||
else:
|
||||
# Otherwise delete the temporary directory
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
tty.debug('Temporary directory deleted [{0}]'.format(tmpdir))
|
||||
shutil.rmtree(tmp_dir, ignore_errors=True)
|
||||
tty.debug('Temporary directory deleted [{0}]'.format(tmp_dir))
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def hash_directory(directory, ignore=[]):
|
||||
"""Hashes recursively the content of a directory.
|
||||
|
||||
@@ -849,7 +742,6 @@ def hash_directory(directory, ignore=[]):
|
||||
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def write_tmp_and_move(filename):
|
||||
"""Write to a temporary file, then move into place."""
|
||||
dirname = os.path.dirname(filename)
|
||||
@@ -861,7 +753,6 @@ def write_tmp_and_move(filename):
|
||||
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def open_if_filename(str_or_file, mode='r'):
|
||||
"""Takes either a path or a file object, and opens it if it is a path.
|
||||
|
||||
@@ -874,13 +765,9 @@ def open_if_filename(str_or_file, mode='r'):
|
||||
yield str_or_file
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def touch(path):
|
||||
"""Creates an empty file at the specified path."""
|
||||
if is_windows:
|
||||
perms = (os.O_WRONLY | os.O_CREAT)
|
||||
else:
|
||||
perms = (os.O_WRONLY | os.O_CREAT | os.O_NONBLOCK | os.O_NOCTTY)
|
||||
perms = (os.O_WRONLY | os.O_CREAT | os.O_NONBLOCK | os.O_NOCTTY)
|
||||
fd = None
|
||||
try:
|
||||
fd = os.open(path, perms)
|
||||
@@ -890,7 +777,6 @@ def touch(path):
|
||||
os.close(fd)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def touchp(path):
|
||||
"""Like ``touch``, but creates any parent directories needed for the file.
|
||||
"""
|
||||
@@ -898,16 +784,14 @@ def touchp(path):
|
||||
touch(path)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def force_symlink(src, dest):
|
||||
try:
|
||||
symlink(src, dest)
|
||||
os.symlink(src, dest)
|
||||
except OSError:
|
||||
os.remove(dest)
|
||||
symlink(src, dest)
|
||||
os.symlink(src, dest)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def join_path(prefix, *args):
|
||||
path = str(prefix)
|
||||
for elt in args:
|
||||
@@ -915,7 +799,6 @@ def join_path(prefix, *args):
|
||||
return path
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def ancestor(dir, n=1):
|
||||
"""Get the nth ancestor of a directory."""
|
||||
parent = os.path.abspath(dir)
|
||||
@@ -924,7 +807,6 @@ def ancestor(dir, n=1):
|
||||
return parent
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def get_single_file(directory):
|
||||
fnames = os.listdir(directory)
|
||||
if len(fnames) != 1:
|
||||
@@ -944,7 +826,6 @@ def temp_cwd():
|
||||
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def temp_rename(orig_path, temp_path):
|
||||
same_path = os.path.realpath(orig_path) == os.path.realpath(temp_path)
|
||||
if not same_path:
|
||||
@@ -956,13 +837,11 @@ def temp_rename(orig_path, temp_path):
|
||||
shutil.move(temp_path, orig_path)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def can_access(file_name):
|
||||
"""True if we have read/write access to the file."""
|
||||
return os.access(file_name, os.R_OK | os.W_OK)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
|
||||
"""Traverse two filesystem trees simultaneously.
|
||||
|
||||
@@ -1045,105 +924,6 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
|
||||
yield (source_path, dest_path)
|
||||
|
||||
|
||||
def lexists_islink_isdir(path):
|
||||
"""Computes the tuple (lexists(path), islink(path), isdir(path)) in a minimal
|
||||
number of stat calls."""
|
||||
# First try to lstat, so we know if it's a link or not.
|
||||
try:
|
||||
lst = os.lstat(path)
|
||||
except (IOError, OSError):
|
||||
return False, False, False
|
||||
|
||||
is_link = stat.S_ISLNK(lst.st_mode)
|
||||
|
||||
# Check whether file is a dir.
|
||||
if not is_link:
|
||||
is_dir = stat.S_ISDIR(lst.st_mode)
|
||||
return True, is_link, is_dir
|
||||
|
||||
# Check whether symlink points to a dir.
|
||||
try:
|
||||
st = os.stat(path)
|
||||
is_dir = stat.S_ISDIR(st.st_mode)
|
||||
except (IOError, OSError):
|
||||
# Dangling symlink (i.e. it lexists but not exists)
|
||||
is_dir = False
|
||||
|
||||
return True, is_link, is_dir
|
||||
|
||||
|
||||
def visit_directory_tree(root, visitor, rel_path='', depth=0):
|
||||
"""
|
||||
Recurses the directory root depth-first through a visitor pattern
|
||||
|
||||
The visitor interface is as follows:
|
||||
- visit_file(root, rel_path, depth)
|
||||
- before_visit_dir(root, rel_path, depth) -> bool
|
||||
if True, descends into this directory
|
||||
- before_visit_symlinked_dir(root, rel_path, depth) -> bool
|
||||
if True, descends into this directory
|
||||
- after_visit_dir(root, rel_path, depth) -> void
|
||||
only called when before_visit_dir returns True
|
||||
- after_visit_symlinked_dir(root, rel_path, depth) -> void
|
||||
only called when before_visit_symlinked_dir returns True
|
||||
"""
|
||||
dir = os.path.join(root, rel_path)
|
||||
|
||||
if sys.version_info >= (3, 5, 0):
|
||||
dir_entries = sorted(os.scandir(dir), key=lambda d: d.name) # novermin
|
||||
else:
|
||||
dir_entries = os.listdir(dir)
|
||||
dir_entries.sort()
|
||||
|
||||
for f in dir_entries:
|
||||
if sys.version_info >= (3, 5, 0):
|
||||
rel_child = os.path.join(rel_path, f.name)
|
||||
islink = f.is_symlink()
|
||||
# On Windows, symlinks to directories are distinct from
|
||||
# symlinks to files, and it is possible to create a
|
||||
# broken symlink to a directory (e.g. using os.symlink
|
||||
# without `target_is_directory=True`), invoking `isdir`
|
||||
# on a symlink on Windows that is broken in this manner
|
||||
# will result in an error. In this case we can work around
|
||||
# the issue by reading the target and resolving the
|
||||
# directory ourselves
|
||||
try:
|
||||
isdir = f.is_dir()
|
||||
except OSError as e:
|
||||
if is_windows and hasattr(e, 'winerror')\
|
||||
and e.winerror == 5 and islink:
|
||||
# if path is a symlink, determine destination and
|
||||
# evaluate file vs directory
|
||||
link_target = resolve_link_target_relative_to_the_link(f)
|
||||
# link_target might be relative but
|
||||
# resolve_link_target_relative_to_the_link
|
||||
# will ensure that if so, that it is relative
|
||||
# to the CWD and therefore
|
||||
# makes sense
|
||||
isdir = os.path.isdir(link_target)
|
||||
else:
|
||||
raise e
|
||||
|
||||
else:
|
||||
rel_child = os.path.join(rel_path, f)
|
||||
lexists, islink, isdir = lexists_islink_isdir(os.path.join(dir, f))
|
||||
if not lexists:
|
||||
continue
|
||||
|
||||
if not isdir:
|
||||
# handle files
|
||||
visitor.visit_file(root, rel_child, depth)
|
||||
elif not islink and visitor.before_visit_dir(root, rel_child, depth):
|
||||
# Handle ordinary directories
|
||||
visit_directory_tree(root, visitor, rel_child, depth + 1)
|
||||
visitor.after_visit_dir(root, rel_child, depth)
|
||||
elif islink and visitor.before_visit_symlinked_dir(root, rel_child, depth):
|
||||
# Handle symlinked directories
|
||||
visit_directory_tree(root, visitor, rel_child, depth + 1)
|
||||
visitor.after_visit_symlinked_dir(root, rel_child, depth)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def set_executable(path):
|
||||
mode = os.stat(path).st_mode
|
||||
if mode & stat.S_IRUSR:
|
||||
@@ -1155,7 +935,6 @@ def set_executable(path):
|
||||
os.chmod(path, mode)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def last_modification_time_recursive(path):
|
||||
path = os.path.abspath(path)
|
||||
times = [os.stat(path).st_mtime]
|
||||
@@ -1165,7 +944,6 @@ def last_modification_time_recursive(path):
|
||||
return max(times)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def remove_empty_directories(root):
|
||||
"""Ascend up from the leaves accessible from `root` and remove empty
|
||||
directories.
|
||||
@@ -1182,7 +960,6 @@ def remove_empty_directories(root):
|
||||
pass
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def remove_dead_links(root):
|
||||
"""Recursively removes any dead link that is present in root.
|
||||
|
||||
@@ -1195,7 +972,6 @@ def remove_dead_links(root):
|
||||
remove_if_dead_link(path)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def remove_if_dead_link(path):
|
||||
"""Removes the argument if it is a dead link.
|
||||
|
||||
@@ -1206,65 +982,24 @@ def remove_if_dead_link(path):
|
||||
os.unlink(path)
|
||||
|
||||
|
||||
def readonly_file_handler(ignore_errors=False):
|
||||
# TODO: generate stages etc. with write permissions wherever
|
||||
# so this callback is no-longer required
|
||||
"""
|
||||
Generate callback for shutil.rmtree to handle permissions errors on
|
||||
Windows. Some files may unexpectedly lack write permissions even
|
||||
though they were generated by Spack on behalf of the user (e.g. the
|
||||
stage), so this callback will detect such cases and modify the
|
||||
permissions if that is the issue. For other errors, the fallback
|
||||
is either to raise (if ignore_errors is False) or ignore (if
|
||||
ignore_errors is True). This is only intended for Windows systems
|
||||
and will raise a separate error if it is ever invoked (by accident)
|
||||
on a non-Windows system.
|
||||
"""
|
||||
def error_remove_readonly(func, path, exc):
|
||||
if not is_windows:
|
||||
raise RuntimeError("This method should only be invoked on Windows")
|
||||
excvalue = exc[1]
|
||||
if is_windows and func in (os.rmdir, os.remove, os.unlink) and\
|
||||
excvalue.errno == errno.EACCES:
|
||||
# change the file to be readable,writable,executable: 0777
|
||||
os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
|
||||
# retry
|
||||
func(path)
|
||||
elif not ignore_errors:
|
||||
raise
|
||||
return error_remove_readonly
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def remove_linked_tree(path):
|
||||
"""Removes a directory and its contents.
|
||||
|
||||
If the directory is a symlink, follows the link and removes the real
|
||||
directory before removing the link.
|
||||
|
||||
This method will force-delete files on Windows
|
||||
|
||||
Parameters:
|
||||
path (str): Directory to be removed
|
||||
"""
|
||||
kwargs = {'ignore_errors': True}
|
||||
|
||||
# Windows readonly files cannot be removed by Python
|
||||
# directly.
|
||||
if is_windows:
|
||||
kwargs['ignore_errors'] = False
|
||||
kwargs['onerror'] = readonly_file_handler(ignore_errors=True)
|
||||
|
||||
if os.path.exists(path):
|
||||
if os.path.islink(path):
|
||||
shutil.rmtree(os.path.realpath(path), **kwargs)
|
||||
shutil.rmtree(os.path.realpath(path), True)
|
||||
os.unlink(path)
|
||||
else:
|
||||
shutil.rmtree(path, **kwargs)
|
||||
shutil.rmtree(path, True)
|
||||
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def safe_remove(*files_or_dirs):
|
||||
"""Context manager to remove the files passed as input, but restore
|
||||
them in case any exception is raised in the context block.
|
||||
@@ -1311,7 +1046,6 @@ def safe_remove(*files_or_dirs):
|
||||
raise
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def fix_darwin_install_name(path):
|
||||
"""Fix install name of dynamic libraries on Darwin to have full path.
|
||||
|
||||
@@ -1398,7 +1132,6 @@ def find(root, files, recursive=True):
|
||||
return _find_non_recursive(root, files)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def _find_recursive(root, search_files):
|
||||
|
||||
# The variable here is **on purpose** a defaultdict. The idea is that
|
||||
@@ -1409,6 +1142,7 @@ def _find_recursive(root, search_files):
|
||||
|
||||
# Make the path absolute to have os.walk also return an absolute path
|
||||
root = os.path.abspath(root)
|
||||
|
||||
for path, _, list_files in os.walk(root):
|
||||
for search_file in search_files:
|
||||
matches = glob.glob(os.path.join(path, search_file))
|
||||
@@ -1422,7 +1156,6 @@ def _find_recursive(root, search_files):
|
||||
return answer
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def _find_non_recursive(root, search_files):
|
||||
# The variable here is **on purpose** a defaultdict as os.list_dir
|
||||
# can return files in any order (does not preserve stability)
|
||||
@@ -1554,7 +1287,7 @@ def directories(self, value):
|
||||
if isinstance(value, six.string_types):
|
||||
value = [value]
|
||||
|
||||
self._directories = [path_to_os_path(os.path.normpath(x))[0] for x in value]
|
||||
self._directories = [os.path.normpath(x) for x in value]
|
||||
|
||||
def _default_directories(self):
|
||||
"""Default computation of directories based on the list of
|
||||
@@ -1712,7 +1445,6 @@ def find_headers(headers, root, recursive=False):
|
||||
return HeaderList(find(root, headers, recursive))
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def find_all_headers(root):
|
||||
"""Convenience function that returns the list of all headers found
|
||||
in the directory passed as argument.
|
||||
@@ -1940,7 +1672,6 @@ def find_libraries(libraries, root, shared=True, recursive=False):
|
||||
return LibraryList(found_libs)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@memoized
|
||||
def can_access_dir(path):
|
||||
"""Returns True if the argument is an accessible directory.
|
||||
@@ -1954,7 +1685,6 @@ def can_access_dir(path):
|
||||
return os.path.isdir(path) and os.access(path, os.R_OK | os.X_OK)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@memoized
|
||||
def can_write_to_dir(path):
|
||||
"""Return True if the argument is a directory in which we can write.
|
||||
@@ -1968,7 +1698,6 @@ def can_write_to_dir(path):
|
||||
return os.path.isdir(path) and os.access(path, os.R_OK | os.X_OK | os.W_OK)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@memoized
|
||||
def files_in(*search_paths):
|
||||
"""Returns all the files in paths passed as arguments.
|
||||
@@ -1990,12 +1719,6 @@ def files_in(*search_paths):
|
||||
return files
|
||||
|
||||
|
||||
def is_readable_file(file_path):
|
||||
"""Return True if the path passed as argument is readable"""
|
||||
return os.path.isfile(file_path) and os.access(file_path, os.R_OK)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def search_paths_for_executables(*path_hints):
|
||||
"""Given a list of path hints returns a list of paths where
|
||||
to search for an executable.
|
||||
@@ -2023,39 +1746,6 @@ def search_paths_for_executables(*path_hints):
|
||||
return executable_paths
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def search_paths_for_libraries(*path_hints):
|
||||
"""Given a list of path hints returns a list of paths where
|
||||
to search for a shared library.
|
||||
|
||||
Args:
|
||||
*path_hints (list of paths): list of paths taken into
|
||||
consideration for a search
|
||||
|
||||
Returns:
|
||||
A list containing the real path of every existing directory
|
||||
in `path_hints` and its `lib` and `lib64` subdirectory if it exists.
|
||||
"""
|
||||
library_paths = []
|
||||
for path in path_hints:
|
||||
if not os.path.isdir(path):
|
||||
continue
|
||||
|
||||
path = os.path.abspath(path)
|
||||
library_paths.append(path)
|
||||
|
||||
lib_dir = os.path.join(path, 'lib')
|
||||
if os.path.isdir(lib_dir):
|
||||
library_paths.append(lib_dir)
|
||||
|
||||
lib64_dir = os.path.join(path, 'lib64')
|
||||
if os.path.isdir(lib64_dir):
|
||||
library_paths.append(lib64_dir)
|
||||
|
||||
return library_paths
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def partition_path(path, entry=None):
|
||||
"""
|
||||
Split the prefixes of the path at the first occurrence of entry and
|
||||
@@ -2072,11 +1762,7 @@ def partition_path(path, entry=None):
|
||||
# Derive the index of entry within paths, which will correspond to
|
||||
# the location of the entry in within the path.
|
||||
try:
|
||||
sep = os.sep
|
||||
entries = path.split(sep)
|
||||
if entries[0].endswith(":"):
|
||||
# Handle drive letters e.g. C:/ on Windows
|
||||
entries[0] = entries[0] + sep
|
||||
entries = path.split(os.sep)
|
||||
i = entries.index(entry)
|
||||
if '' in entries:
|
||||
i -= 1
|
||||
@@ -2087,7 +1773,6 @@ def partition_path(path, entry=None):
|
||||
return paths, '', []
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def prefixes(path):
|
||||
"""
|
||||
Returns a list containing the path and its ancestors, top-to-bottom.
|
||||
@@ -2101,9 +1786,6 @@ def prefixes(path):
|
||||
For example, path ``./hi/jkl/mn`` results in a list with the following
|
||||
paths, in order: ``./hi``, ``./hi/jkl``, and ``./hi/jkl/mn``.
|
||||
|
||||
On Windows, paths will be normalized to use ``/`` and ``/`` will always
|
||||
be used as the separator instead of ``os.sep``.
|
||||
|
||||
Parameters:
|
||||
path (str): the string used to derive ancestor paths
|
||||
|
||||
@@ -2112,17 +1794,14 @@ def prefixes(path):
|
||||
"""
|
||||
if not path:
|
||||
return []
|
||||
sep = os.sep
|
||||
parts = path.strip(sep).split(sep)
|
||||
if path.startswith(sep):
|
||||
parts.insert(0, sep)
|
||||
elif parts[0].endswith(":"):
|
||||
# Handle drive letters e.g. C:/ on Windows
|
||||
parts[0] = parts[0] + sep
|
||||
|
||||
parts = path.strip(os.sep).split(os.sep)
|
||||
if path.startswith(os.sep):
|
||||
parts.insert(0, os.sep)
|
||||
paths = [os.path.join(*parts[:i + 1]) for i in range(len(parts))]
|
||||
|
||||
try:
|
||||
paths.remove(sep)
|
||||
paths.remove(os.sep)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
@@ -2134,7 +1813,6 @@ def prefixes(path):
|
||||
return paths
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def md5sum(file):
|
||||
"""Compute the MD5 sum of a file.
|
||||
|
||||
@@ -2150,7 +1828,6 @@ def md5sum(file):
|
||||
return md5.digest()
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def remove_directory_contents(dir):
|
||||
"""Remove all contents of a directory."""
|
||||
if os.path.exists(dir):
|
||||
@@ -2162,7 +1839,6 @@ def remove_directory_contents(dir):
|
||||
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def keep_modification_time(*filenames):
|
||||
"""
|
||||
Context manager to keep the modification timestamps of the input files.
|
||||
|
@@ -11,14 +11,12 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Tuple
|
||||
|
||||
import six
|
||||
from six import string_types
|
||||
|
||||
from llnl.util.compat import MutableMapping, MutableSequence, zip_longest
|
||||
from llnl.util.compat import MutableMapping, zip_longest
|
||||
|
||||
# Ignore emacs backups when listing modules
|
||||
ignore_modules = [r'^\.#', '~$']
|
||||
@@ -591,31 +589,20 @@ def match(string):
|
||||
return match
|
||||
|
||||
|
||||
def dedupe(sequence, key=None):
|
||||
"""Yields a stable de-duplication of an hashable sequence by key
|
||||
def dedupe(sequence):
|
||||
"""Yields a stable de-duplication of an hashable sequence
|
||||
|
||||
Args:
|
||||
sequence: hashable sequence to be de-duplicated
|
||||
key: callable applied on values before uniqueness test; identity
|
||||
by default.
|
||||
|
||||
Returns:
|
||||
stable de-duplication of the sequence
|
||||
|
||||
Examples:
|
||||
|
||||
Dedupe a list of integers:
|
||||
|
||||
[x for x in dedupe([1, 2, 1, 3, 2])] == [1, 2, 3]
|
||||
|
||||
[x for x in llnl.util.lang.dedupe([1,-2,1,3,2], key=abs)] == [1, -2, 3]
|
||||
"""
|
||||
seen = set()
|
||||
for x in sequence:
|
||||
x_key = x if key is None else key(x)
|
||||
if x_key not in seen:
|
||||
if x not in seen:
|
||||
yield x
|
||||
seen.add(x_key)
|
||||
seen.add(x)
|
||||
|
||||
|
||||
def pretty_date(time, now=None):
|
||||
@@ -891,6 +878,11 @@ def load_module_from_file(module_name, module_path):
|
||||
except KeyError:
|
||||
pass
|
||||
raise
|
||||
elif sys.version_info[0] == 3 and sys.version_info[1] < 5:
|
||||
import importlib.machinery
|
||||
loader = importlib.machinery.SourceFileLoader( # novm
|
||||
module_name, module_path)
|
||||
module = loader.load_module()
|
||||
elif sys.version_info[0] == 2:
|
||||
import imp
|
||||
module = imp.load_source(module_name, module_path)
|
||||
@@ -964,111 +956,3 @@ def nullcontext(*args, **kwargs):
|
||||
|
||||
class UnhashableArguments(TypeError):
|
||||
"""Raise when an @memoized function receives unhashable arg or kwarg values."""
|
||||
|
||||
|
||||
def enum(**kwargs):
|
||||
"""Return an enum-like class.
|
||||
|
||||
Args:
|
||||
**kwargs: explicit dictionary of enums
|
||||
"""
|
||||
return type('Enum', (object,), kwargs)
|
||||
|
||||
|
||||
class TypedMutableSequence(MutableSequence):
|
||||
"""Base class that behaves like a list, just with a different type.
|
||||
|
||||
Client code can inherit from this base class:
|
||||
|
||||
class Foo(TypedMutableSequence):
|
||||
pass
|
||||
|
||||
and later perform checks based on types:
|
||||
|
||||
if isinstance(l, Foo):
|
||||
# do something
|
||||
"""
|
||||
def __init__(self, iterable):
|
||||
self.data = list(iterable)
|
||||
|
||||
def __getitem__(self, item):
|
||||
return self.data[item]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self.data[key] = value
|
||||
|
||||
def __delitem__(self, key):
|
||||
del self.data[key]
|
||||
|
||||
def __len__(self):
|
||||
return len(self.data)
|
||||
|
||||
def insert(self, index, item):
|
||||
self.data.insert(index, item)
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self.data)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.data)
|
||||
|
||||
|
||||
class GroupedExceptionHandler(object):
|
||||
"""A generic mechanism to coalesce multiple exceptions and preserve tracebacks."""
|
||||
|
||||
def __init__(self):
|
||||
self.exceptions = [] # type: List[Tuple[str, Exception, List[str]]]
|
||||
|
||||
def __bool__(self):
|
||||
"""Whether any exceptions were handled."""
|
||||
return bool(self.exceptions)
|
||||
|
||||
def forward(self, context):
|
||||
# type: (str) -> GroupedExceptionForwarder
|
||||
"""Return a contextmanager which extracts tracebacks and prefixes a message."""
|
||||
return GroupedExceptionForwarder(context, self)
|
||||
|
||||
def _receive_forwarded(self, context, exc, tb):
|
||||
# type: (str, Exception, List[str]) -> None
|
||||
self.exceptions.append((context, exc, tb))
|
||||
|
||||
def grouped_message(self, with_tracebacks=True):
|
||||
# type: (bool) -> str
|
||||
"""Print out an error message coalescing all the forwarded errors."""
|
||||
each_exception_message = [
|
||||
'{0} raised {1}: {2}{3}'.format(
|
||||
context,
|
||||
exc.__class__.__name__,
|
||||
exc,
|
||||
'\n{0}'.format(''.join(tb)) if with_tracebacks else '',
|
||||
)
|
||||
for context, exc, tb in self.exceptions
|
||||
]
|
||||
return 'due to the following failures:\n{0}'.format(
|
||||
'\n'.join(each_exception_message)
|
||||
)
|
||||
|
||||
|
||||
class GroupedExceptionForwarder(object):
|
||||
"""A contextmanager to capture exceptions and forward them to a
|
||||
GroupedExceptionHandler."""
|
||||
|
||||
def __init__(self, context, handler):
|
||||
# type: (str, GroupedExceptionHandler) -> None
|
||||
self._context = context
|
||||
self._handler = handler
|
||||
|
||||
def __enter__(self):
|
||||
return None
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
if exc_value is not None:
|
||||
self._handler._receive_forwarded(
|
||||
self._context,
|
||||
exc_value,
|
||||
traceback.format_tb(tb),
|
||||
)
|
||||
|
||||
# Suppress any exception from being re-raised:
|
||||
# https://docs.python.org/3/reference/datamodel.html#object.__exit__.
|
||||
return True
|
||||
|
@@ -10,11 +10,9 @@
|
||||
import filecmp
|
||||
import os
|
||||
import shutil
|
||||
from collections import OrderedDict
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import mkdirp, touch, traverse_tree
|
||||
from llnl.util.symlink import islink, symlink
|
||||
|
||||
__all__ = ['LinkTree']
|
||||
|
||||
@@ -22,7 +20,7 @@
|
||||
|
||||
|
||||
def remove_link(src, dest):
|
||||
if not islink(dest):
|
||||
if not os.path.islink(dest):
|
||||
raise ValueError("%s is not a link tree!" % dest)
|
||||
# remove if dest is a hardlink/symlink to src; this will only
|
||||
# be false if two packages are merged into a prefix and have a
|
||||
@@ -31,246 +29,6 @@ def remove_link(src, dest):
|
||||
os.remove(dest)
|
||||
|
||||
|
||||
class MergeConflict:
|
||||
"""
|
||||
The invariant here is that src_a and src_b are both mapped
|
||||
to dst:
|
||||
|
||||
project(src_a) == project(src_b) == dst
|
||||
"""
|
||||
def __init__(self, dst, src_a=None, src_b=None):
|
||||
self.dst = dst
|
||||
self.src_a = src_a
|
||||
self.src_b = src_b
|
||||
|
||||
|
||||
class SourceMergeVisitor(object):
|
||||
"""
|
||||
Visitor that produces actions:
|
||||
- An ordered list of directories to create in dst
|
||||
- A list of files to link in dst
|
||||
- A list of merge conflicts in dst/
|
||||
"""
|
||||
def __init__(self, ignore=None):
|
||||
self.ignore = ignore if ignore is not None else lambda f: False
|
||||
|
||||
# When mapping <src root> to <dst root>/<projection>, we need
|
||||
# to prepend the <projection> bit to the relative path in the
|
||||
# destination dir.
|
||||
self.projection = ''
|
||||
|
||||
# When a file blocks another file, the conflict can sometimes
|
||||
# be resolved / ignored (e.g. <prefix>/LICENSE or
|
||||
# or <site-packages>/<namespace>/__init__.py conflicts can be
|
||||
# ignored).
|
||||
self.file_conflicts = []
|
||||
|
||||
# When we have to create a dir where a file is, or a file
|
||||
# where a dir is, we have fatal errors, listed here.
|
||||
self.fatal_conflicts = []
|
||||
|
||||
# What directories we have to make; this is an ordered set,
|
||||
# so that we have a fast lookup and can run mkdir in order.
|
||||
self.directories = OrderedDict()
|
||||
|
||||
# Files to link. Maps dst_rel to (src_rel, src_root)
|
||||
self.files = OrderedDict()
|
||||
|
||||
def before_visit_dir(self, root, rel_path, depth):
|
||||
"""
|
||||
Register a directory if dst / rel_path is not blocked by a file or ignored.
|
||||
"""
|
||||
proj_rel_path = os.path.join(self.projection, rel_path)
|
||||
|
||||
if self.ignore(rel_path):
|
||||
# Don't recurse when dir is ignored.
|
||||
return False
|
||||
elif proj_rel_path in self.files:
|
||||
# Can't create a dir where a file is.
|
||||
src_a_root, src_a_relpath = self.files[proj_rel_path]
|
||||
self.fatal_conflicts.append(MergeConflict(
|
||||
dst=proj_rel_path,
|
||||
src_a=os.path.join(src_a_root, src_a_relpath),
|
||||
src_b=os.path.join(root, rel_path)))
|
||||
return False
|
||||
elif proj_rel_path in self.directories:
|
||||
# No new directory, carry on.
|
||||
return True
|
||||
else:
|
||||
# Register new directory.
|
||||
self.directories[proj_rel_path] = (root, rel_path)
|
||||
return True
|
||||
|
||||
def after_visit_dir(self, root, rel_path, depth):
|
||||
pass
|
||||
|
||||
def before_visit_symlinked_dir(self, root, rel_path, depth):
|
||||
"""
|
||||
Replace symlinked dirs with actual directories when possible in low depths,
|
||||
otherwise handle it as a file (i.e. we link to the symlink).
|
||||
|
||||
Transforming symlinks into dirs makes it more likely we can merge directories,
|
||||
e.g. when <prefix>/lib -> <prefix>/subdir/lib.
|
||||
|
||||
We only do this when the symlink is pointing into a subdirectory from the
|
||||
symlink's directory, to avoid potential infinite recursion; and only at a
|
||||
constant level of nesting, to avoid potential exponential blowups in file
|
||||
duplication.
|
||||
"""
|
||||
if self.ignore(rel_path):
|
||||
return False
|
||||
|
||||
# Only follow symlinked dirs in <prefix>/**/**/*
|
||||
if depth > 1:
|
||||
handle_as_dir = False
|
||||
else:
|
||||
# Only follow symlinked dirs when pointing deeper
|
||||
src = os.path.join(root, rel_path)
|
||||
real_parent = os.path.realpath(os.path.dirname(src))
|
||||
real_child = os.path.realpath(src)
|
||||
handle_as_dir = real_child.startswith(real_parent)
|
||||
|
||||
if handle_as_dir:
|
||||
return self.before_visit_dir(root, rel_path, depth)
|
||||
|
||||
self.visit_file(root, rel_path, depth)
|
||||
return False
|
||||
|
||||
def after_visit_symlinked_dir(self, root, rel_path, depth):
|
||||
pass
|
||||
|
||||
def visit_file(self, root, rel_path, depth):
|
||||
proj_rel_path = os.path.join(self.projection, rel_path)
|
||||
|
||||
if self.ignore(rel_path):
|
||||
pass
|
||||
elif proj_rel_path in self.directories:
|
||||
# Can't create a file where a dir is; fatal error
|
||||
src_a_root, src_a_relpath = self.directories[proj_rel_path]
|
||||
self.fatal_conflicts.append(MergeConflict(
|
||||
dst=proj_rel_path,
|
||||
src_a=os.path.join(src_a_root, src_a_relpath),
|
||||
src_b=os.path.join(root, rel_path)))
|
||||
elif proj_rel_path in self.files:
|
||||
# In some cases we can resolve file-file conflicts
|
||||
src_a_root, src_a_relpath = self.files[proj_rel_path]
|
||||
self.file_conflicts.append(MergeConflict(
|
||||
dst=proj_rel_path,
|
||||
src_a=os.path.join(src_a_root, src_a_relpath),
|
||||
src_b=os.path.join(root, rel_path)))
|
||||
else:
|
||||
# Otherwise register this file to be linked.
|
||||
self.files[proj_rel_path] = (root, rel_path)
|
||||
|
||||
def set_projection(self, projection):
|
||||
self.projection = os.path.normpath(projection)
|
||||
|
||||
# Todo, is this how to check in general for empty projection?
|
||||
if self.projection == '.':
|
||||
self.projection = ''
|
||||
return
|
||||
|
||||
# If there is a projection, we'll also create the directories
|
||||
# it consists of, and check whether that's causing conflicts.
|
||||
path = ''
|
||||
for part in self.projection.split(os.sep):
|
||||
path = os.path.join(path, part)
|
||||
if path not in self.files:
|
||||
self.directories[path] = ('<projection>', path)
|
||||
else:
|
||||
# Can't create a dir where a file is.
|
||||
src_a_root, src_a_relpath = self.files[path]
|
||||
self.fatal_conflicts.append(MergeConflict(
|
||||
dst=path,
|
||||
src_a=os.path.join(src_a_root, src_a_relpath),
|
||||
src_b=os.path.join('<projection>', path)))
|
||||
|
||||
|
||||
class DestinationMergeVisitor(object):
|
||||
"""DestinatinoMergeVisitor takes a SourceMergeVisitor
|
||||
and:
|
||||
|
||||
a. registers additional conflicts when merging
|
||||
to the destination prefix
|
||||
b. removes redundant mkdir operations when
|
||||
directories already exist in the destination
|
||||
prefix.
|
||||
|
||||
This also makes sure that symlinked directories
|
||||
in the target prefix will never be merged with
|
||||
directories in the sources directories.
|
||||
"""
|
||||
def __init__(self, source_merge_visitor):
|
||||
self.src = source_merge_visitor
|
||||
|
||||
def before_visit_dir(self, root, rel_path, depth):
|
||||
# If destination dir is a file in a src dir, add a conflict,
|
||||
# and don't traverse deeper
|
||||
if rel_path in self.src.files:
|
||||
src_a_root, src_a_relpath = self.src.files[rel_path]
|
||||
self.src.fatal_conflicts.append(MergeConflict(
|
||||
rel_path,
|
||||
os.path.join(src_a_root, src_a_relpath),
|
||||
os.path.join(root, rel_path)))
|
||||
return False
|
||||
|
||||
# If destination dir was also a src dir, remove the mkdir
|
||||
# action, and traverse deeper.
|
||||
if rel_path in self.src.directories:
|
||||
del self.src.directories[rel_path]
|
||||
return True
|
||||
|
||||
# If the destination dir does not appear in the src dir,
|
||||
# don't descend into it.
|
||||
return False
|
||||
|
||||
def after_visit_dir(self, root, rel_path, depth):
|
||||
pass
|
||||
|
||||
def before_visit_symlinked_dir(self, root, rel_path, depth):
|
||||
"""
|
||||
Symlinked directories in the destination prefix should
|
||||
be seen as files; we should not accidentally merge
|
||||
source dir with a symlinked dest dir.
|
||||
"""
|
||||
# Always conflict
|
||||
if rel_path in self.src.directories:
|
||||
src_a_root, src_a_relpath = self.src.directories[rel_path]
|
||||
self.src.fatal_conflicts.append(MergeConflict(
|
||||
rel_path,
|
||||
os.path.join(src_a_root, src_a_relpath),
|
||||
os.path.join(root, rel_path)))
|
||||
|
||||
if rel_path in self.src.files:
|
||||
src_a_root, src_a_relpath = self.src.files[rel_path]
|
||||
self.src.fatal_conflicts.append(MergeConflict(
|
||||
rel_path,
|
||||
os.path.join(src_a_root, src_a_relpath),
|
||||
os.path.join(root, rel_path)))
|
||||
|
||||
# Never descend into symlinked target dirs.
|
||||
return False
|
||||
|
||||
def after_visit_symlinked_dir(self, root, rel_path, depth):
|
||||
pass
|
||||
|
||||
def visit_file(self, root, rel_path, depth):
|
||||
# Can't merge a file if target already exists
|
||||
if rel_path in self.src.directories:
|
||||
src_a_root, src_a_relpath = self.src.directories[rel_path]
|
||||
self.src.fatal_conflicts.append(MergeConflict(
|
||||
rel_path,
|
||||
os.path.join(src_a_root, src_a_relpath),
|
||||
os.path.join(root, rel_path)))
|
||||
|
||||
elif rel_path in self.src.files:
|
||||
src_a_root, src_a_relpath = self.src.files[rel_path]
|
||||
self.src.fatal_conflicts.append(MergeConflict(
|
||||
rel_path,
|
||||
os.path.join(src_a_root, src_a_relpath),
|
||||
os.path.join(root, rel_path)))
|
||||
|
||||
|
||||
class LinkTree(object):
|
||||
"""Class to create trees of symbolic links from a source directory.
|
||||
|
||||
@@ -355,7 +113,7 @@ def unmerge_directories(self, dest_root, ignore):
|
||||
os.remove(marker)
|
||||
|
||||
def merge(self, dest_root, ignore_conflicts=False, ignore=None,
|
||||
link=symlink, relative=False):
|
||||
link=os.symlink, relative=False):
|
||||
"""Link all files in src into dest, creating directories
|
||||
if necessary.
|
||||
|
||||
@@ -367,7 +125,7 @@ def merge(self, dest_root, ignore_conflicts=False, ignore=None,
|
||||
ignore (callable): callable that returns True if a file is to be
|
||||
ignored in the merge (by default ignore nothing)
|
||||
|
||||
link (callable): function to create links with (defaults to llnl.util.symlink)
|
||||
link (callable): function to create links with (defaults to os.symlink)
|
||||
|
||||
relative (bool): create all symlinks relative to the target
|
||||
(default False)
|
||||
@@ -379,7 +137,7 @@ def merge(self, dest_root, ignore_conflicts=False, ignore=None,
|
||||
conflict = self.find_conflict(
|
||||
dest_root, ignore=ignore, ignore_file_conflicts=ignore_conflicts)
|
||||
if conflict:
|
||||
raise SingleMergeConflictError(conflict)
|
||||
raise MergeConflictError(conflict)
|
||||
|
||||
self.merge_directories(dest_root, ignore)
|
||||
existing = []
|
||||
@@ -411,24 +169,7 @@ def unmerge(self, dest_root, ignore=None, remove_file=remove_link):
|
||||
|
||||
|
||||
class MergeConflictError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class SingleMergeConflictError(MergeConflictError):
|
||||
def __init__(self, path):
|
||||
super(MergeConflictError, self).__init__(
|
||||
"Package merge blocked by file: %s" % path)
|
||||
|
||||
|
||||
class MergeConflictSummary(MergeConflictError):
|
||||
def __init__(self, conflicts):
|
||||
"""
|
||||
A human-readable summary of file system view merge conflicts (showing only the
|
||||
first 3 issues.)
|
||||
"""
|
||||
msg = "{0} fatal error(s) when merging prefixes:".format(len(conflicts))
|
||||
# show the first 3 merge conflicts.
|
||||
for conflict in conflicts[:3]:
|
||||
msg += "\n `{0}` and `{1}` both project to `{2}`".format(
|
||||
conflict.src_a, conflict.src_b, conflict.dst)
|
||||
super(MergeConflictSummary, self).__init__(msg)
|
||||
|
@@ -4,9 +4,9 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import errno
|
||||
import fcntl
|
||||
import os
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
from datetime import datetime
|
||||
from typing import Dict, Tuple # novm
|
||||
@@ -15,10 +15,6 @@
|
||||
|
||||
import spack.util.string
|
||||
|
||||
if sys.platform != 'win32':
|
||||
import fcntl
|
||||
|
||||
|
||||
__all__ = [
|
||||
'Lock',
|
||||
'LockDowngradeError',
|
||||
@@ -33,6 +29,8 @@
|
||||
'CantCreateLockError'
|
||||
]
|
||||
|
||||
#: Mapping of supported locks to description
|
||||
lock_type = {fcntl.LOCK_SH: 'read', fcntl.LOCK_EX: 'write'}
|
||||
|
||||
#: A useful replacement for functions that should return True when not provided
|
||||
#: for example.
|
||||
@@ -168,30 +166,6 @@ def _attempts_str(wait_time, nattempts):
|
||||
return ' after {0:0.2f}s and {1}'.format(wait_time, attempts)
|
||||
|
||||
|
||||
class LockType(object):
|
||||
READ = 0
|
||||
WRITE = 1
|
||||
|
||||
@staticmethod
|
||||
def to_str(tid):
|
||||
ret = "READ"
|
||||
if tid == LockType.WRITE:
|
||||
ret = "WRITE"
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
def to_module(tid):
|
||||
lock = fcntl.LOCK_SH
|
||||
if tid == LockType.WRITE:
|
||||
lock = fcntl.LOCK_EX
|
||||
return lock
|
||||
|
||||
@staticmethod
|
||||
def is_valid(op):
|
||||
return op == LockType.READ \
|
||||
or op == LockType.WRITE
|
||||
|
||||
|
||||
class Lock(object):
|
||||
"""This is an implementation of a filesystem lock using Python's lockf.
|
||||
|
||||
@@ -302,10 +276,9 @@ def _lock(self, op, timeout=None):
|
||||
successfully acquired, the total wait time and the number of attempts
|
||||
is returned.
|
||||
"""
|
||||
assert LockType.is_valid(op)
|
||||
op_str = LockType.to_str(op)
|
||||
assert op in lock_type
|
||||
|
||||
self._log_acquiring('{0} LOCK'.format(op_str))
|
||||
self._log_acquiring('{0} LOCK'.format(lock_type[op].upper()))
|
||||
timeout = timeout or self.default_timeout
|
||||
|
||||
# Create file and parent directories if they don't exist.
|
||||
@@ -313,13 +286,13 @@ def _lock(self, op, timeout=None):
|
||||
self._ensure_parent_directory()
|
||||
self._file = file_tracker.get_fh(self.path)
|
||||
|
||||
if LockType.to_module(op) == fcntl.LOCK_EX and self._file.mode == 'r':
|
||||
if op == fcntl.LOCK_EX and self._file.mode == 'r':
|
||||
# Attempt to upgrade to write lock w/a read-only file.
|
||||
# If the file were writable, we'd have opened it 'r+'
|
||||
raise LockROFileError(self.path)
|
||||
|
||||
self._log_debug("{0} locking [{1}:{2}]: timeout {3} sec"
|
||||
.format(op_str.lower(), self._start, self._length,
|
||||
.format(lock_type[op], self._start, self._length,
|
||||
timeout))
|
||||
|
||||
poll_intervals = iter(Lock._poll_interval_generator())
|
||||
@@ -340,16 +313,17 @@ def _lock(self, op, timeout=None):
|
||||
return total_wait_time, num_attempts
|
||||
|
||||
raise LockTimeoutError("Timed out waiting for a {0} lock."
|
||||
.format(op_str.lower()))
|
||||
.format(lock_type[op]))
|
||||
|
||||
def _poll_lock(self, op):
|
||||
"""Attempt to acquire the lock in a non-blocking manner. Return whether
|
||||
the locking attempt succeeds
|
||||
"""
|
||||
module_op = LockType.to_module(op)
|
||||
assert op in lock_type
|
||||
|
||||
try:
|
||||
# Try to get the lock (will raise if not available.)
|
||||
fcntl.lockf(self._file, module_op | fcntl.LOCK_NB,
|
||||
fcntl.lockf(self._file, op | fcntl.LOCK_NB,
|
||||
self._length, self._start, os.SEEK_SET)
|
||||
|
||||
# help for debugging distributed locking
|
||||
@@ -357,11 +331,11 @@ def _poll_lock(self, op):
|
||||
# All locks read the owner PID and host
|
||||
self._read_log_debug_data()
|
||||
self._log_debug('{0} locked {1} [{2}:{3}] (owner={4})'
|
||||
.format(LockType.to_str(op), self.path,
|
||||
.format(lock_type[op], self.path,
|
||||
self._start, self._length, self.pid))
|
||||
|
||||
# Exclusive locks write their PID/host
|
||||
if module_op == fcntl.LOCK_EX:
|
||||
if op == fcntl.LOCK_EX:
|
||||
self._write_log_debug_data()
|
||||
|
||||
return True
|
||||
@@ -446,7 +420,7 @@ def acquire_read(self, timeout=None):
|
||||
|
||||
if self._reads == 0 and self._writes == 0:
|
||||
# can raise LockError.
|
||||
wait_time, nattempts = self._lock(LockType.READ, timeout=timeout)
|
||||
wait_time, nattempts = self._lock(fcntl.LOCK_SH, timeout=timeout)
|
||||
self._reads += 1
|
||||
# Log if acquired, which includes counts when verbose
|
||||
self._log_acquired('READ LOCK', wait_time, nattempts)
|
||||
@@ -471,7 +445,7 @@ def acquire_write(self, timeout=None):
|
||||
|
||||
if self._writes == 0:
|
||||
# can raise LockError.
|
||||
wait_time, nattempts = self._lock(LockType.WRITE, timeout=timeout)
|
||||
wait_time, nattempts = self._lock(fcntl.LOCK_EX, timeout=timeout)
|
||||
self._writes += 1
|
||||
# Log if acquired, which includes counts when verbose
|
||||
self._log_acquired('WRITE LOCK', wait_time, nattempts)
|
||||
@@ -515,7 +489,7 @@ def downgrade_write_to_read(self, timeout=None):
|
||||
if self._writes == 1 and self._reads == 0:
|
||||
self._log_downgrading()
|
||||
# can raise LockError.
|
||||
wait_time, nattempts = self._lock(LockType.READ, timeout=timeout)
|
||||
wait_time, nattempts = self._lock(fcntl.LOCK_SH, timeout=timeout)
|
||||
self._reads = 1
|
||||
self._writes = 0
|
||||
self._log_downgraded(wait_time, nattempts)
|
||||
@@ -534,7 +508,7 @@ def upgrade_read_to_write(self, timeout=None):
|
||||
if self._reads == 1 and self._writes == 0:
|
||||
self._log_upgrading()
|
||||
# can raise LockError.
|
||||
wait_time, nattempts = self._lock(LockType.WRITE, timeout=timeout)
|
||||
wait_time, nattempts = self._lock(fcntl.LOCK_EX, timeout=timeout)
|
||||
self._reads = 0
|
||||
self._writes = 1
|
||||
self._log_upgraded(wait_time, nattempts)
|
||||
@@ -618,12 +592,6 @@ def release_write(self, release_fn=None):
|
||||
else:
|
||||
return False
|
||||
|
||||
def cleanup(self):
|
||||
if self._reads == 0 and self._writes == 0:
|
||||
os.unlink(self.path)
|
||||
else:
|
||||
raise LockError("Attempting to cleanup active lock.")
|
||||
|
||||
def _get_counts_desc(self):
|
||||
return '(reads {0}, writes {1})'.format(self._reads, self._writes) \
|
||||
if tty.is_verbose() else ''
|
||||
|
@@ -1,112 +0,0 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import errno
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
from os.path import exists, join
|
||||
from sys import platform as _platform
|
||||
|
||||
from llnl.util import lang
|
||||
|
||||
is_windows = _platform == 'win32'
|
||||
|
||||
if is_windows:
|
||||
from win32file import CreateHardLink
|
||||
|
||||
|
||||
def symlink(real_path, link_path):
|
||||
"""
|
||||
Create a symbolic link.
|
||||
|
||||
On Windows, use junctions if os.symlink fails.
|
||||
"""
|
||||
if not is_windows or _win32_can_symlink():
|
||||
os.symlink(real_path, link_path)
|
||||
else:
|
||||
try:
|
||||
# Try to use junctions
|
||||
_win32_junction(real_path, link_path)
|
||||
except OSError:
|
||||
# If all else fails, fall back to copying files
|
||||
shutil.copyfile(real_path, link_path)
|
||||
|
||||
|
||||
def islink(path):
|
||||
return os.path.islink(path) or _win32_is_junction(path)
|
||||
|
||||
|
||||
# '_win32' functions based on
|
||||
# https://github.com/Erotemic/ubelt/blob/master/ubelt/util_links.py
|
||||
def _win32_junction(path, link):
|
||||
# junctions require absolute paths
|
||||
if not os.path.isabs(link):
|
||||
link = os.path.abspath(link)
|
||||
|
||||
# os.symlink will fail if link exists, emulate the behavior here
|
||||
if exists(link):
|
||||
raise OSError(errno.EEXIST, 'File exists: %s -> %s' % (link, path))
|
||||
|
||||
if not os.path.isabs(path):
|
||||
parent = os.path.join(link, os.pardir)
|
||||
path = os.path.join(parent, path)
|
||||
path = os.path.abspath(path)
|
||||
|
||||
CreateHardLink(link, path)
|
||||
|
||||
|
||||
@lang.memoized
|
||||
def _win32_can_symlink():
|
||||
tempdir = tempfile.mkdtemp()
|
||||
|
||||
dpath = join(tempdir, 'dpath')
|
||||
fpath = join(tempdir, 'fpath.txt')
|
||||
|
||||
dlink = join(tempdir, 'dlink')
|
||||
flink = join(tempdir, 'flink.txt')
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
fs.touchp(fpath)
|
||||
|
||||
try:
|
||||
os.symlink(dpath, dlink)
|
||||
can_symlink_directories = os.path.islink(dlink)
|
||||
except OSError:
|
||||
can_symlink_directories = False
|
||||
|
||||
try:
|
||||
os.symlink(fpath, flink)
|
||||
can_symlink_files = os.path.islink(flink)
|
||||
except OSError:
|
||||
can_symlink_files = False
|
||||
|
||||
# Cleanup the test directory
|
||||
shutil.rmtree(tempdir)
|
||||
|
||||
return can_symlink_directories and can_symlink_files
|
||||
|
||||
|
||||
def _win32_is_junction(path):
|
||||
"""
|
||||
Determines if a path is a win32 junction
|
||||
"""
|
||||
if os.path.islink(path):
|
||||
return False
|
||||
|
||||
if is_windows:
|
||||
import ctypes.wintypes
|
||||
|
||||
GetFileAttributes = ctypes.windll.kernel32.GetFileAttributesW
|
||||
GetFileAttributes.argtypes = (ctypes.wintypes.LPWSTR,)
|
||||
GetFileAttributes.restype = ctypes.wintypes.DWORD
|
||||
|
||||
INVALID_FILE_ATTRIBUTES = 0xFFFFFFFF
|
||||
FILE_ATTRIBUTE_REPARSE_POINT = 0x400
|
||||
|
||||
res = GetFileAttributes(path)
|
||||
return res != INVALID_FILE_ATTRIBUTES and \
|
||||
bool(res & FILE_ATTRIBUTE_REPARSE_POINT)
|
||||
|
||||
return False
|
@@ -6,22 +6,19 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import fcntl
|
||||
import os
|
||||
import struct
|
||||
import sys
|
||||
import termios
|
||||
import textwrap
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
from sys import platform as _platform
|
||||
|
||||
import six
|
||||
from six import StringIO
|
||||
from six.moves import input
|
||||
|
||||
if _platform != "win32":
|
||||
import fcntl
|
||||
import termios
|
||||
|
||||
from llnl.util.tty.color import cescape, clen, cprint, cwrite
|
||||
|
||||
# Globals
|
||||
@@ -146,7 +143,7 @@ def process_stacktrace(countback):
|
||||
file_list = []
|
||||
for frame in st:
|
||||
# Check that the file is a spack file
|
||||
if frame[0].find(os.path.sep + "spack") >= 0:
|
||||
if frame[0].find("/spack") >= 0:
|
||||
file_list.append(frame[0])
|
||||
# We use commonprefix to find what the spack 'root' directory is.
|
||||
root_dir = os.path.commonprefix(file_list)
|
||||
@@ -373,29 +370,22 @@ def hline(label=None, **kwargs):
|
||||
|
||||
def terminal_size():
|
||||
"""Gets the dimensions of the console: (rows, cols)."""
|
||||
if _platform != "win32":
|
||||
def ioctl_gwinsz(fd):
|
||||
try:
|
||||
rc = struct.unpack('hh', fcntl.ioctl(
|
||||
fd, termios.TIOCGWINSZ, '1234'))
|
||||
except BaseException:
|
||||
return
|
||||
return rc
|
||||
rc = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2)
|
||||
if not rc:
|
||||
try:
|
||||
fd = os.open(os.ctermid(), os.O_RDONLY)
|
||||
rc = ioctl_gwinsz(fd)
|
||||
os.close(fd)
|
||||
except BaseException:
|
||||
pass
|
||||
if not rc:
|
||||
rc = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
|
||||
|
||||
return int(rc[0]), int(rc[1])
|
||||
else:
|
||||
if sys.version_info[0] < 3:
|
||||
raise RuntimeError("Terminal size not obtainable on Windows with a\
|
||||
Python version older than 3")
|
||||
def ioctl_gwinsz(fd):
|
||||
try:
|
||||
rc = struct.unpack('hh', fcntl.ioctl(
|
||||
fd, termios.TIOCGWINSZ, '1234'))
|
||||
except BaseException:
|
||||
return
|
||||
return rc
|
||||
rc = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2)
|
||||
if not rc:
|
||||
try:
|
||||
fd = os.open(os.ctermid(), os.O_RDONLY)
|
||||
rc = ioctl_gwinsz(fd)
|
||||
os.close(fd)
|
||||
except BaseException:
|
||||
pass
|
||||
if not rc:
|
||||
rc = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
|
||||
return int(rc[0]), int(rc[1])
|
||||
|
||||
return int(rc[0]), int(rc[1])
|
||||
|
@@ -8,19 +8,15 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import atexit
|
||||
import ctypes
|
||||
import errno
|
||||
import io
|
||||
import multiprocessing
|
||||
import os
|
||||
import re
|
||||
import select
|
||||
import signal
|
||||
import sys
|
||||
import threading
|
||||
import traceback
|
||||
from contextlib import contextmanager
|
||||
from threading import Thread
|
||||
from types import ModuleType # novm
|
||||
from typing import Optional # novm
|
||||
|
||||
@@ -403,7 +399,7 @@ def replace_environment(env):
|
||||
os.environ[name] = val
|
||||
|
||||
|
||||
def log_output(*args, **kwargs):
|
||||
class log_output(object):
|
||||
"""Context manager that logs its output to a file.
|
||||
|
||||
In the simplest case, the usage looks like this::
|
||||
@@ -418,7 +414,6 @@ def log_output(*args, **kwargs):
|
||||
with log_output('logfile.txt', echo=True):
|
||||
# do things ... output will be logged and printed out
|
||||
|
||||
The following is available on Unix only. No-op on Windows.
|
||||
And, if you just want to echo *some* stuff from the parent, use
|
||||
``force_echo``::
|
||||
|
||||
@@ -428,20 +423,6 @@ def log_output(*args, **kwargs):
|
||||
with logger.force_echo():
|
||||
# things here will be echoed *and* logged
|
||||
|
||||
See individual log classes for more information.
|
||||
|
||||
|
||||
This method is actually a factory serving a per platform
|
||||
(unix vs windows) log_output class
|
||||
"""
|
||||
if sys.platform == 'win32':
|
||||
return winlog(*args, **kwargs)
|
||||
else:
|
||||
return nixlog(*args, **kwargs)
|
||||
|
||||
|
||||
class nixlog(object):
|
||||
"""
|
||||
Under the hood, we spawn a daemon and set up a pipe between this
|
||||
process and the daemon. The daemon writes our output to both the
|
||||
file and to stdout (if echoing). The parent process can communicate
|
||||
@@ -583,7 +564,7 @@ def __enter__(self):
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
|
||||
# Now do the actual output redirection.
|
||||
# Now do the actual output rediction.
|
||||
self.use_fds = _file_descriptors_work(sys.stdout, sys.stderr)
|
||||
if self.use_fds:
|
||||
# We try first to use OS-level file descriptors, as this
|
||||
@@ -690,175 +671,6 @@ def force_echo(self):
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
class StreamWrapper:
|
||||
""" Wrapper class to handle redirection of io streams """
|
||||
def __init__(self, sys_attr):
|
||||
self.sys_attr = sys_attr
|
||||
self.saved_stream = None
|
||||
if sys.platform.startswith('win32'):
|
||||
if sys.version_info < (3, 5):
|
||||
libc = ctypes.CDLL(ctypes.util.find_library('c'))
|
||||
else:
|
||||
if hasattr(sys, 'gettotalrefcount'): # debug build
|
||||
libc = ctypes.CDLL('ucrtbased')
|
||||
else:
|
||||
libc = ctypes.CDLL('api-ms-win-crt-stdio-l1-1-0')
|
||||
|
||||
kernel32 = ctypes.WinDLL('kernel32')
|
||||
|
||||
# https://docs.microsoft.com/en-us/windows/console/getstdhandle
|
||||
if self.sys_attr == 'stdout':
|
||||
STD_HANDLE = -11
|
||||
elif self.sys_attr == 'stderr':
|
||||
STD_HANDLE = -12
|
||||
else:
|
||||
raise KeyError(self.sys_attr)
|
||||
|
||||
c_stdout = kernel32.GetStdHandle(STD_HANDLE)
|
||||
self.libc = libc
|
||||
self.c_stream = c_stdout
|
||||
else:
|
||||
self.libc = ctypes.CDLL(None)
|
||||
self.c_stream = ctypes.c_void_p.in_dll(self.libc, self.sys_attr)
|
||||
self.sys_stream = getattr(sys, self.sys_attr)
|
||||
self.orig_stream_fd = self.sys_stream.fileno()
|
||||
# Save a copy of the original stdout fd in saved_stream
|
||||
self.saved_stream = os.dup(self.orig_stream_fd)
|
||||
|
||||
def redirect_stream(self, to_fd):
|
||||
"""Redirect stdout to the given file descriptor."""
|
||||
# Flush the C-level buffer stream
|
||||
if sys.platform.startswith('win32'):
|
||||
self.libc.fflush(None)
|
||||
else:
|
||||
self.libc.fflush(self.c_stream)
|
||||
# Flush and close sys_stream - also closes the file descriptor (fd)
|
||||
sys_stream = getattr(sys, self.sys_attr)
|
||||
sys_stream.flush()
|
||||
sys_stream.close()
|
||||
# Make orig_stream_fd point to the same file as to_fd
|
||||
os.dup2(to_fd, self.orig_stream_fd)
|
||||
# Set sys_stream to a new stream that points to the redirected fd
|
||||
new_buffer = open(self.orig_stream_fd, 'wb')
|
||||
new_stream = io.TextIOWrapper(new_buffer)
|
||||
setattr(sys, self.sys_attr, new_stream)
|
||||
self.sys_stream = getattr(sys, self.sys_attr)
|
||||
|
||||
def flush(self):
|
||||
if sys.platform.startswith('win32'):
|
||||
self.libc.fflush(None)
|
||||
else:
|
||||
self.libc.fflush(self.c_stream)
|
||||
self.sys_stream.flush()
|
||||
|
||||
def close(self):
|
||||
"""Redirect back to the original system stream, and close stream"""
|
||||
try:
|
||||
if self.saved_stream is not None:
|
||||
self.redirect_stream(self.saved_stream)
|
||||
finally:
|
||||
if self.saved_stream is not None:
|
||||
os.close(self.saved_stream)
|
||||
|
||||
|
||||
class winlog(object):
|
||||
"""
|
||||
Similar to nixlog, with underlying
|
||||
functionality ported to support Windows.
|
||||
|
||||
Does not support the use of 'v' toggling as nixlog does.
|
||||
"""
|
||||
def __init__(self, file_like=None, echo=False, debug=0, buffer=False,
|
||||
env=None, filter_fn=None):
|
||||
self.env = env
|
||||
self.debug = debug
|
||||
self.echo = echo
|
||||
self.logfile = file_like
|
||||
self.stdout = StreamWrapper('stdout')
|
||||
self.stderr = StreamWrapper('stderr')
|
||||
self._active = False
|
||||
self._ioflag = False
|
||||
self.old_stdout = sys.stdout
|
||||
self.old_stderr = sys.stderr
|
||||
|
||||
def __enter__(self):
|
||||
if self._active:
|
||||
raise RuntimeError("Can't re-enter the same log_output!")
|
||||
|
||||
if self.logfile is None:
|
||||
raise RuntimeError(
|
||||
"file argument must be set by __init__ ")
|
||||
|
||||
# Open both write and reading on logfile
|
||||
if type(self.logfile) == StringIO:
|
||||
self._ioflag = True
|
||||
# cannot have two streams on tempfile, so we must make our own
|
||||
sys.stdout = self.logfile
|
||||
sys.stderr = self.logfile
|
||||
else:
|
||||
self.writer = open(self.logfile, mode='wb+')
|
||||
self.reader = open(self.logfile, mode='rb+')
|
||||
|
||||
# Dup stdout so we can still write to it after redirection
|
||||
self.echo_writer = open(os.dup(sys.stdout.fileno()), "w")
|
||||
# Redirect stdout and stderr to write to logfile
|
||||
self.stderr.redirect_stream(self.writer.fileno())
|
||||
self.stdout.redirect_stream(self.writer.fileno())
|
||||
self._kill = threading.Event()
|
||||
|
||||
def background_reader(reader, echo_writer, _kill):
|
||||
# for each line printed to logfile, read it
|
||||
# if echo: write line to user
|
||||
try:
|
||||
while True:
|
||||
is_killed = _kill.wait(.1)
|
||||
# Flush buffered build output to file
|
||||
# stdout/err fds refer to log file
|
||||
self.stderr.flush()
|
||||
self.stdout.flush()
|
||||
|
||||
line = reader.readline()
|
||||
if self.echo and line:
|
||||
echo_writer.write('{0}'.format(line.decode()))
|
||||
echo_writer.flush()
|
||||
|
||||
if is_killed:
|
||||
break
|
||||
finally:
|
||||
reader.close()
|
||||
|
||||
self._active = True
|
||||
with replace_environment(self.env):
|
||||
self._thread = Thread(target=background_reader,
|
||||
args=(self.reader, self.echo_writer, self._kill))
|
||||
self._thread.start()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if self._ioflag:
|
||||
sys.stdout = self.old_stdout
|
||||
sys.stderr = self.old_stderr
|
||||
self._ioflag = False
|
||||
else:
|
||||
self.writer.close()
|
||||
self.echo_writer.flush()
|
||||
self.stdout.flush()
|
||||
self.stderr.flush()
|
||||
self._kill.set()
|
||||
self._thread.join()
|
||||
self.stdout.close()
|
||||
self.stderr.close()
|
||||
self._active = False
|
||||
|
||||
@contextmanager
|
||||
def force_echo(self):
|
||||
"""Context manager to force local echo, even if echo is off."""
|
||||
if not self._active:
|
||||
raise RuntimeError(
|
||||
"Can't call force_echo() outside log_output region!")
|
||||
yield
|
||||
|
||||
|
||||
def _writer_daemon(stdin_multiprocess_fd, read_multiprocess_fd, write_fd, echo,
|
||||
log_file_wrapper, control_pipe, filter_fn):
|
||||
"""Daemon used by ``log_output`` to write to a log file and to ``stdout``.
|
||||
|
@@ -11,7 +11,6 @@
|
||||
things like timeouts in ``ProcessController.wait()``, which are set to
|
||||
get tests done quickly, not to avoid high CPU usage.
|
||||
|
||||
Note: The functionality in this module is unsupported on Windows
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
@@ -20,6 +19,7 @@
|
||||
import re
|
||||
import signal
|
||||
import sys
|
||||
import termios
|
||||
import time
|
||||
import traceback
|
||||
|
||||
@@ -27,13 +27,6 @@
|
||||
|
||||
from spack.util.executable import which
|
||||
|
||||
termios = None
|
||||
try:
|
||||
import termios as term_mod
|
||||
termios = term_mod
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
class ProcessController(object):
|
||||
"""Wrapper around some fundamental process control operations.
|
||||
|
@@ -3,11 +3,10 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
#: (major, minor, micro, dev release) tuple
|
||||
spack_version_info = (0, 18, 0)
|
||||
#: major, minor, patch version for Spack, in a tuple
|
||||
spack_version_info = (0, 17, 1)
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
spack_version = '.'.join(str(s) for s in spack_version_info)
|
||||
#: String containing Spack version joined with .'s
|
||||
spack_version = '.'.join(str(v) for v in spack_version_info)
|
||||
|
||||
__all__ = ['spack_version_info', 'spack_version']
|
||||
__version__ = spack_version
|
||||
|
@@ -41,14 +41,11 @@ def _search_duplicate_compilers(error_cls):
|
||||
|
||||
from six.moves.urllib.request import urlopen
|
||||
|
||||
import llnl.util.lang
|
||||
from llnl.util.compat import Sequence
|
||||
try:
|
||||
from collections.abc import Sequence # novm
|
||||
except ImportError:
|
||||
from collections import Sequence
|
||||
|
||||
import spack.config
|
||||
import spack.patch
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.variant
|
||||
|
||||
#: Map an audit tag to a list of callables implementing checks
|
||||
CALLBACKS = {}
|
||||
@@ -183,6 +180,7 @@ def run_check(tag, **kwargs):
|
||||
@config_compiler
|
||||
def _search_duplicate_compilers(error_cls):
|
||||
"""Report compilers with the same spec and two different definitions"""
|
||||
import spack.config
|
||||
errors = []
|
||||
|
||||
compilers = list(sorted(
|
||||
@@ -219,6 +217,8 @@ def _search_duplicate_compilers(error_cls):
|
||||
@config_packages
|
||||
def _search_duplicate_specs_in_externals(error_cls):
|
||||
"""Search for duplicate specs declared as externals"""
|
||||
import spack.config
|
||||
|
||||
errors, externals = [], collections.defaultdict(list)
|
||||
packages_yaml = spack.config.get('packages')
|
||||
|
||||
@@ -265,7 +265,6 @@ def _search_duplicate_specs_in_externals(error_cls):
|
||||
kwargs=('pkgs',)
|
||||
)
|
||||
|
||||
|
||||
#: Sanity checks on linting
|
||||
# This can take some time, so it's run separately from packages
|
||||
package_https_directives = AuditClass(
|
||||
@@ -276,58 +275,15 @@ def _search_duplicate_specs_in_externals(error_cls):
|
||||
)
|
||||
|
||||
|
||||
@package_directives
|
||||
def _check_build_test_callbacks(pkgs, error_cls):
|
||||
"""Ensure stand-alone test method is not included in build-time callbacks"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
test_callbacks = pkg.build_time_test_callbacks
|
||||
|
||||
if test_callbacks and 'test' in test_callbacks:
|
||||
msg = ('{0} package contains "test" method in '
|
||||
'build_time_test_callbacks')
|
||||
instr = ('Remove "test" from: [{0}]'
|
||||
.format(', '.join(test_callbacks)))
|
||||
errors.append(error_cls(msg.format(pkg.name), [instr]))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_directives
|
||||
def _check_patch_urls(pkgs, error_cls):
|
||||
"""Ensure that patches fetched from GitHub have stable sha256 hashes."""
|
||||
github_patch_url_re = (
|
||||
r"^https?://github\.com/.+/.+/(?:commit|pull)/[a-fA-F0-9]*.(?:patch|diff)"
|
||||
)
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
for condition, patches in pkg.patches.items():
|
||||
for patch in patches:
|
||||
if not isinstance(patch, spack.patch.UrlPatch):
|
||||
continue
|
||||
|
||||
if not re.match(github_patch_url_re, patch.url):
|
||||
continue
|
||||
|
||||
full_index_arg = "?full_index=1"
|
||||
if not patch.url.endswith(full_index_arg):
|
||||
errors.append(error_cls(
|
||||
"patch URL in package {0} must end with {1}".format(
|
||||
pkg.name, full_index_arg,
|
||||
),
|
||||
[patch.url],
|
||||
))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_https_directives
|
||||
def _linting_package_file(pkgs, error_cls):
|
||||
"""Check for correctness of links
|
||||
"""
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
@@ -352,6 +308,11 @@ def _linting_package_file(pkgs, error_cls):
|
||||
@package_directives
|
||||
def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
"""Report unknown or wrong variants in directives for this package"""
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
@@ -406,6 +367,9 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
@package_directives
|
||||
def _unknown_variants_in_dependencies(pkgs, error_cls):
|
||||
"""Report unknown dependencies and wrong variants for dependencies"""
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
@@ -453,6 +417,8 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
|
||||
@package_directives
|
||||
def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls):
|
||||
"""Report if version constraints used in directives are not satisfiable"""
|
||||
import spack.repo
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
@@ -489,6 +455,7 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
||||
|
||||
|
||||
def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
||||
import spack.variant
|
||||
variant_exceptions = (
|
||||
spack.variant.InconsistentValidationError,
|
||||
spack.variant.MultipleValuesInExclusiveVariantError,
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -5,7 +5,6 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import contextlib
|
||||
import copy
|
||||
import fnmatch
|
||||
import functools
|
||||
import json
|
||||
@@ -22,7 +21,6 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import GroupedExceptionHandler
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.config
|
||||
@@ -38,11 +36,6 @@
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml
|
||||
import spack.util.url
|
||||
|
||||
#: Name of the file containing metadata about the bootstrapping source
|
||||
METADATA_YAML_FILENAME = 'metadata.yaml'
|
||||
|
||||
#: Map a bootstrapper type to the corresponding class
|
||||
_bootstrap_methods = {}
|
||||
@@ -210,43 +203,12 @@ def _executables_in_store(executables, query_spec, query_info=None):
|
||||
return False
|
||||
|
||||
|
||||
class _BootstrapperBase(object):
|
||||
"""Base class to derive types that can bootstrap software for Spack"""
|
||||
config_scope_name = ''
|
||||
|
||||
@_bootstrapper(type='buildcache')
|
||||
class _BuildcacheBootstrapper(object):
|
||||
"""Install the software needed during bootstrapping from a buildcache."""
|
||||
def __init__(self, conf):
|
||||
self.name = conf['name']
|
||||
self.url = conf['info']['url']
|
||||
|
||||
@property
|
||||
def mirror_url(self):
|
||||
# Absolute paths
|
||||
if os.path.isabs(self.url):
|
||||
return spack.util.url.format(self.url)
|
||||
|
||||
# Check for :// and assume it's an url if we find it
|
||||
if '://' in self.url:
|
||||
return self.url
|
||||
|
||||
# Otherwise, it's a relative path
|
||||
return spack.util.url.format(os.path.join(self.metadata_dir, self.url))
|
||||
|
||||
@property
|
||||
def mirror_scope(self):
|
||||
return spack.config.InternalConfigScope(
|
||||
self.config_scope_name, {'mirrors:': {self.name: self.mirror_url}}
|
||||
)
|
||||
|
||||
|
||||
@_bootstrapper(type='buildcache')
|
||||
class _BuildcacheBootstrapper(_BootstrapperBase):
|
||||
"""Install the software needed during bootstrapping from a buildcache."""
|
||||
|
||||
config_scope_name = 'bootstrap_buildcache'
|
||||
|
||||
def __init__(self, conf):
|
||||
super(_BuildcacheBootstrapper, self).__init__(conf)
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf['metadata'])
|
||||
self.last_search = None
|
||||
|
||||
@staticmethod
|
||||
@@ -269,8 +231,9 @@ def _spec_and_platform(abstract_spec_str):
|
||||
def _read_metadata(self, package_name):
|
||||
"""Return metadata about the given package."""
|
||||
json_filename = '{0}.json'.format(package_name)
|
||||
json_dir = self.metadata_dir
|
||||
json_path = os.path.join(json_dir, json_filename)
|
||||
json_path = os.path.join(
|
||||
spack.paths.share_path, 'bootstrap', self.name, json_filename
|
||||
)
|
||||
with open(json_path) as f:
|
||||
data = json.load(f)
|
||||
return data
|
||||
@@ -344,6 +307,12 @@ def _install_and_test(
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def mirror_scope(self):
|
||||
return spack.config.InternalConfigScope(
|
||||
'bootstrap_buildcache', {'mirrors:': {self.name: self.url}}
|
||||
)
|
||||
|
||||
def try_import(self, module, abstract_spec_str):
|
||||
test_fn, info = functools.partial(_try_import_from_store, module), {}
|
||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||
@@ -373,13 +342,9 @@ def try_search_path(self, executables, abstract_spec_str):
|
||||
|
||||
|
||||
@_bootstrapper(type='install')
|
||||
class _SourceBootstrapper(_BootstrapperBase):
|
||||
class _SourceBootstrapper(object):
|
||||
"""Install the software needed during bootstrapping from sources."""
|
||||
config_scope_name = 'bootstrap_source'
|
||||
|
||||
def __init__(self, conf):
|
||||
super(_SourceBootstrapper, self).__init__(conf)
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf['metadata'])
|
||||
self.conf = conf
|
||||
self.last_search = None
|
||||
|
||||
@@ -412,8 +377,7 @@ def try_import(self, module, abstract_spec_str):
|
||||
tty.debug(msg.format(module, abstract_spec_str))
|
||||
|
||||
# Install the spec that should make the module importable
|
||||
with spack.config.override(self.mirror_scope):
|
||||
concrete_spec.package.do_install(fail_fast=True)
|
||||
concrete_spec.package.do_install(fail_fast=True)
|
||||
|
||||
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
||||
self.last_search = info
|
||||
@@ -426,8 +390,6 @@ def try_search_path(self, executables, abstract_spec_str):
|
||||
self.last_search = info
|
||||
return True
|
||||
|
||||
tty.info("Bootstrapping {0} from sources".format(abstract_spec_str))
|
||||
|
||||
# If we compile code from sources detecting a few build tools
|
||||
# might reduce compilation time by a fair amount
|
||||
_add_externals_if_missing()
|
||||
@@ -440,8 +402,7 @@ def try_search_path(self, executables, abstract_spec_str):
|
||||
|
||||
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||
tty.debug(msg.format(abstract_spec_str))
|
||||
with spack.config.override(self.mirror_scope):
|
||||
concrete_spec.package.do_install()
|
||||
concrete_spec.package.do_install()
|
||||
if _executables_in_store(executables, concrete_spec, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
@@ -456,10 +417,11 @@ def _make_bootstrapper(conf):
|
||||
return _bootstrap_methods[btype](conf)
|
||||
|
||||
|
||||
def _validate_source_is_trusted(conf):
|
||||
def _source_is_trusted(conf):
|
||||
trusted, name = spack.config.get('bootstrap:trusted'), conf['name']
|
||||
if name not in trusted:
|
||||
raise ValueError('source is not trusted')
|
||||
return False
|
||||
return trusted[name]
|
||||
|
||||
|
||||
def spec_for_current_python():
|
||||
@@ -524,26 +486,36 @@ def ensure_module_importable_or_raise(module, abstract_spec=None):
|
||||
return
|
||||
|
||||
abstract_spec = abstract_spec or module
|
||||
source_configs = spack.config.get('bootstrap:sources', [])
|
||||
|
||||
h = GroupedExceptionHandler()
|
||||
errors = {}
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
with h.forward(current_config['name']):
|
||||
_validate_source_is_trusted(current_config)
|
||||
for current_config in source_configs:
|
||||
if not _source_is_trusted(current_config):
|
||||
msg = ('[BOOTSTRAP MODULE {0}] Skipping source "{1}" since it is '
|
||||
'not trusted').format(module, current_config['name'])
|
||||
tty.debug(msg)
|
||||
continue
|
||||
|
||||
b = _make_bootstrapper(current_config)
|
||||
b = _make_bootstrapper(current_config)
|
||||
try:
|
||||
if b.try_import(module, abstract_spec):
|
||||
return
|
||||
except Exception as e:
|
||||
msg = '[BOOTSTRAP MODULE {0}] Unexpected error "{1}"'
|
||||
tty.debug(msg.format(module, str(e)))
|
||||
errors[current_config['name']] = e
|
||||
|
||||
assert h, 'expected at least one exception to have been raised at this point: while bootstrapping {0}'.format(module) # noqa: E501
|
||||
msg = 'cannot bootstrap the "{0}" Python module '.format(module)
|
||||
# We couldn't import in any way, so raise an import error
|
||||
msg = 'cannot bootstrap the "{0}" Python module'.format(module)
|
||||
if abstract_spec:
|
||||
msg += 'from spec "{0}" '.format(abstract_spec)
|
||||
if tty.is_debug():
|
||||
msg += h.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += h.grouped_message(with_tracebacks=False)
|
||||
msg += '\nRun `spack --debug ...` for more detailed errors'
|
||||
msg += ' from spec "{0}"'.format(abstract_spec)
|
||||
msg += ' due to the following failures:\n'
|
||||
for method in errors:
|
||||
err = errors[method]
|
||||
msg += " '{0}' raised {1}: {2}\n".format(
|
||||
method, err.__class__.__name__, str(err))
|
||||
msg += ' Please run `spack -d spec zlib` for more verbose error messages'
|
||||
raise ImportError(msg)
|
||||
|
||||
|
||||
@@ -566,14 +538,16 @@ def ensure_executables_in_path_or_raise(executables, abstract_spec):
|
||||
return cmd
|
||||
|
||||
executables_str = ', '.join(executables)
|
||||
source_configs = spack.config.get('bootstrap:sources', [])
|
||||
for current_config in source_configs:
|
||||
if not _source_is_trusted(current_config):
|
||||
msg = ('[BOOTSTRAP EXECUTABLES {0}] Skipping source "{1}" since it is '
|
||||
'not trusted').format(executables_str, current_config['name'])
|
||||
tty.debug(msg)
|
||||
continue
|
||||
|
||||
h = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
with h.forward(current_config['name']):
|
||||
_validate_source_is_trusted(current_config)
|
||||
|
||||
b = _make_bootstrapper(current_config)
|
||||
b = _make_bootstrapper(current_config)
|
||||
try:
|
||||
if b.try_search_path(executables, abstract_spec):
|
||||
# Additional environment variables needed
|
||||
concrete_spec, cmd = b.last_search['spec'], b.last_search['command']
|
||||
@@ -588,16 +562,14 @@ def ensure_executables_in_path_or_raise(executables, abstract_spec):
|
||||
)
|
||||
cmd.add_default_envmod(env_mods)
|
||||
return cmd
|
||||
except Exception as e:
|
||||
msg = '[BOOTSTRAP EXECUTABLES {0}] Unexpected error "{1}"'
|
||||
tty.debug(msg.format(executables_str, str(e)))
|
||||
|
||||
assert h, 'expected at least one exception to have been raised at this point: while bootstrapping {0}'.format(executables_str) # noqa: E501
|
||||
msg = 'cannot bootstrap any of the {0} executables '.format(executables_str)
|
||||
# We couldn't import in any way, so raise an import error
|
||||
msg = 'cannot bootstrap any of the {0} executables'.format(executables_str)
|
||||
if abstract_spec:
|
||||
msg += 'from spec "{0}" '.format(abstract_spec)
|
||||
if tty.is_debug():
|
||||
msg += h.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += h.grouped_message(with_tracebacks=False)
|
||||
msg += '\nRun `spack --debug ...` for more detailed errors'
|
||||
msg += ' from spec "{0}"'.format(abstract_spec)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
@@ -755,11 +727,9 @@ def _root_spec(spec_str):
|
||||
spec_str (str): spec to be bootstrapped. Must be without compiler and target.
|
||||
"""
|
||||
# Add a proper compiler hint to the root spec. We use GCC for
|
||||
# everything but MacOS and Windows.
|
||||
# everything but MacOS.
|
||||
if str(spack.platforms.host()) == 'darwin':
|
||||
spec_str += ' %apple-clang'
|
||||
elif str(spack.platforms.host()) == 'windows':
|
||||
spec_str += ' %msvc'
|
||||
else:
|
||||
spec_str += ' %gcc'
|
||||
|
||||
@@ -854,19 +824,6 @@ def ensure_flake8_in_path_or_raise():
|
||||
return ensure_executables_in_path_or_raise([executable], abstract_spec=root_spec)
|
||||
|
||||
|
||||
def all_root_specs(development=False):
|
||||
"""Return a list of all the root specs that may be used to bootstrap Spack.
|
||||
|
||||
Args:
|
||||
development (bool): if True include dev dependencies
|
||||
"""
|
||||
specs = [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()]
|
||||
if development:
|
||||
specs += [isort_root_spec(), mypy_root_spec(),
|
||||
black_root_spec(), flake8_root_spec()]
|
||||
return specs
|
||||
|
||||
|
||||
def _missing(name, purpose, system_only=True):
|
||||
"""Message to be printed if an executable is not found"""
|
||||
msg = '[{2}] MISSING "{0}": {1}'
|
||||
@@ -1004,23 +961,3 @@ def status_message(section):
|
||||
msg += '\n'
|
||||
msg = msg.format(pass_token if not missing_software else fail_token)
|
||||
return msg, missing_software
|
||||
|
||||
|
||||
def bootstrapping_sources(scope=None):
|
||||
"""Return the list of configured sources of software for bootstrapping Spack
|
||||
|
||||
Args:
|
||||
scope (str or None): if a valid configuration scope is given, return the
|
||||
list only from that scope
|
||||
"""
|
||||
source_configs = spack.config.get('bootstrap:sources', default=None, scope=scope)
|
||||
source_configs = source_configs or []
|
||||
list_of_sources = []
|
||||
for entry in source_configs:
|
||||
current = copy.copy(entry)
|
||||
metadata_dir = spack.util.path.canonicalize_path(entry['metadata'])
|
||||
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
||||
with open(metadata_yaml) as f:
|
||||
current.update(spack.util.spack_yaml.load(f))
|
||||
list_of_sources.append(current)
|
||||
return list_of_sources
|
||||
|
@@ -46,7 +46,6 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import install, install_tree, mkdirp
|
||||
from llnl.util.lang import dedupe
|
||||
from llnl.util.symlink import symlink
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
from llnl.util.tty.log import MultiProcessFd
|
||||
|
||||
@@ -111,20 +110,6 @@
|
||||
dso_suffix = 'dylib' if sys.platform == 'darwin' else 'so'
|
||||
|
||||
|
||||
def should_set_parallel_jobs(jobserver_support=False):
|
||||
"""Returns true in general, except when:
|
||||
- The env variable SPACK_NO_PARALLEL_MAKE=1 is set
|
||||
- jobserver_support is enabled, and a jobserver was found.
|
||||
"""
|
||||
if (
|
||||
jobserver_support and
|
||||
'MAKEFLAGS' in os.environ and
|
||||
'--jobserver' in os.environ['MAKEFLAGS']
|
||||
):
|
||||
return False
|
||||
return not env_flag(SPACK_NO_PARALLEL_MAKE)
|
||||
|
||||
|
||||
class MakeExecutable(Executable):
|
||||
"""Special callable executable object for make so the user can specify
|
||||
parallelism options on a per-invocation basis. Specifying
|
||||
@@ -134,6 +119,9 @@ class MakeExecutable(Executable):
|
||||
call will name an environment variable which will be set to the
|
||||
parallelism level (without affecting the normal invocation with
|
||||
-j).
|
||||
|
||||
Note that if the SPACK_NO_PARALLEL_MAKE env var is set it overrides
|
||||
everything.
|
||||
"""
|
||||
|
||||
def __init__(self, name, jobs):
|
||||
@@ -144,8 +132,9 @@ def __call__(self, *args, **kwargs):
|
||||
"""parallel, and jobs_env from kwargs are swallowed and used here;
|
||||
remaining arguments are passed through to the superclass.
|
||||
"""
|
||||
parallel = should_set_parallel_jobs(jobserver_support=True) and \
|
||||
kwargs.pop('parallel', self.jobs > 1)
|
||||
|
||||
disable = env_flag(SPACK_NO_PARALLEL_MAKE)
|
||||
parallel = (not disable) and kwargs.pop('parallel', self.jobs > 1)
|
||||
|
||||
if parallel:
|
||||
args = ('-j{0}'.format(self.jobs),) + args
|
||||
@@ -191,7 +180,7 @@ def clean_environment():
|
||||
env.unset('PYTHONPATH')
|
||||
|
||||
# Affects GNU make, can e.g. indirectly inhibit enabling parallel build
|
||||
# env.unset('MAKEFLAGS')
|
||||
env.unset('MAKEFLAGS')
|
||||
|
||||
# Avoid that libraries of build dependencies get hijacked.
|
||||
env.unset('LD_PRELOAD')
|
||||
@@ -384,8 +373,7 @@ def set_wrapper_variables(pkg, env):
|
||||
# directory. Add that to the path too.
|
||||
env_paths = []
|
||||
compiler_specific = os.path.join(
|
||||
spack.paths.build_env_path,
|
||||
os.path.dirname(pkg.compiler.link_paths['cc']))
|
||||
spack.paths.build_env_path, os.path.dirname(pkg.compiler.link_paths['cc']))
|
||||
for item in [spack.paths.build_env_path, compiler_specific]:
|
||||
env_paths.append(item)
|
||||
ci = os.path.join(item, 'case-insensitive')
|
||||
@@ -538,9 +526,7 @@ def _set_variables_for_single_module(pkg, module):
|
||||
m.cmake = Executable('cmake')
|
||||
m.ctest = MakeExecutable('ctest', jobs)
|
||||
|
||||
if sys.platform == 'win32':
|
||||
m.nmake = Executable('nmake')
|
||||
# Standard CMake arguments
|
||||
# Standard build system arguments
|
||||
m.std_cmake_args = spack.build_systems.cmake.CMakePackage._std_args(pkg)
|
||||
m.std_meson_args = spack.build_systems.meson.MesonPackage._std_args(pkg)
|
||||
m.std_pip_args = spack.build_systems.python.PythonPackage._std_args(pkg)
|
||||
@@ -559,7 +545,7 @@ def _set_variables_for_single_module(pkg, module):
|
||||
m.makedirs = os.makedirs
|
||||
m.remove = os.remove
|
||||
m.removedirs = os.removedirs
|
||||
m.symlink = symlink
|
||||
m.symlink = os.symlink
|
||||
|
||||
m.mkdirp = mkdirp
|
||||
m.install = install
|
||||
@@ -682,11 +668,11 @@ def _static_to_shared_library(arch, compiler, static_lib, shared_lib=None,
|
||||
shared_lib_link = os.path.basename(shared_lib)
|
||||
|
||||
if version or compat_version:
|
||||
symlink(shared_lib_link, shared_lib_base)
|
||||
os.symlink(shared_lib_link, shared_lib_base)
|
||||
|
||||
if compat_version and compat_version != version:
|
||||
symlink(shared_lib_link, '{0}.{1}'.format(shared_lib_base,
|
||||
compat_version))
|
||||
os.symlink(shared_lib_link, '{0}.{1}'.format(shared_lib_base,
|
||||
compat_version))
|
||||
|
||||
return compiler(*compiler_args, output=compiler_output)
|
||||
|
||||
@@ -835,13 +821,12 @@ def setup_package(pkg, dirty, context='build'):
|
||||
for mod in pkg.compiler.modules:
|
||||
load_module(mod)
|
||||
|
||||
# kludge to handle cray mpich and libsci being automatically loaded by
|
||||
# PrgEnv modules on cray platform. Module unload does no damage when
|
||||
# kludge to handle cray libsci being automatically loaded by PrgEnv
|
||||
# modules on cray platform. Module unload does no damage when
|
||||
# unnecessary
|
||||
on_cray, _ = _on_cray()
|
||||
if on_cray and not dirty:
|
||||
for mod in ['cray-mpich', 'cray-libsci']:
|
||||
module('unload', mod)
|
||||
if on_cray:
|
||||
module('unload', 'cray-libsci')
|
||||
|
||||
if target.module_name:
|
||||
load_module(target.module_name)
|
||||
@@ -1038,7 +1023,7 @@ def get_cmake_prefix_path(pkg):
|
||||
|
||||
|
||||
def _setup_pkg_and_run(serialized_pkg, function, kwargs, child_pipe,
|
||||
input_multiprocess_fd, jsfd1, jsfd2):
|
||||
input_multiprocess_fd):
|
||||
|
||||
context = kwargs.get('context', 'build')
|
||||
|
||||
@@ -1145,29 +1130,19 @@ def child_fun():
|
||||
"""
|
||||
parent_pipe, child_pipe = multiprocessing.Pipe()
|
||||
input_multiprocess_fd = None
|
||||
jobserver_fd1 = None
|
||||
jobserver_fd2 = None
|
||||
|
||||
serialized_pkg = spack.subprocess_context.PackageInstallContext(pkg)
|
||||
|
||||
try:
|
||||
# Forward sys.stdin when appropriate, to allow toggling verbosity
|
||||
if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin,
|
||||
'fileno'):
|
||||
if sys.stdin.isatty() and hasattr(sys.stdin, 'fileno'):
|
||||
input_fd = os.dup(sys.stdin.fileno())
|
||||
input_multiprocess_fd = MultiProcessFd(input_fd)
|
||||
mflags = os.environ.get('MAKEFLAGS', False)
|
||||
if mflags:
|
||||
m = re.search(r'--jobserver-[^=]*=(\d),(\d)', mflags)
|
||||
if m:
|
||||
jobserver_fd1 = MultiProcessFd(int(m.group(1)))
|
||||
jobserver_fd2 = MultiProcessFd(int(m.group(2)))
|
||||
|
||||
p = multiprocessing.Process(
|
||||
target=_setup_pkg_and_run,
|
||||
args=(serialized_pkg, function, kwargs, child_pipe,
|
||||
input_multiprocess_fd, jobserver_fd1, jobserver_fd2))
|
||||
|
||||
input_multiprocess_fd))
|
||||
p.start()
|
||||
|
||||
except InstallError as e:
|
||||
|
@@ -14,7 +14,7 @@
|
||||
from llnl.util.filesystem import force_remove, working_dir
|
||||
|
||||
from spack.build_environment import InstallError
|
||||
from spack.directives import conflicts, depends_on
|
||||
from spack.directives import depends_on
|
||||
from spack.operating_systems.mac_os import macos_version
|
||||
from spack.package import PackageBase, run_after, run_before
|
||||
from spack.util.executable import Executable
|
||||
@@ -76,7 +76,7 @@ def patch_config_files(self):
|
||||
or self.spec.satisfies('target=riscv64:'))
|
||||
|
||||
#: Whether or not to update ``libtool``
|
||||
#: (currently only for Arm/Clang/Fujitsu/NVHPC compilers)
|
||||
#: (currently only for Arm/Clang/Fujitsu compilers)
|
||||
patch_libtool = True
|
||||
|
||||
#: Targets for ``make`` during the :py:meth:`~.AutotoolsPackage.build`
|
||||
@@ -104,7 +104,6 @@ def patch_config_files(self):
|
||||
depends_on('gnuconfig', type='build', when='target=ppc64le:')
|
||||
depends_on('gnuconfig', type='build', when='target=aarch64:')
|
||||
depends_on('gnuconfig', type='build', when='target=riscv64:')
|
||||
conflicts('platform=windows')
|
||||
|
||||
@property
|
||||
def _removed_la_files_log(self):
|
||||
@@ -252,7 +251,7 @@ def _set_autotools_environment_variables(self):
|
||||
def _do_patch_libtool(self):
|
||||
"""If configure generates a "libtool" script that does not correctly
|
||||
detect the compiler (and patch_libtool is set), patch in the correct
|
||||
flags for the Arm, Clang/Flang, Fujitsu and NVHPC compilers."""
|
||||
flags for the Arm, Clang/Flang, and Fujitsu compilers."""
|
||||
|
||||
# Exit early if we are required not to patch libtool
|
||||
if not self.patch_libtool:
|
||||
@@ -263,12 +262,9 @@ def _do_patch_libtool(self):
|
||||
self._patch_libtool(libtool_path)
|
||||
|
||||
def _patch_libtool(self, libtool_path):
|
||||
if (
|
||||
self.spec.satisfies('%arm') or
|
||||
self.spec.satisfies('%clang') or
|
||||
self.spec.satisfies('%fj') or
|
||||
self.spec.satisfies('%nvhpc')
|
||||
):
|
||||
if self.spec.satisfies('%arm')\
|
||||
or self.spec.satisfies('%clang')\
|
||||
or self.spec.satisfies('%fj'):
|
||||
fs.filter_file('wl=""\n', 'wl="-Wl,"\n', libtool_path)
|
||||
fs.filter_file('pic_flag=""\n',
|
||||
'pic_flag="{0}"\n'
|
||||
|
@@ -210,10 +210,6 @@ def std_initconfig_entries(self):
|
||||
"#------------------{0}\n".format("-" * 60),
|
||||
]
|
||||
|
||||
def initconfig_package_entries(self):
|
||||
"""This method is to be overwritten by the package"""
|
||||
return []
|
||||
|
||||
def initconfig(self, spec, prefix):
|
||||
cache_entries = (self.std_initconfig_entries() +
|
||||
self.initconfig_compiler_entries() +
|
||||
|
@@ -8,8 +8,7 @@
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
from typing import List
|
||||
from typing import List # novm
|
||||
|
||||
import six
|
||||
|
||||
@@ -19,7 +18,6 @@
|
||||
import spack.build_environment
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.package import InstallError, PackageBase, run_after
|
||||
from spack.util.path import convert_to_posix_path
|
||||
|
||||
# Regex to extract the primary generator from the CMake generator
|
||||
# string.
|
||||
@@ -93,12 +91,7 @@ class CMakePackage(PackageBase):
|
||||
#:
|
||||
#: See https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html
|
||||
#: for more information.
|
||||
|
||||
generator = "Unix Makefiles"
|
||||
|
||||
if sys.platform == 'win32':
|
||||
generator = "Ninja"
|
||||
depends_on('ninja')
|
||||
generator = 'Unix Makefiles'
|
||||
|
||||
# https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
|
||||
variant('build_type', default='RelWithDebInfo',
|
||||
@@ -145,11 +138,10 @@ def std_cmake_args(self):
|
||||
@staticmethod
|
||||
def _std_args(pkg):
|
||||
"""Computes the standard cmake arguments for a generic package"""
|
||||
|
||||
try:
|
||||
generator = pkg.generator
|
||||
except AttributeError:
|
||||
generator = CMakePackage.generator
|
||||
generator = 'Unix Makefiles'
|
||||
|
||||
# Make sure a valid generator was chosen
|
||||
valid_primary_generators = ['Unix Makefiles', 'Ninja']
|
||||
@@ -174,9 +166,8 @@ def _std_args(pkg):
|
||||
define = CMakePackage.define
|
||||
args = [
|
||||
'-G', generator,
|
||||
define('CMAKE_INSTALL_PREFIX', convert_to_posix_path(pkg.prefix)),
|
||||
define('CMAKE_INSTALL_PREFIX', pkg.prefix),
|
||||
define('CMAKE_BUILD_TYPE', build_type),
|
||||
define('BUILD_TESTING', pkg.run_tests),
|
||||
]
|
||||
|
||||
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
|
||||
@@ -194,7 +185,7 @@ def _std_args(pkg):
|
||||
|
||||
# Set up CMake rpath
|
||||
args.extend([
|
||||
define('CMAKE_INSTALL_RPATH_USE_LINK_PATH', True),
|
||||
define('CMAKE_INSTALL_RPATH_USE_LINK_PATH', False),
|
||||
define('CMAKE_INSTALL_RPATH',
|
||||
spack.build_environment.get_rpaths(pkg)),
|
||||
define('CMAKE_PREFIX_PATH',
|
||||
@@ -362,7 +353,6 @@ def cmake_args(self):
|
||||
|
||||
* CMAKE_INSTALL_PREFIX
|
||||
* CMAKE_BUILD_TYPE
|
||||
* BUILD_TESTING
|
||||
|
||||
which will be set automatically.
|
||||
|
||||
|
@@ -107,10 +107,10 @@ def cuda_flags(arch_list):
|
||||
# each release of a new cuda minor version.
|
||||
conflicts('%gcc@10:', when='+cuda ^cuda@:11.0')
|
||||
conflicts('%gcc@11:', when='+cuda ^cuda@:11.4.0')
|
||||
conflicts('%gcc@12:', when='+cuda ^cuda@:11.7')
|
||||
conflicts('%gcc@12:', when='+cuda ^cuda@:11.6')
|
||||
conflicts('%clang@12:', when='+cuda ^cuda@:11.4.0')
|
||||
conflicts('%clang@13:', when='+cuda ^cuda@:11.5')
|
||||
conflicts('%clang@14:', when='+cuda ^cuda@:11.7')
|
||||
conflicts('%clang@14:', when='+cuda ^cuda@:11.6')
|
||||
|
||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||
conflicts('%gcc@10', when='+cuda ^cuda@:11.4.0')
|
||||
@@ -188,3 +188,7 @@ def cuda_flags(arch_list):
|
||||
# Darwin.
|
||||
# TODO: add missing conflicts for %apple-clang cuda@:10
|
||||
conflicts('platform=darwin', when='+cuda ^cuda@11.0.2: ')
|
||||
|
||||
# Make sure cuda_arch can not be used without +cuda
|
||||
for value in cuda_arch_values:
|
||||
conflicts('~cuda', when='cuda_arch=' + value)
|
||||
|
@@ -686,15 +686,15 @@ def openmp_libs(self):
|
||||
# packages.yaml), specificially to provide the 'iomp5' libs.
|
||||
|
||||
elif '%gcc' in self.spec:
|
||||
with self.compiler.compiler_environment():
|
||||
omp_lib_path = Executable(self.compiler.cc)(
|
||||
'--print-file-name', 'libgomp.%s' % dso_suffix, output=str)
|
||||
gcc = Executable(self.compiler.cc)
|
||||
omp_lib_path = gcc(
|
||||
'--print-file-name', 'libgomp.%s' % dso_suffix, output=str)
|
||||
omp_libs = LibraryList(omp_lib_path.strip())
|
||||
|
||||
elif '%clang' in self.spec:
|
||||
with self.compiler.compiler_environment():
|
||||
omp_lib_path = Executable(self.compiler.cc)(
|
||||
'--print-file-name', 'libomp.%s' % dso_suffix, output=str)
|
||||
clang = Executable(self.compiler.cc)
|
||||
omp_lib_path = clang(
|
||||
'--print-file-name', 'libomp.%s' % dso_suffix, output=str)
|
||||
omp_libs = LibraryList(omp_lib_path.strip())
|
||||
|
||||
if len(omp_libs) < 1:
|
||||
@@ -735,9 +735,8 @@ def tbb_libs(self):
|
||||
|
||||
# TODO: clang(?)
|
||||
gcc = self._gcc_executable # must be gcc, not self.compiler.cc
|
||||
with self.compiler.compiler_environment():
|
||||
cxx_lib_path = gcc(
|
||||
'--print-file-name', 'libstdc++.%s' % dso_suffix, output=str)
|
||||
cxx_lib_path = gcc(
|
||||
'--print-file-name', 'libstdc++.%s' % dso_suffix, output=str)
|
||||
|
||||
libs = tbb_lib + LibraryList(cxx_lib_path.rstrip())
|
||||
debug_print(libs)
|
||||
@@ -747,9 +746,8 @@ def tbb_libs(self):
|
||||
def _tbb_abi(self):
|
||||
'''Select the ABI needed for linking TBB'''
|
||||
gcc = self._gcc_executable
|
||||
with self.compiler.compiler_environment():
|
||||
matches = re.search(r'(gcc|LLVM).* ([0-9]+\.[0-9]+\.[0-9]+).*',
|
||||
gcc('--version', output=str), re.I | re.M)
|
||||
matches = re.search(r'(gcc|LLVM).* ([0-9]+\.[0-9]+\.[0-9]+).*',
|
||||
gcc('--version', output=str), re.I | re.M)
|
||||
abi = ''
|
||||
if sys.platform == 'darwin':
|
||||
pass
|
||||
|
@@ -1,102 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
import os
|
||||
|
||||
from llnl.util.filesystem import find
|
||||
|
||||
from spack.directives import depends_on, extends
|
||||
from spack.multimethod import when
|
||||
from spack.package import PackageBase
|
||||
from spack.util.executable import Executable
|
||||
|
||||
|
||||
class LuaPackage(PackageBase):
|
||||
"""Specialized class for lua packages"""
|
||||
|
||||
phases = ['unpack', 'generate_luarocks_config', 'preprocess', 'install']
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
build_system_class = 'LuaPackage'
|
||||
|
||||
list_depth = 1 # LuaRocks requires at least one level of spidering to find versions
|
||||
depends_on('lua-lang')
|
||||
extends('lua', when='^lua')
|
||||
with when('^lua-luajit'):
|
||||
extends('lua-luajit')
|
||||
depends_on('luajit')
|
||||
depends_on('lua-luajit+lualinks')
|
||||
with when('^lua-luajit-openresty'):
|
||||
extends('lua-luajit-openresty')
|
||||
depends_on('luajit')
|
||||
depends_on('lua-luajit-openresty+lualinks')
|
||||
|
||||
def unpack(self, spec, prefix):
|
||||
if os.path.splitext(self.stage.archive_file)[1] == '.rock':
|
||||
directory = self.luarocks('unpack', self.stage.archive_file, output=str)
|
||||
dirlines = directory.split('\n')
|
||||
# TODO: figure out how to scope this better
|
||||
os.chdir(dirlines[2])
|
||||
|
||||
def _generate_tree_line(self, name, prefix):
|
||||
return """{{ name = "{name}", root = "{prefix}" }};""".format(
|
||||
name=name,
|
||||
prefix=prefix,
|
||||
)
|
||||
|
||||
def _luarocks_config_path(self):
|
||||
return os.path.join(self.stage.source_path, 'spack_luarocks.lua')
|
||||
|
||||
def generate_luarocks_config(self, spec, prefix):
|
||||
spec = self.spec
|
||||
table_entries = []
|
||||
for d in spec.traverse(
|
||||
deptypes=("build", "run"), deptype_query="run"
|
||||
):
|
||||
if d.package.extends(self.extendee_spec):
|
||||
table_entries.append(self._generate_tree_line(d.name, d.prefix))
|
||||
|
||||
path = self._luarocks_config_path()
|
||||
with open(path, 'w') as config:
|
||||
config.write(
|
||||
"""
|
||||
deps_mode="all"
|
||||
rocks_trees={{
|
||||
{}
|
||||
}}
|
||||
""".format(
|
||||
"\n".join(table_entries)
|
||||
)
|
||||
)
|
||||
return path
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
env.set('LUAROCKS_CONFIG', self._luarocks_config_path())
|
||||
|
||||
def preprocess(self, spec, prefix):
|
||||
"""Override this to preprocess source before building with luarocks"""
|
||||
pass
|
||||
|
||||
@property
|
||||
def lua(self):
|
||||
return Executable(self.spec['lua-lang'].prefix.bin.lua)
|
||||
|
||||
@property
|
||||
def luarocks(self):
|
||||
lr = Executable(self.spec['lua-lang'].prefix.bin.luarocks)
|
||||
return lr
|
||||
|
||||
def luarocks_args(self):
|
||||
return []
|
||||
|
||||
def install(self, spec, prefix):
|
||||
rock = '.'
|
||||
specs = find('.', '*.rockspec', recursive=False)
|
||||
if specs:
|
||||
rock = specs[0]
|
||||
rocks_args = self.luarocks_args()
|
||||
rocks_args.append(rock)
|
||||
self.luarocks('--tree=' + prefix, 'make', *rocks_args)
|
@@ -10,7 +10,6 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.directives import conflicts
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
@@ -56,7 +55,6 @@ class MakefilePackage(PackageBase):
|
||||
#: phase
|
||||
install_targets = ['install']
|
||||
|
||||
conflicts('platform=windows')
|
||||
#: Callback names for build-time test
|
||||
build_time_test_callbacks = ['check']
|
||||
|
||||
|
@@ -30,15 +30,6 @@ class IntelOneApiPackage(Package):
|
||||
# organization (e.g. University/Company).
|
||||
redistribute_source = False
|
||||
|
||||
@staticmethod
|
||||
def update_description(cls):
|
||||
"""Updates oneapi package descriptions with common text."""
|
||||
|
||||
text = """ LICENSE INFORMATION: By downloading and using this software, you agree to the terms
|
||||
and conditions of the software license agreements at https://intel.ly/393CijO."""
|
||||
cls.__doc__ = cls.__doc__ + text
|
||||
return cls
|
||||
|
||||
@property
|
||||
def component_dir(self):
|
||||
"""Subdirectory for this component in the install prefix."""
|
||||
|
@@ -11,7 +11,7 @@
|
||||
from llnl.util.filesystem import (
|
||||
filter_file,
|
||||
find,
|
||||
is_nonsymlink_exe_with_shebang,
|
||||
get_filetype,
|
||||
path_contains_subdirectory,
|
||||
same_path,
|
||||
working_dir,
|
||||
@@ -216,7 +216,7 @@ def view_file_conflicts(self, view, merge_map):
|
||||
|
||||
return conflicts
|
||||
|
||||
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
||||
def add_files_to_view(self, view, merge_map):
|
||||
bin_dir = self.spec.prefix.bin
|
||||
python_prefix = self.extendee_spec.prefix
|
||||
python_is_external = self.extendee_spec.external
|
||||
@@ -230,7 +230,7 @@ def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
||||
view.link(src, dst)
|
||||
elif not os.path.islink(src):
|
||||
shutil.copy2(src, dst)
|
||||
is_script = is_nonsymlink_exe_with_shebang(src)
|
||||
is_script = 'script' in get_filetype(src)
|
||||
if is_script and not python_is_external:
|
||||
filter_file(
|
||||
python_prefix, os.path.abspath(
|
||||
|
@@ -1,70 +0,0 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
|
||||
from spack.directives import extends
|
||||
from spack.package import PackageBase
|
||||
from spack.util.environment import env_flag
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
|
||||
class RacketPackage(PackageBase):
|
||||
"""Specialized class for packages that are built using Racket's
|
||||
`raco pkg install` and `raco setup` commands.
|
||||
|
||||
This class provides the following phases that can be overridden:
|
||||
|
||||
* install
|
||||
* setup
|
||||
"""
|
||||
#: Package name, version, and extension on PyPI
|
||||
maintainers = ['elfprince13']
|
||||
|
||||
# Default phases
|
||||
phases = ['install']
|
||||
|
||||
# To be used in UI queries that require to know which
|
||||
# build-system class we are using
|
||||
build_system_class = 'RacketPackage'
|
||||
|
||||
extends('racket')
|
||||
|
||||
pkgs = False
|
||||
subdirectory = None
|
||||
name = None
|
||||
parallel = True
|
||||
|
||||
@property
|
||||
def homepage(self):
|
||||
if self.pkgs:
|
||||
return 'https://pkgs.racket-lang.org/package/{0}'.format(self.name)
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
ret = os.getcwd()
|
||||
if self.subdirectory:
|
||||
ret = os.path.join(ret, self.subdirectory)
|
||||
return ret
|
||||
|
||||
def install(self, spec, prefix):
|
||||
"""Install everything from build directory."""
|
||||
raco = Executable("raco")
|
||||
with working_dir(self.build_directory):
|
||||
allow_parallel = self.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE))
|
||||
args = ['pkg', 'install', '-t', 'dir', '-n', self.name, '--deps', 'fail',
|
||||
'--ignore-implies', '--copy', '-i', '-j',
|
||||
str(determine_number_of_jobs(allow_parallel)),
|
||||
'--', os.getcwd()]
|
||||
try:
|
||||
raco(*args)
|
||||
except ProcessError:
|
||||
args.insert(-2, "--skip-installed")
|
||||
raco(*args)
|
||||
tty.warn(("Racket package {0} was already installed, uninstalling via "
|
||||
"Spack may make someone unhappy!").format(self.name))
|
@@ -112,6 +112,10 @@ class ROCmPackage(PackageBase):
|
||||
# need amd gpu type for rocm builds
|
||||
conflicts('amdgpu_target=none', when='+rocm')
|
||||
|
||||
# Make sure amdgpu_targets cannot be used without +rocm
|
||||
for value in amdgpu_targets:
|
||||
conflicts('~rocm', when='amdgpu_target=' + value)
|
||||
|
||||
# https://github.com/ROCm-Developer-Tools/HIP/blob/master/bin/hipcc
|
||||
# It seems that hip-clang does not (yet?) accept this flag, in which case
|
||||
# we will still need to set the HCC_AMDGPU_TARGET environment flag in the
|
||||
|
@@ -8,7 +8,6 @@
|
||||
|
||||
import llnl.util.lang
|
||||
from llnl.util.filesystem import mkdirp
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.config
|
||||
import spack.error
|
||||
@@ -86,7 +85,7 @@ def symlink(self, mirror_ref):
|
||||
# to https://github.com/spack/spack/pull/13908)
|
||||
os.unlink(cosmetic_path)
|
||||
mkdirp(os.path.dirname(cosmetic_path))
|
||||
symlink(relative_dst, cosmetic_path)
|
||||
os.symlink(relative_dst, cosmetic_path)
|
||||
|
||||
|
||||
#: Spack's local cache for downloaded source archives
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -308,7 +308,8 @@ def optimizer(yaml):
|
||||
# try factoring out commonly repeated portions
|
||||
common_job = {
|
||||
'variables': {
|
||||
'SPACK_COMPILER_ACTION': 'NONE'
|
||||
'SPACK_COMPILER_ACTION': 'NONE',
|
||||
'SPACK_RELATED_BUILDS_CDASH': ''
|
||||
},
|
||||
|
||||
'after_script': ['rm -rf "./spack"'],
|
||||
|
@@ -155,17 +155,31 @@ def parse_specs(args, **kwargs):
|
||||
normalize = kwargs.get('normalize', False)
|
||||
tests = kwargs.get('tests', False)
|
||||
|
||||
sargs = args
|
||||
if not isinstance(args, six.string_types):
|
||||
sargs = ' '.join(spack.util.string.quote(args))
|
||||
specs = spack.spec.parse(sargs)
|
||||
for spec in specs:
|
||||
if concretize:
|
||||
spec.concretize(tests=tests) # implies normalize
|
||||
elif normalize:
|
||||
spec.normalize(tests=tests)
|
||||
try:
|
||||
sargs = args
|
||||
if not isinstance(args, six.string_types):
|
||||
sargs = ' '.join(spack.util.string.quote(args))
|
||||
specs = spack.spec.parse(sargs)
|
||||
for spec in specs:
|
||||
if concretize:
|
||||
spec.concretize(tests=tests) # implies normalize
|
||||
elif normalize:
|
||||
spec.normalize(tests=tests)
|
||||
|
||||
return specs
|
||||
return specs
|
||||
|
||||
except spack.spec.SpecParseError as e:
|
||||
msg = e.message + "\n" + str(e.string) + "\n"
|
||||
msg += (e.pos + 2) * " " + "^"
|
||||
raise spack.error.SpackError(msg)
|
||||
|
||||
except spack.error.SpecError as e:
|
||||
|
||||
msg = e.message
|
||||
if e.long_message:
|
||||
msg += e.long_message
|
||||
|
||||
raise spack.error.SpackError(msg)
|
||||
|
||||
|
||||
def matching_spec_from_env(spec):
|
||||
|
@@ -26,10 +26,6 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def activate(parser, args):
|
||||
|
||||
tty.warn("spack activate is deprecated in favor of "
|
||||
"environments and will be removed in v0.19.0")
|
||||
|
||||
specs = spack.cmd.parse_specs(args.spec)
|
||||
if len(specs) != 1:
|
||||
tty.die("activate requires one spec. %d given." % len(specs))
|
||||
|
@@ -6,9 +6,7 @@
|
||||
|
||||
import os.path
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty
|
||||
import llnl.util.tty.color
|
||||
|
||||
@@ -17,9 +15,6 @@
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.main
|
||||
import spack.mirror
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.path
|
||||
|
||||
description = "manage bootstrap configuration"
|
||||
@@ -27,38 +22,6 @@
|
||||
level = "long"
|
||||
|
||||
|
||||
# Tarball to be downloaded if binary packages are requested in a local mirror
|
||||
BINARY_TARBALL = 'https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.2/bootstrap-buildcache.tar.gz'
|
||||
|
||||
#: Subdirectory where to create the mirror
|
||||
LOCAL_MIRROR_DIR = 'bootstrap_cache'
|
||||
|
||||
# Metadata for a generated binary mirror
|
||||
BINARY_METADATA = {
|
||||
'type': 'buildcache',
|
||||
'description': ('Buildcache copied from a public tarball available on Github.'
|
||||
'The sha256 checksum of binaries is checked before installation.'),
|
||||
'info': {
|
||||
'url': os.path.join('..', '..', LOCAL_MIRROR_DIR),
|
||||
'homepage': 'https://github.com/spack/spack-bootstrap-mirrors',
|
||||
'releases': 'https://github.com/spack/spack-bootstrap-mirrors/releases',
|
||||
'tarball': BINARY_TARBALL
|
||||
}
|
||||
}
|
||||
|
||||
CLINGO_JSON = '$spack/share/spack/bootstrap/github-actions-v0.2/clingo.json'
|
||||
GNUPG_JSON = '$spack/share/spack/bootstrap/github-actions-v0.2/gnupg.json'
|
||||
|
||||
# Metadata for a generated source mirror
|
||||
SOURCE_METADATA = {
|
||||
'type': 'install',
|
||||
'description': 'Mirror with software needed to bootstrap Spack',
|
||||
'info': {
|
||||
'url': os.path.join('..', '..', LOCAL_MIRROR_DIR)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def _add_scope_option(parser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
@@ -104,61 +67,24 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
list = sp.add_parser(
|
||||
'list', help='list all the sources of software to bootstrap Spack'
|
||||
'list', help='list the methods available for bootstrapping'
|
||||
)
|
||||
_add_scope_option(list)
|
||||
|
||||
trust = sp.add_parser(
|
||||
'trust', help='trust a bootstrapping source'
|
||||
'trust', help='trust a bootstrapping method'
|
||||
)
|
||||
_add_scope_option(trust)
|
||||
trust.add_argument(
|
||||
'name', help='name of the source to be trusted'
|
||||
'name', help='name of the method to be trusted'
|
||||
)
|
||||
|
||||
untrust = sp.add_parser(
|
||||
'untrust', help='untrust a bootstrapping source'
|
||||
'untrust', help='untrust a bootstrapping method'
|
||||
)
|
||||
_add_scope_option(untrust)
|
||||
untrust.add_argument(
|
||||
'name', help='name of the source to be untrusted'
|
||||
)
|
||||
|
||||
add = sp.add_parser(
|
||||
'add', help='add a new source for bootstrapping'
|
||||
)
|
||||
_add_scope_option(add)
|
||||
add.add_argument(
|
||||
'--trust', action='store_true',
|
||||
help='trust the source immediately upon addition')
|
||||
add.add_argument(
|
||||
'name', help='name of the new source of software'
|
||||
)
|
||||
add.add_argument(
|
||||
'metadata_dir', help='directory where to find metadata files'
|
||||
)
|
||||
|
||||
remove = sp.add_parser(
|
||||
'remove', help='remove a bootstrapping source'
|
||||
)
|
||||
remove.add_argument(
|
||||
'name', help='name of the source to be removed'
|
||||
)
|
||||
|
||||
mirror = sp.add_parser(
|
||||
'mirror', help='create a local mirror to bootstrap Spack'
|
||||
)
|
||||
mirror.add_argument(
|
||||
'--binary-packages', action='store_true',
|
||||
help='download public binaries in the mirror'
|
||||
)
|
||||
mirror.add_argument(
|
||||
'--dev', action='store_true',
|
||||
help='download dev dependencies too'
|
||||
)
|
||||
mirror.add_argument(
|
||||
metavar='DIRECTORY', dest='root_dir',
|
||||
help='root directory in which to create the mirror and metadata'
|
||||
'name', help='name of the method to be untrusted'
|
||||
)
|
||||
|
||||
|
||||
@@ -211,7 +137,10 @@ def _root(args):
|
||||
|
||||
|
||||
def _list(args):
|
||||
sources = spack.bootstrap.bootstrapping_sources(scope=args.scope)
|
||||
sources = spack.config.get(
|
||||
'bootstrap:sources', default=None, scope=args.scope
|
||||
)
|
||||
|
||||
if not sources:
|
||||
llnl.util.tty.msg(
|
||||
"No method available for bootstrapping Spack's dependencies"
|
||||
@@ -320,119 +249,6 @@ def _status(args):
|
||||
print()
|
||||
|
||||
|
||||
def _add(args):
|
||||
initial_sources = spack.bootstrap.bootstrapping_sources()
|
||||
names = [s['name'] for s in initial_sources]
|
||||
|
||||
# If the name is already used error out
|
||||
if args.name in names:
|
||||
msg = 'a source named "{0}" already exist. Please choose a different name'
|
||||
raise RuntimeError(msg.format(args.name))
|
||||
|
||||
# Check that the metadata file exists
|
||||
metadata_dir = spack.util.path.canonicalize_path(args.metadata_dir)
|
||||
if not os.path.exists(metadata_dir) or not os.path.isdir(metadata_dir):
|
||||
raise RuntimeError(
|
||||
'the directory "{0}" does not exist'.format(args.metadata_dir)
|
||||
)
|
||||
|
||||
file = os.path.join(metadata_dir, 'metadata.yaml')
|
||||
if not os.path.exists(file):
|
||||
raise RuntimeError('the file "{0}" does not exist'.format(file))
|
||||
|
||||
# Insert the new source as the highest priority one
|
||||
write_scope = args.scope or spack.config.default_modify_scope(section='bootstrap')
|
||||
sources = spack.config.get('bootstrap:sources', scope=write_scope) or []
|
||||
sources = [
|
||||
{'name': args.name, 'metadata': args.metadata_dir}
|
||||
] + sources
|
||||
spack.config.set('bootstrap:sources', sources, scope=write_scope)
|
||||
|
||||
msg = 'New bootstrapping source "{0}" added in the "{1}" configuration scope'
|
||||
llnl.util.tty.msg(msg.format(args.name, write_scope))
|
||||
if args.trust:
|
||||
_trust(args)
|
||||
|
||||
|
||||
def _remove(args):
|
||||
initial_sources = spack.bootstrap.bootstrapping_sources()
|
||||
names = [s['name'] for s in initial_sources]
|
||||
if args.name not in names:
|
||||
msg = ('cannot find any bootstrapping source named "{0}". '
|
||||
'Run `spack bootstrap list` to see available sources.')
|
||||
raise RuntimeError(msg.format(args.name))
|
||||
|
||||
for current_scope in spack.config.scopes():
|
||||
sources = spack.config.get('bootstrap:sources', scope=current_scope) or []
|
||||
if args.name in [s['name'] for s in sources]:
|
||||
sources = [s for s in sources if s['name'] != args.name]
|
||||
spack.config.set('bootstrap:sources', sources, scope=current_scope)
|
||||
msg = ('Removed the bootstrapping source named "{0}" from the '
|
||||
'"{1}" configuration scope.')
|
||||
llnl.util.tty.msg(msg.format(args.name, current_scope))
|
||||
trusted = spack.config.get('bootstrap:trusted', scope=current_scope) or []
|
||||
if args.name in trusted:
|
||||
trusted.pop(args.name)
|
||||
spack.config.set('bootstrap:trusted', trusted, scope=current_scope)
|
||||
msg = 'Deleting information on "{0}" from list of trusted sources'
|
||||
llnl.util.tty.msg(msg.format(args.name))
|
||||
|
||||
|
||||
def _mirror(args):
|
||||
mirror_dir = os.path.join(args.root_dir, LOCAL_MIRROR_DIR)
|
||||
|
||||
# TODO: Here we are adding gnuconfig manually, but this can be fixed
|
||||
# TODO: as soon as we have an option to add to a mirror all the possible
|
||||
# TODO: dependencies of a spec
|
||||
root_specs = spack.bootstrap.all_root_specs(development=args.dev) + ['gnuconfig']
|
||||
for spec_str in root_specs:
|
||||
msg = 'Adding "{0}" and dependencies to the mirror at {1}'
|
||||
llnl.util.tty.msg(msg.format(spec_str, mirror_dir))
|
||||
# Suppress tty from the call below for terser messages
|
||||
llnl.util.tty.set_msg_enabled(False)
|
||||
spec = spack.spec.Spec(spec_str).concretized()
|
||||
for node in spec.traverse():
|
||||
spack.mirror.create(mirror_dir, [node])
|
||||
llnl.util.tty.set_msg_enabled(True)
|
||||
|
||||
if args.binary_packages:
|
||||
msg = 'Adding binary packages from "{0}" to the mirror at {1}'
|
||||
llnl.util.tty.msg(msg.format(BINARY_TARBALL, mirror_dir))
|
||||
llnl.util.tty.set_msg_enabled(False)
|
||||
stage = spack.stage.Stage(BINARY_TARBALL, path=tempfile.mkdtemp())
|
||||
stage.create()
|
||||
stage.fetch()
|
||||
stage.expand_archive()
|
||||
build_cache_dir = os.path.join(stage.source_path, 'build_cache')
|
||||
shutil.move(build_cache_dir, mirror_dir)
|
||||
llnl.util.tty.set_msg_enabled(True)
|
||||
|
||||
def write_metadata(subdir, metadata):
|
||||
metadata_rel_dir = os.path.join('metadata', subdir)
|
||||
metadata_yaml = os.path.join(
|
||||
args.root_dir, metadata_rel_dir, 'metadata.yaml'
|
||||
)
|
||||
llnl.util.filesystem.mkdirp(os.path.dirname(metadata_yaml))
|
||||
with open(metadata_yaml, mode='w') as f:
|
||||
spack.util.spack_yaml.dump(metadata, stream=f)
|
||||
return os.path.dirname(metadata_yaml), metadata_rel_dir
|
||||
|
||||
instructions = ('\nTo register the mirror on the platform where it\'s supposed '
|
||||
'to be used, move "{0}" to its final location and run the '
|
||||
'following command(s):\n\n').format(args.root_dir)
|
||||
cmd = ' % spack bootstrap add --trust {0} <final-path>/{1}\n'
|
||||
_, rel_directory = write_metadata(subdir='sources', metadata=SOURCE_METADATA)
|
||||
instructions += cmd.format('local-sources', rel_directory)
|
||||
if args.binary_packages:
|
||||
abs_directory, rel_directory = write_metadata(
|
||||
subdir='binaries', metadata=BINARY_METADATA
|
||||
)
|
||||
shutil.copy(spack.util.path.canonicalize_path(CLINGO_JSON), abs_directory)
|
||||
shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory)
|
||||
instructions += cmd.format('local-binaries', rel_directory)
|
||||
print(instructions)
|
||||
|
||||
|
||||
def bootstrap(parser, args):
|
||||
callbacks = {
|
||||
'status': _status,
|
||||
@@ -442,9 +258,6 @@ def bootstrap(parser, args):
|
||||
'root': _root,
|
||||
'list': _list,
|
||||
'trust': _trust,
|
||||
'untrust': _untrust,
|
||||
'add': _add,
|
||||
'remove': _remove,
|
||||
'mirror': _mirror
|
||||
'untrust': _untrust
|
||||
}
|
||||
callbacks[args.subcommand](args)
|
||||
|
@@ -161,6 +161,11 @@ def setup_parser(subparser):
|
||||
help=('Check single spec from json or yaml file instead of release ' +
|
||||
'specs file'))
|
||||
|
||||
check.add_argument(
|
||||
'--rebuild-on-error', default=False, action='store_true',
|
||||
help="Default to rebuilding packages if errors are encountered " +
|
||||
"during the process of checking whether rebuilding is needed")
|
||||
|
||||
check.set_defaults(func=check_fn)
|
||||
|
||||
# Download tarball and specfile
|
||||
@@ -175,6 +180,9 @@ def setup_parser(subparser):
|
||||
download.add_argument(
|
||||
'-p', '--path', default=None,
|
||||
help="Path to directory where tarball should be downloaded")
|
||||
download.add_argument(
|
||||
'-c', '--require-cdashid', action='store_true', default=False,
|
||||
help="Require .cdashid file to be downloaded with buildcache entry")
|
||||
download.set_defaults(func=download_fn)
|
||||
|
||||
# Get buildcache name
|
||||
@@ -356,7 +364,7 @@ def list_fn(args):
|
||||
try:
|
||||
specs = bindist.update_cache_and_get_specs()
|
||||
except bindist.FetchCacheError as e:
|
||||
tty.die(e)
|
||||
tty.error(e)
|
||||
|
||||
if not args.allarch:
|
||||
arch = spack.spec.Spec.default_arch()
|
||||
@@ -425,14 +433,16 @@ def check_fn(args):
|
||||
sys.exit(0)
|
||||
|
||||
sys.exit(bindist.check_specs_against_mirrors(
|
||||
configured_mirrors, specs, args.output_file))
|
||||
configured_mirrors, specs, args.output_file, args.rebuild_on_error))
|
||||
|
||||
|
||||
def download_fn(args):
|
||||
"""Download buildcache entry from a remote mirror to local folder. This
|
||||
command uses the process exit code to indicate its result, specifically,
|
||||
a non-zero exit code indicates that the command failed to download at
|
||||
least one of the required buildcache components."""
|
||||
least one of the required buildcache components. Normally, just the
|
||||
tarball and .spec.json files are required, but if the --require-cdashid
|
||||
argument was provided, then a .cdashid file is also required."""
|
||||
if not args.spec and not args.spec_file:
|
||||
tty.msg('No specs provided, exiting.')
|
||||
sys.exit(0)
|
||||
@@ -442,7 +452,9 @@ def download_fn(args):
|
||||
sys.exit(0)
|
||||
|
||||
spec = _concrete_spec_from_args(args)
|
||||
result = bindist.download_single_spec(spec, args.path)
|
||||
result = bindist.download_single_spec(
|
||||
spec, args.path, require_cdashid=args.require_cdashid
|
||||
)
|
||||
|
||||
if not result:
|
||||
sys.exit(1)
|
||||
@@ -478,12 +490,11 @@ def save_specfile_fn(args):
|
||||
if args.root_specfile:
|
||||
with open(args.root_specfile) as fd:
|
||||
root_spec_as_json = fd.read()
|
||||
spec_format = 'yaml' if args.root_specfile.endswith('yaml') else 'json'
|
||||
else:
|
||||
root_spec = Spec(args.root_spec)
|
||||
root_spec.concretize()
|
||||
root_spec_as_json = root_spec.to_json(hash=ht.dag_hash)
|
||||
spec_format = 'json'
|
||||
root_spec_as_json = root_spec.to_json(hash=ht.build_hash)
|
||||
spec_format = 'yaml' if args.root_specfile.endswith('yaml') else 'json'
|
||||
save_dependency_specfiles(
|
||||
root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format)
|
||||
|
||||
@@ -549,6 +560,11 @@ def copy_fn(args):
|
||||
specfile_src_path_yaml = os.path.join(args.base_dir, specfile_rel_path)
|
||||
specfile_dest_path_yaml = os.path.join(dest_root_path, specfile_rel_path)
|
||||
|
||||
cdashidfile_rel_path = os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(spec, '.cdashid'))
|
||||
cdashid_src_path = os.path.join(args.base_dir, cdashidfile_rel_path)
|
||||
cdashid_dest_path = os.path.join(dest_root_path, cdashidfile_rel_path)
|
||||
|
||||
# Make sure directory structure exists before attempting to copy
|
||||
os.makedirs(os.path.dirname(tarball_dest_path))
|
||||
|
||||
@@ -562,6 +578,11 @@ def copy_fn(args):
|
||||
tty.msg('Copying {0}'.format(specfile_rel_path_yaml))
|
||||
shutil.copyfile(specfile_src_path_yaml, specfile_dest_path_yaml)
|
||||
|
||||
# Copy the cdashid file (if exists) to the destination mirror
|
||||
if os.path.exists(cdashid_src_path):
|
||||
tty.msg('Copying {0}'.format(cdashidfile_rel_path))
|
||||
shutil.copyfile(cdashid_src_path, cdashid_dest_path)
|
||||
|
||||
|
||||
def sync_fn(args):
|
||||
""" Syncs binaries (and associated metadata) from one mirror to another.
|
||||
@@ -646,6 +667,8 @@ def sync_fn(args):
|
||||
build_cache_dir, bindist.tarball_name(s, '.spec.yaml')),
|
||||
os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(s, '.spec.json')),
|
||||
os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(s, '.cdashid'))
|
||||
])
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
@@ -697,7 +720,7 @@ def update_index(mirror_url, update_keys=False):
|
||||
|
||||
def update_index_fn(args):
|
||||
"""Update a buildcache index."""
|
||||
outdir = 'file://.'
|
||||
outdir = '.'
|
||||
if args.mirror_url:
|
||||
outdir = args.mirror_url
|
||||
|
||||
|
@@ -57,32 +57,32 @@ def checksum(parser, args):
|
||||
pkg = spack.repo.get(args.package)
|
||||
|
||||
url_dict = {}
|
||||
versions = args.versions
|
||||
if (not versions) and args.preferred:
|
||||
versions = [preferred_version(pkg)]
|
||||
|
||||
if versions:
|
||||
remote_versions = None
|
||||
for version in versions:
|
||||
if args.versions:
|
||||
# If the user asked for specific versions, use those
|
||||
for version in args.versions:
|
||||
version = ver(version)
|
||||
if not isinstance(version, Version):
|
||||
tty.die("Cannot generate checksums for version lists or "
|
||||
"version ranges. Use unambiguous versions.")
|
||||
url = pkg.find_valid_url_for_version(version)
|
||||
if url is not None:
|
||||
url_dict[version] = url
|
||||
continue
|
||||
# if we get here, it's because no valid url was provided by the package
|
||||
# do expensive fallback to try to recover
|
||||
if remote_versions is None:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
if version in remote_versions:
|
||||
url_dict[version] = remote_versions[version]
|
||||
url_dict[version] = pkg.url_for_version(version)
|
||||
elif args.preferred:
|
||||
version = preferred_version(pkg)
|
||||
url_dict = dict([(version, pkg.url_for_version(version))])
|
||||
else:
|
||||
# Otherwise, see what versions we can find online
|
||||
url_dict = pkg.fetch_remote_versions()
|
||||
if not url_dict:
|
||||
tty.die("Could not find any versions for {0}".format(pkg.name))
|
||||
|
||||
if not url_dict:
|
||||
tty.die("Could not find any versions for {0}".format(pkg.name))
|
||||
# And ensure the specified version URLs take precedence, if available
|
||||
try:
|
||||
explicit_dict = {}
|
||||
for v in pkg.versions:
|
||||
if not v.isdevelop():
|
||||
explicit_dict[v] = pkg.url_for_version(v)
|
||||
url_dict.update(explicit_dict)
|
||||
except spack.package.NoURLError:
|
||||
pass
|
||||
|
||||
version_lines = spack.stage.get_checksums_for_versions(
|
||||
url_dict, pkg.name, keep_stage=args.keep_stage,
|
||||
|
@@ -64,11 +64,6 @@ def setup_parser(subparser):
|
||||
'--dependencies', action='store_true', default=False,
|
||||
help="(Experimental) disable DAG scheduling; use "
|
||||
' "plain" dependencies.')
|
||||
generate.add_argument(
|
||||
'--buildcache-destination', default=None,
|
||||
help="Override the mirror configured in the environment (spack.yaml) " +
|
||||
"in order to push binaries from the generated pipeline to a " +
|
||||
"different location.")
|
||||
prune_group = generate.add_mutually_exclusive_group()
|
||||
prune_group.add_argument(
|
||||
'--prune-dag', action='store_true', dest='prune_dag',
|
||||
@@ -132,7 +127,6 @@ def ci_generate(args):
|
||||
prune_dag = args.prune_dag
|
||||
index_only = args.index_only
|
||||
artifacts_root = args.artifacts_root
|
||||
buildcache_destination = args.buildcache_destination
|
||||
|
||||
if not output_file:
|
||||
output_file = os.path.abspath(".gitlab-ci.yml")
|
||||
@@ -146,8 +140,7 @@ def ci_generate(args):
|
||||
spack_ci.generate_gitlab_ci_yaml(
|
||||
env, True, output_file, prune_dag=prune_dag,
|
||||
check_index_only=index_only, run_optimizer=run_optimizer,
|
||||
use_dependencies=use_dependencies, artifacts_root=artifacts_root,
|
||||
remote_mirror_override=buildcache_destination)
|
||||
use_dependencies=use_dependencies, artifacts_root=artifacts_root)
|
||||
|
||||
if copy_yaml_to:
|
||||
copy_to_dir = os.path.dirname(copy_yaml_to)
|
||||
@@ -174,7 +167,8 @@ def ci_reindex(args):
|
||||
|
||||
def ci_rebuild(args):
|
||||
"""Check a single spec against the remote mirror, and rebuild it from
|
||||
source if the mirror does not contain the hash. """
|
||||
source if the mirror does not contain the full hash match of the spec
|
||||
as computed locally. """
|
||||
env = spack.cmd.require_active_env(cmd_name='ci rebuild')
|
||||
|
||||
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
||||
@@ -187,9 +181,6 @@ def ci_rebuild(args):
|
||||
if not gitlab_ci:
|
||||
tty.die('spack ci rebuild requires an env containing gitlab-ci cfg')
|
||||
|
||||
tty.msg('SPACK_BUILDCACHE_DESTINATION={0}'.format(
|
||||
os.environ.get('SPACK_BUILDCACHE_DESTINATION', None)))
|
||||
|
||||
# Grab the environment variables we need. These either come from the
|
||||
# pipeline generation step ("spack ci generate"), where they were written
|
||||
# out as variables, or else provided by GitLab itself.
|
||||
@@ -205,8 +196,9 @@ def ci_rebuild(args):
|
||||
job_spec_pkg_name = get_env_var('SPACK_JOB_SPEC_PKG_NAME')
|
||||
compiler_action = get_env_var('SPACK_COMPILER_ACTION')
|
||||
cdash_build_name = get_env_var('SPACK_CDASH_BUILD_NAME')
|
||||
related_builds = get_env_var('SPACK_RELATED_BUILDS_CDASH')
|
||||
spack_pipeline_type = get_env_var('SPACK_PIPELINE_TYPE')
|
||||
remote_mirror_override = get_env_var('SPACK_REMOTE_MIRROR_OVERRIDE')
|
||||
pr_mirror_url = get_env_var('SPACK_PR_MIRROR_URL')
|
||||
remote_mirror_url = get_env_var('SPACK_REMOTE_MIRROR_URL')
|
||||
|
||||
# Construct absolute paths relative to current $CI_PROJECT_DIR
|
||||
@@ -244,6 +236,7 @@ def ci_rebuild(args):
|
||||
tty.debug('cdash_project_enc = {0}'.format(cdash_project_enc))
|
||||
tty.debug('cdash_build_name = {0}'.format(cdash_build_name))
|
||||
tty.debug('cdash_site = {0}'.format(cdash_site))
|
||||
tty.debug('related_builds = {0}'.format(related_builds))
|
||||
tty.debug('job_spec_buildgroup = {0}'.format(job_spec_buildgroup))
|
||||
|
||||
# Is this a pipeline run on a spack PR or a merge to develop? It might
|
||||
@@ -254,10 +247,6 @@ def ci_rebuild(args):
|
||||
tty.debug('Pipeline type - PR: {0}, develop: {1}'.format(
|
||||
spack_is_pr_pipeline, spack_is_develop_pipeline))
|
||||
|
||||
# If no override url exists, then just push binary package to the
|
||||
# normal remote mirror url.
|
||||
buildcache_mirror_url = remote_mirror_override or remote_mirror_url
|
||||
|
||||
# Figure out what is our temporary storage mirror: Is it artifacts
|
||||
# buildcache? Or temporary-storage-url-prefix? In some cases we need to
|
||||
# force something or pipelines might not have a way to propagate build
|
||||
@@ -290,11 +279,11 @@ def ci_rebuild(args):
|
||||
# Whatever form of root_spec we got, use it to get a map giving us concrete
|
||||
# specs for this job and all of its dependencies.
|
||||
spec_map = spack_ci.get_concrete_specs(
|
||||
env, root_spec, job_spec_pkg_name, compiler_action)
|
||||
env, root_spec, job_spec_pkg_name, related_builds, compiler_action)
|
||||
job_spec = spec_map[job_spec_pkg_name]
|
||||
|
||||
job_spec_json_file = '{0}.json'.format(job_spec_pkg_name)
|
||||
job_spec_json_path = os.path.join(repro_dir, job_spec_json_file)
|
||||
job_spec_yaml_file = '{0}.yaml'.format(job_spec_pkg_name)
|
||||
job_spec_yaml_path = os.path.join(repro_dir, job_spec_yaml_file)
|
||||
|
||||
# To provide logs, cdash reports, etc for developer download/perusal,
|
||||
# these things have to be put into artifacts. This means downstream
|
||||
@@ -348,23 +337,23 @@ def ci_rebuild(args):
|
||||
# using a compiler already installed on the target system).
|
||||
spack_ci.configure_compilers(compiler_action)
|
||||
|
||||
# Write this job's spec json into the reproduction directory, and it will
|
||||
# Write this job's spec yaml into the reproduction directory, and it will
|
||||
# also be used in the generated "spack install" command to install the spec
|
||||
tty.debug('job concrete spec path: {0}'.format(job_spec_json_path))
|
||||
with open(job_spec_json_path, 'w') as fd:
|
||||
fd.write(job_spec.to_json(hash=ht.dag_hash))
|
||||
tty.debug('job concrete spec path: {0}'.format(job_spec_yaml_path))
|
||||
with open(job_spec_yaml_path, 'w') as fd:
|
||||
fd.write(job_spec.to_yaml(hash=ht.build_hash))
|
||||
|
||||
# Write the concrete root spec json into the reproduction directory
|
||||
root_spec_json_path = os.path.join(repro_dir, 'root.json')
|
||||
with open(root_spec_json_path, 'w') as fd:
|
||||
fd.write(spec_map['root'].to_json(hash=ht.dag_hash))
|
||||
# Write the concrete root spec yaml into the reproduction directory
|
||||
root_spec_yaml_path = os.path.join(repro_dir, 'root.yaml')
|
||||
with open(root_spec_yaml_path, 'w') as fd:
|
||||
fd.write(spec_map['root'].to_yaml(hash=ht.build_hash))
|
||||
|
||||
# Write some other details to aid in reproduction into an artifact
|
||||
repro_file = os.path.join(repro_dir, 'repro.json')
|
||||
repro_details = {
|
||||
'job_name': ci_job_name,
|
||||
'job_spec_json': job_spec_json_file,
|
||||
'root_spec_json': 'root.json',
|
||||
'job_spec_yaml': job_spec_yaml_file,
|
||||
'root_spec_yaml': 'root.yaml',
|
||||
'ci_project_dir': ci_project_dir
|
||||
}
|
||||
with open(repro_file, 'w') as fd:
|
||||
@@ -373,47 +362,32 @@ def ci_rebuild(args):
|
||||
# Write information about spack into an artifact in the repro dir
|
||||
spack_info = spack_ci.get_spack_info()
|
||||
spack_info_file = os.path.join(repro_dir, 'spack_info.txt')
|
||||
with open(spack_info_file, 'wb') as fd:
|
||||
fd.write(b'\n')
|
||||
fd.write(spack_info.encode('utf8'))
|
||||
fd.write(b'\n')
|
||||
with open(spack_info_file, 'w') as fd:
|
||||
fd.write('\n{0}\n'.format(spack_info))
|
||||
|
||||
# If we decided there should be a temporary storage mechanism, add that
|
||||
# mirror now so it's used when we check for a hash match already
|
||||
# mirror now so it's used when we check for a full hash match already
|
||||
# built for this spec.
|
||||
if pipeline_mirror_url:
|
||||
spack.mirror.add(spack_ci.TEMP_STORAGE_MIRROR_NAME,
|
||||
pipeline_mirror_url,
|
||||
cfg.default_modify_scope())
|
||||
|
||||
# Check configured mirrors for a built spec with a matching hash
|
||||
mirrors_to_check = None
|
||||
if remote_mirror_override and spack_pipeline_type == 'spack_protected_branch':
|
||||
# Passing "mirrors_to_check" below means we *only* look in the override
|
||||
# mirror to see if we should skip building, which is what we want.
|
||||
mirrors_to_check = {
|
||||
'override': remote_mirror_override
|
||||
}
|
||||
|
||||
# Adding this mirror to the list of configured mirrors means dependencies
|
||||
# could be installed from either the override mirror or any other configured
|
||||
# mirror (e.g. remote_mirror_url which is defined in the environment or
|
||||
# pipeline_mirror_url), which is also what we want.
|
||||
spack.mirror.add('mirror_override',
|
||||
remote_mirror_override,
|
||||
cfg.default_modify_scope())
|
||||
cdash_build_id = None
|
||||
cdash_build_stamp = None
|
||||
|
||||
# Check configured mirrors for a built spec with a matching full hash
|
||||
matches = bindist.get_mirrors_for_spec(
|
||||
job_spec, mirrors_to_check=mirrors_to_check, index_only=False)
|
||||
job_spec, full_hash_match=True, index_only=False)
|
||||
|
||||
if matches:
|
||||
# Got a hash match on at least one configured mirror. All
|
||||
# Got a full hash match on at least one configured mirror. All
|
||||
# matches represent the fully up-to-date spec, so should all be
|
||||
# equivalent. If artifacts mirror is enabled, we just pick one
|
||||
# of the matches and download the buildcache files from there to
|
||||
# the artifacts, so they're available to be used by dependent
|
||||
# jobs in subsequent stages.
|
||||
tty.msg('No need to rebuild {0}, found hash match at: '.format(
|
||||
tty.msg('No need to rebuild {0}, found full hash match at: '.format(
|
||||
job_spec_pkg_name))
|
||||
for match in matches:
|
||||
tty.msg(' {0}'.format(match['mirror_url']))
|
||||
@@ -426,13 +400,14 @@ def ci_rebuild(args):
|
||||
bindist.download_single_spec(
|
||||
job_spec,
|
||||
build_cache_dir,
|
||||
require_cdashid=False,
|
||||
mirror_url=matching_mirror
|
||||
)
|
||||
|
||||
# Now we are done and successful
|
||||
sys.exit(0)
|
||||
|
||||
# No hash match anywhere means we need to rebuild spec
|
||||
# No full hash match anywhere means we need to rebuild spec
|
||||
|
||||
# Start with spack arguments
|
||||
install_args = [base_arg for base_arg in CI_REBUILD_INSTALL_BASE_ARGS]
|
||||
@@ -444,6 +419,7 @@ def ci_rebuild(args):
|
||||
install_args.extend([
|
||||
'install',
|
||||
'--keep-stage',
|
||||
'--require-full-hash-match',
|
||||
])
|
||||
|
||||
can_verify = spack_ci.can_verify_binaries()
|
||||
@@ -451,8 +427,16 @@ def ci_rebuild(args):
|
||||
if not verify_binaries:
|
||||
install_args.append('--no-check-signature')
|
||||
|
||||
# If CDash reporting is enabled, we first register this build with
|
||||
# the specified CDash instance, then relate the build to those of
|
||||
# its dependencies.
|
||||
if enable_cdash:
|
||||
# Add additional arguments to `spack install` for CDash reporting.
|
||||
tty.debug('CDash: Registering build')
|
||||
(cdash_build_id,
|
||||
cdash_build_stamp) = spack_ci.register_cdash_build(
|
||||
cdash_build_name, cdash_base_url, cdash_project,
|
||||
cdash_site, job_spec_buildgroup)
|
||||
|
||||
cdash_upload_url = '{0}/submit.php?project={1}'.format(
|
||||
cdash_base_url, cdash_project_enc)
|
||||
|
||||
@@ -460,9 +444,15 @@ def ci_rebuild(args):
|
||||
'--cdash-upload-url', cdash_upload_url,
|
||||
'--cdash-build', cdash_build_name,
|
||||
'--cdash-site', cdash_site,
|
||||
'--cdash-track', job_spec_buildgroup,
|
||||
'--cdash-buildstamp', cdash_build_stamp,
|
||||
])
|
||||
|
||||
if cdash_build_id is not None:
|
||||
tty.debug('CDash: Relating build with dependency builds')
|
||||
spack_ci.relate_cdash_builds(
|
||||
spec_map, cdash_base_url, cdash_build_id, cdash_project,
|
||||
[pipeline_mirror_url, pr_mirror_url, remote_mirror_url])
|
||||
|
||||
# A compiler action of 'FIND_ANY' means we are building a bootstrap
|
||||
# compiler or one of its deps.
|
||||
# TODO: when compilers are dependencies, we should include --no-add
|
||||
@@ -471,8 +461,8 @@ def ci_rebuild(args):
|
||||
|
||||
# TODO: once we have the concrete spec registry, use the DAG hash
|
||||
# to identify the spec to install, rather than the concrete spec
|
||||
# json file.
|
||||
install_args.extend(['-f', job_spec_json_path])
|
||||
# yaml file.
|
||||
install_args.extend(['-f', job_spec_yaml_path])
|
||||
|
||||
tty.debug('Installing {0} from source'.format(job_spec.name))
|
||||
tty.debug('spack install arguments: {0}'.format(
|
||||
@@ -505,13 +495,13 @@ def ci_rebuild(args):
|
||||
tty.debug('spack install exited {0}'.format(install_exit_code))
|
||||
|
||||
# If a spec fails to build in a spack develop pipeline, we add it to a
|
||||
# list of known broken hashes. This allows spack PR pipelines to
|
||||
# list of known broken full hashes. This allows spack PR pipelines to
|
||||
# avoid wasting compute cycles attempting to build those hashes.
|
||||
if install_exit_code == INSTALL_FAIL_CODE and spack_is_develop_pipeline:
|
||||
tty.debug('Install failed on develop')
|
||||
if 'broken-specs-url' in gitlab_ci:
|
||||
broken_specs_url = gitlab_ci['broken-specs-url']
|
||||
dev_fail_hash = job_spec.dag_hash()
|
||||
dev_fail_hash = job_spec.full_hash()
|
||||
broken_spec_path = url_util.join(broken_specs_url, dev_fail_hash)
|
||||
tty.msg('Reporting broken develop build as: {0}'.format(
|
||||
broken_spec_path))
|
||||
@@ -522,7 +512,7 @@ def ci_rebuild(args):
|
||||
'broken-spec': {
|
||||
'job-url': get_env_var('CI_JOB_URL'),
|
||||
'pipeline-url': get_env_var('CI_PIPELINE_URL'),
|
||||
'concrete-spec-dict': job_spec.to_dict(hash=ht.dag_hash)
|
||||
'concrete-spec-yaml': job_spec.to_dict(hash=ht.full_hash)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -548,6 +538,13 @@ def ci_rebuild(args):
|
||||
# any logs from the staging directory to artifacts now
|
||||
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
|
||||
|
||||
# Create buildcache on remote mirror, either on pr-specific mirror or
|
||||
# on the main mirror defined in the gitlab-enabled spack environment
|
||||
if spack_is_pr_pipeline:
|
||||
buildcache_mirror_url = pr_mirror_url
|
||||
else:
|
||||
buildcache_mirror_url = remote_mirror_url
|
||||
|
||||
# If the install succeeded, create a buildcache entry for this job spec
|
||||
# and push it to one or more mirrors. If the install did not succeed,
|
||||
# print out some instructions on how to reproduce this build failure
|
||||
@@ -560,23 +557,35 @@ def ci_rebuild(args):
|
||||
# per-PR mirror, if this is a PR pipeline
|
||||
if buildcache_mirror_url:
|
||||
spack_ci.push_mirror_contents(
|
||||
env, job_spec_json_path, buildcache_mirror_url, sign_binaries
|
||||
env, job_spec_yaml_path, buildcache_mirror_url, sign_binaries
|
||||
)
|
||||
|
||||
if cdash_build_id:
|
||||
tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(
|
||||
cdash_build_id, buildcache_mirror_url))
|
||||
spack_ci.write_cdashid_to_mirror(
|
||||
cdash_build_id, job_spec, buildcache_mirror_url)
|
||||
|
||||
# Create another copy of that buildcache in the per-pipeline
|
||||
# temporary storage mirror (this is only done if either
|
||||
# artifacts buildcache is enabled or a temporary storage url
|
||||
# prefix is set)
|
||||
if pipeline_mirror_url:
|
||||
spack_ci.push_mirror_contents(
|
||||
env, job_spec_json_path, pipeline_mirror_url, sign_binaries
|
||||
env, job_spec_yaml_path, pipeline_mirror_url, sign_binaries
|
||||
)
|
||||
|
||||
if cdash_build_id:
|
||||
tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(
|
||||
cdash_build_id, pipeline_mirror_url))
|
||||
spack_ci.write_cdashid_to_mirror(
|
||||
cdash_build_id, job_spec, pipeline_mirror_url)
|
||||
|
||||
# If this is a develop pipeline, check if the spec that we just built is
|
||||
# on the broken-specs list. If so, remove it.
|
||||
if spack_is_develop_pipeline and 'broken-specs-url' in gitlab_ci:
|
||||
broken_specs_url = gitlab_ci['broken-specs-url']
|
||||
just_built_hash = job_spec.dag_hash()
|
||||
just_built_hash = job_spec.full_hash()
|
||||
broken_spec_path = url_util.join(broken_specs_url, just_built_hash)
|
||||
if web_util.url_exists(broken_spec_path):
|
||||
tty.msg('Removing {0} from the list of broken specs'.format(
|
||||
|
@@ -117,7 +117,7 @@ def format(self, cmd):
|
||||
'virtual': '_providers',
|
||||
'namespace': '_repos',
|
||||
'hash': '_all_resource_hashes',
|
||||
'pytest': '_unit_tests',
|
||||
'pytest': '_tests',
|
||||
}
|
||||
|
||||
|
||||
|
@@ -35,9 +35,6 @@ def shell_init_instructions(cmd, equivalent):
|
||||
color.colorize("@*c{For fish:}"),
|
||||
" source %s/setup-env.fish" % spack.paths.share_path,
|
||||
"",
|
||||
color.colorize("@*c{For Windows batch:}"),
|
||||
" source %s/spack_cmd.bat" % spack.paths.share_path,
|
||||
"",
|
||||
"Or, if you do not want to use shell support, run " + (
|
||||
"one of these" if shell_specific else "this") + " instead:",
|
||||
"",
|
||||
@@ -48,7 +45,6 @@ def shell_init_instructions(cmd, equivalent):
|
||||
equivalent.format(sh_arg="--sh ") + " # bash/zsh/sh",
|
||||
equivalent.format(sh_arg="--csh ") + " # csh/tcsh",
|
||||
equivalent.format(sh_arg="--fish") + " # fish",
|
||||
equivalent.format(sh_arg="--bat ") + " # batch"
|
||||
]
|
||||
else:
|
||||
msg += [" " + equivalent]
|
||||
|
@@ -18,8 +18,6 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
arguments.add_common_arguments(subparser, ['clean', 'dirty'])
|
||||
arguments.add_concretizer_args(subparser)
|
||||
|
||||
subparser.add_argument(
|
||||
'--dump', metavar="FILE",
|
||||
help="dump a source-able environment to FILE"
|
||||
|
@@ -22,9 +22,6 @@ def setup_parser(subparser):
|
||||
help="""Concretize with test dependencies. When 'root' is chosen, test
|
||||
dependencies are only added for the environment's root specs. When 'all' is
|
||||
chosen, test dependencies are enabled for all packages in the environment.""")
|
||||
subparser.add_argument(
|
||||
'-q', '--quiet', action='store_true',
|
||||
help="Don't print concretized specs")
|
||||
|
||||
spack.cmd.common.arguments.add_concretizer_args(subparser)
|
||||
|
||||
@@ -41,6 +38,5 @@ def concretize(parser, args):
|
||||
|
||||
with env.write_transaction():
|
||||
concretized_specs = env.concretize(force=args.force, tests=tests)
|
||||
if not args.quiet:
|
||||
ev.display_specs(concretized_specs)
|
||||
ev.display_specs(concretized_specs)
|
||||
env.write()
|
||||
|
@@ -187,27 +187,6 @@ def cmake_args(self):
|
||||
return args"""
|
||||
|
||||
|
||||
class LuaPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for LuaRocks-based packages"""
|
||||
|
||||
base_class_name = 'LuaPackage'
|
||||
|
||||
body_def = """\
|
||||
def luarocks_args(self):
|
||||
# FIXME: Add arguments to `luarocks make` other than rockspec path
|
||||
# FIXME: If not needed delete this function
|
||||
args = []
|
||||
return args"""
|
||||
|
||||
def __init__(self, name, url, *args, **kwargs):
|
||||
# If the user provided `--name lua-lpeg`, don't rename it lua-lua-lpeg
|
||||
if not name.startswith('lua-'):
|
||||
# Make it more obvious that we are renaming the package
|
||||
tty.msg("Changing package name from {0} to lua-{0}".format(name))
|
||||
name = 'lua-{0}'.format(name)
|
||||
super(LuaPackageTemplate, self).__init__(name, url, *args, **kwargs)
|
||||
|
||||
|
||||
class MesonPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for meson-based packages"""
|
||||
|
||||
@@ -279,42 +258,6 @@ def install(self, spec, prefix):
|
||||
bazel()"""
|
||||
|
||||
|
||||
class RacketPackageTemplate(PackageTemplate):
|
||||
"""Provides approriate overrides for Racket extensions"""
|
||||
base_class_name = 'RacketPackage'
|
||||
|
||||
url_line = """\
|
||||
# FIXME: set the proper location from which to fetch your package
|
||||
git = "git@github.com:example/example.git"
|
||||
"""
|
||||
|
||||
dependencies = """\
|
||||
# FIXME: Add dependencies if required. Only add the racket dependency
|
||||
# if you need specific versions. A generic racket dependency is
|
||||
# added implicity by the RacketPackage class.
|
||||
# depends_on('racket@8.3:', type=('build', 'run'))"""
|
||||
|
||||
body_def = """\
|
||||
# FIXME: specify the name of the package,
|
||||
# as it should appear to ``raco pkg install``
|
||||
name = '{0}'
|
||||
# FIXME: set to true if published on pkgs.racket-lang.org
|
||||
# pkgs = False
|
||||
# FIXME: specify path to the root directory of the
|
||||
# package, if not the base directory
|
||||
# subdirectory = None
|
||||
"""
|
||||
|
||||
def __init__(self, name, url, *args, **kwargs):
|
||||
# If the user provided `--name rkt-scribble`, don't rename it rkt-rkt-scribble
|
||||
if not name.startswith('rkt-'):
|
||||
# Make it more obvious that we are renaming the package
|
||||
tty.msg("Changing package name from {0} to rkt-{0}".format(name))
|
||||
name = 'rkt-{0}'.format(name)
|
||||
self.body_def = self.body_def.format(name[4:])
|
||||
super(RacketPackageTemplate, self).__init__(name, url, *args, **kwargs)
|
||||
|
||||
|
||||
class PythonPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for python extensions"""
|
||||
base_class_name = 'PythonPackage'
|
||||
@@ -593,7 +536,6 @@ def __init__(self, name, *args, **kwargs):
|
||||
'bazel': BazelPackageTemplate,
|
||||
'python': PythonPackageTemplate,
|
||||
'r': RPackageTemplate,
|
||||
'racket': RacketPackageTemplate,
|
||||
'perlmake': PerlmakePackageTemplate,
|
||||
'perlbuild': PerlbuildPackageTemplate,
|
||||
'octave': OctavePackageTemplate,
|
||||
@@ -601,7 +543,6 @@ def __init__(self, name, *args, **kwargs):
|
||||
'makefile': MakefilePackageTemplate,
|
||||
'intel': IntelPackageTemplate,
|
||||
'meson': MesonPackageTemplate,
|
||||
'lua': LuaPackageTemplate,
|
||||
'sip': SIPPackageTemplate,
|
||||
'generic': PackageTemplate,
|
||||
}
|
||||
@@ -666,9 +607,6 @@ def __call__(self, stage, url):
|
||||
if url.endswith('.whl') or '.whl#' in url:
|
||||
self.build_system = 'python'
|
||||
return
|
||||
if url.endswith('.rock'):
|
||||
self.build_system = 'lua'
|
||||
return
|
||||
|
||||
# A list of clues that give us an idea of the build system a package
|
||||
# uses. If the regular expression matches a file contained in the
|
||||
@@ -693,7 +631,6 @@ def __call__(self, stage, url):
|
||||
(r'/Rakefile$', 'ruby'),
|
||||
(r'/setup\.rb$', 'ruby'),
|
||||
(r'/.*\.pro$', 'qmake'),
|
||||
(r'/.*\.rockspec$', 'lua'),
|
||||
(r'/(GNU)?[Mm]akefile$', 'makefile'),
|
||||
(r'/DESCRIPTION$', 'octave'),
|
||||
(r'/meson\.build$', 'meson'),
|
||||
@@ -821,15 +758,7 @@ def get_versions(args, name):
|
||||
# Default guesser
|
||||
guesser = BuildSystemGuesser()
|
||||
|
||||
valid_url = True
|
||||
try:
|
||||
spack.util.url.require_url_format(args.url)
|
||||
if args.url.startswith('file://'):
|
||||
valid_url = False # No point in spidering these
|
||||
except ValueError:
|
||||
valid_url = False
|
||||
|
||||
if args.url is not None and args.template != 'bundle' and valid_url:
|
||||
if args.url is not None and args.template != 'bundle':
|
||||
# Find available versions
|
||||
try:
|
||||
url_dict = spack.util.web.find_versions_of_archive(args.url)
|
||||
|
@@ -8,9 +8,9 @@
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.graph
|
||||
import spack.store
|
||||
from spack.filesystem_view import YamlFilesystemView
|
||||
from spack.graph import topological_sort
|
||||
|
||||
description = "deactivate a package extension"
|
||||
section = "extensions"
|
||||
@@ -32,10 +32,6 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def deactivate(parser, args):
|
||||
|
||||
tty.warn("spack deactivate is deprecated in favor of "
|
||||
"environments and will be removed in v0.19.0")
|
||||
|
||||
specs = spack.cmd.parse_specs(args.spec)
|
||||
if len(specs) != 1:
|
||||
tty.die("deactivate requires one spec. %d given." % len(specs))
|
||||
@@ -72,8 +68,11 @@ def deactivate(parser, args):
|
||||
tty.msg("Deactivating %s and all dependencies." %
|
||||
pkg.spec.short_spec)
|
||||
|
||||
nodes_in_topological_order = spack.graph.topological_sort(spec)
|
||||
for espec in reversed(nodes_in_topological_order):
|
||||
topo_order = topological_sort(spec)
|
||||
index = spec.index()
|
||||
|
||||
for name in topo_order:
|
||||
espec = index[name]
|
||||
epkg = espec.package
|
||||
if epkg.extends(pkg.extendee_spec):
|
||||
if epkg.is_activated(view) or args.force:
|
||||
|
@@ -19,7 +19,6 @@
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
@@ -124,7 +123,7 @@ def deprecate(parser, args):
|
||||
if not answer:
|
||||
tty.die('Will not deprecate any packages.')
|
||||
|
||||
link_fn = os.link if args.link_type == 'hard' else symlink
|
||||
link_fn = os.link if args.link_type == 'hard' else os.symlink
|
||||
|
||||
for dcate, dcator in zip(all_deprecate, all_deprecators):
|
||||
dcate.package.do_deprecate(dcator, link_fn)
|
||||
|
@@ -91,8 +91,8 @@ def dev_build(self, args):
|
||||
spec.concretize()
|
||||
package = spack.repo.get(spec)
|
||||
|
||||
if spec.installed:
|
||||
tty.error("Already installed in %s" % spec.prefix)
|
||||
if package.installed:
|
||||
tty.error("Already installed in %s" % package.prefix)
|
||||
tty.msg("Uninstall or try adding a version suffix for this dev build.")
|
||||
sys.exit(1)
|
||||
|
||||
|
@@ -68,14 +68,8 @@ def compare_specs(a, b, to_string=False, color=None):
|
||||
# Prepare a solver setup to parse differences
|
||||
setup = asp.SpackSolverSetup()
|
||||
|
||||
# get facts for specs, making sure to include build dependencies of concrete
|
||||
# specs and to descend into dependency hashes so we include all facts.
|
||||
a_facts = set(t for t in setup.spec_clauses(
|
||||
a, body=True, expand_hashes=True, concrete_build_deps=True,
|
||||
))
|
||||
b_facts = set(t for t in setup.spec_clauses(
|
||||
b, body=True, expand_hashes=True, concrete_build_deps=True,
|
||||
))
|
||||
a_facts = set(t for t in setup.spec_clauses(a, body=True, expand_hashes=True))
|
||||
b_facts = set(t for t in setup.spec_clauses(b, body=True, expand_hashes=True))
|
||||
|
||||
# We want to present them to the user as simple key: values
|
||||
intersect = sorted(a_facts.intersection(b_facts))
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user