Compare commits
1 Commits
e4s-21.11
...
features/g
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0d092d671f |
118
.github/workflows/bootstrap.yml
vendored
118
.github/workflows/bootstrap.yml
vendored
@@ -19,7 +19,7 @@ on:
|
||||
|
||||
jobs:
|
||||
|
||||
fedora-clingo-sources:
|
||||
fedora-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "fedora:latest"
|
||||
steps:
|
||||
@@ -29,7 +29,7 @@ jobs:
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -46,7 +46,7 @@ jobs:
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
ubuntu-clingo-sources:
|
||||
ubuntu-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
steps:
|
||||
@@ -59,7 +59,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -76,9 +76,9 @@ jobs:
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
opensuse-clingo-sources:
|
||||
opensuse-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "opensuse/leap:latest"
|
||||
container: "opensuse/tumbleweed:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
bzip2 curl file gcc-c++ gcc gcc-fortran tar git gpg2 gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -101,13 +101,13 @@ jobs:
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-clingo-sources:
|
||||
macos-sources:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -126,8 +126,8 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install tree
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Bootstrap clingo
|
||||
@@ -137,14 +137,15 @@ jobs:
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
|
||||
ubuntu-clingo-binaries:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9']
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Setup repo and non-root user
|
||||
@@ -158,94 +159,3 @@ jobs:
|
||||
spack bootstrap untrust spack-install
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
ubuntu-gnupg-binaries:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
run: |
|
||||
apt-get update -y && apt-get upgrade -y
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap GnuPG
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust spack-install
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
ubuntu-gnupg-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
run: |
|
||||
apt-get update -y && apt-get upgrade -y
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap GnuPG
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap untrust github-actions
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-gnupg-binaries:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- uses: actions/checkout@v2
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust spack-install
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-gnupg-sources:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install gawk tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- uses: actions/checkout@v2
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap untrust github-actions
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
59
.github/workflows/build-containers.yml
vendored
59
.github/workflows/build-containers.yml
vendored
@@ -1,26 +1,15 @@
|
||||
name: Containers
|
||||
|
||||
name: Build & Deploy Docker Containers
|
||||
on:
|
||||
# This Workflow can be triggered manually
|
||||
workflow_dispatch:
|
||||
# Build new Spack develop containers nightly.
|
||||
schedule:
|
||||
- cron: '34 0 * * *'
|
||||
# Run on pull requests that modify this file
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- '.github/workflows/build-containers.yml'
|
||||
# Let's also build & tag Spack containers on releases.
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
deploy-images:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
packages: write
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
# Even if one container fails to build we still want the others
|
||||
# to continue their builds.
|
||||
@@ -28,19 +17,19 @@ jobs:
|
||||
# A matrix of Dockerfile paths, associated tags, and which architectures
|
||||
# they support.
|
||||
matrix:
|
||||
dockerfile: [[amazon-linux, amazonlinux-2.dockerfile, 'linux/amd64,linux/arm64'],
|
||||
[centos7, centos-7.dockerfile, 'linux/amd64,linux/arm64,linux/ppc64le'],
|
||||
[leap15, leap-15.dockerfile, 'linux/amd64,linux/arm64,linux/ppc64le'],
|
||||
[ubuntu-xenial, ubuntu-1604.dockerfile, 'linux/amd64,linux/arm64,linux/ppc64le'],
|
||||
[ubuntu-bionic, ubuntu-1804.dockerfile, 'linux/amd64,linux/arm64,linux/ppc64le']]
|
||||
dockerfile: [[amazon-linux, amazonlinux-2.dockerfile, 'linux/amd64,linux/arm64'],
|
||||
[centos7, centos-7.dockerfile, 'linux/amd64,linux/arm64'],
|
||||
[leap15, leap-15.dockerfile, 'linux/amd64,linux/arm64'],
|
||||
[ubuntu-xenial, ubuntu-1604.dockerfile, 'linux/amd64,linux/arm64'],
|
||||
[ubuntu-bionic, ubuntu-1804.dockerfile, 'linux/amd64,linux/arm64']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
container="${{ matrix.dockerfile[0] }}:latest"
|
||||
container="ghcr.io/spack/${{ matrix.dockerfile[0]}}:latest"
|
||||
echo "container=${container}" >> $GITHUB_ENV
|
||||
echo "versioned=${container}" >> $GITHUB_ENV
|
||||
|
||||
@@ -48,7 +37,7 @@ jobs:
|
||||
- name: Set Container Tag on Release
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
versioned="${{matrix.dockerfile[0]}}:${GITHUB_REF##*/}"
|
||||
versioned="ghcr.io/spack/${{matrix.dockerfile[0]}}:${GITHUB_REF##*/}"
|
||||
echo "versioned=${versioned}" >> $GITHUB_ENV
|
||||
|
||||
- name: Check ${{ matrix.dockerfile[1] }} Exists
|
||||
@@ -59,33 +48,25 @@ jobs:
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@27d0a4f181a40b142cce983c5393082c365d1480 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@94ab11c41e45d028884a99163086648e898eed25 # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 # @v1
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[1] }}
|
||||
uses: docker/build-push-action@a66e35b9cbcf4ad0ea91ffcaf7bbad63ad9e0229 # @v2
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
file: share/spack/docker/${{matrix.dockerfile[1]}}
|
||||
platforms: ${{ matrix.dockerfile[2] }}
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
push: true
|
||||
tags: |
|
||||
spack/${{ env.container }}
|
||||
spack/${{ env.versioned }}
|
||||
ghcr.io/spack/${{ env.container }}
|
||||
ghcr.io/spack/${{ env.versioned }}
|
||||
${{ env.container }}
|
||||
${{ env.versioned }}
|
||||
|
||||
26
.github/workflows/macos_python.yml
vendored
26
.github/workflows/macos_python.yml
vendored
@@ -24,8 +24,8 @@ jobs:
|
||||
name: gcc with clang
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
@@ -39,8 +39,8 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 700
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
@@ -52,8 +52,8 @@ jobs:
|
||||
name: scipy, mpl, pd
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
@@ -62,3 +62,17 @@ jobs:
|
||||
spack install -v --fail-fast py-scipy %apple-clang
|
||||
spack install -v --fail-fast py-matplotlib %apple-clang
|
||||
spack install -v --fail-fast py-pandas %apple-clang
|
||||
|
||||
install_mpi4py_clang:
|
||||
name: mpi4py, petsc4py
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
run: |
|
||||
. .github/workflows/install_spack.sh
|
||||
spack install -v --fail-fast py-mpi4py %apple-clang
|
||||
spack install -v --fail-fast py-petsc4py %apple-clang
|
||||
|
||||
85
.github/workflows/unit_tests.yaml
vendored
85
.github/workflows/unit_tests.yaml
vendored
@@ -15,8 +15,8 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python Packages
|
||||
@@ -31,10 +31,10 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -48,6 +48,26 @@ jobs:
|
||||
- name: Run style tests
|
||||
run: |
|
||||
share/spack/qa/run-style-tests
|
||||
# Build the documentation
|
||||
documentation:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
sudo apt-get install -y coreutils ninja-build graphviz
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools
|
||||
pip install --upgrade -r lib/spack/docs/requirements.txt
|
||||
- name: Build documentation
|
||||
run: |
|
||||
share/spack/qa/run-doc-tests
|
||||
|
||||
# Check which files have been updated by the PR
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -57,12 +77,12 @@ jobs:
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
with_coverage: ${{ steps.coverage.outputs.with_coverage }}
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# For pull requests it's not necessary to checkout the code
|
||||
- uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
|
||||
@@ -92,17 +112,17 @@ jobs:
|
||||
|
||||
# Run unit tests with different configurations on linux
|
||||
unittests:
|
||||
needs: [ validate, style, changes ]
|
||||
needs: [ validate, style, documentation, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [2.7, 3.5, 3.6, 3.7, 3.8, 3.9]
|
||||
concretizer: ['original', 'clingo']
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -151,19 +171,19 @@ jobs:
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
|
||||
- uses: codecov/codecov-action@v2.0.3
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
# Test shell integration
|
||||
shell:
|
||||
needs: [ validate, style, changes ]
|
||||
needs: [ validate, style, documentation, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install System packages
|
||||
@@ -189,13 +209,13 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
|
||||
- uses: codecov/codecov-action@v2.0.3
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
# Test for Python2.6 run on Centos 6
|
||||
centos6:
|
||||
needs: [ validate, style, changes ]
|
||||
needs: [ validate, style, documentation, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
container: spack/github-actions:centos6
|
||||
steps:
|
||||
@@ -205,30 +225,28 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
env:
|
||||
HOME: /home/spack-test
|
||||
SPACK_TEST_SOLVER: original
|
||||
run: |
|
||||
whoami && echo $HOME && cd $HOME
|
||||
git clone "${{ github.server_url }}/${{ github.repository }}.git" && cd spack
|
||||
git fetch origin "${{ github.ref }}:test-branch"
|
||||
git clone https://github.com/spack/spack.git && cd spack
|
||||
git fetch origin ${{ github.ref }}:test-branch
|
||||
git checkout test-branch
|
||||
bin/spack unit-test -x
|
||||
share/spack/qa/run-unit-tests
|
||||
- name: Run unit tests (only package tests)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
||||
env:
|
||||
HOME: /home/spack-test
|
||||
ONLY_PACKAGES: true
|
||||
SPACK_TEST_SOLVER: original
|
||||
run: |
|
||||
whoami && echo $HOME && cd $HOME
|
||||
git clone "${{ github.server_url }}/${{ github.repository }}.git" && cd spack
|
||||
git fetch origin "${{ github.ref }}:test-branch"
|
||||
git clone https://github.com/spack/spack.git && cd spack
|
||||
git fetch origin ${{ github.ref }}:test-branch
|
||||
git checkout test-branch
|
||||
bin/spack unit-test -x -m "not maybeslow" -k "package_sanity"
|
||||
share/spack/qa/run-unit-tests
|
||||
|
||||
# Test RHEL8 UBI with platform Python. This job is run
|
||||
# only on PRs modifying core Spack
|
||||
rhel8-platform-python:
|
||||
needs: [ validate, style, changes ]
|
||||
needs: [ validate, style, documentation, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
container: registry.access.redhat.com/ubi8/ubi
|
||||
@@ -238,7 +256,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -250,17 +268,16 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack -d solve zlib
|
||||
spack unit-test -k 'not cvs and not svn and not hg' -x --verbose
|
||||
# Test for the clingo based solver (using clingo-cffi)
|
||||
clingo-cffi:
|
||||
needs: [ validate, style, changes ]
|
||||
needs: [ validate, style, documentation, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install System packages
|
||||
@@ -294,22 +311,22 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
|
||||
- uses: codecov/codecov-action@v2.0.3
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
# Run unit tests on MacOS
|
||||
build:
|
||||
needs: [ validate, style, changes ]
|
||||
needs: [ validate, style, documentation, changes ]
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.8]
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -340,7 +357,7 @@ jobs:
|
||||
echo "ONLY PACKAGE RECIPES CHANGED [skipping coverage]"
|
||||
$(which spack) unit-test -x -m "not maybeslow" -k "package_sanity"
|
||||
fi
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
|
||||
- uses: codecov/codecov-action@v2.0.3
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
files: ./coverage.xml
|
||||
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -132,11 +132,11 @@ celerybeat.pid
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
!/lib/spack/env
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
!/lib/spack/env
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
@@ -210,9 +210,6 @@ tramp
|
||||
/eshell/history
|
||||
/eshell/lastdir
|
||||
|
||||
# zsh byte-compiled files
|
||||
*.zwc
|
||||
|
||||
# elpa packages
|
||||
/elpa/
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@ version: 2
|
||||
|
||||
sphinx:
|
||||
configuration: lib/spack/docs/conf.py
|
||||
fail_on_warning: true
|
||||
|
||||
python:
|
||||
version: 3.7
|
||||
|
||||
184
CHANGELOG.md
184
CHANGELOG.md
@@ -1,187 +1,3 @@
|
||||
# v0.17.0 (2021-11-05)
|
||||
|
||||
`v0.17.0` is a major feature release.
|
||||
|
||||
## Major features in this release
|
||||
|
||||
1. **New concretizer is now default**
|
||||
The new concretizer introduced as an experimental feature in `v0.16.0`
|
||||
is now the default (#25502). The new concretizer is based on the
|
||||
[clingo](https://github.com/potassco/clingo) logic programming system,
|
||||
and it enables us to do much higher quality and faster dependency solving
|
||||
The old concretizer is still available via the `concretizer: original`
|
||||
setting, but it is deprecated and will be removed in `v0.18.0`.
|
||||
|
||||
2. **Binary Bootstrapping**
|
||||
To make it easier to use the new concretizer and binary packages,
|
||||
Spack now bootstraps `clingo` and `GnuPG` from public binaries. If it
|
||||
is not able to bootstrap them from binaries, it installs them from
|
||||
source code. With these changes, you should still be able to clone Spack
|
||||
and start using it almost immediately. (#21446, #22354, #22489, #22606,
|
||||
#22720, #22720, #23677, #23946, #24003, #25138, #25607, #25964, #26029,
|
||||
#26399, #26599).
|
||||
|
||||
3. **Reuse existing packages (experimental)**
|
||||
The most wanted feature from our
|
||||
[2020 user survey](https://spack.io/spack-user-survey-2020/) and
|
||||
the most wanted Spack feature of all time (#25310). `spack install`,
|
||||
`spack spec`, and `spack concretize` now have a `--reuse` option, which
|
||||
causes Spack to minimize the number of rebuilds it does. The `--reuse`
|
||||
option will try to find existing installations and binary packages locally
|
||||
and in registered mirrors, and will prefer to use them over building new
|
||||
versions. This will allow users to build from source *far* less than in
|
||||
prior versions of Spack. This feature will continue to be improved, with
|
||||
configuration options and better CLI expected in `v0.17.1`. It will become
|
||||
the *default* concretization mode in `v0.18.0`.
|
||||
|
||||
4. **Better error messages**
|
||||
We have improved the error messages generated by the new concretizer by
|
||||
using *unsatisfiable cores*. Spack will now print a summary of the types
|
||||
of constraints that were violated to make a spec unsatisfiable (#26719).
|
||||
|
||||
5. **Conditional variants**
|
||||
Variants can now have a `when="<spec>"` clause, allowing them to be
|
||||
conditional based on the version or other attributes of a package (#24858).
|
||||
|
||||
6. **Git commit versions**
|
||||
In an environment and on the command-line, you can now provide a full,
|
||||
40-character git commit as a version for any package with a top-level
|
||||
`git` URL. e.g., `spack install hdf5@45bb27f58240a8da7ebb4efc821a1a964d7712a8`.
|
||||
Spack will compare the commit to tags in the git repository to understand
|
||||
what versions it is ahead of or behind.
|
||||
|
||||
7. **Override local config and cache directories**
|
||||
You can now set `SPACK_DISABLE_LOCAL_CONFIG` to disable the `~/.spack` and
|
||||
`/etc/spack` configuration scopes. `SPACK_USER_CACHE_PATH` allows you to
|
||||
move caches out of `~/.spack`, as well (#27022, #26735). This addresses
|
||||
common problems where users could not isolate CI environments from local
|
||||
configuration.
|
||||
|
||||
8. **Improvements to Spack Containerize**
|
||||
For added reproducibility, you can now pin the Spack version used by
|
||||
`spack containerize` (#21910). The container build will only build
|
||||
with the Spack version pinned at build recipe creation instead of the
|
||||
latest Spack version.
|
||||
|
||||
9. **New commands for dealing with tags**
|
||||
The `spack tags` command allows you to list tags on packages (#26136), and you
|
||||
can list tests and filter tags with `spack test list` (#26842).
|
||||
|
||||
## Other new features of note
|
||||
|
||||
* Copy and relocate environment views as stand-alone installations (#24832)
|
||||
* `spack diff` command can diff two installed specs (#22283, #25169)
|
||||
* `spack -c <config>` can set one-off config parameters on CLI (#22251)
|
||||
* `spack load --list` is an alias for `spack find --loaded` (#27184)
|
||||
* `spack gpg` can export private key with `--secret` (#22557)
|
||||
* `spack style` automatically bootstraps dependencies (#24819)
|
||||
* `spack style --fix` automatically invokes `isort` (#24071)
|
||||
* build dependencies can be installed from build caches with `--include-build-deps` (#19955)
|
||||
* `spack audit` command for checking package constraints (#23053)
|
||||
* Spack can now fetch from `CVS` repositories (yep, really) (#23212)
|
||||
* `spack monitor` lets you upload analysis about installations to a
|
||||
[spack monitor server](https://github.com/spack/spack-monitor) (#23804, #24321,
|
||||
#23777, #25928))
|
||||
* `spack python --path` shows which `python` Spack is using (#22006)
|
||||
* `spack env activate --temp` can create temporary environments (#25388)
|
||||
* `--preferred` and `--latest` options for `spack checksum` (#25830)
|
||||
* `cc` is now pure posix and runs on Alpine (#26259)
|
||||
* `SPACK_PYTHON` environment variable sets which `python` spack uses (#21222)
|
||||
* `SPACK_SKIP_MODULES` lets you source `setup-env.sh` faster if you don't need modules (#24545)
|
||||
|
||||
## Major internal refactors
|
||||
|
||||
* `spec.yaml` files are now `spec.json`, yielding a large speed improvement (#22845)
|
||||
* Splicing allows Spack specs to store mixed build provenance (#20262)
|
||||
* More extensive hooks API for installations (#21930)
|
||||
* New internal API for getting the active environment (#25439)
|
||||
|
||||
## Performance Improvements
|
||||
|
||||
* Parallelize separate concretization in environments; Previously 55 min E4S solve
|
||||
now takes 2.5 min (#26264)
|
||||
* Drastically improve YamlFilesystemView file removal performance via batching (#24355)
|
||||
* Speed up spec comparison (#21618)
|
||||
* Speed up environment activation (#25633)
|
||||
|
||||
## Archspec improvements
|
||||
* support for new generic `x86_64_v2`, `x86_64_v3`, `x86_64_v4` targets
|
||||
(see [archspec#31](https://github.com/archspec/archspec-json/pull/31))
|
||||
* `spack arch --generic` lets you get the best generic architecture for
|
||||
your node (#27061)
|
||||
* added support for aocc (#20124), `arm` compiler on `graviton2` (#24904)
|
||||
and on `a64fx` (#24524),
|
||||
|
||||
## Infrastructure, buildcaches, and services
|
||||
|
||||
* Add support for GCS Bucket Mirrors (#26382)
|
||||
* Add `spackbot` to help package maintainers with notifications. See
|
||||
[spack.github.io/spackbot](https://spack.github.io/spackbot/)
|
||||
* Reproducible pipeline builds with `spack ci rebuild` (#22887)
|
||||
* Removed redundant concretizations from GitLab pipeline generation (#26622)
|
||||
* Spack CI no longer generates jobs for unbuilt specs (#20435)
|
||||
* Every pull request pipeline has its own buildcache (#25529)
|
||||
* `--no-add` installs only specified specs and only if already present in… (#22657)
|
||||
* Add environment-aware `spack buildcache sync` command (#25470)
|
||||
* Binary cache installation speedups and improvements (#19690, #20768)
|
||||
|
||||
## Deprecations and Removals
|
||||
|
||||
* `spack setup` was deprecated in v0.16.0, and has now been removed.
|
||||
Use `spack develop` and `spack dev-build`.
|
||||
* Remove unused `--dependencies` flag from `spack load` (#25731)
|
||||
* Remove stubs for `spack module [refresh|find|rm|loads]`, all of which
|
||||
were deprecated in 2018.
|
||||
|
||||
## Notable Bugfixes
|
||||
|
||||
* Deactivate previous env before activating new one (#25409)
|
||||
* Many fixes to error codes from `spack install` (#21319, #27012, #25314)
|
||||
* config add: infer type based on JSON schema validation errors (#27035)
|
||||
* `spack config edit` now works even if `spack.yaml` is broken (#24689)
|
||||
|
||||
## Packages
|
||||
|
||||
* Allow non-empty version ranges like `1.1.0:1.1` (#26402)
|
||||
* Remove `.99`'s from many version ranges (#26422)
|
||||
* Python: use platform-specific site packages dir (#25998)
|
||||
* `CachedCMakePackage` for using *.cmake initial config files (#19316)
|
||||
* `lua-lang` allows swapping `lua` and `luajit` (#22492)
|
||||
* Better support for `ld.gold` and `ld.lld` (#25626)
|
||||
* build times are now stored as metadata in `$prefix/.spack` (#21179)
|
||||
* post-install tests can be reused in smoke tests (#20298)
|
||||
* Packages can use `pypi` attribute to infer `homepage`/`url`/`list_url` (#17587)
|
||||
* Use gnuconfig package for `config.guess` file replacement (#26035)
|
||||
* patches: make re-applied patches idempotent (#26784)
|
||||
|
||||
## Spack community stats
|
||||
|
||||
* 5969 total packages, 920 new since `v0.16.0`
|
||||
* 358 new Python packages, 175 new R packages
|
||||
* 513 people contributed to this release
|
||||
* 490 committers to packages
|
||||
* 105 committers to core
|
||||
* Lots of GPU updates:
|
||||
* ~77 CUDA-related commits
|
||||
* ~66 AMD-related updates
|
||||
* ~27 OneAPI-related commits
|
||||
* 30 commits from AMD toolchain support
|
||||
* `spack test` usage in packages is increasing
|
||||
* 1669 packages with tests (mostly generic python tests)
|
||||
* 93 packages with their own tests
|
||||
|
||||
|
||||
# v0.16.3 (2021-09-21)
|
||||
|
||||
* clang/llvm: fix version detection (#19978)
|
||||
* Fix use of quotes in Python build system (#22279)
|
||||
* Cray: fix extracting paths from module files (#23472)
|
||||
* Use AWS CloudFront for source mirror (#23978)
|
||||
* Ensure all roots of an installed environment are marked explicit in db (#24277)
|
||||
* Fix fetching for Python 3.8 and 3.9 (#24686)
|
||||
* locks: only open lockfiles once instead of for every lock held (#24794)
|
||||
* Remove the EOL centos:6 docker image
|
||||
|
||||
# v0.16.2 (2021-05-22)
|
||||
|
||||
* Major performance improvement for `spack load` and other commands. (#23661)
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
[](https://github.com/spack/spack/actions/workflows/bootstrap.yml)
|
||||
[](https://github.com/spack/spack/actions?query=workflow%3A%22macOS+builds+nightly%22)
|
||||
[](https://codecov.io/gh/spack/spack)
|
||||
[](https://github.com/spack/spack/actions/workflows/build-containers.yml)
|
||||
[](https://spack.readthedocs.io)
|
||||
[](https://slack.spack.io)
|
||||
|
||||
@@ -27,7 +26,7 @@ for examples and highlights.
|
||||
To install spack and your first package, make sure you have Python.
|
||||
Then:
|
||||
|
||||
$ git clone -c feature.manyFiles=true https://github.com/spack/spack.git
|
||||
$ git clone https://github.com/spack/spack.git
|
||||
$ cd spack/bin
|
||||
$ ./spack install zlib
|
||||
|
||||
|
||||
24
SECURITY.md
24
SECURITY.md
@@ -1,24 +0,0 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
We provide security updates for the following releases.
|
||||
For more on Spack's release structure, see
|
||||
[`README.md`](https://github.com/spack/spack#releases).
|
||||
|
||||
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| develop | :white_check_mark: |
|
||||
| 0.16.x | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
To report a vulnerability or other security
|
||||
issue, email maintainers@spack.io.
|
||||
|
||||
You can expect to hear back within two days.
|
||||
If your security issue is accepted, we will do
|
||||
our best to release a fix within a week. If
|
||||
fixing the issue will take longer than this,
|
||||
we will discuss timeline options with you.
|
||||
@@ -4,7 +4,7 @@ bootstrap:
|
||||
enable: true
|
||||
# Root directory for bootstrapping work. The software bootstrapped
|
||||
# by Spack is installed in a "store" subfolder of this root directory
|
||||
root: $user_cache_path/bootstrap
|
||||
root: ~/.spack/bootstrap
|
||||
# Methods that can be used to bootstrap software. Each method may or
|
||||
# may not be able to bootstrap all of the software that Spack needs,
|
||||
# depending on its type.
|
||||
@@ -29,4 +29,4 @@ bootstrap:
|
||||
# By default we trust bootstrapping from sources and from binaries
|
||||
# produced on Github via the workflow
|
||||
github-actions: true
|
||||
spack-install: true
|
||||
spack-install: true
|
||||
@@ -42,8 +42,8 @@ config:
|
||||
# (i.e., ``$TMP` or ``$TMPDIR``).
|
||||
#
|
||||
# Another option that prevents conflicts and potential permission issues is
|
||||
# to specify `$user_cache_path/stage`, which ensures each user builds in their
|
||||
# home directory.
|
||||
# to specify `~/.spack/stage`, which ensures each user builds in their home
|
||||
# directory.
|
||||
#
|
||||
# A more traditional path uses the value of `$spack/var/spack/stage`, which
|
||||
# builds directly inside Spack's instance without staging them in a
|
||||
@@ -60,13 +60,13 @@ config:
|
||||
# identifies Spack staging to avoid accidentally wiping out non-Spack work.
|
||||
build_stage:
|
||||
- $tempdir/$user/spack-stage
|
||||
- $user_cache_path/stage
|
||||
- ~/.spack/stage
|
||||
# - $spack/var/spack/stage
|
||||
|
||||
# Directory in which to run tests and store test results.
|
||||
# Tests will be stored in directories named by date/time and package
|
||||
# name/hash.
|
||||
test_stage: $user_cache_path/test
|
||||
test_stage: ~/.spack/test
|
||||
|
||||
# Cache directory for already downloaded source tarballs and archived
|
||||
# repositories. This can be purged with `spack clean --downloads`.
|
||||
@@ -75,7 +75,7 @@ config:
|
||||
|
||||
# Cache directory for miscellaneous files, like the package index.
|
||||
# This can be purged with `spack clean --misc-cache`
|
||||
misc_cache: $user_cache_path/cache
|
||||
misc_cache: ~/.spack/cache
|
||||
|
||||
|
||||
# Timeout in seconds used for downloading sources etc. This only applies
|
||||
@@ -134,7 +134,7 @@ config:
|
||||
# enabling locks.
|
||||
locks: true
|
||||
|
||||
# The default url fetch method to use.
|
||||
# The default url fetch method to use.
|
||||
# If set to 'curl', Spack will require curl on the user's system
|
||||
# If set to 'urllib', Spack will use python built-in libs to fetch
|
||||
url_fetch_method: urllib
|
||||
@@ -160,10 +160,11 @@ config:
|
||||
# sufficiently for many specs.
|
||||
#
|
||||
# 'clingo': Uses a logic solver under the hood to solve DAGs with full
|
||||
# backtracking and optimization for user preferences. Spack will
|
||||
# try to bootstrap the logic solver, if not already available.
|
||||
# backtracking and optimization for user preferences.
|
||||
#
|
||||
concretizer: clingo
|
||||
# 'clingo' currently requires the clingo ASP solver to be installed and
|
||||
# built with python bindings. 'original' is built in.
|
||||
concretizer: original
|
||||
|
||||
|
||||
# How long to wait to lock the Spack installation database. This lock is used
|
||||
@@ -190,8 +191,3 @@ config:
|
||||
# Set to 'false' to allow installation on filesystems that doesn't allow setgid bit
|
||||
# manipulation by unprivileged user (e.g. AFS)
|
||||
allow_sgid: true
|
||||
|
||||
# Whether to set the terminal title to display status information during
|
||||
# building and installing packages. This gives information about Spack's
|
||||
# current progress as well as the current and total number of packages.
|
||||
terminal_title: false
|
||||
|
||||
@@ -31,13 +31,13 @@ colorized output with a flag
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack --color always find | less -R
|
||||
$ spack --color always | less -R
|
||||
|
||||
or an environment variable
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ SPACK_COLOR=always spack find | less -R
|
||||
$ SPACK_COLOR=always spack | less -R
|
||||
|
||||
--------------------------
|
||||
Listing available packages
|
||||
@@ -188,34 +188,6 @@ configuration a **spec**. In the commands above, ``mpileaks`` and
|
||||
``mpileaks@3.0.4`` are both valid *specs*. We'll talk more about how
|
||||
you can use them to customize an installation in :ref:`sec-specs`.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Reusing installed dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. warning::
|
||||
|
||||
The ``--reuse`` option described here is experimental, and it will
|
||||
likely be replaced with a different option and configuration settings
|
||||
in the next Spack release.
|
||||
|
||||
By default, when you run ``spack install``, Spack tries to build a new
|
||||
version of the package you asked for, along with updated versions of
|
||||
its dependencies. This gets you the latest versions and configurations,
|
||||
but it can result in unwanted rebuilds if you update Spack frequently.
|
||||
|
||||
If you want Spack to try hard to reuse existing installations as dependencies,
|
||||
you can add the ``--reuse`` option:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --reuse mpich
|
||||
|
||||
This will not do anything if ``mpich`` is already installed. If ``mpich``
|
||||
is not installed, but dependencies like ``hwloc`` and ``libfabric`` are,
|
||||
the ``mpich`` will be build with the installed versions, if possible.
|
||||
You can use the :ref:`spack spec -I <cmd-spack-spec>` command to see what
|
||||
will be reused and what will be built before you install.
|
||||
|
||||
.. _cmd-spack-uninstall:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
@@ -896,9 +868,8 @@ your path:
|
||||
These commands will add appropriate directories to your ``PATH``,
|
||||
``MANPATH``, ``CPATH``, and ``LD_LIBRARY_PATH`` according to the
|
||||
:ref:`prefix inspections <customize-env-modifications>` defined in your
|
||||
modules configuration.
|
||||
When you no longer want to use a package, you can type unload or
|
||||
unuse similarly:
|
||||
modules configuration. When you no longer want to use a package, you
|
||||
can type unload or unuse similarly:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -939,22 +910,6 @@ first ``libelf`` above, you would run:
|
||||
|
||||
$ spack load /qmm4kso
|
||||
|
||||
To see which packages that you have loaded to your enviornment you would
|
||||
use ``spack find --loaded``.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find --loaded
|
||||
==> 2 installed packages
|
||||
-- linux-debian7 / gcc@4.4.7 ------------------------------------
|
||||
libelf@0.8.13
|
||||
|
||||
-- linux-debian7 / intel@15.0.0 ---------------------------------
|
||||
libelf@0.8.13
|
||||
|
||||
You can also use ``spack load --list`` to get the same output, but it
|
||||
does not have the full set of query options that ``spack find`` offers.
|
||||
|
||||
We'll learn more about Spack's spec syntax in the next section.
|
||||
|
||||
|
||||
@@ -1694,7 +1649,6 @@ and it will be added to the ``PYTHONPATH`` in your current shell:
|
||||
|
||||
Now ``import numpy`` will succeed for as long as you keep your current
|
||||
session open.
|
||||
The loaded packages can be checked using ``spack find --loaded``
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Loading Extensions via Modules
|
||||
|
||||
@@ -112,44 +112,20 @@ phase runs:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ autoreconf --install --verbose --force -I <aclocal-prefix>/share/aclocal
|
||||
|
||||
In case you need to add more arguments, override ``autoreconf_extra_args``
|
||||
in your ``package.py`` on class scope like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
autoreconf_extra_args = ["-Im4"]
|
||||
$ libtoolize
|
||||
$ aclocal
|
||||
$ autoreconf --install --verbose --force
|
||||
|
||||
All you need to do is add a few Autotools dependencies to the package.
|
||||
Most stable releases will come with a ``configure`` script, but if you
|
||||
check out a commit from the ``master`` branch, you would want to add:
|
||||
check out a commit from the ``develop`` branch, you would want to add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('autoconf', type='build', when='@master')
|
||||
depends_on('automake', type='build', when='@master')
|
||||
depends_on('libtool', type='build', when='@master')
|
||||
|
||||
It is typically redundant to list the ``m4`` macro processor package as a
|
||||
dependency, since ``autoconf`` already depends on it.
|
||||
|
||||
"""""""""""""""""""""""""""""""
|
||||
Using a custom autoreconf phase
|
||||
"""""""""""""""""""""""""""""""
|
||||
|
||||
In some cases, it might be needed to replace the default implementation
|
||||
of the autoreconf phase with one running a script interpreter. In this
|
||||
example, the ``bash`` shell is used to run the ``autogen.sh`` script.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def autoreconf(self, spec, prefix):
|
||||
which('bash')('autogen.sh')
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""""
|
||||
patching configure or Makefile.in files
|
||||
"""""""""""""""""""""""""""""""""""""""
|
||||
depends_on('autoconf', type='build', when='@develop')
|
||||
depends_on('automake', type='build', when='@develop')
|
||||
depends_on('libtool', type='build', when='@develop')
|
||||
depends_on('m4', type='build', when='@develop')
|
||||
|
||||
In some cases, developers might need to distribute a patch that modifies
|
||||
one of the files used to generate ``configure`` or ``Makefile.in``.
|
||||
@@ -159,57 +135,6 @@ create a new patch that directly modifies ``configure``. That way,
|
||||
Spack can use the secondary patch and additional build system
|
||||
dependencies aren't necessary.
|
||||
|
||||
""""""""""""""""""""""""""""
|
||||
Old Autotools helper scripts
|
||||
""""""""""""""""""""""""""""
|
||||
|
||||
Autotools based tarballs come with helper scripts such as ``config.sub`` and
|
||||
``config.guess``. It is the responsibility of the developers to keep these files
|
||||
up to date so that they run on every platform, but for very old software
|
||||
releases this is impossible. In these cases Spack can help to replace these
|
||||
files with newer ones, without having to add the heavy dependency on
|
||||
``automake``.
|
||||
|
||||
Automatic helper script replacement is currently enabled by default on
|
||||
``ppc64le`` and ``aarch64``, as these are the known cases where old scripts fail.
|
||||
On these targets, ``AutotoolsPackage`` adds a build dependency on ``gnuconfig``,
|
||||
which is a very light-weight package with newer versions of the helper files.
|
||||
Spack then tries to run all the helper scripts it can find in the release, and
|
||||
replaces them on failure with the helper scripts from ``gnuconfig``.
|
||||
|
||||
To opt out of this feature, use the following setting:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
patch_config_files = False
|
||||
|
||||
To enable it conditionally on different architectures, define a property and
|
||||
make the package depend on ``gnuconfig`` as a build dependency:
|
||||
|
||||
.. code-block
|
||||
|
||||
depends_on('gnuconfig', when='@1.0:')
|
||||
|
||||
@property
|
||||
def patch_config_files(self):
|
||||
return self.spec.satisfies("@1.0:")
|
||||
|
||||
.. note::
|
||||
|
||||
On some exotic architectures it is necessary to use system provided
|
||||
``config.sub`` and ``config.guess`` files. In this case, the most
|
||||
transparent solution is to mark the ``gnuconfig`` package as external and
|
||||
non-buildable, with a prefix set to the directory containing the files:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
gnuconfig:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: gnuconfig@master
|
||||
prefix: /usr/share/configure_files/
|
||||
|
||||
|
||||
""""""""""""""""
|
||||
force_autoreconf
|
||||
""""""""""""""""
|
||||
@@ -399,29 +324,8 @@ options:
|
||||
|
||||
--with-libfabric=</path/to/libfabric>
|
||||
|
||||
"""""""""""""""""""""""
|
||||
The ``variant`` keyword
|
||||
"""""""""""""""""""""""
|
||||
|
||||
When Spack variants and configure flags do not correspond one-to-one, the
|
||||
``variant`` keyword can be passed to ``with_or_without`` and
|
||||
``enable_or_disable``. For example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('debug_tools', default=False)
|
||||
config_args += self.enable_or_disable('debug-tools', variant='debug_tools')
|
||||
|
||||
Or when one variant controls multiple flags:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('debug_tools', default=False)
|
||||
config_args += self.with_or_without('memchecker', variant='debug_tools')
|
||||
config_args += self.with_or_without('profiler', variant='debug_tools')
|
||||
|
||||
""""""""""""""""""""
|
||||
Activation overrides
|
||||
activation overrides
|
||||
""""""""""""""""""""
|
||||
|
||||
Finally, the behavior of either ``with_or_without`` or
|
||||
|
||||
@@ -336,7 +336,7 @@ This would be translated to:
|
||||
.. code-block:: python
|
||||
|
||||
extends('python')
|
||||
depends_on('python@3.5:3', type=('build', 'run'))
|
||||
depends_on('python@3.5:3.999', type=('build', 'run'))
|
||||
|
||||
|
||||
Many ``setup.py`` or ``setup.cfg`` files also contain information like::
|
||||
@@ -568,7 +568,7 @@ check the ``METADATA`` file for lines like::
|
||||
Lines that use ``Requires-Dist`` are similar to ``install_requires``.
|
||||
Lines that use ``Provides-Extra`` are similar to ``extra_requires``,
|
||||
and you can add a variant for those dependencies. The ``~=1.11.0``
|
||||
syntax is equivalent to ``1.11.0:1.11``.
|
||||
syntax is equivalent to ``1.11.0:1.11.999``.
|
||||
|
||||
""""""""""
|
||||
setuptools
|
||||
|
||||
@@ -259,16 +259,3 @@ and ld.so will ONLY search for dependencies in the ``RUNPATH`` of
|
||||
the loading object.
|
||||
|
||||
DO NOT MIX the two options within the same install tree.
|
||||
|
||||
----------------------
|
||||
``terminal_title``
|
||||
----------------------
|
||||
|
||||
By setting this option to ``true``, Spack will update the terminal's title to
|
||||
provide information about its current progress as well as the current and
|
||||
total package numbers.
|
||||
|
||||
To work properly, this requires your terminal to reset its title after
|
||||
Spack has finished its work, otherwise Spack's status information will
|
||||
remain in the terminal's title indefinitely. Most terminals should already
|
||||
be set up this way and clear Spack's status information.
|
||||
|
||||
@@ -402,15 +402,12 @@ Spack-specific variables
|
||||
|
||||
Spack understands several special variables. These are:
|
||||
|
||||
* ``$env``: name of the currently active :ref:`environment <environments>`
|
||||
* ``$spack``: path to the prefix of this Spack installation
|
||||
* ``$tempdir``: default system temporary directory (as specified in
|
||||
Python's `tempfile.tempdir
|
||||
<https://docs.python.org/2/library/tempfile.html#tempfile.tempdir>`_
|
||||
variable.
|
||||
* ``$user``: name of the current user
|
||||
* ``$user_cache_path``: user cache directory (``~/.spack`` unless
|
||||
:ref:`overridden <local-config-overrides>`)
|
||||
|
||||
Note that, as with shell variables, you can write these as ``$varname``
|
||||
or with braces to distinguish the variable from surrounding characters:
|
||||
@@ -565,39 +562,3 @@ built in and are not overridden by a configuration file. The
|
||||
command line. ``dirty`` and ``install_tree`` come from the custom
|
||||
scopes ``./my-scope`` and ``./my-scope-2``, and all other configuration
|
||||
options come from the default configuration files that ship with Spack.
|
||||
|
||||
.. _local-config-overrides:
|
||||
|
||||
------------------------------
|
||||
Overriding Local Configuration
|
||||
------------------------------
|
||||
|
||||
Spack's ``system`` and ``user`` scopes provide ways for administrators and users to set
|
||||
global defaults for all Spack instances, but for use cases where one wants a clean Spack
|
||||
installation, these scopes can be undesirable. For example, users may want to opt out of
|
||||
global system configuration, or they may want to ignore their own home directory
|
||||
settings when running in a continuous integration environment.
|
||||
|
||||
Spack also, by default, keeps various caches and user data in ``~/.spack``, but
|
||||
users may want to override these locations.
|
||||
|
||||
Spack provides three environment variables that allow you to override or opt out of
|
||||
configuration locations:
|
||||
|
||||
* ``SPACK_USER_CONFIG_PATH``: Override the path to use for the
|
||||
``user`` scope (``~/.spack`` by default).
|
||||
* ``SPACK_SYSTEM_CONFIG_PATH``: Override the path to use for the
|
||||
``system`` scope (``/etc/spack`` by default).
|
||||
* ``SPACK_DISABLE_LOCAL_CONFIG``: set this environment variable to completely disable
|
||||
**both** the system and user configuration directories. Spack will only consider its
|
||||
own defaults and ``site`` configuration locations.
|
||||
|
||||
And one that allows you to move the default cache location:
|
||||
|
||||
* ``SPACK_USER_CACHE_PATH``: Override the default path to use for user data
|
||||
(misc_cache, tests, reports, etc.)
|
||||
|
||||
With these settings, if you want to isolate Spack in a CI environment, you can do this::
|
||||
|
||||
export SPACK_DISABLE_LOCAL_CONFIG=true
|
||||
export SPACK_USER_CACHE_PATH=/tmp/spack
|
||||
|
||||
@@ -126,6 +126,9 @@ are currently supported are summarized in the table below:
|
||||
* - Ubuntu 18.04
|
||||
- ``ubuntu:18.04``
|
||||
- ``spack/ubuntu-bionic``
|
||||
* - CentOS 6
|
||||
- ``centos:6``
|
||||
- ``spack/centos6``
|
||||
* - CentOS 7
|
||||
- ``centos:7``
|
||||
- ``spack/centos7``
|
||||
@@ -197,7 +200,7 @@ Setting Base Images
|
||||
|
||||
The ``images`` subsection is used to select both the image where
|
||||
Spack builds the software and the image where the built software
|
||||
is installed. This attribute can be set in different ways and
|
||||
is installed. This attribute can be set in two different ways and
|
||||
which one to use depends on the use case at hand.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -257,54 +260,10 @@ software is respectively built and installed:
|
||||
|
||||
ENTRYPOINT ["/bin/bash", "--rcfile", "/etc/profile", "-l"]
|
||||
|
||||
This is the simplest available method of selecting base images, and we advise
|
||||
This method of selecting base images is the simplest of the two, and we advise
|
||||
to use it whenever possible. There are cases though where using Spack official
|
||||
images is not enough to fit production needs. In these situations users can
|
||||
extend the recipe to start with the bootstrapping of Spack at a certain pinned
|
||||
version or manually select which base image to start from in the recipe,
|
||||
as we'll see next.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Use a Bootstrap Stage for Spack
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In some cases users may want to pin the commit sha that is used for Spack, to ensure later
|
||||
reproducibility, or start from a fork of the official Spack repository to try a bugfix or
|
||||
a feature in the early stage of development. This is possible by being just a little more
|
||||
verbose when specifying information about Spack in the ``spack.yaml`` file:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
images:
|
||||
os: amazonlinux:2
|
||||
spack:
|
||||
# URL of the Spack repository to be used in the container image
|
||||
url: <to-use-a-fork>
|
||||
# Either a commit sha, a branch name or a tag
|
||||
ref: <sha/tag/branch>
|
||||
# If true turn a branch name or a tag into the corresponding commit
|
||||
# sha at the time of recipe generation
|
||||
resolve_sha: <true/false>
|
||||
|
||||
``url`` specifies the URL from which to clone Spack and defaults to https://github.com/spack/spack.
|
||||
The ``ref`` attribute can be either a commit sha, a branch name or a tag. The default value in
|
||||
this case is to use the ``develop`` branch, but it may change in the future to point to the latest stable
|
||||
release. Finally ``resolve_sha`` transform branch names or tags into the corresponding commit
|
||||
shas at the time of recipe generation, to allow for a greater reproducibility of the results
|
||||
at a later time.
|
||||
|
||||
The list of operating systems that can be used to bootstrap Spack can be
|
||||
obtained with:
|
||||
|
||||
.. command-output:: spack containerize --list-os
|
||||
|
||||
.. note::
|
||||
|
||||
The ``resolve_sha`` option uses ``git rev-parse`` under the hood and thus it requires
|
||||
to checkout the corresponding Spack repository in a temporary folder before generating
|
||||
the recipe. Recipe generation may take longer when this option is set to true because
|
||||
of this additional step.
|
||||
|
||||
images is not enough to fit production needs. In these situations users can manually
|
||||
select which base image to start from in the recipe, as we'll see next.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Use Custom Images Provided by Users
|
||||
@@ -456,18 +415,6 @@ to customize the generation of container recipes:
|
||||
- Version of Spack use in the ``build`` stage
|
||||
- Valid tags for ``base:image``
|
||||
- Yes, if using constrained selection of base images
|
||||
* - ``images:spack:url``
|
||||
- Repository from which Spack is cloned
|
||||
- Any fork of Spack
|
||||
- No
|
||||
* - ``images:spack:ref``
|
||||
- Reference for the checkout of Spack
|
||||
- Either a commit sha, a branch name or a tag
|
||||
- No
|
||||
* - ``images:spack:resolve_sha``
|
||||
- Resolve branches and tags in ``spack.yaml`` to commits in the generated recipe
|
||||
- True or False (default: False)
|
||||
- No
|
||||
* - ``images:build``
|
||||
- Image to be used in the ``build`` stage
|
||||
- Any valid container image
|
||||
|
||||
@@ -338,6 +338,15 @@ Once all of the dependencies are installed, you can try building the documentati
|
||||
If you see any warning or error messages, you will have to correct those before
|
||||
your PR is accepted.
|
||||
|
||||
.. note::
|
||||
|
||||
There is also a ``run-doc-tests`` script in ``share/spack/qa``. The only
|
||||
difference between running this script and running ``make`` by hand is that
|
||||
the script will exit immediately if it encounters an error or warning. This
|
||||
is necessary for CI. If you made a lot of documentation changes, it is
|
||||
much quicker to run ``make`` by hand so that you can see all of the warnings
|
||||
at once.
|
||||
|
||||
If you are editing the documentation, you should obviously be running the
|
||||
documentation tests. But even if you are simply adding a new package, your
|
||||
changes could cause the documentation tests to fail:
|
||||
|
||||
@@ -210,6 +210,15 @@ Spec-related modules
|
||||
but compilers aren't fully integrated with the build process
|
||||
yet.
|
||||
|
||||
:mod:`spack.architecture`
|
||||
:func:`architecture.default_arch <spack.architecture.default_arch>` is used
|
||||
to determine the host architecture while building.
|
||||
|
||||
.. warning::
|
||||
|
||||
Not yet implemented. Should eventually have architecture
|
||||
descriptions for cross-compiling.
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
Build environment
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
@@ -35,7 +35,7 @@ Getting Spack is easy. You can clone it from the `github repository
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git clone -c feature.manyFiles=true https://github.com/spack/spack.git
|
||||
$ git clone https://github.com/spack/spack.git
|
||||
|
||||
This will create a directory called ``spack``.
|
||||
|
||||
@@ -88,71 +88,74 @@ the environment.
|
||||
Bootstrapping clingo
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack uses ``clingo`` under the hood to resolve optimal versions and variants of
|
||||
dependencies when installing a package. Since ``clingo`` itself is a binary,
|
||||
Spack has to install it on initial use, which is called bootstrapping.
|
||||
|
||||
Spack provides two ways of bootstrapping ``clingo``: from pre-built binaries
|
||||
(default), or from sources. The fastest way to get started is to bootstrap from
|
||||
pre-built binaries.
|
||||
|
||||
.. note::
|
||||
|
||||
When bootstrapping from pre-built binaries, Spack currently requires
|
||||
``patchelf`` on Linux and ``otool`` on macOS. If ``patchelf`` is not in the
|
||||
``PATH``, Spack will build it from sources, and a C++ compiler is required.
|
||||
|
||||
The first time you concretize a spec, Spack will bootstrap in the background:
|
||||
Spack supports using ``clingo`` as an external solver to compute which software
|
||||
needs to be installed. The default configuration allows Spack to install
|
||||
``clingo`` from a public buildcache, created by a Github Action workflow. In this
|
||||
case the bootstrapping procedure is transparent to the user, except for a
|
||||
slightly long waiting time on the first concretization of a spec:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ time spack spec zlib
|
||||
Input spec
|
||||
--------------------------------
|
||||
zlib
|
||||
$ spack find -b
|
||||
==> Showing internal bootstrap store at "/home/spack/.spack/bootstrap/store"
|
||||
==> 0 installed packages
|
||||
|
||||
Concretized
|
||||
--------------------------------
|
||||
zlib@1.2.11%gcc@7.5.0+optimize+pic+shared arch=linux-ubuntu18.04-zen
|
||||
$ time spack solve zlib
|
||||
==> Best of 2 considered solutions.
|
||||
==> Optimization Criteria:
|
||||
Priority Criterion Value
|
||||
1 deprecated versions used 0
|
||||
2 version weight 0
|
||||
3 number of non-default variants (roots) 0
|
||||
4 multi-valued variants 0
|
||||
5 preferred providers for roots 0
|
||||
6 number of non-default variants (non-roots) 0
|
||||
7 preferred providers (non-roots) 0
|
||||
8 compiler mismatches 0
|
||||
9 version badness 0
|
||||
10 count of non-root multi-valued variants 0
|
||||
11 non-preferred compilers 0
|
||||
12 target mismatches 0
|
||||
13 non-preferred targets 0
|
||||
|
||||
real 0m20.023s
|
||||
user 0m18.351s
|
||||
sys 0m0.784s
|
||||
zlib@1.2.11%gcc@11.1.0+optimize+pic+shared arch=linux-ubuntu18.04-broadwell
|
||||
|
||||
real 0m30,618s
|
||||
user 0m27,278s
|
||||
sys 0m1,549s
|
||||
|
||||
After this command you'll see that ``clingo`` has been installed for Spack's own use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find -b
|
||||
==> Showing internal bootstrap store at "/root/.spack/bootstrap/store"
|
||||
==> 3 installed packages
|
||||
==> Showing internal bootstrap store at "/home/spack/.spack/bootstrap/store"
|
||||
==> 2 installed packages
|
||||
-- linux-rhel5-x86_64 / gcc@9.3.0 -------------------------------
|
||||
clingo-bootstrap@spack python@3.6
|
||||
|
||||
-- linux-ubuntu18.04-zen / gcc@7.5.0 ----------------------------
|
||||
patchelf@0.13
|
||||
|
||||
Subsequent calls to the concretizer will then be much faster:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ time spack spec zlib
|
||||
$ time spack solve zlib
|
||||
[ ... ]
|
||||
real 0m0.490s
|
||||
user 0m0.431s
|
||||
sys 0m0.041s
|
||||
real 0m1,222s
|
||||
user 0m1,146s
|
||||
sys 0m0,059s
|
||||
|
||||
|
||||
If for security concerns you cannot bootstrap ``clingo`` from pre-built
|
||||
binaries, you have to mark this bootstrapping method as untrusted. This makes
|
||||
Spack fall back to bootstrapping from sources:
|
||||
If for security or for other reasons you don't want to or can't install precompiled
|
||||
binaries, Spack can fall-back to bootstrap ``clingo`` from source files. To forbid
|
||||
Spack from retrieving binaries from the bootstrapping buildcache, the following
|
||||
command must be given:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack bootstrap untrust github-actions
|
||||
==> "github-actions" is now untrusted and will not be used for bootstrapping
|
||||
|
||||
You can verify that the new settings are effective with:
|
||||
since an "untrusted" way of bootstrapping software will not be considered
|
||||
by Spack. You can verify the new settings are effective with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -178,25 +181,33 @@ You can verify that the new settings are effective with:
|
||||
Description:
|
||||
Specs built from sources by Spack. May take a long time.
|
||||
|
||||
.. note::
|
||||
|
||||
When bootstrapping from sources, Spack requires a full install of Python
|
||||
including header files (e.g. ``python3-dev`` on Debian), and a compiler
|
||||
with support for C++14 (GCC on Linux, Apple Clang on macOS) and static C++
|
||||
standard libraries on Linux.
|
||||
|
||||
Spack will build the required software on the first request to concretize a spec:
|
||||
When bootstrapping from sources, Spack requires a compiler with support
|
||||
for C++14 (GCC on ``linux``, Apple Clang on ``darwin``) and static C++
|
||||
standard libraries on ``linux``. Spack will build the required software
|
||||
on the first request to concretize a spec:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack spec zlib
|
||||
$ spack solve zlib
|
||||
[+] /usr (external bison-3.0.4-wu5pgjchxzemk5ya2l3ddqug2d7jv6eb)
|
||||
[+] /usr (external cmake-3.19.4-a4kmcfzxxy45mzku4ipmj5kdiiz5a57b)
|
||||
[+] /usr (external python-3.6.9-x4fou4iqqlh5ydwddx3pvfcwznfrqztv)
|
||||
==> Installing re2c-1.2.1-e3x6nxtk3ahgd63ykgy44mpuva6jhtdt
|
||||
[ ... ]
|
||||
==> Optimization: [0, 0, 0, 0, 0, 1, 0, 0, 0]
|
||||
zlib@1.2.11%gcc@10.1.0+optimize+pic+shared arch=linux-ubuntu18.04-broadwell
|
||||
|
||||
.. tip::
|
||||
|
||||
If you want to speed-up bootstrapping ``clingo`` from sources, you may try to
|
||||
search for ``cmake`` and ``bison`` on your system:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack external find cmake bison
|
||||
==> The following specs have been detected on this system and added to /home/spack/.spack/packages.yaml
|
||||
bison@3.0.4 cmake@3.19.4
|
||||
|
||||
"""""""""""""""""""
|
||||
The Bootstrap Store
|
||||
"""""""""""""""""""
|
||||
|
||||
@@ -39,7 +39,7 @@ package:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git clone -c feature.manyFiles=true https://github.com/spack/spack.git
|
||||
$ git clone https://github.com/spack/spack.git
|
||||
$ cd spack/bin
|
||||
$ ./spack install libelf
|
||||
|
||||
|
||||
@@ -213,18 +213,6 @@ location). The set ``my_custom_lmod_modules`` will install its lmod
|
||||
modules to ``/path/to/install/custom/lmod/modules`` (and still install
|
||||
its tcl modules, if any, to the default location).
|
||||
|
||||
By default, an architecture-specific directory is added to the root
|
||||
directory. A module set may override that behavior by setting the
|
||||
``arch_folder`` config value to ``False``.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
default:
|
||||
roots:
|
||||
tcl: /path/to/install/tcl/modules
|
||||
arch_folder: false
|
||||
|
||||
Obviously, having multiple module sets install modules to the default
|
||||
location could be confusing to users of your modules. In the next
|
||||
section, we will discuss enabling and disabling module types (module
|
||||
@@ -461,36 +449,6 @@ that are already in the LMod hierarchy.
|
||||
For hierarchies that are deeper than three layers ``lmod spider`` may have some issues.
|
||||
See `this discussion on the LMod project <https://github.com/TACC/Lmod/issues/114>`_.
|
||||
|
||||
""""""""""""""""""""""
|
||||
Select default modules
|
||||
""""""""""""""""""""""
|
||||
|
||||
By default, when multiple modules of the same name share a directory,
|
||||
the highest version number will be the default module. This behavior
|
||||
of the ``module`` command can be overridden with a symlink named
|
||||
``default`` to the desired default module. If you wish to configure
|
||||
default modules with Spack, add a ``defaults`` key to your modules
|
||||
configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
my-module-set:
|
||||
tcl:
|
||||
defaults:
|
||||
- gcc@10.2.1
|
||||
- hdf5@1.2.10+mpi+hl%gcc
|
||||
|
||||
These defaults may be arbitrarily specific. For any package that
|
||||
satisfies a default, Spack will generate the module file in the
|
||||
appropriate path, and will generate a default symlink to the module
|
||||
file as well.
|
||||
|
||||
.. warning::
|
||||
If Spack is configured to generate multiple default packages in the
|
||||
same directory, the last modulefile to be generated will be the
|
||||
default module.
|
||||
|
||||
.. _customize-env-modifications:
|
||||
|
||||
"""""""""""""""""""""""""""""""""""
|
||||
|
||||
@@ -695,23 +695,20 @@ example, ``py-sphinx-rtd-theme@0.1.10a0``. In this case, numbers are
|
||||
always considered to be "newer" than letters. This is for consistency
|
||||
with `RPM <https://bugzilla.redhat.com/show_bug.cgi?id=50977>`_.
|
||||
|
||||
Spack versions may also be arbitrary non-numeric strings, for example
|
||||
``@develop``, ``@master``, ``@local``.
|
||||
Spack versions may also be arbitrary non-numeric strings; any string
|
||||
here will suffice; for example, ``@develop``, ``@master``, ``@local``.
|
||||
Versions are compared as follows. First, a version string is split into
|
||||
multiple fields based on delimiters such as ``.``, ``-`` etc. Then
|
||||
matching fields are compared using the rules below:
|
||||
|
||||
The order on versions is defined as follows. A version string is split
|
||||
into a list of components based on delimiters such as ``.``, ``-`` etc.
|
||||
Lists are then ordered lexicographically, where components are ordered
|
||||
as follows:
|
||||
#. The following develop-like strings are greater (newer) than all
|
||||
numbers and are ordered as ``develop > main > master > head > trunk``.
|
||||
|
||||
#. The following special strings are considered larger than any other
|
||||
numeric or non-numeric version component, and satisfy the following
|
||||
order between themselves: ``develop > main > master > head > trunk``.
|
||||
#. Numbers are all less than the chosen develop-like strings above,
|
||||
and are sorted numerically.
|
||||
|
||||
#. Numbers are ordered numerically, are less than special strings, and
|
||||
larger than other non-numeric components.
|
||||
|
||||
#. All other non-numeric components are less than numeric components,
|
||||
and are ordered alphabetically.
|
||||
#. All other non-numeric versions are less than numeric versions, and
|
||||
are sorted alphabetically.
|
||||
|
||||
The logic behind this sort order is two-fold:
|
||||
|
||||
@@ -732,7 +729,7 @@ Version selection
|
||||
When concretizing, many versions might match a user-supplied spec.
|
||||
For example, the spec ``python`` matches all available versions of the
|
||||
package ``python``. Similarly, ``python@3:`` matches all versions of
|
||||
Python 3 and above. Given a set of versions that match a spec, Spack
|
||||
Python3. Given a set of versions that match a spec, Spack
|
||||
concretization uses the following priorities to decide which one to
|
||||
use:
|
||||
|
||||
@@ -1422,60 +1419,6 @@ other similar operations:
|
||||
).with_default('auto').with_non_feature_values('auto'),
|
||||
)
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
Conditional Variants
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The variant directive accepts a ``when`` clause. The variant will only
|
||||
be present on specs that otherwise satisfy the spec listed as the
|
||||
``when`` clause. For example, the following class has a variant
|
||||
``bar`` when it is at version 2.0 or higher.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Foo(Package):
|
||||
...
|
||||
variant('bar', default=False, when='@2.0:', description='help message')
|
||||
|
||||
The ``when`` clause follows the same syntax and accepts the same
|
||||
values as the ``when`` argument of
|
||||
:py:func:`spack.directives.depends_on`
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Overriding Variants
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Packages may override variants for several reasons, most often to
|
||||
change the default from a variant defined in a parent class or to
|
||||
change the conditions under which a variant is present on the spec.
|
||||
|
||||
When a variant is defined multiple times, whether in the same package
|
||||
file or in a subclass and a superclass, the last definition is used
|
||||
for all attributes **except** for the ``when`` clauses. The ``when``
|
||||
clauses are accumulated through all invocations, and the variant is
|
||||
present on the spec if any of the accumulated conditions are
|
||||
satisfied.
|
||||
|
||||
For example, consider the following package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Foo(Package):
|
||||
...
|
||||
variant('bar', default=False, when='@1.0', description='help1')
|
||||
variant('bar', default=True, when='platform=darwin', description='help2')
|
||||
...
|
||||
|
||||
This package ``foo`` has a variant ``bar`` when the spec satisfies
|
||||
either ``@1.0`` or ``platform=darwin``, but not for other platforms at
|
||||
other versions. The default for this variant, when it is present, is
|
||||
always ``True``, regardless of which condition of the variant is
|
||||
satisfied. This allows packages to override variants in packages or
|
||||
build system classes from which they inherit, by modifying the variant
|
||||
values without modifying the ``when`` clause. It also allows a package
|
||||
to implement ``or`` semantics for a variant ``when`` clause by
|
||||
duplicating the variant definition.
|
||||
|
||||
------------------------------------
|
||||
Resources (expanding extra tarballs)
|
||||
------------------------------------
|
||||
@@ -2120,7 +2063,7 @@ Version ranges
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
Although some packages require a specific version for their dependencies,
|
||||
most can be built with a range of versions. For example, if you are
|
||||
most can be built with a range of version. For example, if you are
|
||||
writing a package for a legacy Python module that only works with Python
|
||||
2.4 through 2.6, this would look like:
|
||||
|
||||
@@ -2129,9 +2072,9 @@ writing a package for a legacy Python module that only works with Python
|
||||
depends_on('python@2.4:2.6')
|
||||
|
||||
Version ranges in Spack are *inclusive*, so ``2.4:2.6`` means any version
|
||||
greater than or equal to ``2.4`` and up to and including any ``2.6.x``. If
|
||||
you want to specify that a package works with any version of Python 3 (or
|
||||
higher), this would look like:
|
||||
greater than or equal to ``2.4`` and up to and including ``2.6``. If you
|
||||
want to specify that a package works with any version of Python 3, this
|
||||
would look like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -2142,30 +2085,29 @@ requires Python 2, you can similarly leave out the lower bound:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('python@:2')
|
||||
depends_on('python@:2.9')
|
||||
|
||||
Notice that we didn't use ``@:3``. Version ranges are *inclusive*, so
|
||||
``@:3`` means "up to and including any 3.x version".
|
||||
``@:3`` means "up to and including 3".
|
||||
|
||||
What if a package can only be built with Python 2.7? You might be
|
||||
What if a package can only be built with Python 2.6? You might be
|
||||
inclined to use:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('python@2.7')
|
||||
depends_on('python@2.6')
|
||||
|
||||
However, this would be wrong. Spack assumes that all version constraints
|
||||
are exact, so it would try to install Python not at ``2.7.18``, but
|
||||
exactly at ``2.7``, which is a non-existent version. The correct way to
|
||||
specify this would be:
|
||||
are absolute, so it would try to install Python at exactly ``2.6``. The
|
||||
correct way to specify this would be:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('python@2.7.0:2.7')
|
||||
depends_on('python@2.6.0:2.6.999')
|
||||
|
||||
A spec can contain a version list of ranges and individual versions
|
||||
separated by commas. For example, if you need Boost 1.59.0 or newer,
|
||||
but there are known issues with 1.64.0, 1.65.0, and 1.66.0, you can say:
|
||||
A spec can contain multiple version ranges separated by commas.
|
||||
For example, if you need Boost 1.59.0 or newer, but there are known
|
||||
issues with 1.64.0, 1.65.0, and 1.66.0, you can say:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -2882,7 +2824,7 @@ is equivalent to:
|
||||
|
||||
depends_on('elpa+openmp', when='+openmp+elpa')
|
||||
|
||||
Constraints from nested context managers are also combined together, but they are rarely
|
||||
Constraints from nested context managers are also added together, but they are rarely
|
||||
needed or recommended.
|
||||
|
||||
.. _install-method:
|
||||
|
||||
@@ -48,9 +48,9 @@ or Amazon Elastic Kubernetes Service (`EKS <https://aws.amazon.com/eks>`_), thou
|
||||
topics are outside the scope of this document.
|
||||
|
||||
Spack's pipelines are now making use of the
|
||||
`trigger <https://docs.gitlab.com/ee/ci/yaml/#trigger>`_ syntax to run
|
||||
`trigger <https://docs.gitlab.com/12.9/ee/ci/yaml/README.html#trigger>`_ syntax to run
|
||||
dynamically generated
|
||||
`child pipelines <https://docs.gitlab.com/ee/ci/pipelines/parent_child_pipelines.html>`_.
|
||||
`child pipelines <https://docs.gitlab.com/12.9/ee/ci/parent_child_pipelines.html>`_.
|
||||
Note that the use of dynamic child pipelines requires running Gitlab version
|
||||
``>= 12.9``.
|
||||
|
||||
|
||||
@@ -335,7 +335,7 @@ merged YAML from all configuration files, use ``spack config get repos``:
|
||||
- ~/myrepo
|
||||
- $spack/var/spack/repos/builtin
|
||||
|
||||
Note that, unlike ``spack repo list``, this does not include the
|
||||
mNote that, unlike ``spack repo list``, this does not include the
|
||||
namespace, which is read from each repo's ``repo.yaml``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
@@ -5,6 +5,3 @@ sphinx>=3.4,!=4.1.2
|
||||
sphinxcontrib-programoutput
|
||||
sphinx-rtd-theme
|
||||
python-levenshtein
|
||||
# Restrict to docutils <0.17 to workaround a list rendering issue in sphinx.
|
||||
# https://stackoverflow.com/questions/67542699
|
||||
docutils <0.17
|
||||
|
||||
@@ -17,7 +17,6 @@ spack:
|
||||
# Sphinx
|
||||
- "py-sphinx@3.4:4.1.1,4.1.3:"
|
||||
- py-sphinxcontrib-programoutput
|
||||
- py-docutils@:0.16
|
||||
- py-sphinx-rtd-theme
|
||||
# VCS
|
||||
- git
|
||||
|
||||
@@ -15,4 +15,3 @@ gnupg2, , , Sign/Verify Buildcaches
|
||||
git, , , Manage Software Repositories
|
||||
svn, , Optional, Manage Software Repositories
|
||||
hg, , Optional, Manage Software Repositories
|
||||
Python header files, , Optional (e.g. ``python3-dev`` on Debian), Bootstrapping from sources
|
||||
|
574
lib/spack/env/cc
vendored
574
lib/spack/env/cc
vendored
@@ -1,5 +1,4 @@
|
||||
#!/bin/sh
|
||||
# shellcheck disable=SC2034 # evals in this script fool shellcheck
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
@@ -21,33 +20,25 @@
|
||||
# -Wl,-rpath arguments for dependency /lib directories.
|
||||
#
|
||||
|
||||
# Reset IFS to the default: whitespace-separated lists. When we use
|
||||
# other separators, we set and reset it.
|
||||
unset IFS
|
||||
|
||||
# Separator for lists whose names end with `_list`.
|
||||
# We pick the alarm bell character, which is highly unlikely to
|
||||
# conflict with anything. This is a literal bell character (which
|
||||
# we have to use since POSIX sh does not convert escape sequences
|
||||
# like '\a' outside of the format argument of `printf`).
|
||||
# NOTE: Depending on your editor this may look empty, but it is not.
|
||||
readonly lsep=''
|
||||
|
||||
# This is an array of environment variables that need to be set before
|
||||
# the script runs. They are set by routines in spack.build_environment
|
||||
# as part of the package installation process.
|
||||
readonly params="\
|
||||
SPACK_ENV_PATH
|
||||
SPACK_DEBUG_LOG_DIR
|
||||
SPACK_DEBUG_LOG_ID
|
||||
SPACK_COMPILER_SPEC
|
||||
SPACK_CC_RPATH_ARG
|
||||
SPACK_CXX_RPATH_ARG
|
||||
SPACK_F77_RPATH_ARG
|
||||
SPACK_FC_RPATH_ARG
|
||||
SPACK_LINKER_ARG
|
||||
SPACK_SHORT_SPEC
|
||||
SPACK_SYSTEM_DIRS"
|
||||
parameters=(
|
||||
SPACK_ENV_PATH
|
||||
SPACK_DEBUG_LOG_DIR
|
||||
SPACK_DEBUG_LOG_ID
|
||||
SPACK_COMPILER_SPEC
|
||||
SPACK_CC_RPATH_ARG
|
||||
SPACK_CXX_RPATH_ARG
|
||||
SPACK_F77_RPATH_ARG
|
||||
SPACK_FC_RPATH_ARG
|
||||
SPACK_TARGET_ARGS
|
||||
SPACK_DTAGS_TO_ADD
|
||||
SPACK_DTAGS_TO_STRIP
|
||||
SPACK_LINKER_ARG
|
||||
SPACK_SHORT_SPEC
|
||||
SPACK_SYSTEM_DIRS
|
||||
)
|
||||
|
||||
# Optional parameters that aren't required to be set
|
||||
|
||||
@@ -67,157 +58,60 @@ SPACK_SYSTEM_DIRS"
|
||||
# Test command is used to unit test the compiler script.
|
||||
# SPACK_TEST_COMMAND
|
||||
|
||||
# die MESSAGE
|
||||
# Print a message and exit with error code 1.
|
||||
die() {
|
||||
echo "[spack cc] ERROR: $*"
|
||||
# die()
|
||||
# Prints a message and exits with error 1.
|
||||
function die {
|
||||
echo "$@"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# empty VARNAME
|
||||
# Return whether the variable VARNAME is unset or set to the empty string.
|
||||
empty() {
|
||||
eval "test -z \"\${$1}\""
|
||||
}
|
||||
# read input parameters into proper bash arrays.
|
||||
# SYSTEM_DIRS is delimited by :
|
||||
IFS=':' read -ra SPACK_SYSTEM_DIRS <<< "${SPACK_SYSTEM_DIRS}"
|
||||
|
||||
# setsep LISTNAME
|
||||
# Set the global variable 'sep' to the separator for a list with name LISTNAME.
|
||||
# There are three types of lists:
|
||||
# 1. regular lists end with _list and are separated by $lsep
|
||||
# 2. directory lists end with _dirs/_DIRS/PATH(S) and are separated by ':'
|
||||
# 3. any other list is assumed to be separated by spaces: " "
|
||||
setsep() {
|
||||
case "$1" in
|
||||
*_dirs|*_DIRS|*PATH|*PATHS)
|
||||
sep=':'
|
||||
;;
|
||||
*_list)
|
||||
sep="$lsep"
|
||||
;;
|
||||
*)
|
||||
sep=" "
|
||||
;;
|
||||
esac
|
||||
}
|
||||
# SPACK_<LANG>FLAGS and SPACK_LDLIBS are split by ' '
|
||||
IFS=' ' read -ra SPACK_FFLAGS <<< "$SPACK_FFLAGS"
|
||||
IFS=' ' read -ra SPACK_CPPFLAGS <<< "$SPACK_CPPFLAGS"
|
||||
IFS=' ' read -ra SPACK_CFLAGS <<< "$SPACK_CFLAGS"
|
||||
IFS=' ' read -ra SPACK_CXXFLAGS <<< "$SPACK_CXXFLAGS"
|
||||
IFS=' ' read -ra SPACK_LDFLAGS <<< "$SPACK_LDFLAGS"
|
||||
IFS=' ' read -ra SPACK_LDLIBS <<< "$SPACK_LDLIBS"
|
||||
|
||||
# prepend LISTNAME ELEMENT [SEP]
|
||||
#
|
||||
# Prepend ELEMENT to the list stored in the variable LISTNAME,
|
||||
# assuming the list is separated by SEP.
|
||||
# Handles empty lists and single-element lists.
|
||||
prepend() {
|
||||
varname="$1"
|
||||
elt="$2"
|
||||
|
||||
if empty "$varname"; then
|
||||
eval "$varname=\"\${elt}\""
|
||||
else
|
||||
# Get the appropriate separator for the list we're appending to.
|
||||
setsep "$varname"
|
||||
eval "$varname=\"\${elt}${sep}\${$varname}\""
|
||||
fi
|
||||
}
|
||||
|
||||
# append LISTNAME ELEMENT [SEP]
|
||||
#
|
||||
# Append ELEMENT to the list stored in the variable LISTNAME,
|
||||
# assuming the list is separated by SEP.
|
||||
# Handles empty lists and single-element lists.
|
||||
append() {
|
||||
varname="$1"
|
||||
elt="$2"
|
||||
|
||||
if empty "$varname"; then
|
||||
eval "$varname=\"\${elt}\""
|
||||
else
|
||||
# Get the appropriate separator for the list we're appending to.
|
||||
setsep "$varname"
|
||||
eval "$varname=\"\${$varname}${sep}\${elt}\""
|
||||
fi
|
||||
}
|
||||
|
||||
# extend LISTNAME1 LISTNAME2 [PREFIX]
|
||||
#
|
||||
# Append the elements stored in the variable LISTNAME2
|
||||
# to the list stored in LISTNAME1.
|
||||
# If PREFIX is provided, prepend it to each element.
|
||||
extend() {
|
||||
# Figure out the appropriate IFS for the list we're reading.
|
||||
setsep "$2"
|
||||
if [ "$sep" != " " ]; then
|
||||
IFS="$sep"
|
||||
fi
|
||||
eval "for elt in \${$2}; do append $1 \"$3\${elt}\"; done"
|
||||
unset IFS
|
||||
}
|
||||
|
||||
# preextend LISTNAME1 LISTNAME2 [PREFIX]
|
||||
#
|
||||
# Prepend the elements stored in the list at LISTNAME2
|
||||
# to the list at LISTNAME1, preserving order.
|
||||
# If PREFIX is provided, prepend it to each element.
|
||||
preextend() {
|
||||
# Figure out the appropriate IFS for the list we're reading.
|
||||
setsep "$2"
|
||||
if [ "$sep" != " " ]; then
|
||||
IFS="$sep"
|
||||
fi
|
||||
|
||||
# first, reverse the list to prepend
|
||||
_reversed_list=""
|
||||
eval "for elt in \${$2}; do prepend _reversed_list \"$3\${elt}\"; done"
|
||||
|
||||
# prepend reversed list to preextend in order
|
||||
IFS="${lsep}"
|
||||
for elt in $_reversed_list; do prepend "$1" "$3${elt}"; done
|
||||
unset IFS
|
||||
}
|
||||
|
||||
# system_dir PATH
|
||||
# test whether a path is a system directory
|
||||
system_dir() {
|
||||
IFS=':' # SPACK_SYSTEM_DIRS is colon-separated
|
||||
function system_dir {
|
||||
path="$1"
|
||||
for sd in $SPACK_SYSTEM_DIRS; do
|
||||
if [ "${path}" = "${sd}" ] || [ "${path}" = "${sd}/" ]; then
|
||||
for sd in "${SPACK_SYSTEM_DIRS[@]}"; do
|
||||
if [ "${path}" == "${sd}" ] || [ "${path}" == "${sd}/" ]; then
|
||||
# success if path starts with a system prefix
|
||||
unset IFS
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
unset IFS
|
||||
return 1 # fail if path starts no system prefix
|
||||
}
|
||||
|
||||
# Fail with a clear message if the input contains any bell characters.
|
||||
if eval "[ \"\${*#*${lsep}}\" != \"\$*\" ]"; then
|
||||
die "Compiler command line contains our separator ('${lsep}'). Cannot parse."
|
||||
fi
|
||||
|
||||
# ensure required variables are set
|
||||
for param in $params; do
|
||||
if eval "test -z \"\${${param}:-}\""; then
|
||||
for param in "${parameters[@]}"; do
|
||||
if [[ -z ${!param+x} ]]; then
|
||||
die "Spack compiler must be run from Spack! Input '$param' is missing."
|
||||
fi
|
||||
done
|
||||
|
||||
# Check if optional parameters are defined
|
||||
# If we aren't asking for debug flags, don't add them
|
||||
if [ -z "${SPACK_ADD_DEBUG_FLAGS:-}" ]; then
|
||||
if [[ -z ${SPACK_ADD_DEBUG_FLAGS+x} ]]; then
|
||||
SPACK_ADD_DEBUG_FLAGS="false"
|
||||
fi
|
||||
|
||||
# SPACK_ADD_DEBUG_FLAGS must be true/false/custom
|
||||
is_valid="false"
|
||||
for param in "true" "false" "custom"; do
|
||||
if [ "$param" = "$SPACK_ADD_DEBUG_FLAGS" ]; then
|
||||
if [ "$param" == "$SPACK_ADD_DEBUG_FLAGS" ]; then
|
||||
is_valid="true"
|
||||
fi
|
||||
done
|
||||
|
||||
# Exit with error if we are given an incorrect value
|
||||
if [ "$is_valid" = "false" ]; then
|
||||
die "SPACK_ADD_DEBUG_FLAGS, if defined, must be one of 'true', 'false', or 'custom'."
|
||||
if [ "$is_valid" == "false" ]; then
|
||||
die "SPACK_ADD_DEBUG_FLAGS, if defined, must be one of 'true' 'false' or 'custom'"
|
||||
fi
|
||||
|
||||
# Figure out the type of compiler, the language, and the mode so that
|
||||
@@ -234,7 +128,7 @@ fi
|
||||
# ld link
|
||||
# ccld compile & link
|
||||
|
||||
command="${0##*/}"
|
||||
command=$(basename "$0")
|
||||
comp="CC"
|
||||
case "$command" in
|
||||
cpp)
|
||||
@@ -280,7 +174,7 @@ esac
|
||||
# If any of the arguments below are present, then the mode is vcheck.
|
||||
# In vcheck mode, nothing is added in terms of extra search paths or
|
||||
# libraries.
|
||||
if [ -z "$mode" ] || [ "$mode" = ld ]; then
|
||||
if [[ -z $mode ]] || [[ $mode == ld ]]; then
|
||||
for arg in "$@"; do
|
||||
case $arg in
|
||||
-v|-V|--version|-dumpversion)
|
||||
@@ -292,16 +186,16 @@ if [ -z "$mode" ] || [ "$mode" = ld ]; then
|
||||
fi
|
||||
|
||||
# Finish setting up the mode.
|
||||
if [ -z "$mode" ]; then
|
||||
if [[ -z $mode ]]; then
|
||||
mode=ccld
|
||||
for arg in "$@"; do
|
||||
if [ "$arg" = "-E" ]; then
|
||||
if [[ $arg == -E ]]; then
|
||||
mode=cpp
|
||||
break
|
||||
elif [ "$arg" = "-S" ]; then
|
||||
elif [[ $arg == -S ]]; then
|
||||
mode=as
|
||||
break
|
||||
elif [ "$arg" = "-c" ]; then
|
||||
elif [[ $arg == -c ]]; then
|
||||
mode=cc
|
||||
break
|
||||
fi
|
||||
@@ -328,46 +222,42 @@ dtags_to_strip="${SPACK_DTAGS_TO_STRIP}"
|
||||
linker_arg="${SPACK_LINKER_ARG}"
|
||||
|
||||
# Set up rpath variable according to language.
|
||||
rpath="ERROR: RPATH ARG WAS NOT SET"
|
||||
eval "rpath=\${SPACK_${comp}_RPATH_ARG:?${rpath}}"
|
||||
eval rpath=\$SPACK_${comp}_RPATH_ARG
|
||||
|
||||
# Dump the mode and exit if the command is dump-mode.
|
||||
if [ "$SPACK_TEST_COMMAND" = "dump-mode" ]; then
|
||||
if [[ $SPACK_TEST_COMMAND == dump-mode ]]; then
|
||||
echo "$mode"
|
||||
exit
|
||||
fi
|
||||
|
||||
# If, say, SPACK_CC is set but SPACK_FC is not, we want to know. Compilers do not
|
||||
# *have* to set up Fortran executables, so we need to tell the user when a build is
|
||||
# about to attempt to use them unsuccessfully.
|
||||
if [ -z "$command" ]; then
|
||||
die "Compiler '$SPACK_COMPILER_SPEC' does not have a $language compiler configured."
|
||||
# Check that at least one of the real commands was actually selected,
|
||||
# otherwise we don't know what to execute.
|
||||
if [[ -z $command ]]; then
|
||||
die "ERROR: Compiler '$SPACK_COMPILER_SPEC' does not support compiling $language programs."
|
||||
fi
|
||||
|
||||
#
|
||||
# Filter '.' and Spack environment directories out of PATH so that
|
||||
# this script doesn't just call itself
|
||||
#
|
||||
new_dirs=""
|
||||
IFS=':'
|
||||
for dir in $PATH; do
|
||||
IFS=':' read -ra env_path <<< "$PATH"
|
||||
IFS=':' read -ra spack_env_dirs <<< "$SPACK_ENV_PATH"
|
||||
spack_env_dirs+=("" ".")
|
||||
export PATH=""
|
||||
for dir in "${env_path[@]}"; do
|
||||
addpath=true
|
||||
for spack_env_dir in $SPACK_ENV_PATH; do
|
||||
case "${dir%%/}" in
|
||||
"$spack_env_dir"|'.'|'')
|
||||
addpath=false
|
||||
break
|
||||
;;
|
||||
esac
|
||||
for env_dir in "${spack_env_dirs[@]}"; do
|
||||
if [[ "${dir%%/}" == "$env_dir" ]]; then
|
||||
addpath=false
|
||||
break
|
||||
fi
|
||||
done
|
||||
if [ $addpath = true ]; then
|
||||
append new_dirs "$dir"
|
||||
if $addpath; then
|
||||
export PATH="${PATH:+$PATH:}$dir"
|
||||
fi
|
||||
done
|
||||
unset IFS
|
||||
export PATH="$new_dirs"
|
||||
|
||||
if [ "$mode" = vcheck ]; then
|
||||
if [[ $mode == vcheck ]]; then
|
||||
exec "${command}" "$@"
|
||||
fi
|
||||
|
||||
@@ -375,20 +265,16 @@ fi
|
||||
# It doesn't work with -rpath.
|
||||
# This variable controls whether they are added.
|
||||
add_rpaths=true
|
||||
if [ "$mode" = ld ] || [ "$mode" = ccld ]; then
|
||||
if [ "${SPACK_SHORT_SPEC#*darwin}" != "${SPACK_SHORT_SPEC}" ]; then
|
||||
for arg in "$@"; do
|
||||
if [ "$arg" = "-r" ]; then
|
||||
if [ "$mode" = ld ] || [ "$mode" = ccld ]; then
|
||||
add_rpaths=false
|
||||
break
|
||||
fi
|
||||
elif [ "$arg" = "-Wl,-r" ] && [ "$mode" = ccld ]; then
|
||||
add_rpaths=false
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
if [[ ($mode == ld || $mode == ccld) && "$SPACK_SHORT_SPEC" =~ "darwin" ]];
|
||||
then
|
||||
for arg in "$@"; do
|
||||
if [[ ($arg == -r && $mode == ld) ||
|
||||
($arg == -r && $mode == ccld) ||
|
||||
($arg == -Wl,-r && $mode == ccld) ]]; then
|
||||
add_rpaths=false
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# Save original command for debug logging
|
||||
@@ -411,22 +297,17 @@ input_command="$*"
|
||||
# The libs variable is initialized here for completeness, and it is also
|
||||
# used later to inject flags supplied via `ldlibs` on the command
|
||||
# line. These come into the wrappers via SPACK_LDLIBS.
|
||||
|
||||
# The loop below breaks up the command line into these lists of components.
|
||||
# The lists are all bell-separated to be as flexible as possible, as their
|
||||
# contents may come from the command line, from ' '-separated lists,
|
||||
# ':'-separated lists, etc.
|
||||
include_dirs_list=""
|
||||
lib_dirs_list=""
|
||||
rpath_dirs_list=""
|
||||
system_include_dirs_list=""
|
||||
system_lib_dirs_list=""
|
||||
system_rpath_dirs_list=""
|
||||
isystem_system_include_dirs_list=""
|
||||
isystem_include_dirs_list=""
|
||||
libs_list=""
|
||||
other_args_list=""
|
||||
|
||||
#
|
||||
includes=()
|
||||
libdirs=()
|
||||
rpaths=()
|
||||
system_includes=()
|
||||
system_libdirs=()
|
||||
system_rpaths=()
|
||||
libs=()
|
||||
other_args=()
|
||||
isystem_system_includes=()
|
||||
isystem_includes=()
|
||||
|
||||
while [ $# -ne 0 ]; do
|
||||
|
||||
@@ -446,32 +327,32 @@ while [ $# -ne 0 ]; do
|
||||
isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append isystem_system_include_dirs_list "$arg"
|
||||
isystem_system_includes+=("$arg")
|
||||
else
|
||||
append isystem_include_dirs_list "$arg"
|
||||
isystem_includes+=("$arg")
|
||||
fi
|
||||
;;
|
||||
-I*)
|
||||
arg="${1#-I}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append system_include_dirs_list "$arg"
|
||||
system_includes+=("$arg")
|
||||
else
|
||||
append include_dirs_list "$arg"
|
||||
includes+=("$arg")
|
||||
fi
|
||||
;;
|
||||
-L*)
|
||||
arg="${1#-L}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append system_lib_dirs_list "$arg"
|
||||
system_libdirs+=("$arg")
|
||||
else
|
||||
append lib_dirs_list "$arg"
|
||||
libdirs+=("$arg")
|
||||
fi
|
||||
;;
|
||||
-l*)
|
||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||
# and passed by ifx to the linker, which confuses it with a
|
||||
# and passed by ifx to the linker, which confuses it with a
|
||||
# library. Filter it out.
|
||||
# TODO: generalize filtering of args with an env var, so that
|
||||
# TODO: we do not have to special case this here.
|
||||
@@ -482,76 +363,66 @@ while [ $# -ne 0 ]; do
|
||||
fi
|
||||
arg="${1#-l}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append other_args_list "-l$arg"
|
||||
other_args+=("-l$arg")
|
||||
;;
|
||||
-Wl,*)
|
||||
arg="${1#-Wl,}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
case "$arg" in
|
||||
-rpath=*) rp="${arg#-rpath=}" ;;
|
||||
--rpath=*) rp="${arg#--rpath=}" ;;
|
||||
-rpath,*) rp="${arg#-rpath,}" ;;
|
||||
--rpath,*) rp="${arg#--rpath,}" ;;
|
||||
-rpath|--rpath)
|
||||
shift; arg="$1"
|
||||
case "$arg" in
|
||||
-Wl,*)
|
||||
rp="${arg#-Wl,}"
|
||||
;;
|
||||
*)
|
||||
die "-Wl,-rpath was not followed by -Wl,*"
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
: # We want to remove explicitly this flag
|
||||
;;
|
||||
*)
|
||||
append other_args_list "-Wl,$arg"
|
||||
;;
|
||||
esac
|
||||
if [[ "$arg" = -rpath=* ]]; then
|
||||
rp="${arg#-rpath=}"
|
||||
elif [[ "$arg" = --rpath=* ]]; then
|
||||
rp="${arg#--rpath=}"
|
||||
elif [[ "$arg" = -rpath,* ]]; then
|
||||
rp="${arg#-rpath,}"
|
||||
elif [[ "$arg" = --rpath,* ]]; then
|
||||
rp="${arg#--rpath,}"
|
||||
elif [[ "$arg" =~ ^-?-rpath$ ]]; then
|
||||
shift; arg="$1"
|
||||
if [[ "$arg" != -Wl,* ]]; then
|
||||
die "-Wl,-rpath was not followed by -Wl,*"
|
||||
fi
|
||||
rp="${arg#-Wl,}"
|
||||
elif [[ "$arg" = "$dtags_to_strip" ]] ; then
|
||||
: # We want to remove explicitly this flag
|
||||
else
|
||||
other_args+=("-Wl,$arg")
|
||||
fi
|
||||
;;
|
||||
-Xlinker,*)
|
||||
arg="${1#-Xlinker,}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
|
||||
case "$arg" in
|
||||
-rpath=*) rp="${arg#-rpath=}" ;;
|
||||
--rpath=*) rp="${arg#--rpath=}" ;;
|
||||
-rpath|--rpath)
|
||||
shift; arg="$1"
|
||||
case "$arg" in
|
||||
-Xlinker,*)
|
||||
rp="${arg#-Xlinker,}"
|
||||
;;
|
||||
*)
|
||||
die "-Xlinker,-rpath was not followed by -Xlinker,*"
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
*)
|
||||
append other_args_list "-Xlinker,$arg"
|
||||
;;
|
||||
esac
|
||||
if [[ "$arg" = -rpath=* ]]; then
|
||||
rp="${arg#-rpath=}"
|
||||
elif [[ "$arg" = --rpath=* ]]; then
|
||||
rp="${arg#--rpath=}"
|
||||
elif [[ "$arg" = -rpath ]] || [[ "$arg" = --rpath ]]; then
|
||||
shift; arg="$1"
|
||||
if [[ "$arg" != -Xlinker,* ]]; then
|
||||
die "-Xlinker,-rpath was not followed by -Xlinker,*"
|
||||
fi
|
||||
rp="${arg#-Xlinker,}"
|
||||
else
|
||||
other_args+=("-Xlinker,$arg")
|
||||
fi
|
||||
;;
|
||||
-Xlinker)
|
||||
if [ "$2" = "-rpath" ]; then
|
||||
if [ "$3" != "-Xlinker" ]; then
|
||||
if [[ "$2" == "-rpath" ]]; then
|
||||
if [[ "$3" != "-Xlinker" ]]; then
|
||||
die "-Xlinker,-rpath was not followed by -Xlinker,*"
|
||||
fi
|
||||
shift 3;
|
||||
rp="$1"
|
||||
elif [ "$2" = "$dtags_to_strip" ]; then
|
||||
elif [[ "$2" = "$dtags_to_strip" ]] ; then
|
||||
shift # We want to remove explicitly this flag
|
||||
else
|
||||
append other_args_list "$1"
|
||||
other_args+=("$1")
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
if [ "$1" = "$dtags_to_strip" ]; then
|
||||
if [[ "$1" = "$dtags_to_strip" ]] ; then
|
||||
: # We want to remove explicitly this flag
|
||||
else
|
||||
append other_args_list "$1"
|
||||
other_args+=("$1")
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
@@ -559,9 +430,9 @@ while [ $# -ne 0 ]; do
|
||||
# test rpaths against system directories in one place.
|
||||
if [ -n "$rp" ]; then
|
||||
if system_dir "$rp"; then
|
||||
append system_rpath_dirs_list "$rp"
|
||||
system_rpaths+=("$rp")
|
||||
else
|
||||
append rpath_dirs_list "$rp"
|
||||
rpaths+=("$rp")
|
||||
fi
|
||||
fi
|
||||
shift
|
||||
@@ -574,15 +445,16 @@ done
|
||||
# See the gmake manual on implicit rules for details:
|
||||
# https://www.gnu.org/software/make/manual/html_node/Implicit-Variables.html
|
||||
#
|
||||
flags_list=""
|
||||
flags=()
|
||||
|
||||
# Add debug flags
|
||||
if [ "${SPACK_ADD_DEBUG_FLAGS}" = "true" ]; then
|
||||
extend flags_list debug_flags
|
||||
if [ "${SPACK_ADD_DEBUG_FLAGS}" == "true" ]; then
|
||||
flags=("${flags[@]}" "${debug_flags}")
|
||||
|
||||
# If a custom flag is requested, derive from environment
|
||||
elif [ "$SPACK_ADD_DEBUG_FLAGS" = "custom" ]; then
|
||||
extend flags_list SPACK_DEBUG_FLAGS
|
||||
elif [ "$SPACK_ADD_DEBUG_FLAGS" == "custom" ]; then
|
||||
IFS=' ' read -ra SPACK_DEBUG_FLAGS <<< "$SPACK_DEBUG_FLAGS"
|
||||
flags=("${flags[@]}" "${SPACK_DEBUG_FLAGS[@]}")
|
||||
fi
|
||||
|
||||
# Fortran flags come before CPPFLAGS
|
||||
@@ -590,8 +462,7 @@ case "$mode" in
|
||||
cc|ccld)
|
||||
case $lang_flags in
|
||||
F)
|
||||
extend flags_list SPACK_FFLAGS
|
||||
;;
|
||||
flags=("${flags[@]}" "${SPACK_FFLAGS[@]}") ;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
@@ -599,8 +470,7 @@ esac
|
||||
# C preprocessor flags come before any C/CXX flags
|
||||
case "$mode" in
|
||||
cpp|as|cc|ccld)
|
||||
extend flags_list SPACK_CPPFLAGS
|
||||
;;
|
||||
flags=("${flags[@]}" "${SPACK_CPPFLAGS[@]}") ;;
|
||||
esac
|
||||
|
||||
|
||||
@@ -609,67 +479,67 @@ case "$mode" in
|
||||
cc|ccld)
|
||||
case $lang_flags in
|
||||
C)
|
||||
extend flags_list SPACK_CFLAGS
|
||||
;;
|
||||
flags=("${flags[@]}" "${SPACK_CFLAGS[@]}") ;;
|
||||
CXX)
|
||||
extend flags_list SPACK_CXXFLAGS
|
||||
;;
|
||||
flags=("${flags[@]}" "${SPACK_CXXFLAGS[@]}") ;;
|
||||
esac
|
||||
|
||||
# prepend target args
|
||||
preextend flags_list SPACK_TARGET_ARGS
|
||||
flags=(${SPACK_TARGET_ARGS[@]} "${flags[@]}")
|
||||
;;
|
||||
esac
|
||||
|
||||
# Linker flags
|
||||
case "$mode" in
|
||||
ld|ccld)
|
||||
extend flags_list SPACK_LDFLAGS
|
||||
;;
|
||||
flags=("${flags[@]}" "${SPACK_LDFLAGS[@]}") ;;
|
||||
esac
|
||||
|
||||
# On macOS insert headerpad_max_install_names linker flag
|
||||
if [ "$mode" = ld ] || [ "$mode" = ccld ]; then
|
||||
if [ "${SPACK_SHORT_SPEC#*darwin}" != "${SPACK_SHORT_SPEC}" ]; then
|
||||
case "$mode" in
|
||||
ld)
|
||||
append flags_list "-headerpad_max_install_names" ;;
|
||||
ccld)
|
||||
append flags_list "-Wl,-headerpad_max_install_names" ;;
|
||||
esac
|
||||
fi
|
||||
if [[ ($mode == ld || $mode == ccld) && "$SPACK_SHORT_SPEC" =~ "darwin" ]];
|
||||
then
|
||||
case "$mode" in
|
||||
ld)
|
||||
flags=("${flags[@]}" -headerpad_max_install_names) ;;
|
||||
ccld)
|
||||
flags=("${flags[@]}" "-Wl,-headerpad_max_install_names") ;;
|
||||
esac
|
||||
fi
|
||||
|
||||
if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
||||
if [ "$add_rpaths" != "false" ]; then
|
||||
IFS=':' read -ra rpath_dirs <<< "$SPACK_RPATH_DIRS"
|
||||
if [[ $mode == ccld || $mode == ld ]]; then
|
||||
|
||||
if [[ "$add_rpaths" != "false" ]] ; then
|
||||
# Append RPATH directories. Note that in the case of the
|
||||
# top-level package these directories may not exist yet. For dependencies
|
||||
# it is assumed that paths have already been confirmed.
|
||||
extend rpath_dirs_list SPACK_RPATH_DIRS
|
||||
rpaths=("${rpaths[@]}" "${rpath_dirs[@]}")
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
||||
extend lib_dirs_list SPACK_LINK_DIRS
|
||||
IFS=':' read -ra link_dirs <<< "$SPACK_LINK_DIRS"
|
||||
if [[ $mode == ccld || $mode == ld ]]; then
|
||||
libdirs=("${libdirs[@]}" "${link_dirs[@]}")
|
||||
fi
|
||||
|
||||
# add RPATHs if we're in in any linking mode
|
||||
case "$mode" in
|
||||
ld|ccld)
|
||||
# Set extra RPATHs
|
||||
extend lib_dirs_list SPACK_COMPILER_EXTRA_RPATHS
|
||||
if [ "$add_rpaths" != "false" ]; then
|
||||
extend rpath_dirs_list SPACK_COMPILER_EXTRA_RPATHS
|
||||
IFS=':' read -ra extra_rpaths <<< "$SPACK_COMPILER_EXTRA_RPATHS"
|
||||
libdirs+=("${extra_rpaths[@]}")
|
||||
if [[ "$add_rpaths" != "false" ]] ; then
|
||||
rpaths+=("${extra_rpaths[@]}")
|
||||
fi
|
||||
|
||||
# Set implicit RPATHs
|
||||
if [ "$add_rpaths" != "false" ]; then
|
||||
extend rpath_dirs_list SPACK_COMPILER_IMPLICIT_RPATHS
|
||||
IFS=':' read -ra implicit_rpaths <<< "$SPACK_COMPILER_IMPLICIT_RPATHS"
|
||||
if [[ "$add_rpaths" != "false" ]] ; then
|
||||
rpaths+=("${implicit_rpaths[@]}")
|
||||
fi
|
||||
|
||||
# Add SPACK_LDLIBS to args
|
||||
for lib in $SPACK_LDLIBS; do
|
||||
append libs_list "${lib#-l}"
|
||||
for lib in "${SPACK_LDLIBS[@]}"; do
|
||||
libs+=("${lib#-l}")
|
||||
done
|
||||
;;
|
||||
esac
|
||||
@@ -677,62 +547,63 @@ esac
|
||||
#
|
||||
# Finally, reassemble the command line.
|
||||
#
|
||||
args_list="$flags_list"
|
||||
|
||||
# Includes and system includes first
|
||||
args=()
|
||||
|
||||
# flags assembled earlier
|
||||
args+=("${flags[@]}")
|
||||
|
||||
# Insert include directories just prior to any system include directories
|
||||
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
||||
extend args_list include_dirs_list "-I"
|
||||
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
case "$mode" in
|
||||
cpp|cc|as|ccld)
|
||||
if [ "$isystem_was_used" = "true" ]; then
|
||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||
else
|
||||
extend args_list SPACK_INCLUDE_DIRS "-I"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
for dir in "${includes[@]}"; do args+=("-I$dir"); done
|
||||
for dir in "${isystem_includes[@]}"; do args+=("-isystem" "$dir"); done
|
||||
|
||||
extend args_list system_include_dirs_list -I
|
||||
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
IFS=':' read -ra spack_include_dirs <<< "$SPACK_INCLUDE_DIRS"
|
||||
if [[ $mode == cpp || $mode == cc || $mode == as || $mode == ccld ]]; then
|
||||
if [[ "$isystem_was_used" == "true" ]] ; then
|
||||
for dir in "${spack_include_dirs[@]}"; do args+=("-isystem" "$dir"); done
|
||||
else
|
||||
for dir in "${spack_include_dirs[@]}"; do args+=("-I$dir"); done
|
||||
fi
|
||||
fi
|
||||
|
||||
for dir in "${system_includes[@]}"; do args+=("-I$dir"); done
|
||||
for dir in "${isystem_system_includes[@]}"; do args+=("-isystem" "$dir"); done
|
||||
|
||||
# Library search paths
|
||||
extend args_list lib_dirs_list "-L"
|
||||
extend args_list system_lib_dirs_list "-L"
|
||||
for dir in "${libdirs[@]}"; do args+=("-L$dir"); done
|
||||
for dir in "${system_libdirs[@]}"; do args+=("-L$dir"); done
|
||||
|
||||
# RPATHs arguments
|
||||
case "$mode" in
|
||||
ccld)
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$linker_arg$dtags_to_add"
|
||||
fi
|
||||
extend args_list rpath_dirs_list "$rpath"
|
||||
extend args_list system_rpath_dirs_list "$rpath"
|
||||
if [ -n "$dtags_to_add" ] ; then args+=("$linker_arg$dtags_to_add") ; fi
|
||||
for dir in "${rpaths[@]}"; do args+=("$rpath$dir"); done
|
||||
for dir in "${system_rpaths[@]}"; do args+=("$rpath$dir"); done
|
||||
;;
|
||||
ld)
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$dtags_to_add"
|
||||
fi
|
||||
extend args_list rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
||||
if [ -n "$dtags_to_add" ] ; then args+=("$dtags_to_add") ; fi
|
||||
for dir in "${rpaths[@]}"; do args+=("-rpath" "$dir"); done
|
||||
for dir in "${system_rpaths[@]}"; do args+=("-rpath" "$dir"); done
|
||||
;;
|
||||
esac
|
||||
|
||||
# Other arguments from the input command
|
||||
extend args_list other_args_list
|
||||
args+=("${other_args[@]}")
|
||||
|
||||
# Inject SPACK_LDLIBS, if supplied
|
||||
extend args_list libs_list "-l"
|
||||
for lib in "${libs[@]}"; do
|
||||
args+=("-l$lib");
|
||||
done
|
||||
|
||||
full_command_list="$command"
|
||||
extend full_command_list args_list
|
||||
full_command=("$command" "${args[@]}")
|
||||
|
||||
# prepend the ccache binary if we're using ccache
|
||||
if [ -n "$SPACK_CCACHE_BINARY" ]; then
|
||||
case "$lang_flags" in
|
||||
C|CXX) # ccache only supports C languages
|
||||
prepend full_command_list "${SPACK_CCACHE_BINARY}"
|
||||
full_command=("${SPACK_CCACHE_BINARY}" "${full_command[@]}")
|
||||
# workaround for stage being a temp folder
|
||||
# see #3761#issuecomment-294352232
|
||||
export CCACHE_NOHASHDIR=yes
|
||||
@@ -741,36 +612,25 @@ if [ -n "$SPACK_CCACHE_BINARY" ]; then
|
||||
fi
|
||||
|
||||
# dump the full command if the caller supplies SPACK_TEST_COMMAND=dump-args
|
||||
if [ -n "${SPACK_TEST_COMMAND=}" ]; then
|
||||
case "$SPACK_TEST_COMMAND" in
|
||||
dump-args)
|
||||
IFS="$lsep"
|
||||
for arg in $full_command_list; do
|
||||
echo "$arg"
|
||||
done
|
||||
unset IFS
|
||||
exit
|
||||
;;
|
||||
dump-env-*)
|
||||
var=${SPACK_TEST_COMMAND#dump-env-}
|
||||
eval "printf '%s\n' \"\$0: \$var: \$$var\""
|
||||
;;
|
||||
*)
|
||||
die "Unknown test command: '$SPACK_TEST_COMMAND'"
|
||||
;;
|
||||
esac
|
||||
if [[ $SPACK_TEST_COMMAND == dump-args ]]; then
|
||||
IFS="
|
||||
" && echo "${full_command[*]}"
|
||||
exit
|
||||
elif [[ $SPACK_TEST_COMMAND =~ dump-env-* ]]; then
|
||||
var=${SPACK_TEST_COMMAND#dump-env-}
|
||||
echo "$0: $var: ${!var}"
|
||||
elif [[ -n $SPACK_TEST_COMMAND ]]; then
|
||||
die "ERROR: Unknown test command"
|
||||
fi
|
||||
|
||||
#
|
||||
# Write the input and output commands to debug logs if it's asked for.
|
||||
#
|
||||
if [ "$SPACK_DEBUG" = TRUE ]; then
|
||||
if [[ $SPACK_DEBUG == TRUE ]]; then
|
||||
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
|
||||
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
|
||||
echo "[$mode] $command $input_command" >> "$input_log"
|
||||
echo "[$mode] ${full_command_list}" >> "$output_log"
|
||||
echo "[$mode] ${full_command[*]}" >> "$output_log"
|
||||
fi
|
||||
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
# to the alarm bell separator.
|
||||
IFS="$lsep"; exec $full_command_list
|
||||
exec "${full_command[@]}"
|
||||
|
||||
4
lib/spack/external/__init__.py
vendored
4
lib/spack/external/__init__.py
vendored
@@ -11,7 +11,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.1.2 (commit 85757b6666422fca86aa882a769bf78b0f992f54)
|
||||
* Version: 0.1.2 (commit 4dbf253daf37e4a008e4beb6489f347b4a35aed4)
|
||||
|
||||
argparse
|
||||
--------
|
||||
@@ -88,8 +88,6 @@
|
||||
* Usage: Needed by pytest. Library with cross-python path,
|
||||
ini-parsing, io, code, and log facilities.
|
||||
* Version: 1.4.34 (last version supporting Python 2.6)
|
||||
* Note: This packages has been modified:
|
||||
* https://github.com/pytest-dev/py/pull/186 was backported
|
||||
|
||||
pytest
|
||||
------
|
||||
|
||||
13
lib/spack/external/archspec/README.md
vendored
13
lib/spack/external/archspec/README.md
vendored
@@ -49,19 +49,6 @@ $ tox
|
||||
congratulations :)
|
||||
```
|
||||
|
||||
## Citing Archspec
|
||||
|
||||
If you are referencing `archspec` in a publication, please cite the following
|
||||
paper:
|
||||
|
||||
* Massimiliano Culpo, Gregory Becker, Carlos Eduardo Arango Gutierrez, Kenneth
|
||||
Hoste, and Todd Gamblin.
|
||||
[**`archspec`: A library for detecting, labeling, and reasoning about
|
||||
microarchitectures**](https://tgamblin.github.io/pubs/archspec-canopie-hpc-2020.pdf).
|
||||
In *2nd International Workshop on Containers and New Orchestration Paradigms
|
||||
for Isolated Environments in HPC (CANOPIE-HPC'20)*, Online Event, November
|
||||
12, 2020.
|
||||
|
||||
## License
|
||||
|
||||
Archspec is distributed under the terms of both the MIT license and the
|
||||
|
||||
48
lib/spack/external/archspec/cpu/detect.py
vendored
48
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -206,26 +206,11 @@ def host():
|
||||
# Get a list of possible candidates for this micro-architecture
|
||||
candidates = compatible_microarchitectures(info)
|
||||
|
||||
# Sorting criteria for candidates
|
||||
def sorting_fn(item):
|
||||
return len(item.ancestors), len(item.features)
|
||||
|
||||
# Get the best generic micro-architecture
|
||||
generic_candidates = [c for c in candidates if c.vendor == "generic"]
|
||||
best_generic = max(generic_candidates, key=sorting_fn)
|
||||
|
||||
# Filter the candidates to be descendant of the best generic candidate.
|
||||
# This is to avoid that the lack of a niche feature that can be disabled
|
||||
# from e.g. BIOS prevents detection of a reasonably performant architecture
|
||||
candidates = [c for c in candidates if c > best_generic]
|
||||
|
||||
# If we don't have candidates, return the best generic micro-architecture
|
||||
if not candidates:
|
||||
return best_generic
|
||||
|
||||
# Reverse sort of the depth for the inheritance tree among only targets we
|
||||
# can use. This gets the newest target we satisfy.
|
||||
return max(candidates, key=sorting_fn)
|
||||
return sorted(
|
||||
candidates, key=lambda t: (len(t.ancestors), len(t.features)), reverse=True
|
||||
)[0]
|
||||
|
||||
|
||||
def compatibility_check(architecture_family):
|
||||
@@ -260,13 +245,7 @@ def compatibility_check_for_power(info, target):
|
||||
"""Compatibility check for PPC64 and PPC64LE architectures."""
|
||||
basename = platform.machine()
|
||||
generation_match = re.search(r"POWER(\d+)", info.get("cpu", ""))
|
||||
try:
|
||||
generation = int(generation_match.group(1))
|
||||
except AttributeError:
|
||||
# There might be no match under emulated environments. For instance
|
||||
# emulating a ppc64le with QEMU and Docker still reports the host
|
||||
# /proc/cpuinfo and not a Power
|
||||
generation = 0
|
||||
generation = int(generation_match.group(1))
|
||||
|
||||
# We can use a target if it descends from our machine type and our
|
||||
# generation (9 for POWER9, etc) is at least its generation.
|
||||
@@ -306,22 +285,3 @@ def compatibility_check_for_aarch64(info, target):
|
||||
and (target.vendor == vendor or target.vendor == "generic")
|
||||
and target.features.issubset(features)
|
||||
)
|
||||
|
||||
|
||||
@compatibility_check(architecture_family="riscv64")
|
||||
def compatibility_check_for_riscv64(info, target):
|
||||
"""Compatibility check for riscv64 architectures."""
|
||||
basename = "riscv64"
|
||||
uarch = info.get("uarch")
|
||||
|
||||
# sifive unmatched board
|
||||
if uarch == "sifive,u74-mc":
|
||||
uarch = "u74mc"
|
||||
# catch-all for unknown uarchs
|
||||
else:
|
||||
uarch = "riscv64"
|
||||
|
||||
arch_root = TARGETS[basename]
|
||||
return (target == arch_root or arch_root in target.ancestors) and (
|
||||
target == uarch or target.vendor == "generic"
|
||||
)
|
||||
|
||||
@@ -173,12 +173,6 @@ def family(self):
|
||||
|
||||
return roots.pop()
|
||||
|
||||
@property
|
||||
def generic(self):
|
||||
"""Returns the best generic architecture that is compatible with self"""
|
||||
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
|
||||
return max(generics, key=lambda x: len(x.ancestors))
|
||||
|
||||
def to_dict(self, return_list_of_items=False):
|
||||
"""Returns a dictionary representation of this object.
|
||||
|
||||
|
||||
@@ -2017,44 +2017,6 @@
|
||||
"features": [],
|
||||
"compilers": {
|
||||
}
|
||||
},
|
||||
"riscv64": {
|
||||
"from": [],
|
||||
"vendor": "generic",
|
||||
"features": [],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "7.1:",
|
||||
"flags" : "-march=rv64gc"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "9.0:",
|
||||
"flags" : "-march=rv64gc"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"u74mc": {
|
||||
"from": ["riscv64"],
|
||||
"vendor": "SiFive",
|
||||
"features": [],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "10.2:",
|
||||
"flags" : "-march=rv64gc -mtune=sifive-7-series"
|
||||
}
|
||||
],
|
||||
"clang" : [
|
||||
{
|
||||
"versions": "12.0:",
|
||||
"flags" : "-march=rv64gc -mtune=sifive-7-series"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"feature_aliases": {
|
||||
|
||||
91
lib/spack/external/ctest_log_parser.py
vendored
91
lib/spack/external/ctest_log_parser.py
vendored
@@ -77,18 +77,52 @@
|
||||
from six import StringIO
|
||||
from six import string_types
|
||||
|
||||
class prefilter(object):
|
||||
"""Make regular expressions faster with a simple prefiltering predicate.
|
||||
|
||||
Some regular expressions seem to be much more costly than others. In
|
||||
most cases, we can evaluate a simple precondition, e.g.::
|
||||
|
||||
lambda x: "error" in x
|
||||
|
||||
to avoid evaluating expensive regexes on all lines in a file. This
|
||||
can reduce parse time for large files by orders of magnitude when
|
||||
evaluating lots of expressions.
|
||||
|
||||
A ``prefilter`` object is designed to act like a regex,, but
|
||||
``search`` and ``match`` check the precondition before bothering to
|
||||
evaluate the regular expression.
|
||||
|
||||
Note that ``match`` and ``search`` just return ``True`` and ``False``
|
||||
at the moment. Make them return a ``MatchObject`` or ``None`` if it
|
||||
becomes necessary.
|
||||
"""
|
||||
def __init__(self, precondition, *patterns):
|
||||
self.patterns = [re.compile(p) for p in patterns]
|
||||
self.pre = precondition
|
||||
self.pattern = "\n ".join(
|
||||
('MERGED:',) + patterns)
|
||||
|
||||
def search(self, text):
|
||||
return self.pre(text) and any(p.search(text) for p in self.patterns)
|
||||
|
||||
def match(self, text):
|
||||
return self.pre(text) and any(p.match(text) for p in self.patterns)
|
||||
|
||||
|
||||
_error_matches = [
|
||||
"^FAIL: ",
|
||||
"^FATAL: ",
|
||||
"^failed ",
|
||||
"FAILED",
|
||||
"Failed test",
|
||||
prefilter(
|
||||
lambda x: any(s in x for s in (
|
||||
'Error:', 'error', 'undefined reference', 'multiply defined')),
|
||||
"([^:]+): error[ \\t]*[0-9]+[ \\t]*:",
|
||||
"([^:]+): (Error:|error|undefined reference|multiply defined)",
|
||||
"([^ :]+) ?: (error|fatal error|catastrophic error)",
|
||||
"([^:]+)\\(([^\\)]+)\\) ?: (error|fatal error|catastrophic error)"),
|
||||
"^FAILED",
|
||||
"^[Bb]us [Ee]rror",
|
||||
"^[Ss]egmentation [Vv]iolation",
|
||||
"^[Ss]egmentation [Ff]ault",
|
||||
":.*[Pp]ermission [Dd]enied",
|
||||
"[^ :]:[0-9]+: [^ \\t]",
|
||||
"[^:]: error[ \\t]*[0-9]+[ \\t]*:",
|
||||
"^Error ([0-9]+):",
|
||||
"^Fatal",
|
||||
"^[Ee]rror: ",
|
||||
@@ -98,9 +132,6 @@
|
||||
"^cc[^C]*CC: ERROR File = ([^,]+), Line = ([0-9]+)",
|
||||
"^ld([^:])*:([ \\t])*ERROR([^:])*:",
|
||||
"^ild:([ \\t])*\\(undefined symbol\\)",
|
||||
"[^ :] : (error|fatal error|catastrophic error)",
|
||||
"[^:]: (Error:|error|undefined reference|multiply defined)",
|
||||
"[^:]\\([^\\)]+\\) ?: (error|fatal error|catastrophic error)",
|
||||
"^fatal error C[0-9]+:",
|
||||
": syntax error ",
|
||||
"^collect2: ld returned 1 exit status",
|
||||
@@ -109,7 +140,7 @@
|
||||
"^Unresolved:",
|
||||
"Undefined symbol",
|
||||
"^Undefined[ \\t]+first referenced",
|
||||
"^CMake Error",
|
||||
"^CMake Error.*:",
|
||||
":[ \\t]cannot find",
|
||||
":[ \\t]can't find",
|
||||
": \\*\\*\\* No rule to make target [`'].*\\'. Stop",
|
||||
@@ -123,7 +154,6 @@
|
||||
"ld: 0706-006 Cannot find or open library file: -l ",
|
||||
"ild: \\(argument error\\) can't find library argument ::",
|
||||
"^could not be found and will not be loaded.",
|
||||
"^WARNING: '.*' is missing on your system",
|
||||
"s:616 string too big",
|
||||
"make: Fatal error: ",
|
||||
"ld: 0711-993 Error occurred while writing to the output file:",
|
||||
@@ -145,40 +175,44 @@
|
||||
"instantiated from ",
|
||||
"candidates are:",
|
||||
": warning",
|
||||
": WARNING",
|
||||
": \\(Warning\\)",
|
||||
": note",
|
||||
" ok",
|
||||
"Note:",
|
||||
"makefile:",
|
||||
"Makefile:",
|
||||
":[ \\t]+Where:",
|
||||
"[^ :]:[0-9]+: Warning",
|
||||
"([^ :]+):([0-9]+): Warning",
|
||||
"------ Build started: .* ------",
|
||||
]
|
||||
|
||||
#: Regexes to match file/line numbers in error/warning messages
|
||||
_warning_matches = [
|
||||
"[^ :]:[0-9]+: warning:",
|
||||
"[^ :]:[0-9]+: note:",
|
||||
prefilter(
|
||||
lambda x: 'warning' in x,
|
||||
"([^ :]+):([0-9]+): warning:",
|
||||
"([^:]+): warning ([0-9]+):",
|
||||
"([^:]+): warning[ \\t]*[0-9]+[ \\t]*:",
|
||||
"([^ :]+) : warning",
|
||||
"([^:]+): warning"),
|
||||
prefilter(
|
||||
lambda x: 'note:' in x,
|
||||
"^([^ :]+):([0-9]+): note:"),
|
||||
prefilter(
|
||||
lambda x: any(s in x for s in ('Warning', 'Warnung')),
|
||||
"^(Warning|Warnung) ([0-9]+):",
|
||||
"^(Warning|Warnung)[ :]",
|
||||
"^cxx: Warning:",
|
||||
"([^ :]+):([0-9]+): (Warning|Warnung)",
|
||||
"^CMake Warning.*:"),
|
||||
"file: .* has no symbols",
|
||||
"^cc[^C]*CC: WARNING File = ([^,]+), Line = ([0-9]+)",
|
||||
"^ld([^:])*:([ \\t])*WARNING([^:])*:",
|
||||
"[^:]: warning [0-9]+:",
|
||||
"^\"[^\"]+\", line [0-9]+: [Ww](arning|arnung)",
|
||||
"[^:]: warning[ \\t]*[0-9]+[ \\t]*:",
|
||||
"^(Warning|Warnung) ([0-9]+):",
|
||||
"^(Warning|Warnung)[ :]",
|
||||
"WARNING: ",
|
||||
"[^ :] : warning",
|
||||
"[^:]: warning",
|
||||
"\", line [0-9]+\\.[0-9]+: [0-9]+-[0-9]+ \\([WI]\\)",
|
||||
"^cxx: Warning:",
|
||||
"file: .* has no symbols",
|
||||
"[^ :]:[0-9]+: (Warning|Warnung)",
|
||||
"\\([0-9]*\\): remark #[0-9]*",
|
||||
"\".*\", line [0-9]+: remark\\([0-9]*\\):",
|
||||
"cc-[0-9]* CC: REMARK File = .*, Line = [0-9]*",
|
||||
"^CMake Warning",
|
||||
"^\\[WARNING\\]",
|
||||
]
|
||||
|
||||
@@ -309,7 +343,8 @@ def _profile_match(matches, exceptions, line, match_times, exc_times):
|
||||
|
||||
def _parse(lines, offset, profile):
|
||||
def compile(regex_array):
|
||||
return [re.compile(regex) for regex in regex_array]
|
||||
return [regex if isinstance(regex, prefilter) else re.compile(regex)
|
||||
for regex in regex_array]
|
||||
|
||||
error_matches = compile(_error_matches)
|
||||
error_exceptions = compile(_error_exceptions)
|
||||
|
||||
6
lib/spack/external/py/_path/local.py
vendored
6
lib/spack/external/py/_path/local.py
vendored
@@ -10,7 +10,7 @@
|
||||
from py._path.common import iswin32, fspath
|
||||
from stat import S_ISLNK, S_ISDIR, S_ISREG
|
||||
|
||||
from os.path import abspath, normpath, isabs, exists, isdir, isfile, islink, dirname
|
||||
from os.path import abspath, normcase, normpath, isabs, exists, isdir, isfile, islink, dirname
|
||||
|
||||
if sys.version_info > (3,0):
|
||||
def map_as_list(func, iter):
|
||||
@@ -801,10 +801,10 @@ def make_numbered_dir(cls, prefix='session-', rootdir=None, keep=3,
|
||||
if rootdir is None:
|
||||
rootdir = cls.get_temproot()
|
||||
|
||||
nprefix = prefix.lower()
|
||||
nprefix = normcase(prefix)
|
||||
def parse_num(path):
|
||||
""" parse the number out of a path (if it matches the prefix) """
|
||||
nbasename = path.basename.lower()
|
||||
nbasename = normcase(path.basename)
|
||||
if nbasename.startswith(nprefix):
|
||||
try:
|
||||
return int(nbasename[len(nprefix):])
|
||||
|
||||
@@ -656,12 +656,6 @@ def working_dir(dirname, **kwargs):
|
||||
os.chdir(orig_dir)
|
||||
|
||||
|
||||
class CouldNotRestoreDirectoryBackup(RuntimeError):
|
||||
def __init__(self, inner_exception, outer_exception):
|
||||
self.inner_exception = inner_exception
|
||||
self.outer_exception = outer_exception
|
||||
|
||||
|
||||
@contextmanager
|
||||
def replace_directory_transaction(directory_name, tmp_root=None):
|
||||
"""Moves a directory to a temporary space. If the operations executed
|
||||
@@ -689,33 +683,29 @@ def replace_directory_transaction(directory_name, tmp_root=None):
|
||||
assert os.path.isabs(tmp_root)
|
||||
|
||||
tmp_dir = tempfile.mkdtemp(dir=tmp_root)
|
||||
tty.debug('Temporary directory created [{0}]'.format(tmp_dir))
|
||||
tty.debug('TEMPORARY DIRECTORY CREATED [{0}]'.format(tmp_dir))
|
||||
|
||||
shutil.move(src=directory_name, dst=tmp_dir)
|
||||
tty.debug('Directory moved [src={0}, dest={1}]'.format(directory_name, tmp_dir))
|
||||
tty.debug('DIRECTORY MOVED [src={0}, dest={1}]'.format(
|
||||
directory_name, tmp_dir
|
||||
))
|
||||
|
||||
try:
|
||||
yield tmp_dir
|
||||
except (Exception, KeyboardInterrupt, SystemExit) as inner_exception:
|
||||
# Try to recover the original directory, if this fails, raise a
|
||||
# composite exception.
|
||||
try:
|
||||
# Delete what was there, before copying back the original content
|
||||
if os.path.exists(directory_name):
|
||||
shutil.rmtree(directory_name)
|
||||
shutil.move(
|
||||
src=os.path.join(tmp_dir, directory_basename),
|
||||
dst=os.path.dirname(directory_name)
|
||||
)
|
||||
except Exception as outer_exception:
|
||||
raise CouldNotRestoreDirectoryBackup(inner_exception, outer_exception)
|
||||
|
||||
tty.debug('Directory recovered [{0}]'.format(directory_name))
|
||||
except (Exception, KeyboardInterrupt, SystemExit):
|
||||
# Delete what was there, before copying back the original content
|
||||
if os.path.exists(directory_name):
|
||||
shutil.rmtree(directory_name)
|
||||
shutil.move(
|
||||
src=os.path.join(tmp_dir, directory_basename),
|
||||
dst=os.path.dirname(directory_name)
|
||||
)
|
||||
tty.debug('DIRECTORY RECOVERED [{0}]'.format(directory_name))
|
||||
raise
|
||||
else:
|
||||
# Otherwise delete the temporary directory
|
||||
shutil.rmtree(tmp_dir, ignore_errors=True)
|
||||
tty.debug('Temporary directory deleted [{0}]'.format(tmp_dir))
|
||||
shutil.rmtree(tmp_dir)
|
||||
tty.debug('TEMPORARY DIRECTORY DELETED [{0}]'.format(tmp_dir))
|
||||
|
||||
|
||||
def hash_directory(directory, ignore=[]):
|
||||
@@ -1855,18 +1845,3 @@ def keep_modification_time(*filenames):
|
||||
for f, mtime in mtimes.items():
|
||||
if os.path.exists(f):
|
||||
os.utime(f, (os.path.getatime(f), mtime))
|
||||
|
||||
|
||||
@contextmanager
|
||||
def temporary_dir(*args, **kwargs):
|
||||
"""Create a temporary directory and cd's into it. Delete the directory
|
||||
on exit.
|
||||
|
||||
Takes the same arguments as tempfile.mkdtemp()
|
||||
"""
|
||||
tmp_dir = tempfile.mkdtemp(*args, **kwargs)
|
||||
try:
|
||||
with working_dir(tmp_dir):
|
||||
yield tmp_dir
|
||||
finally:
|
||||
remove_directory_contents(tmp_dir)
|
||||
|
||||
@@ -33,7 +33,7 @@
|
||||
|
||||
|
||||
# Use this to strip escape sequences
|
||||
_escape = re.compile(r'\x1b[^m]*m|\x1b\[?1034h|\x1b\][0-9]+;[^\x07]*\x07')
|
||||
_escape = re.compile(r'\x1b[^m]*m|\x1b\[?1034h')
|
||||
|
||||
# control characters for enabling/disabling echo
|
||||
#
|
||||
@@ -323,7 +323,7 @@ def unwrap(self):
|
||||
if sys.version_info < (3,):
|
||||
self.file = open(self.file_like, 'w')
|
||||
else:
|
||||
self.file = open(self.file_like, 'w', encoding='utf-8') # novm
|
||||
self.file = open(self.file_like, 'w', encoding='utf-8')
|
||||
else:
|
||||
self.file = StringIO()
|
||||
return self.file
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
#: major, minor, patch version for Spack, in a tuple
|
||||
spack_version_info = (0, 17, 0)
|
||||
spack_version_info = (0, 16, 2)
|
||||
|
||||
#: String containing Spack version joined with .'s
|
||||
spack_version = '.'.join(str(v) for v in spack_version_info)
|
||||
|
||||
@@ -10,8 +10,6 @@
|
||||
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
from .analyzer_base import AnalyzerBase
|
||||
@@ -45,7 +43,6 @@ def _read_environment_file(self, filename):
|
||||
to remove path prefixes specific to user systems.
|
||||
"""
|
||||
if not os.path.exists(filename):
|
||||
tty.warn("No environment file available")
|
||||
return
|
||||
|
||||
mods = EnvironmentModifications.from_sourcing_file(filename)
|
||||
|
||||
242
lib/spack/spack/architecture.py
Normal file
242
lib/spack/spack/architecture.py
Normal file
@@ -0,0 +1,242 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Aggregate the target processor, the operating system and the target
|
||||
platform into an architecture object.
|
||||
|
||||
On a multiple architecture machine, the architecture spec field can be set to
|
||||
build a package against any target and operating system that is present on the
|
||||
platform. On Cray platforms or any other architecture that has different front
|
||||
and back end environments, the operating system will determine the method of
|
||||
compiler detection.
|
||||
|
||||
There are two different types of compiler detection:
|
||||
|
||||
1. Through the $PATH env variable (front-end detection)
|
||||
2. Through the module system. (back-end detection)
|
||||
|
||||
Depending on which operating system is specified, the compiler will be detected
|
||||
using one of those methods.
|
||||
|
||||
For platforms such as linux and darwin, the operating system is autodetected.
|
||||
|
||||
The command line syntax for specifying an architecture is as follows:
|
||||
|
||||
target=<Target name> os=<OperatingSystem name>
|
||||
|
||||
If the user wishes to use the defaults, either target or os can be left out of
|
||||
the command line and Spack will concretize using the default. These defaults
|
||||
are set in the 'platforms/' directory which contains the different subclasses
|
||||
for platforms. If the machine has multiple architectures, the user can
|
||||
also enter frontend, or fe or backend or be. These settings will concretize
|
||||
to their respective frontend and backend targets and operating systems.
|
||||
|
||||
Platforms are an abstract class that are extended by subclasses. If the user
|
||||
wants to add a new type of platform (such as cray_xe), they can create a
|
||||
subclass and set all the class attributes such as priority, front_target,
|
||||
back_target, front_os, back_os. Platforms also contain a priority class
|
||||
attribute. A lower number signifies higher priority. These numbers are
|
||||
arbitrarily set and can be changed though often there isn't much need unless a
|
||||
new platform is added and the user wants that to be detected first.
|
||||
|
||||
Targets are created inside the platform subclasses. Most architecture
|
||||
(like linux, and darwin) will have only one target family (x86_64) but in the case of
|
||||
Cray machines, there is both a frontend and backend processor. The user can
|
||||
specify which targets are present on front-end and back-end architecture
|
||||
|
||||
Depending on the platform, operating systems are either autodetected or are
|
||||
set. The user can set the frontend and backend operating setting by the class
|
||||
attributes front_os and back_os. The operating system as described earlier,
|
||||
will be responsible for compiler detection.
|
||||
"""
|
||||
import contextlib
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.lang as lang
|
||||
|
||||
import spack.compiler
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.operating_systems
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
import spack.target
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.version
|
||||
|
||||
|
||||
@lang.lazy_lexicographic_ordering
|
||||
class Arch(object):
|
||||
"""Architecture is now a class to help with setting attributes.
|
||||
|
||||
TODO: refactor so that we don't need this class.
|
||||
"""
|
||||
|
||||
def __init__(self, plat=None, os=None, target=None):
|
||||
self.platform = plat
|
||||
if plat and os:
|
||||
os = self.platform.operating_system(os)
|
||||
self.os = os
|
||||
if plat and target:
|
||||
target = self.platform.target(target)
|
||||
self.target = target
|
||||
|
||||
# Hooks for parser to use when platform is set after target or os
|
||||
self.target_string = None
|
||||
self.os_string = None
|
||||
|
||||
@property
|
||||
def concrete(self):
|
||||
return all(
|
||||
(self.platform is not None,
|
||||
isinstance(self.platform, spack.platforms.Platform),
|
||||
self.os is not None,
|
||||
isinstance(self.os, spack.operating_systems.OperatingSystem),
|
||||
self.target is not None, isinstance(self.target, spack.target.Target))
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
if self.platform or self.os or self.target:
|
||||
if self.platform.name == 'darwin':
|
||||
os_name = self.os.name if self.os else "None"
|
||||
else:
|
||||
os_name = str(self.os)
|
||||
|
||||
return (str(self.platform) + "-" +
|
||||
os_name + "-" + str(self.target))
|
||||
else:
|
||||
return ''
|
||||
|
||||
def __contains__(self, string):
|
||||
return string in str(self)
|
||||
|
||||
# TODO: make this unnecessary: don't include an empty arch on *every* spec.
|
||||
def __nonzero__(self):
|
||||
return (self.platform is not None or
|
||||
self.os is not None or
|
||||
self.target is not None)
|
||||
__bool__ = __nonzero__
|
||||
|
||||
def _cmp_iter(self):
|
||||
if isinstance(self.platform, spack.platforms.Platform):
|
||||
yield self.platform.name
|
||||
else:
|
||||
yield self.platform
|
||||
|
||||
if isinstance(self.os, spack.operating_systems.OperatingSystem):
|
||||
yield self.os.name
|
||||
else:
|
||||
yield self.os
|
||||
|
||||
if isinstance(self.target, spack.target.Target):
|
||||
yield self.target.microarchitecture
|
||||
else:
|
||||
yield self.target
|
||||
|
||||
def to_dict(self):
|
||||
str_or_none = lambda v: str(v) if v else None
|
||||
d = syaml.syaml_dict([
|
||||
('platform', str_or_none(self.platform)),
|
||||
('platform_os', str_or_none(self.os)),
|
||||
('target', self.target.to_dict_or_value())])
|
||||
return syaml.syaml_dict([('arch', d)])
|
||||
|
||||
def to_spec(self):
|
||||
"""Convert this Arch to an anonymous Spec with architecture defined."""
|
||||
spec = spack.spec.Spec()
|
||||
spec.architecture = spack.spec.ArchSpec(str(self))
|
||||
return spec
|
||||
|
||||
@staticmethod
|
||||
def from_dict(d):
|
||||
spec = spack.spec.ArchSpec.from_dict(d)
|
||||
return arch_for_spec(spec)
|
||||
|
||||
|
||||
def arch_for_spec(arch_spec):
|
||||
"""Transforms the given architecture spec into an architecture object."""
|
||||
arch_spec = spack.spec.ArchSpec(arch_spec)
|
||||
assert arch_spec.concrete
|
||||
|
||||
arch_plat = spack.platforms.by_name(arch_spec.platform)
|
||||
if not (arch_plat.operating_system(arch_spec.os) and
|
||||
arch_plat.target(arch_spec.target)):
|
||||
sys_type = str(default_arch())
|
||||
msg = ("Can't recreate arch for spec {0} on current arch {1}; "
|
||||
"spec architecture is too different")
|
||||
raise ValueError(msg.format(arch_spec, sys_type))
|
||||
|
||||
return Arch(arch_plat, arch_spec.os, arch_spec.target)
|
||||
|
||||
|
||||
@lang.memoized
|
||||
def _platform():
|
||||
return spack.platforms.host()
|
||||
|
||||
|
||||
#: The "real" platform of the host running Spack. This should not be changed
|
||||
#: by any method and is here as a convenient way to refer to the host platform.
|
||||
real_platform = _platform
|
||||
|
||||
#: The current platform used by Spack. May be swapped by the use_platform
|
||||
#: context manager.
|
||||
platform = _platform
|
||||
|
||||
|
||||
@lang.memoized
|
||||
def default_arch():
|
||||
"""Default ``Arch`` object for this machine"""
|
||||
return Arch(platform(), 'default_os', 'default_target')
|
||||
|
||||
|
||||
@lang.memoized
|
||||
def compatible_sys_types():
|
||||
"""Return a list of all the platform-os-target tuples compatible
|
||||
with the current host.
|
||||
"""
|
||||
current_host = archspec.cpu.host()
|
||||
compatible_targets = [current_host] + current_host.ancestors
|
||||
compatible_archs = [
|
||||
str(Arch(platform(), 'default_os', target)) for target in compatible_targets
|
||||
]
|
||||
return compatible_archs
|
||||
|
||||
|
||||
class _PickleableCallable(object):
|
||||
"""Class used to pickle a callable that may substitute either
|
||||
_platform or _all_platforms. Lambda or nested functions are
|
||||
not pickleable.
|
||||
"""
|
||||
def __init__(self, return_value):
|
||||
self.return_value = return_value
|
||||
|
||||
def __call__(self):
|
||||
return self.return_value
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def use_platform(new_platform):
|
||||
global platform
|
||||
|
||||
msg = '"{0}" must be an instance of Platform'
|
||||
assert isinstance(new_platform, spack.platforms.Platform), msg.format(new_platform)
|
||||
|
||||
original_platform_fn = platform
|
||||
|
||||
try:
|
||||
platform = _PickleableCallable(new_platform)
|
||||
|
||||
# Clear configuration and compiler caches
|
||||
spack.config.config.clear_caches()
|
||||
spack.compilers._cache_config_files = []
|
||||
|
||||
yield new_platform
|
||||
|
||||
finally:
|
||||
platform = original_platform_fn
|
||||
|
||||
# Clear configuration and compiler caches
|
||||
spack.config.config.clear_caches()
|
||||
spack.compilers._cache_config_files = []
|
||||
@@ -4,9 +4,11 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import codecs
|
||||
import glob
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import tarfile
|
||||
@@ -45,25 +47,6 @@
|
||||
_build_cache_keys_relative_path = '_pgp'
|
||||
|
||||
|
||||
class FetchCacheError(Exception):
|
||||
"""Error thrown when fetching the cache failed, usually a composite error list."""
|
||||
def __init__(self, errors):
|
||||
if not isinstance(errors, list):
|
||||
raise TypeError("Expected a list of errors")
|
||||
self.errors = errors
|
||||
if len(errors) > 1:
|
||||
msg = " Error {0}: {1}: {2}"
|
||||
self.message = "Multiple errors during fetching:\n"
|
||||
self.message += "\n".join((
|
||||
msg.format(i + 1, err.__class__.__name__, str(err))
|
||||
for (i, err) in enumerate(errors)
|
||||
))
|
||||
else:
|
||||
err = errors[0]
|
||||
self.message = "{0}: {1}".format(err.__class__.__name__, str(err))
|
||||
super(FetchCacheError, self).__init__(self.message)
|
||||
|
||||
|
||||
class BinaryCacheIndex(object):
|
||||
"""
|
||||
The BinaryCacheIndex tracks what specs are available on (usually remote)
|
||||
@@ -241,16 +224,11 @@ def find_built_spec(self, spec):
|
||||
]
|
||||
"""
|
||||
self.regenerate_spec_cache()
|
||||
return self.find_by_hash(spec.dag_hash())
|
||||
|
||||
def find_by_hash(self, find_hash):
|
||||
"""Same as find_built_spec but uses the hash of a spec.
|
||||
|
||||
Args:
|
||||
find_hash (str): hash of the spec to search
|
||||
"""
|
||||
find_hash = spec.dag_hash()
|
||||
if find_hash not in self._mirrors_for_spec:
|
||||
return None
|
||||
|
||||
return self._mirrors_for_spec[find_hash]
|
||||
|
||||
def update_spec(self, spec, found_list):
|
||||
@@ -315,22 +293,14 @@ def update(self):
|
||||
# Otherwise the concrete spec cache should not need to be updated at
|
||||
# all.
|
||||
|
||||
fetch_errors = []
|
||||
all_methods_failed = True
|
||||
|
||||
for cached_mirror_url in self._local_index_cache:
|
||||
cache_entry = self._local_index_cache[cached_mirror_url]
|
||||
cached_index_hash = cache_entry['index_hash']
|
||||
cached_index_path = cache_entry['index_path']
|
||||
if cached_mirror_url in configured_mirror_urls:
|
||||
# May need to fetch the index and update the local caches
|
||||
try:
|
||||
needs_regen = self._fetch_and_cache_index(
|
||||
cached_mirror_url, expect_hash=cached_index_hash)
|
||||
all_methods_failed = False
|
||||
except FetchCacheError as fetch_error:
|
||||
needs_regen = False
|
||||
fetch_errors.extend(fetch_error.errors)
|
||||
needs_regen = self._fetch_and_cache_index(
|
||||
cached_mirror_url, expect_hash=cached_index_hash)
|
||||
# The need to regenerate implies a need to clear as well.
|
||||
spec_cache_clear_needed |= needs_regen
|
||||
spec_cache_regenerate_needed |= needs_regen
|
||||
@@ -357,12 +327,7 @@ def update(self):
|
||||
for mirror_url in configured_mirror_urls:
|
||||
if mirror_url not in self._local_index_cache:
|
||||
# Need to fetch the index and update the local caches
|
||||
try:
|
||||
needs_regen = self._fetch_and_cache_index(mirror_url)
|
||||
all_methods_failed = False
|
||||
except FetchCacheError as fetch_error:
|
||||
fetch_errors.extend(fetch_error.errors)
|
||||
needs_regen = False
|
||||
needs_regen = self._fetch_and_cache_index(mirror_url)
|
||||
# Generally speaking, a new mirror wouldn't imply the need to
|
||||
# clear the spec cache, so leave it as is.
|
||||
if needs_regen:
|
||||
@@ -370,9 +335,7 @@ def update(self):
|
||||
|
||||
self._write_local_index_cache()
|
||||
|
||||
if all_methods_failed:
|
||||
raise FetchCacheError(fetch_errors)
|
||||
elif spec_cache_regenerate_needed:
|
||||
if spec_cache_regenerate_needed:
|
||||
self.regenerate_spec_cache(clear_existing=spec_cache_clear_needed)
|
||||
|
||||
def _fetch_and_cache_index(self, mirror_url, expect_hash=None):
|
||||
@@ -391,8 +354,6 @@ def _fetch_and_cache_index(self, mirror_url, expect_hash=None):
|
||||
True if this function thinks the concrete spec cache,
|
||||
``_mirrors_for_spec``, should be regenerated. Returns False
|
||||
otherwise.
|
||||
Throws:
|
||||
FetchCacheError: a composite exception.
|
||||
"""
|
||||
index_fetch_url = url_util.join(
|
||||
mirror_url, _build_cache_relative_path, 'index.json')
|
||||
@@ -402,19 +363,14 @@ def _fetch_and_cache_index(self, mirror_url, expect_hash=None):
|
||||
old_cache_key = None
|
||||
fetched_hash = None
|
||||
|
||||
errors = []
|
||||
|
||||
# Fetch the hash first so we can check if we actually need to fetch
|
||||
# the index itself.
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(hash_fetch_url)
|
||||
fetched_hash = codecs.getreader('utf-8')(fs).read()
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
errors.append(
|
||||
RuntimeError("Unable to read index hash {0} due to {1}: {2}".format(
|
||||
hash_fetch_url, url_err.__class__.__name__, str(url_err)
|
||||
))
|
||||
)
|
||||
tty.debug('Unable to read index hash {0}'.format(
|
||||
hash_fetch_url), url_err, 1)
|
||||
|
||||
# The only case where we'll skip attempting to fetch the buildcache
|
||||
# index from the mirror is when we already have a hash for this
|
||||
@@ -441,23 +397,24 @@ def _fetch_and_cache_index(self, mirror_url, expect_hash=None):
|
||||
_, _, fs = web_util.read_from_url(index_fetch_url)
|
||||
index_object_str = codecs.getreader('utf-8')(fs).read()
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
errors.append(
|
||||
RuntimeError("Unable to read index {0} due to {1}: {2}".format(
|
||||
index_fetch_url, url_err.__class__.__name__, str(url_err)
|
||||
))
|
||||
)
|
||||
raise FetchCacheError(errors)
|
||||
tty.debug('Unable to read index {0}'.format(index_fetch_url),
|
||||
url_err, 1)
|
||||
# We failed to fetch the index, even though we decided it was
|
||||
# necessary. However, regenerating the spec cache won't produce
|
||||
# anything different than what it has already, so return False.
|
||||
return False
|
||||
|
||||
locally_computed_hash = compute_hash(index_object_str)
|
||||
|
||||
if fetched_hash is not None and locally_computed_hash != fetched_hash:
|
||||
msg = ('Computed hash ({0}) did not match remote ({1}), '
|
||||
'indicating error in index transmission').format(
|
||||
locally_computed_hash, expect_hash)
|
||||
errors.append(RuntimeError(msg))
|
||||
msg_tmpl = ('Computed hash ({0}) did not match remote ({1}), '
|
||||
'indicating error in index transmission')
|
||||
tty.error(msg_tmpl.format(locally_computed_hash, expect_hash))
|
||||
# We somehow got an index that doesn't match the remote one, maybe
|
||||
# the next time we try we'll be successful.
|
||||
raise FetchCacheError(errors)
|
||||
# the next time we try we'll be successful. Regardless, we're not
|
||||
# updating our index cache with this, so don't regenerate the spec
|
||||
# cache either.
|
||||
return False
|
||||
|
||||
url_hash = compute_hash(mirror_url)
|
||||
|
||||
@@ -613,16 +570,6 @@ def get_buildfile_manifest(spec):
|
||||
# Used by make_package_relative to determine binaries to change.
|
||||
for root, dirs, files in os.walk(spec.prefix, topdown=True):
|
||||
dirs[:] = [d for d in dirs if d not in blacklist]
|
||||
|
||||
# Directories may need to be relocated too.
|
||||
for directory in dirs:
|
||||
dir_path_name = os.path.join(root, directory)
|
||||
rel_path_name = os.path.relpath(dir_path_name, spec.prefix)
|
||||
if os.path.islink(dir_path_name):
|
||||
link = os.readlink(dir_path_name)
|
||||
if os.path.isabs(link) and link.startswith(spack.store.layout.root):
|
||||
data['link_to_relocate'].append(rel_path_name)
|
||||
|
||||
for filename in files:
|
||||
path_name = os.path.join(root, filename)
|
||||
m_type, m_subtype = relocate.mime_type(path_name)
|
||||
@@ -1441,30 +1388,42 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
|
||||
buildinfo = spec_dict.get('buildinfo', {})
|
||||
old_relative_prefix = buildinfo.get('relative_prefix', new_relative_prefix)
|
||||
rel = buildinfo.get('relative_rpaths')
|
||||
# if the original relative prefix and new relative prefix differ the
|
||||
# directory layout has changed and the buildcache cannot be installed
|
||||
# if it was created with relative rpaths
|
||||
info = 'old relative prefix %s\nnew relative prefix %s\nrelative rpaths %s'
|
||||
tty.debug(info %
|
||||
(old_relative_prefix, new_relative_prefix, rel))
|
||||
# if (old_relative_prefix != new_relative_prefix and (rel)):
|
||||
# shutil.rmtree(tmpdir)
|
||||
# msg = "Package tarball was created from an install "
|
||||
# msg += "prefix with a different directory layout. "
|
||||
# msg += "It cannot be relocated because it "
|
||||
# msg += "uses relative rpaths."
|
||||
# raise NewLayoutException(msg)
|
||||
|
||||
# Extract the tarball into the store root, presumably on the same filesystem.
|
||||
# The directory created is the base directory name of the old prefix.
|
||||
# Moving the old prefix name to the new prefix location should preserve
|
||||
# hard links and symbolic links.
|
||||
extract_tmp = os.path.join(spack.store.layout.root, '.tmp')
|
||||
mkdirp(extract_tmp)
|
||||
extracted_dir = os.path.join(extract_tmp,
|
||||
old_relative_prefix.split(os.path.sep)[-1])
|
||||
|
||||
# extract the tarball in a temp directory
|
||||
with closing(tarfile.open(tarfile_path, 'r')) as tar:
|
||||
try:
|
||||
tar.extractall(path=extract_tmp)
|
||||
except Exception as e:
|
||||
shutil.rmtree(extracted_dir)
|
||||
raise e
|
||||
try:
|
||||
shutil.move(extracted_dir, spec.prefix)
|
||||
except Exception as e:
|
||||
shutil.rmtree(extracted_dir)
|
||||
raise e
|
||||
tar.extractall(path=tmpdir)
|
||||
# get the parent directory of the file .spack/binary_distribution
|
||||
# this should the directory unpacked from the tarball whose
|
||||
# name is unknown because the prefix naming is unknown
|
||||
bindist_file = glob.glob('%s/*/.spack/binary_distribution' % tmpdir)[0]
|
||||
workdir = re.sub('/.spack/binary_distribution$', '', bindist_file)
|
||||
tty.debug('workdir %s' % workdir)
|
||||
# install_tree copies hardlinks
|
||||
# create a temporary tarfile from prefix and exract it to workdir
|
||||
# tarfile preserves hardlinks
|
||||
temp_tarfile_name = tarball_name(spec, '.tar')
|
||||
temp_tarfile_path = os.path.join(tmpdir, temp_tarfile_name)
|
||||
with closing(tarfile.open(temp_tarfile_path, 'w')) as tar:
|
||||
tar.add(name='%s' % workdir,
|
||||
arcname='.')
|
||||
with closing(tarfile.open(temp_tarfile_path, 'r')) as tar:
|
||||
tar.extractall(spec.prefix)
|
||||
os.remove(temp_tarfile_path)
|
||||
|
||||
# cleanup
|
||||
os.remove(tarfile_path)
|
||||
os.remove(specfile_path)
|
||||
|
||||
@@ -1600,9 +1559,6 @@ def update_cache_and_get_specs():
|
||||
possible, so this method will also attempt to initialize and update the
|
||||
local index cache (essentially a no-op if it has been done already and
|
||||
nothing has changed on the configured mirrors.)
|
||||
|
||||
Throws:
|
||||
FetchCacheError
|
||||
"""
|
||||
binary_index.update()
|
||||
return binary_index.get_all_built_specs()
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
|
||||
import contextlib
|
||||
import fnmatch
|
||||
import functools
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
@@ -24,9 +23,9 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.architecture
|
||||
import spack.binary_distribution
|
||||
import spack.config
|
||||
import spack.detection
|
||||
import spack.environment
|
||||
import spack.main
|
||||
import spack.modules
|
||||
@@ -35,14 +34,11 @@
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.user_environment
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
#: "spack buildcache" command, initialized lazily
|
||||
_buildcache_cmd = None
|
||||
|
||||
#: Map a bootstrapper type to the corresponding class
|
||||
_bootstrap_methods = {}
|
||||
|
||||
@@ -68,10 +64,10 @@ def _try_import_from_store(module, abstract_spec_str):
|
||||
module: Python module to be imported
|
||||
abstract_spec_str: abstract spec that may provide the module
|
||||
"""
|
||||
bincache_platform = spack.platforms.real_host()
|
||||
bincache_platform = spack.architecture.real_platform()
|
||||
if str(bincache_platform) == 'cray':
|
||||
bincache_platform = spack.platforms.linux.Linux()
|
||||
with spack.platforms.use_platform(bincache_platform):
|
||||
with spack.architecture.use_platform(bincache_platform):
|
||||
abstract_spec_str = str(spack.spec.Spec(abstract_spec_str))
|
||||
|
||||
# We have to run as part of this python interpreter
|
||||
@@ -175,34 +171,6 @@ def _fix_ext_suffix(candidate_spec):
|
||||
os.symlink(abs_path, link_name)
|
||||
|
||||
|
||||
def _executables_in_store(executables, abstract_spec_str):
|
||||
"""Return True if at least one of the executables can be retrieved from
|
||||
a spec in store, False otherwise.
|
||||
|
||||
The different executables must provide the same functionality and are
|
||||
"alternate" to each other, i.e. the function will exit True on the first
|
||||
executable found.
|
||||
|
||||
Args:
|
||||
executables: list of executables to be searched
|
||||
abstract_spec_str: abstract spec that may provide the executable
|
||||
"""
|
||||
executables_str = ', '.join(executables)
|
||||
msg = "[BOOTSTRAP EXECUTABLES {0}] Try installed specs with query '{1}'"
|
||||
tty.debug(msg.format(executables_str, abstract_spec_str))
|
||||
installed_specs = spack.store.db.query(abstract_spec_str, installed=True)
|
||||
if installed_specs:
|
||||
for concrete_spec in installed_specs:
|
||||
bin_dir = concrete_spec.prefix.bin
|
||||
# IF we have a "bin" directory and it contains
|
||||
# the executables we are looking for
|
||||
if (os.path.exists(bin_dir) and os.path.isdir(bin_dir) and
|
||||
spack.util.executable.which_string(*executables, path=bin_dir)):
|
||||
spack.util.environment.path_put_first('PATH', [bin_dir])
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
@_bootstrapper(type='buildcache')
|
||||
class _BuildcacheBootstrapper(object):
|
||||
"""Install the software needed during bootstrapping from a buildcache."""
|
||||
@@ -210,140 +178,87 @@ def __init__(self, conf):
|
||||
self.name = conf['name']
|
||||
self.url = conf['info']['url']
|
||||
|
||||
@staticmethod
|
||||
def _spec_and_platform(abstract_spec_str):
|
||||
"""Return the spec object and platform we need to use when
|
||||
querying the buildcache.
|
||||
|
||||
Args:
|
||||
abstract_spec_str: abstract spec string we are looking for
|
||||
"""
|
||||
# This import is local since it is needed only on Cray
|
||||
import spack.platforms.linux
|
||||
def try_import(self, module, abstract_spec_str):
|
||||
if _try_import_from_store(module, abstract_spec_str):
|
||||
return True
|
||||
|
||||
# Try to install from an unsigned binary cache
|
||||
abstract_spec = spack.spec.Spec(abstract_spec_str)
|
||||
abstract_spec = spack.spec.Spec(
|
||||
abstract_spec_str + ' ^' + spec_for_current_python()
|
||||
)
|
||||
|
||||
# On Cray we want to use Linux binaries if available from mirrors
|
||||
bincache_platform = spack.platforms.real_host()
|
||||
bincache_platform = spack.architecture.real_platform()
|
||||
if str(bincache_platform) == 'cray':
|
||||
bincache_platform = spack.platforms.Linux()
|
||||
with spack.platforms.use_platform(bincache_platform):
|
||||
abstract_spec = spack.spec.Spec(abstract_spec_str)
|
||||
return abstract_spec, bincache_platform
|
||||
with spack.architecture.use_platform(bincache_platform):
|
||||
abstract_spec = spack.spec.Spec(
|
||||
abstract_spec_str + ' ^' + spec_for_current_python()
|
||||
)
|
||||
|
||||
def _read_metadata(self, package_name):
|
||||
"""Return metadata about the given package."""
|
||||
json_filename = '{0}.json'.format(package_name)
|
||||
# Read information on verified clingo binaries
|
||||
json_filename = '{0}.json'.format(module)
|
||||
json_path = os.path.join(
|
||||
spack.paths.share_path, 'bootstrap', self.name, json_filename
|
||||
)
|
||||
with open(json_path) as f:
|
||||
data = json.load(f)
|
||||
return data
|
||||
|
||||
def _install_by_hash(self, pkg_hash, pkg_sha256, index, bincache_platform):
|
||||
global _buildcache_cmd
|
||||
|
||||
if _buildcache_cmd is None:
|
||||
_buildcache_cmd = spack.main.SpackCommand('buildcache')
|
||||
|
||||
index_spec = next(x for x in index if x.dag_hash() == pkg_hash)
|
||||
# Reconstruct the compiler that we need to use for bootstrapping
|
||||
compiler_entry = {
|
||||
"modules": [],
|
||||
"operating_system": str(index_spec.os),
|
||||
"paths": {
|
||||
"cc": "/dev/null",
|
||||
"cxx": "/dev/null",
|
||||
"f77": "/dev/null",
|
||||
"fc": "/dev/null"
|
||||
},
|
||||
"spec": str(index_spec.compiler),
|
||||
"target": str(index_spec.target.family)
|
||||
}
|
||||
with spack.platforms.use_platform(bincache_platform):
|
||||
with spack.config.override(
|
||||
'compilers', [{'compiler': compiler_entry}]
|
||||
):
|
||||
spec_str = '/' + pkg_hash
|
||||
install_args = [
|
||||
'install',
|
||||
'--sha256', pkg_sha256,
|
||||
'--only-root',
|
||||
'-a', '-u', '-o', '-f', spec_str
|
||||
]
|
||||
_buildcache_cmd(*install_args, fail_on_error=False)
|
||||
|
||||
def _install_and_test(
|
||||
self, abstract_spec, bincache_platform, bincache_data, test_fn
|
||||
):
|
||||
buildcache = spack.main.SpackCommand('buildcache')
|
||||
# Ensure we see only the buildcache being used to bootstrap
|
||||
with spack.config.override(self.mirror_scope):
|
||||
mirror_scope = spack.config.InternalConfigScope(
|
||||
'bootstrap', {'mirrors:': {self.name: self.url}}
|
||||
)
|
||||
with spack.config.override(mirror_scope):
|
||||
# This index is currently needed to get the compiler used to build some
|
||||
# specs that we know by dag hash.
|
||||
# specs that wwe know by dag hash.
|
||||
spack.binary_distribution.binary_index.regenerate_spec_cache()
|
||||
index = spack.binary_distribution.update_cache_and_get_specs()
|
||||
|
||||
if not index:
|
||||
raise RuntimeError("The binary index is empty")
|
||||
|
||||
for item in bincache_data['verified']:
|
||||
for item in data['verified']:
|
||||
candidate_spec = item['spec']
|
||||
# This will be None for things that don't depend on python
|
||||
python_spec = item.get('python', None)
|
||||
python_spec = item['python']
|
||||
# Skip specs which are not compatible
|
||||
if not abstract_spec.satisfies(candidate_spec):
|
||||
continue
|
||||
|
||||
if python_spec is not None and python_spec not in abstract_spec:
|
||||
if python_spec not in abstract_spec:
|
||||
continue
|
||||
|
||||
for pkg_name, pkg_hash, pkg_sha256 in item['binaries']:
|
||||
# TODO: undo installations that didn't complete?
|
||||
self._install_by_hash(
|
||||
pkg_hash, pkg_sha256, index, bincache_platform
|
||||
)
|
||||
msg = ('[BOOTSTRAP MODULE {0}] Try installing "{1}" from binary '
|
||||
'cache at "{2}"')
|
||||
tty.debug(msg.format(module, pkg_name, self.url))
|
||||
index_spec = next(x for x in index if x.dag_hash() == pkg_hash)
|
||||
# Reconstruct the compiler that we need to use for bootstrapping
|
||||
compiler_entry = {
|
||||
"modules": [],
|
||||
"operating_system": str(index_spec.os),
|
||||
"paths": {
|
||||
"cc": "/dev/null",
|
||||
"cxx": "/dev/null",
|
||||
"f77": "/dev/null",
|
||||
"fc": "/dev/null"
|
||||
},
|
||||
"spec": str(index_spec.compiler),
|
||||
"target": str(index_spec.target.family)
|
||||
}
|
||||
with spack.architecture.use_platform(bincache_platform):
|
||||
with spack.config.override(
|
||||
'compilers', [{'compiler': compiler_entry}]
|
||||
):
|
||||
spec_str = '/' + pkg_hash
|
||||
install_args = [
|
||||
'install',
|
||||
'--sha256', pkg_sha256,
|
||||
'-a', '-u', '-o', '-f', spec_str
|
||||
]
|
||||
buildcache(*install_args, fail_on_error=False)
|
||||
# TODO: undo installations that didn't complete?
|
||||
|
||||
if test_fn():
|
||||
if _try_import_from_store(module, abstract_spec_str):
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def mirror_scope(self):
|
||||
return spack.config.InternalConfigScope(
|
||||
'bootstrap_buildcache', {'mirrors:': {self.name: self.url}}
|
||||
)
|
||||
|
||||
def try_import(self, module, abstract_spec_str):
|
||||
test_fn = functools.partial(_try_import_from_store, module, abstract_spec_str)
|
||||
if test_fn():
|
||||
return True
|
||||
|
||||
tty.info("Bootstrapping {0} from pre-built binaries".format(module))
|
||||
abstract_spec, bincache_platform = self._spec_and_platform(
|
||||
abstract_spec_str + ' ^' + spec_for_current_python()
|
||||
)
|
||||
data = self._read_metadata(module)
|
||||
return self._install_and_test(
|
||||
abstract_spec, bincache_platform, data, test_fn
|
||||
)
|
||||
|
||||
def try_search_path(self, executables, abstract_spec_str):
|
||||
test_fn = functools.partial(
|
||||
_executables_in_store, executables, abstract_spec_str
|
||||
)
|
||||
if test_fn():
|
||||
return True
|
||||
|
||||
abstract_spec, bincache_platform = self._spec_and_platform(
|
||||
abstract_spec_str
|
||||
)
|
||||
tty.info("Bootstrapping {0} from pre-built binaries".format(abstract_spec.name))
|
||||
data = self._read_metadata(abstract_spec.name)
|
||||
return self._install_and_test(
|
||||
abstract_spec, bincache_platform, data, test_fn
|
||||
)
|
||||
|
||||
|
||||
@_bootstrapper(type='install')
|
||||
class _SourceBootstrapper(object):
|
||||
@@ -356,16 +271,10 @@ def try_import(module, abstract_spec_str):
|
||||
if _try_import_from_store(module, abstract_spec_str):
|
||||
return True
|
||||
|
||||
tty.info("Bootstrapping {0} from sources".format(module))
|
||||
|
||||
# If we compile code from sources detecting a few build tools
|
||||
# might reduce compilation time by a fair amount
|
||||
_add_externals_if_missing()
|
||||
|
||||
# Try to build and install from sources
|
||||
with spack_python_interpreter():
|
||||
# Add hint to use frontend operating system on Cray
|
||||
if str(spack.platforms.host()) == 'cray':
|
||||
if str(spack.architecture.platform()) == 'cray':
|
||||
abstract_spec_str += ' os=fe'
|
||||
|
||||
concrete_spec = spack.spec.Spec(
|
||||
@@ -374,7 +283,7 @@ def try_import(module, abstract_spec_str):
|
||||
|
||||
if module == 'clingo':
|
||||
# TODO: remove when the old concretizer is deprecated
|
||||
concrete_spec._old_concretize(deprecation_warning=False)
|
||||
concrete_spec._old_concretize()
|
||||
else:
|
||||
concrete_spec.concretize()
|
||||
|
||||
@@ -382,30 +291,10 @@ def try_import(module, abstract_spec_str):
|
||||
tty.debug(msg.format(module, abstract_spec_str))
|
||||
|
||||
# Install the spec that should make the module importable
|
||||
concrete_spec.package.do_install(fail_fast=True)
|
||||
concrete_spec.package.do_install()
|
||||
|
||||
return _try_import_from_store(module, abstract_spec_str=abstract_spec_str)
|
||||
|
||||
def try_search_path(self, executables, abstract_spec_str):
|
||||
if _executables_in_store(executables, abstract_spec_str):
|
||||
return True
|
||||
|
||||
# If we compile code from sources detecting a few build tools
|
||||
# might reduce compilation time by a fair amount
|
||||
_add_externals_if_missing()
|
||||
|
||||
# Add hint to use frontend operating system on Cray
|
||||
if str(spack.platforms.host()) == 'cray':
|
||||
abstract_spec_str += ' os=fe'
|
||||
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str)
|
||||
concrete_spec.concretize()
|
||||
|
||||
msg = "[BOOTSTRAP GnuPG] Try installing '{0}' from sources"
|
||||
tty.debug(msg.format(abstract_spec_str))
|
||||
concrete_spec.package.do_install()
|
||||
return _executables_in_store(executables, abstract_spec_str)
|
||||
|
||||
|
||||
def _make_bootstrapper(conf):
|
||||
"""Return a bootstrap object built according to the
|
||||
@@ -485,9 +374,6 @@ def ensure_module_importable_or_raise(module, abstract_spec=None):
|
||||
|
||||
abstract_spec = abstract_spec or module
|
||||
source_configs = spack.config.get('bootstrap:sources', [])
|
||||
|
||||
errors = {}
|
||||
|
||||
for current_config in source_configs:
|
||||
if not _source_is_trusted(current_config):
|
||||
msg = ('[BOOTSTRAP MODULE {0}] Skipping source "{1}" since it is '
|
||||
@@ -502,59 +388,14 @@ def ensure_module_importable_or_raise(module, abstract_spec=None):
|
||||
except Exception as e:
|
||||
msg = '[BOOTSTRAP MODULE {0}] Unexpected error "{1}"'
|
||||
tty.debug(msg.format(module, str(e)))
|
||||
errors[current_config['name']] = e
|
||||
|
||||
# We couldn't import in any way, so raise an import error
|
||||
msg = 'cannot bootstrap the "{0}" Python module'.format(module)
|
||||
if abstract_spec:
|
||||
msg += ' from spec "{0}"'.format(abstract_spec)
|
||||
msg += ' due to the following failures:\n'
|
||||
for method in errors:
|
||||
err = errors[method]
|
||||
msg += " '{0}' raised {1}: {2}\n".format(
|
||||
method, err.__class__.__name__, str(err))
|
||||
msg += ' Please run `spack -d spec zlib` for more verbose error messages'
|
||||
raise ImportError(msg)
|
||||
|
||||
|
||||
def ensure_executables_in_path_or_raise(executables, abstract_spec):
|
||||
"""Ensure that some executables are in path or raise.
|
||||
|
||||
Args:
|
||||
executables (list): list of executables to be searched in the PATH,
|
||||
in order. The function exits on the first one found.
|
||||
abstract_spec (str): abstract spec that provides the executables
|
||||
|
||||
Raises:
|
||||
RuntimeError: if the executables cannot be ensured to be in PATH
|
||||
"""
|
||||
if spack.util.executable.which_string(*executables):
|
||||
return
|
||||
|
||||
executables_str = ', '.join(executables)
|
||||
source_configs = spack.config.get('bootstrap:sources', [])
|
||||
for current_config in source_configs:
|
||||
if not _source_is_trusted(current_config):
|
||||
msg = ('[BOOTSTRAP EXECUTABLES {0}] Skipping source "{1}" since it is '
|
||||
'not trusted').format(executables_str, current_config['name'])
|
||||
tty.debug(msg)
|
||||
continue
|
||||
|
||||
b = _make_bootstrapper(current_config)
|
||||
try:
|
||||
if b.try_search_path(executables, abstract_spec):
|
||||
return
|
||||
except Exception as e:
|
||||
msg = '[BOOTSTRAP EXECUTABLES {0}] Unexpected error "{1}"'
|
||||
tty.debug(msg.format(executables_str, str(e)))
|
||||
|
||||
# We couldn't import in any way, so raise an import error
|
||||
msg = 'cannot bootstrap any of the {0} executables'.format(executables_str)
|
||||
if abstract_spec:
|
||||
msg += ' from spec "{0}"'.format(abstract_spec)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
def _python_import(module):
|
||||
try:
|
||||
__import__(module)
|
||||
@@ -592,9 +433,7 @@ def get_executable(exe, spec=None, install=False):
|
||||
ret = spack.util.executable.Executable(exe_path[0])
|
||||
envmod = EnvironmentModifications()
|
||||
for dep in ispec.traverse(root=True, order='post'):
|
||||
envmod.extend(
|
||||
spack.user_environment.environment_modifications_for_spec(dep)
|
||||
)
|
||||
envmod.extend(uenv.environment_modifications_for_spec(dep))
|
||||
ret.add_default_envmod(envmod)
|
||||
return ret
|
||||
else:
|
||||
@@ -623,9 +462,7 @@ def _raise_error(executable, exe_spec):
|
||||
ret = spack.util.executable.Executable(exe_path[0])
|
||||
envmod = EnvironmentModifications()
|
||||
for dep in spec.traverse(root=True, order='post'):
|
||||
envmod.extend(
|
||||
spack.user_environment.environment_modifications_for_spec(dep)
|
||||
)
|
||||
envmod.extend(uenv.environment_modifications_for_spec(dep))
|
||||
ret.add_default_envmod(envmod)
|
||||
return ret
|
||||
|
||||
@@ -637,12 +474,8 @@ def _bootstrap_config_scopes():
|
||||
config_scopes = [
|
||||
spack.config.InternalConfigScope('_builtin', spack.config.config_defaults)
|
||||
]
|
||||
configuration_paths = (
|
||||
spack.config.configuration_defaults_path,
|
||||
('bootstrap', _config_path())
|
||||
)
|
||||
for name, path in configuration_paths:
|
||||
platform = spack.platforms.host().name
|
||||
for name, path in spack.config.configuration_paths:
|
||||
platform = spack.architecture.platform().name
|
||||
platform_scope = spack.config.ConfigScope(
|
||||
'/'.join([name, platform]), os.path.join(path, platform)
|
||||
)
|
||||
@@ -654,64 +487,22 @@ def _bootstrap_config_scopes():
|
||||
return config_scopes
|
||||
|
||||
|
||||
def _add_compilers_if_missing():
|
||||
arch = spack.spec.ArchSpec.frontend_arch()
|
||||
if not spack.compilers.compilers_for_arch(arch):
|
||||
new_compilers = spack.compilers.find_new_compilers()
|
||||
if new_compilers:
|
||||
spack.compilers.add_compilers_to_config(new_compilers, init_config=False)
|
||||
|
||||
|
||||
def _add_externals_if_missing():
|
||||
search_list = [
|
||||
# clingo
|
||||
spack.repo.path.get('cmake'),
|
||||
spack.repo.path.get('bison'),
|
||||
# GnuPG
|
||||
spack.repo.path.get('gawk')
|
||||
]
|
||||
detected_packages = spack.detection.by_executable(search_list)
|
||||
spack.detection.update_configuration(detected_packages, scope='bootstrap')
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def ensure_bootstrap_configuration():
|
||||
bootstrap_store_path = store_path()
|
||||
user_configuration = _read_and_sanitize_configuration()
|
||||
with spack.environment.no_active_environment():
|
||||
with spack.platforms.use_platform(spack.platforms.real_host()):
|
||||
with spack.environment.deactivate_environment():
|
||||
with spack.architecture.use_platform(spack.architecture.real_platform()):
|
||||
with spack.repo.use_repositories(spack.paths.packages_path):
|
||||
with spack.store.use_store(bootstrap_store_path):
|
||||
# Default configuration scopes excluding command line
|
||||
# and builtin but accounting for platform specific scopes
|
||||
config_scopes = _bootstrap_config_scopes()
|
||||
with spack.config.use_configuration(*config_scopes):
|
||||
# We may need to compile code from sources, so ensure we have
|
||||
# compilers for the current platform before switching parts.
|
||||
_add_compilers_if_missing()
|
||||
spack.config.set('bootstrap', user_configuration['bootstrap'])
|
||||
spack.config.set('config', user_configuration['config'])
|
||||
with spack.modules.disable_modules():
|
||||
with spack_python_interpreter():
|
||||
yield
|
||||
|
||||
|
||||
def _read_and_sanitize_configuration():
|
||||
"""Read the user configuration that needs to be reused for bootstrapping
|
||||
and remove the entries that should not be copied over.
|
||||
"""
|
||||
# Read the "config" section but pop the install tree (the entry will not be
|
||||
# considered due to the use_store context manager, so it will be confusing
|
||||
# to have it in the configuration).
|
||||
config_yaml = spack.config.get('config')
|
||||
config_yaml.pop('install_tree', None)
|
||||
user_configuration = {
|
||||
'bootstrap': spack.config.get('bootstrap'),
|
||||
'config': config_yaml
|
||||
}
|
||||
return user_configuration
|
||||
|
||||
|
||||
def store_path():
|
||||
"""Path to the store used for bootstrapped software"""
|
||||
enabled = spack.config.get('bootstrap:enable', True)
|
||||
@@ -720,69 +511,37 @@ def store_path():
|
||||
'Use "spack bootstrap enable" to enable it')
|
||||
raise RuntimeError(msg)
|
||||
|
||||
return _store_path()
|
||||
|
||||
|
||||
def _root_path():
|
||||
"""Root of all the bootstrap related folders"""
|
||||
return spack.config.get(
|
||||
'bootstrap:root', spack.paths.default_user_bootstrap_path
|
||||
bootstrap_root_path = spack.config.get(
|
||||
'bootstrap:root', spack.paths.user_bootstrap_path
|
||||
)
|
||||
|
||||
|
||||
def _store_path():
|
||||
bootstrap_root_path = _root_path()
|
||||
return spack.util.path.canonicalize_path(
|
||||
bootstrap_store_path = spack.util.path.canonicalize_path(
|
||||
os.path.join(bootstrap_root_path, 'store')
|
||||
)
|
||||
return bootstrap_store_path
|
||||
|
||||
|
||||
def _config_path():
|
||||
bootstrap_root_path = _root_path()
|
||||
return spack.util.path.canonicalize_path(
|
||||
os.path.join(bootstrap_root_path, 'config')
|
||||
)
|
||||
def clingo_root_spec():
|
||||
# Construct the root spec that will be used to bootstrap clingo
|
||||
spec_str = 'clingo-bootstrap@spack+python'
|
||||
|
||||
|
||||
def _root_spec(spec_str):
|
||||
"""Add a proper compiler and target to a spec used during bootstrapping.
|
||||
|
||||
Args:
|
||||
spec_str (str): spec to be bootstrapped. Must be without compiler and target.
|
||||
"""
|
||||
# Add a proper compiler hint to the root spec. We use GCC for
|
||||
# everything but MacOS.
|
||||
if str(spack.platforms.host()) == 'darwin':
|
||||
if str(spack.architecture.platform()) == 'darwin':
|
||||
spec_str += ' %apple-clang'
|
||||
else:
|
||||
spec_str += ' %gcc'
|
||||
|
||||
target = archspec.cpu.host().family
|
||||
spec_str += ' target={0}'.format(target)
|
||||
# Add the generic target
|
||||
generic_target = archspec.cpu.host().family
|
||||
spec_str += ' target={0}'.format(str(generic_target))
|
||||
|
||||
tty.debug('[BOOTSTRAP ROOT SPEC] clingo: {0}'.format(spec_str))
|
||||
|
||||
tty.debug('[BOOTSTRAP ROOT SPEC] {0}'.format(spec_str))
|
||||
return spec_str
|
||||
|
||||
|
||||
def clingo_root_spec():
|
||||
"""Return the root spec used to bootstrap clingo"""
|
||||
return _root_spec('clingo-bootstrap@spack+python')
|
||||
|
||||
|
||||
def ensure_clingo_importable_or_raise():
|
||||
"""Ensure that the clingo module is available for import."""
|
||||
ensure_module_importable_or_raise(
|
||||
module='clingo', abstract_spec=clingo_root_spec()
|
||||
)
|
||||
|
||||
|
||||
def gnupg_root_spec():
|
||||
"""Return the root spec used to bootstrap GnuPG"""
|
||||
return _root_spec('gnupg@2.3:')
|
||||
|
||||
|
||||
def ensure_gpg_in_path_or_raise():
|
||||
"""Ensure gpg or gpg2 are in the PATH or raise."""
|
||||
ensure_executables_in_path_or_raise(
|
||||
executables=['gpg2', 'gpg'], abstract_spec=gnupg_root_spec(),
|
||||
)
|
||||
|
||||
@@ -49,6 +49,7 @@
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
from llnl.util.tty.log import MultiProcessFd
|
||||
|
||||
import spack.architecture as arch
|
||||
import spack.build_systems.cmake
|
||||
import spack.build_systems.meson
|
||||
import spack.config
|
||||
@@ -56,7 +57,6 @@
|
||||
import spack.main
|
||||
import spack.package
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.schema.environment
|
||||
import spack.store
|
||||
@@ -72,6 +72,7 @@
|
||||
get_path,
|
||||
inspect_path,
|
||||
is_system_path,
|
||||
preserve_environment,
|
||||
system_dirs,
|
||||
validate,
|
||||
)
|
||||
@@ -147,14 +148,6 @@ def __call__(self, *args, **kwargs):
|
||||
return super(MakeExecutable, self).__call__(*args, **kwargs)
|
||||
|
||||
|
||||
def _on_cray():
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system('default_os')
|
||||
on_cray = str(host_platform) == 'cray'
|
||||
using_cnl = re.match(r'cnl\d+', str(host_os))
|
||||
return on_cray, using_cnl
|
||||
|
||||
|
||||
def clean_environment():
|
||||
# Stuff in here sanitizes the build environment to eliminate
|
||||
# anything the user has set that may interfere. We apply it immediately
|
||||
@@ -178,9 +171,6 @@ def clean_environment():
|
||||
|
||||
env.unset('CMAKE_PREFIX_PATH')
|
||||
|
||||
# Affects GNU make, can e.g. indirectly inhibit enabling parallel build
|
||||
env.unset('MAKEFLAGS')
|
||||
|
||||
# Avoid that libraries of build dependencies get hijacked.
|
||||
env.unset('LD_PRELOAD')
|
||||
env.unset('DYLD_INSERT_LIBRARIES')
|
||||
@@ -189,7 +179,9 @@ def clean_environment():
|
||||
# interference with Spack dependencies.
|
||||
# CNL requires these variables to be set (or at least some of them,
|
||||
# depending on the CNL version).
|
||||
on_cray, using_cnl = _on_cray()
|
||||
hostarch = arch.Arch(arch.platform(), 'default_os', 'default_target')
|
||||
on_cray = str(hostarch.platform) == 'cray'
|
||||
using_cnl = re.match(r'cnl\d+', str(hostarch.os))
|
||||
if on_cray and not using_cnl:
|
||||
env.unset('CRAY_LD_LIBRARY_PATH')
|
||||
for varname in os.environ.keys():
|
||||
@@ -232,7 +224,7 @@ def clean_environment():
|
||||
if '/macports/' in p:
|
||||
env.remove_path('PATH', p)
|
||||
|
||||
return env
|
||||
env.apply_modifications()
|
||||
|
||||
|
||||
def set_compiler_environment_variables(pkg, env):
|
||||
@@ -765,77 +757,79 @@ def setup_package(pkg, dirty, context='build'):
|
||||
|
||||
set_module_variables_for_package(pkg)
|
||||
|
||||
# Keep track of env changes from packages separately, since we want to
|
||||
# issue warnings when packages make "suspicious" modifications.
|
||||
env_base = EnvironmentModifications() if dirty else clean_environment()
|
||||
env_mods = EnvironmentModifications()
|
||||
env = EnvironmentModifications()
|
||||
|
||||
if not dirty:
|
||||
clean_environment()
|
||||
|
||||
# setup compilers for build contexts
|
||||
need_compiler = context == 'build' or (context == 'test' and
|
||||
pkg.test_requires_compiler)
|
||||
if need_compiler:
|
||||
set_compiler_environment_variables(pkg, env_mods)
|
||||
set_wrapper_variables(pkg, env_mods)
|
||||
set_compiler_environment_variables(pkg, env)
|
||||
set_wrapper_variables(pkg, env)
|
||||
|
||||
env_mods.extend(modifications_from_dependencies(
|
||||
env.extend(modifications_from_dependencies(
|
||||
pkg.spec, context, custom_mods_only=False))
|
||||
|
||||
# architecture specific setup
|
||||
platform = spack.platforms.by_name(pkg.spec.architecture.platform)
|
||||
target = platform.target(pkg.spec.architecture.target)
|
||||
platform.setup_platform_environment(pkg, env_mods)
|
||||
pkg.architecture.platform.setup_platform_environment(pkg, env)
|
||||
|
||||
if context == 'build':
|
||||
pkg.setup_build_environment(env_mods)
|
||||
pkg.setup_build_environment(env)
|
||||
|
||||
if (not dirty) and (not env_mods.is_unset('CPATH')):
|
||||
if (not dirty) and (not env.is_unset('CPATH')):
|
||||
tty.debug("A dependency has updated CPATH, this may lead pkg-"
|
||||
"config to assume that the package is part of the system"
|
||||
" includes and omit it when invoked with '--cflags'.")
|
||||
elif context == 'test':
|
||||
env_mods.extend(
|
||||
env.extend(
|
||||
inspect_path(
|
||||
pkg.spec.prefix,
|
||||
spack.user_environment.prefix_inspections(pkg.spec.platform),
|
||||
exclude=is_system_path
|
||||
)
|
||||
)
|
||||
pkg.setup_run_environment(env_mods)
|
||||
env_mods.prepend_path('PATH', '.')
|
||||
pkg.setup_run_environment(env)
|
||||
env.prepend_path('PATH', '.')
|
||||
|
||||
# First apply the clean environment changes
|
||||
env_base.apply_modifications()
|
||||
# Loading modules, in particular if they are meant to be used outside
|
||||
# of Spack, can change environment variables that are relevant to the
|
||||
# build of packages. To avoid a polluted environment, preserve the
|
||||
# value of a few, selected, environment variables
|
||||
# With the current ordering of environment modifications, this is strictly
|
||||
# unnecessary. Modules affecting these variables will be overwritten anyway
|
||||
with preserve_environment('CC', 'CXX', 'FC', 'F77'):
|
||||
# All module loads that otherwise would belong in previous
|
||||
# functions have to occur after the env object has its
|
||||
# modifications applied. Otherwise the environment modifications
|
||||
# could undo module changes, such as unsetting LD_LIBRARY_PATH
|
||||
# after a module changes it.
|
||||
if need_compiler:
|
||||
for mod in pkg.compiler.modules:
|
||||
# Fixes issue https://github.com/spack/spack/issues/3153
|
||||
if os.environ.get("CRAY_CPU_TARGET") == "mic-knl":
|
||||
load_module("cce")
|
||||
load_module(mod)
|
||||
|
||||
# Load modules on an already clean environment, just before applying Spack's
|
||||
# own environment modifications. This ensures Spack controls CC/CXX/... variables.
|
||||
if need_compiler:
|
||||
for mod in pkg.compiler.modules:
|
||||
load_module(mod)
|
||||
|
||||
# kludge to handle cray libsci being automatically loaded by PrgEnv
|
||||
# modules on cray platform. Module unload does no damage when
|
||||
# unnecessary
|
||||
on_cray, _ = _on_cray()
|
||||
if on_cray:
|
||||
# kludge to handle cray libsci being automatically loaded by PrgEnv
|
||||
# modules on cray platform. Module unload does no damage when
|
||||
# unnecessary
|
||||
module('unload', 'cray-libsci')
|
||||
|
||||
if target.module_name:
|
||||
load_module(target.module_name)
|
||||
if pkg.architecture.target.module_name:
|
||||
load_module(pkg.architecture.target.module_name)
|
||||
|
||||
load_external_modules(pkg)
|
||||
load_external_modules(pkg)
|
||||
|
||||
implicit_rpaths = pkg.compiler.implicit_rpaths()
|
||||
if implicit_rpaths:
|
||||
env_mods.set('SPACK_COMPILER_IMPLICIT_RPATHS',
|
||||
':'.join(implicit_rpaths))
|
||||
env.set('SPACK_COMPILER_IMPLICIT_RPATHS',
|
||||
':'.join(implicit_rpaths))
|
||||
|
||||
# Make sure nothing's strange about the Spack environment.
|
||||
validate(env_mods, tty.warn)
|
||||
env_mods.apply_modifications()
|
||||
|
||||
# Return all env modifications we controlled (excluding module related ones)
|
||||
env_base.extend(env_mods)
|
||||
return env_base
|
||||
validate(env, tty.warn)
|
||||
env.apply_modifications()
|
||||
|
||||
|
||||
def _make_runnable(pkg, env):
|
||||
@@ -1023,8 +1017,8 @@ def _setup_pkg_and_run(serialized_pkg, function, kwargs, child_pipe,
|
||||
|
||||
if not kwargs.get('fake', False):
|
||||
kwargs['unmodified_env'] = os.environ.copy()
|
||||
kwargs['env_modifications'] = setup_package(
|
||||
pkg, dirty=kwargs.get('dirty', False), context=context)
|
||||
setup_package(pkg, dirty=kwargs.get('dirty', False),
|
||||
context=context)
|
||||
return_value = function(pkg, kwargs)
|
||||
child_pipe.send(return_value)
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
import itertools
|
||||
import os
|
||||
import os.path
|
||||
import stat
|
||||
@@ -13,8 +14,6 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import force_remove, working_dir
|
||||
|
||||
from spack.build_environment import InstallError
|
||||
from spack.directives import depends_on
|
||||
from spack.package import PackageBase, run_after, run_before
|
||||
from spack.util.executable import Executable
|
||||
|
||||
@@ -55,24 +54,9 @@ class AutotoolsPackage(PackageBase):
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
build_system_class = 'AutotoolsPackage'
|
||||
|
||||
@property
|
||||
def patch_config_files(self):
|
||||
"""
|
||||
Whether or not to update old ``config.guess`` and ``config.sub`` files
|
||||
distributed with the tarball. This currently only applies to
|
||||
``ppc64le:``, ``aarch64:``, and ``riscv64`` target architectures. The
|
||||
substitutes are taken from the ``gnuconfig`` package, which is
|
||||
automatically added as a build dependency for these architectures. In
|
||||
case system versions of these config files are required, the
|
||||
``gnuconfig`` package can be marked external with a prefix pointing to
|
||||
the directory containing the system ``config.guess`` and ``config.sub``
|
||||
files.
|
||||
"""
|
||||
return (self.spec.satisfies('target=ppc64le:')
|
||||
or self.spec.satisfies('target=aarch64:')
|
||||
or self.spec.satisfies('target=riscv64:'))
|
||||
|
||||
#: Whether or not to update ``config.guess`` and ``config.sub`` on old
|
||||
#: architectures
|
||||
patch_config_files = True
|
||||
#: Whether or not to update ``libtool``
|
||||
#: (currently only for Arm/Clang/Fujitsu compilers)
|
||||
patch_libtool = True
|
||||
@@ -99,10 +83,6 @@ def patch_config_files(self):
|
||||
#: after the installation. If True instead it installs them.
|
||||
install_libtool_archives = False
|
||||
|
||||
depends_on('gnuconfig', type='build', when='target=ppc64le:')
|
||||
depends_on('gnuconfig', type='build', when='target=aarch64:')
|
||||
depends_on('gnuconfig', type='build', when='target=riscv64:')
|
||||
|
||||
@property
|
||||
def _removed_la_files_log(self):
|
||||
"""File containing the list of remove libtool archives"""
|
||||
@@ -124,10 +104,12 @@ def _do_patch_config_files(self):
|
||||
"""Some packages ship with older config.guess/config.sub files and
|
||||
need to have these updated when installed on a newer architecture.
|
||||
In particular, config.guess fails for PPC64LE for version prior
|
||||
to a 2013-06-10 build date (automake 1.13.4) and for ARM (aarch64) and
|
||||
RISC-V (riscv64).
|
||||
to a 2013-06-10 build date (automake 1.13.4) and for ARM (aarch64).
|
||||
"""
|
||||
if not self.patch_config_files:
|
||||
if not self.patch_config_files or (
|
||||
not self.spec.satisfies('target=ppc64le:') and
|
||||
not self.spec.satisfies('target=aarch64:')
|
||||
):
|
||||
return
|
||||
|
||||
# TODO: Expand this to select the 'config.sub'-compatible architecture
|
||||
@@ -137,8 +119,6 @@ def _do_patch_config_files(self):
|
||||
config_arch = 'ppc64le'
|
||||
elif self.spec.satisfies('target=aarch64:'):
|
||||
config_arch = 'aarch64'
|
||||
elif self.spec.satisfies('target=riscv64:'):
|
||||
config_arch = 'riscv64'
|
||||
else:
|
||||
config_arch = 'local'
|
||||
|
||||
@@ -158,69 +138,39 @@ def runs_ok(script_abs_path):
|
||||
|
||||
return True
|
||||
|
||||
# Get the list of files that needs to be patched
|
||||
to_be_patched = fs.find(self.stage.path, files=['config.sub', 'config.guess'])
|
||||
# Compute the list of files that needs to be patched
|
||||
search_dir = self.stage.path
|
||||
to_be_patched = fs.find(
|
||||
search_dir, files=['config.sub', 'config.guess'], recursive=True
|
||||
)
|
||||
to_be_patched = [f for f in to_be_patched if not runs_ok(f)]
|
||||
|
||||
# If there are no files to be patched, return early
|
||||
if not to_be_patched:
|
||||
return
|
||||
|
||||
# Otherwise, require `gnuconfig` to be a build dependency
|
||||
self._require_build_deps(
|
||||
pkgs=['gnuconfig'],
|
||||
spec=self.spec,
|
||||
err="Cannot patch config files")
|
||||
# Directories where to search for files to be copied
|
||||
# over the failing ones
|
||||
good_file_dirs = ['/usr/share']
|
||||
if 'automake' in self.spec:
|
||||
good_file_dirs.insert(0, self.spec['automake'].prefix)
|
||||
|
||||
# Get the config files we need to patch (config.sub / config.guess).
|
||||
# List of files to be found in the directories above
|
||||
to_be_found = list(set(os.path.basename(f) for f in to_be_patched))
|
||||
gnuconfig = self.spec['gnuconfig']
|
||||
gnuconfig_dir = gnuconfig.prefix
|
||||
|
||||
# An external gnuconfig may not not have a prefix.
|
||||
if gnuconfig_dir is None:
|
||||
raise InstallError("Spack could not find substitutes for GNU config "
|
||||
"files because no prefix is available for the "
|
||||
"`gnuconfig` package. Make sure you set a prefix "
|
||||
"path instead of modules for external `gnuconfig`.")
|
||||
|
||||
candidates = fs.find(gnuconfig_dir, files=to_be_found, recursive=False)
|
||||
|
||||
# For external packages the user may have specified an incorrect prefix.
|
||||
# otherwise the installation is just corrupt.
|
||||
if not candidates:
|
||||
msg = ("Spack could not find `config.guess` and `config.sub` "
|
||||
"files in the `gnuconfig` prefix `{0}`. This means the "
|
||||
"`gnuconfig` package is broken").format(gnuconfig_dir)
|
||||
if gnuconfig.external:
|
||||
msg += (" or the `gnuconfig` package prefix is misconfigured as"
|
||||
" an external package")
|
||||
raise InstallError(msg)
|
||||
|
||||
# Filter working substitutes
|
||||
candidates = [f for f in candidates if runs_ok(f)]
|
||||
substitutes = {}
|
||||
for candidate in candidates:
|
||||
config_file = os.path.basename(candidate)
|
||||
substitutes[config_file] = candidate
|
||||
to_be_found.remove(config_file)
|
||||
for directory in good_file_dirs:
|
||||
candidates = fs.find(directory, files=to_be_found, recursive=True)
|
||||
candidates = [f for f in candidates if runs_ok(f)]
|
||||
for name, good_files in itertools.groupby(
|
||||
candidates, key=os.path.basename
|
||||
):
|
||||
substitutes[name] = next(good_files)
|
||||
to_be_found.remove(name)
|
||||
|
||||
# Check that we found everything we needed
|
||||
if to_be_found:
|
||||
msg = """\
|
||||
Spack could not find working replacements for the following autotools config
|
||||
files: {0}.
|
||||
|
||||
To resolve this problem, please try the following:
|
||||
1. Try to rebuild with `patch_config_files = False` in the package `{1}`, to
|
||||
rule out that Spack tries to replace config files not used by the build.
|
||||
2. Verify that the `gnuconfig` package is up-to-date.
|
||||
3. On some systems you need to use system-provided `config.guess` and `config.sub`
|
||||
files. In this case, mark `gnuconfig` as an non-buildable external package,
|
||||
and set the prefix to the directory containing the `config.guess` and
|
||||
`config.sub` files.
|
||||
"""
|
||||
raise InstallError(msg.format(', '.join(to_be_found), self.name))
|
||||
msg = 'Failed to find suitable substitutes for {0}'
|
||||
raise RuntimeError(msg.format(', '.join(to_be_found)))
|
||||
|
||||
# Copy the good files over the bad ones
|
||||
for abs_path in to_be_patched:
|
||||
@@ -302,41 +252,17 @@ def delete_configure_to_force_update(self):
|
||||
if self.force_autoreconf:
|
||||
force_remove(self.configure_abs_path)
|
||||
|
||||
def _require_build_deps(self, pkgs, spec, err):
|
||||
"""Require `pkgs` to be direct build dependencies of `spec`. Raises a
|
||||
RuntimeError with a helpful error messages when any dep is missing."""
|
||||
|
||||
build_deps = [d.name for d in spec.dependencies(deptype='build')]
|
||||
missing_deps = [x for x in pkgs if x not in build_deps]
|
||||
|
||||
if not missing_deps:
|
||||
return
|
||||
|
||||
# Raise an exception on missing deps.
|
||||
msg = ("{0}: missing dependencies: {1}.\n\nPlease add "
|
||||
"the following lines to the package:\n\n"
|
||||
.format(err, ", ".join(missing_deps)))
|
||||
|
||||
for dep in missing_deps:
|
||||
msg += (" depends_on('{0}', type='build', when='@{1}')\n"
|
||||
.format(dep, spec.version))
|
||||
|
||||
msg += "\nUpdate the version (when='@{0}') as needed.".format(spec.version)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
def autoreconf(self, spec, prefix):
|
||||
"""Not needed usually, configure should be already there"""
|
||||
|
||||
# If configure exists nothing needs to be done
|
||||
if os.path.exists(self.configure_abs_path):
|
||||
return
|
||||
|
||||
# Else try to regenerate it, which reuquires a few build dependencies
|
||||
self._require_build_deps(
|
||||
pkgs=['autoconf', 'automake', 'libtool'],
|
||||
spec=spec,
|
||||
err="Cannot generate configure")
|
||||
|
||||
# Else try to regenerate it
|
||||
autotools = ['m4', 'autoconf', 'automake', 'libtool']
|
||||
missing = [x for x in autotools if x not in spec]
|
||||
if missing:
|
||||
msg = 'Cannot generate configure: missing dependencies {0}'
|
||||
raise RuntimeError(msg.format(missing))
|
||||
tty.msg('Configure script not found: trying to generate it')
|
||||
tty.warn('*********************************************************')
|
||||
tty.warn('* If the default procedure fails, consider implementing *')
|
||||
@@ -447,15 +373,14 @@ def _activate_or_not(
|
||||
name,
|
||||
activation_word,
|
||||
deactivation_word,
|
||||
activation_value=None,
|
||||
variant=None
|
||||
activation_value=None
|
||||
):
|
||||
"""This function contains the current implementation details of
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without` and
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsPackage.enable_or_disable`.
|
||||
|
||||
Args:
|
||||
name (str): name of the option that is being activated or not
|
||||
name (str): name of the variant that is being processed
|
||||
activation_word (str): the default activation word ('with' in the
|
||||
case of ``with_or_without``)
|
||||
deactivation_word (str): the default deactivation word ('without'
|
||||
@@ -467,8 +392,6 @@ def _activate_or_not(
|
||||
|
||||
The special value 'prefix' can also be assigned and will return
|
||||
``spec[name].prefix`` as activation parameter.
|
||||
variant (str): name of the variant that is being processed
|
||||
(if different from option name)
|
||||
|
||||
Examples:
|
||||
|
||||
@@ -478,7 +401,6 @@ def _activate_or_not(
|
||||
|
||||
variant('foo', values=('x', 'y'), description='')
|
||||
variant('bar', default=True, description='')
|
||||
variant('ba_z', default=True, description='')
|
||||
|
||||
calling this function like:
|
||||
|
||||
@@ -488,13 +410,12 @@ def _activate_or_not(
|
||||
'foo', 'with', 'without', activation_value='prefix'
|
||||
)
|
||||
_activate_or_not('bar', 'with', 'without')
|
||||
_activate_or_not('ba-z', 'with', 'without', variant='ba_z')
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
--with-x=<prefix-to-x> --without-y --with-bar --with-ba-z
|
||||
--with-x=<prefix-to-x> --without-y --with-bar
|
||||
|
||||
for ``<spec-name> foo=x +bar``
|
||||
|
||||
@@ -511,37 +432,32 @@ def _activate_or_not(
|
||||
if activation_value == 'prefix':
|
||||
activation_value = lambda x: spec[x].prefix
|
||||
|
||||
variant = variant or name
|
||||
|
||||
# Defensively look that the name passed as argument is among
|
||||
# variants
|
||||
if variant not in self.variants:
|
||||
if name not in self.variants:
|
||||
msg = '"{0}" is not a variant of "{1}"'
|
||||
raise KeyError(msg.format(variant, self.name))
|
||||
raise KeyError(msg.format(name, self.name))
|
||||
|
||||
# Create a list of pairs. Each pair includes a configuration
|
||||
# option and whether or not that option is activated
|
||||
variant_desc, _ = self.variants[variant]
|
||||
if set(variant_desc.values) == set((True, False)):
|
||||
if set(self.variants[name].values) == set((True, False)):
|
||||
# BoolValuedVariant carry information about a single option.
|
||||
# Nonetheless, for uniformity of treatment we'll package them
|
||||
# in an iterable of one element.
|
||||
condition = '+{name}'.format(name=variant)
|
||||
condition = '+{name}'.format(name=name)
|
||||
options = [(name, condition in spec)]
|
||||
else:
|
||||
condition = '{variant}={value}'
|
||||
condition = '{name}={value}'
|
||||
# "feature_values" is used to track values which correspond to
|
||||
# features which can be enabled or disabled as understood by the
|
||||
# package's build system. It excludes values which have special
|
||||
# meanings and do not correspond to features (e.g. "none")
|
||||
feature_values = getattr(
|
||||
variant_desc.values, 'feature_values', None
|
||||
) or variant_desc.values
|
||||
self.variants[name].values, 'feature_values', None
|
||||
) or self.variants[name].values
|
||||
|
||||
options = [
|
||||
(value,
|
||||
condition.format(variant=variant,
|
||||
value=value) in spec)
|
||||
(value, condition.format(name=name, value=value) in spec)
|
||||
for value in feature_values
|
||||
]
|
||||
|
||||
@@ -569,7 +485,7 @@ def _default_generator(is_activated):
|
||||
args.append(line_generator(activated))
|
||||
return args
|
||||
|
||||
def with_or_without(self, name, activation_value=None, variant=None):
|
||||
def with_or_without(self, name, activation_value=None):
|
||||
"""Inspects a variant and returns the arguments that activate
|
||||
or deactivate the selected feature(s) for the configure options.
|
||||
|
||||
@@ -595,10 +511,9 @@ def with_or_without(self, name, activation_value=None, variant=None):
|
||||
Returns:
|
||||
list of arguments to configure
|
||||
"""
|
||||
return self._activate_or_not(name, 'with', 'without', activation_value,
|
||||
variant)
|
||||
return self._activate_or_not(name, 'with', 'without', activation_value)
|
||||
|
||||
def enable_or_disable(self, name, activation_value=None, variant=None):
|
||||
def enable_or_disable(self, name, activation_value=None):
|
||||
"""Same as
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without`
|
||||
but substitute ``with`` with ``enable`` and ``without`` with ``disable``.
|
||||
@@ -616,7 +531,7 @@ def enable_or_disable(self, name, activation_value=None, variant=None):
|
||||
list of arguments to configure
|
||||
"""
|
||||
return self._activate_or_not(
|
||||
name, 'enable', 'disable', activation_value, variant
|
||||
name, 'enable', 'disable', activation_value
|
||||
)
|
||||
|
||||
run_after('install')(PackageBase._run_default_install_time_test_callbacks)
|
||||
@@ -646,6 +561,3 @@ def remove_libtool_archives(self):
|
||||
fs.mkdirp(os.path.dirname(self._removed_la_files_log))
|
||||
with open(self._removed_la_files_log, mode='w') as f:
|
||||
f.write('\n'.join(libtool_files))
|
||||
|
||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||
run_after('install')(PackageBase.apply_macos_rpath_fixups)
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
|
||||
import spack.variant
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
from spack.package import PackageBase
|
||||
|
||||
|
||||
@@ -12,7 +13,7 @@ class CudaPackage(PackageBase):
|
||||
"""Auxiliary class which contains CUDA variant, dependencies and conflicts
|
||||
and is meant to unify and facilitate its usage.
|
||||
|
||||
Maintainers: ax3l, Rombur, davidbeckingsale
|
||||
Maintainers: ax3l, Rombur
|
||||
"""
|
||||
|
||||
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
|
||||
@@ -85,9 +86,6 @@ def cuda_flags(arch_list):
|
||||
# apply to platform=darwin. We currently do not provide conflicts for
|
||||
# platform=darwin with %apple-clang.
|
||||
|
||||
# Linux x86_64 compiler conflicts from here:
|
||||
# https://gist.github.com/ax3l/9489132
|
||||
|
||||
# GCC
|
||||
# According to
|
||||
# https://github.com/spack/spack/pull/25054#issuecomment-886531664
|
||||
@@ -104,87 +102,88 @@ def cuda_flags(arch_list):
|
||||
# This implies that the last one in the list has to be updated at
|
||||
# each release of a new cuda minor version.
|
||||
conflicts('%gcc@10:', when='+cuda ^cuda@:11.0')
|
||||
conflicts('%gcc@11:', when='+cuda ^cuda@:11.4.0')
|
||||
conflicts('%gcc@12:', when='+cuda ^cuda@:11.5.0')
|
||||
conflicts('%clang@12:', when='+cuda ^cuda@:11.4.0')
|
||||
conflicts('%clang@13:', when='+cuda ^cuda@:11.5.0')
|
||||
conflicts('%gcc@11:', when='+cuda ^cuda@:11.4')
|
||||
|
||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||
conflicts('%gcc@10', when='+cuda ^cuda@:11.4.0')
|
||||
conflicts('%gcc@5:', when='+cuda ^cuda@:7.5 target=x86_64:')
|
||||
conflicts('%gcc@6:', when='+cuda ^cuda@:8 target=x86_64:')
|
||||
conflicts('%gcc@7:', when='+cuda ^cuda@:9.1 target=x86_64:')
|
||||
conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130 target=x86_64:')
|
||||
conflicts('%gcc@9:', when='+cuda ^cuda@:10.2.89 target=x86_64:')
|
||||
conflicts('%pgi@:14.8', when='+cuda ^cuda@:7.0.27 target=x86_64:')
|
||||
conflicts('%pgi@:15.3,15.5:', when='+cuda ^cuda@7.5 target=x86_64:')
|
||||
conflicts('%pgi@:16.2,16.0:16.3', when='+cuda ^cuda@8 target=x86_64:')
|
||||
conflicts('%pgi@:15,18:', when='+cuda ^cuda@9.0:9.1 target=x86_64:')
|
||||
conflicts('%pgi@:16,19:', when='+cuda ^cuda@9.2.88:10 target=x86_64:')
|
||||
conflicts('%pgi@:17,20:', when='+cuda ^cuda@10.1.105:10.2.89 target=x86_64:')
|
||||
conflicts('%pgi@:17,21:', when='+cuda ^cuda@11.0.2:11.1.0 target=x86_64:')
|
||||
conflicts('%clang@:3.4', when='+cuda ^cuda@:7.5 target=x86_64:')
|
||||
conflicts('%clang@:3.7,4:', when='+cuda ^cuda@8.0:9.0 target=x86_64:')
|
||||
conflicts('%clang@:3.7,4.1:', when='+cuda ^cuda@9.1 target=x86_64:')
|
||||
conflicts('%clang@:3.7,5.1:', when='+cuda ^cuda@9.2 target=x86_64:')
|
||||
conflicts('%clang@:3.7,6.1:', when='+cuda ^cuda@10.0.130 target=x86_64:')
|
||||
conflicts('%clang@:3.7,7.1:', when='+cuda ^cuda@10.1.105 target=x86_64:')
|
||||
conflicts('%clang@:3.7,8.1:',
|
||||
when='+cuda ^cuda@10.1.105:10.1.243 target=x86_64:')
|
||||
conflicts('%clang@:3.2,9:', when='+cuda ^cuda@10.2.89 target=x86_64:')
|
||||
conflicts('%clang@:5', when='+cuda ^cuda@11.0.2: target=x86_64:')
|
||||
conflicts('%clang@10:', when='+cuda ^cuda@:11.0.3 target=x86_64:')
|
||||
conflicts('%clang@11:', when='+cuda ^cuda@:11.1.0 target=x86_64:')
|
||||
|
||||
# x86_64 vs. ppc64le differ according to NVidia docs
|
||||
# Linux ppc64le compiler conflicts from Table from the docs below:
|
||||
# https://docs.nvidia.com/cuda/cuda-installation-guide-linux/index.html
|
||||
# https://docs.nvidia.com/cuda/archive/9.2/cuda-installation-guide-linux/index.html
|
||||
# https://docs.nvidia.com/cuda/archive/9.1/cuda-installation-guide-linux/index.html
|
||||
# https://docs.nvidia.com/cuda/archive/9.0/cuda-installation-guide-linux/index.html
|
||||
# https://docs.nvidia.com/cuda/archive/8.0/cuda-installation-guide-linux/index.html
|
||||
# Linux x86_64 compiler conflicts from here:
|
||||
# https://gist.github.com/ax3l/9489132
|
||||
with when('~allow-unsupported-compilers'):
|
||||
conflicts('%gcc@5:', when='+cuda ^cuda@:7.5 target=x86_64:')
|
||||
conflicts('%gcc@6:', when='+cuda ^cuda@:8 target=x86_64:')
|
||||
conflicts('%gcc@7:', when='+cuda ^cuda@:9.1 target=x86_64:')
|
||||
conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130 target=x86_64:')
|
||||
conflicts('%gcc@9:', when='+cuda ^cuda@:10.2.89 target=x86_64:')
|
||||
conflicts('%pgi@:14.8', when='+cuda ^cuda@:7.0.27 target=x86_64:')
|
||||
conflicts('%pgi@:15.3,15.5:', when='+cuda ^cuda@7.5 target=x86_64:')
|
||||
conflicts('%pgi@:16.2,16.0:16.3', when='+cuda ^cuda@8 target=x86_64:')
|
||||
conflicts('%pgi@:15,18:', when='+cuda ^cuda@9.0:9.1 target=x86_64:')
|
||||
conflicts('%pgi@:16,19:', when='+cuda ^cuda@9.2.88:10 target=x86_64:')
|
||||
conflicts('%pgi@:17,20:', when='+cuda ^cuda@10.1.105:10.2.89 target=x86_64:')
|
||||
conflicts('%pgi@:17,21:', when='+cuda ^cuda@11.0.2:11.1.0 target=x86_64:')
|
||||
conflicts('%clang@:3.4', when='+cuda ^cuda@:7.5 target=x86_64:')
|
||||
conflicts('%clang@:3.7,4:', when='+cuda ^cuda@8.0:9.0 target=x86_64:')
|
||||
conflicts('%clang@:3.7,4.1:', when='+cuda ^cuda@9.1 target=x86_64:')
|
||||
conflicts('%clang@:3.7,5.1:', when='+cuda ^cuda@9.2 target=x86_64:')
|
||||
conflicts('%clang@:3.7,6.1:', when='+cuda ^cuda@10.0.130 target=x86_64:')
|
||||
conflicts('%clang@:3.7,7.1:', when='+cuda ^cuda@10.1.105 target=x86_64:')
|
||||
conflicts('%clang@:3.7,8.1:',
|
||||
when='+cuda ^cuda@10.1.105:10.1.243 target=x86_64:')
|
||||
conflicts('%clang@:3.2,9:', when='+cuda ^cuda@10.2.89 target=x86_64:')
|
||||
conflicts('%clang@:5', when='+cuda ^cuda@11.0.2: target=x86_64:')
|
||||
conflicts('%clang@10:', when='+cuda ^cuda@:11.0.3 target=x86_64:')
|
||||
conflicts('%clang@11:', when='+cuda ^cuda@:11.1.0 target=x86_64:')
|
||||
|
||||
# information prior to CUDA 9 difficult to find
|
||||
conflicts('%gcc@6:', when='+cuda ^cuda@:9 target=ppc64le:')
|
||||
conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130 target=ppc64le:')
|
||||
conflicts('%gcc@9:', when='+cuda ^cuda@:10.1.243 target=ppc64le:')
|
||||
# officially, CUDA 11.0.2 only supports the system GCC 8.3 on ppc64le
|
||||
conflicts('%pgi', when='+cuda ^cuda@:8 target=ppc64le:')
|
||||
conflicts('%pgi@:16', when='+cuda ^cuda@:9.1.185 target=ppc64le:')
|
||||
conflicts('%pgi@:17', when='+cuda ^cuda@:10 target=ppc64le:')
|
||||
conflicts('%clang@4:', when='+cuda ^cuda@:9.0.176 target=ppc64le:')
|
||||
conflicts('%clang@5:', when='+cuda ^cuda@:9.1 target=ppc64le:')
|
||||
conflicts('%clang@6:', when='+cuda ^cuda@:9.2 target=ppc64le:')
|
||||
conflicts('%clang@7:', when='+cuda ^cuda@10.0.130 target=ppc64le:')
|
||||
conflicts('%clang@7.1:', when='+cuda ^cuda@:10.1.105 target=ppc64le:')
|
||||
conflicts('%clang@8.1:', when='+cuda ^cuda@:10.2.89 target=ppc64le:')
|
||||
conflicts('%clang@:5', when='+cuda ^cuda@11.0.2: target=ppc64le:')
|
||||
conflicts('%clang@10:', when='+cuda ^cuda@:11.0.2 target=ppc64le:')
|
||||
conflicts('%clang@11:', when='+cuda ^cuda@:11.1.0 target=ppc64le:')
|
||||
# x86_64 vs. ppc64le differ according to NVidia docs
|
||||
# Linux ppc64le compiler conflicts from Table from the docs below:
|
||||
# https://docs.nvidia.com/cuda/cuda-installation-guide-linux/index.html
|
||||
# https://docs.nvidia.com/cuda/archive/9.2/cuda-installation-guide-linux/index.html
|
||||
# https://docs.nvidia.com/cuda/archive/9.1/cuda-installation-guide-linux/index.html
|
||||
# https://docs.nvidia.com/cuda/archive/9.0/cuda-installation-guide-linux/index.html
|
||||
# https://docs.nvidia.com/cuda/archive/8.0/cuda-installation-guide-linux/index.html
|
||||
|
||||
# Intel is mostly relevant for x86_64 Linux, even though it also
|
||||
# exists for Mac OS X. No information prior to CUDA 3.2 or Intel 11.1
|
||||
conflicts('%intel@:11.0', when='+cuda ^cuda@:3.1')
|
||||
conflicts('%intel@:12.0', when='+cuda ^cuda@5.5:')
|
||||
conflicts('%intel@:13.0', when='+cuda ^cuda@6.0:')
|
||||
conflicts('%intel@:13.2', when='+cuda ^cuda@6.5:')
|
||||
conflicts('%intel@:14.9', when='+cuda ^cuda@7:')
|
||||
# Intel 15.x is compatible with CUDA 7 thru current CUDA
|
||||
conflicts('%intel@16.0:', when='+cuda ^cuda@:8.0.43')
|
||||
conflicts('%intel@17.0:', when='+cuda ^cuda@:8.0.60')
|
||||
conflicts('%intel@18.0:', when='+cuda ^cuda@:9.9')
|
||||
conflicts('%intel@19.0:', when='+cuda ^cuda@:10.0')
|
||||
conflicts('%intel@19.1:', when='+cuda ^cuda@:10.1')
|
||||
conflicts('%intel@19.2:', when='+cuda ^cuda@:11.1.0')
|
||||
# information prior to CUDA 9 difficult to find
|
||||
conflicts('%gcc@6:', when='+cuda ^cuda@:9 target=ppc64le:')
|
||||
conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130 target=ppc64le:')
|
||||
conflicts('%gcc@9:', when='+cuda ^cuda@:10.1.243 target=ppc64le:')
|
||||
# officially, CUDA 11.0.2 only supports the system GCC 8.3 on ppc64le
|
||||
conflicts('%pgi', when='+cuda ^cuda@:8 target=ppc64le:')
|
||||
conflicts('%pgi@:16', when='+cuda ^cuda@:9.1.185 target=ppc64le:')
|
||||
conflicts('%pgi@:17', when='+cuda ^cuda@:10 target=ppc64le:')
|
||||
conflicts('%clang@4:', when='+cuda ^cuda@:9.0.176 target=ppc64le:')
|
||||
conflicts('%clang@5:', when='+cuda ^cuda@:9.1 target=ppc64le:')
|
||||
conflicts('%clang@6:', when='+cuda ^cuda@:9.2 target=ppc64le:')
|
||||
conflicts('%clang@7:', when='+cuda ^cuda@10.0.130 target=ppc64le:')
|
||||
conflicts('%clang@7.1:', when='+cuda ^cuda@:10.1.105 target=ppc64le:')
|
||||
conflicts('%clang@8.1:', when='+cuda ^cuda@:10.2.89 target=ppc64le:')
|
||||
conflicts('%clang@:5', when='+cuda ^cuda@11.0.2: target=ppc64le:')
|
||||
conflicts('%clang@10:', when='+cuda ^cuda@:11.0.2 target=ppc64le:')
|
||||
conflicts('%clang@11:', when='+cuda ^cuda@:11.1.0 target=ppc64le:')
|
||||
|
||||
# XL is mostly relevant for ppc64le Linux
|
||||
conflicts('%xl@:12,14:', when='+cuda ^cuda@:9.1')
|
||||
conflicts('%xl@:12,14:15,17:', when='+cuda ^cuda@9.2')
|
||||
conflicts('%xl@:12,17:', when='+cuda ^cuda@:11.1.0')
|
||||
# Intel is mostly relevant for x86_64 Linux, even though it also
|
||||
# exists for Mac OS X. No information prior to CUDA 3.2 or Intel 11.1
|
||||
conflicts('%intel@:11.0', when='+cuda ^cuda@:3.1')
|
||||
conflicts('%intel@:12.0', when='+cuda ^cuda@5.5:')
|
||||
conflicts('%intel@:13.0', when='+cuda ^cuda@6.0:')
|
||||
conflicts('%intel@:13.2', when='+cuda ^cuda@6.5:')
|
||||
conflicts('%intel@:14.9', when='+cuda ^cuda@7:')
|
||||
# Intel 15.x is compatible with CUDA 7 thru current CUDA
|
||||
conflicts('%intel@16.0:', when='+cuda ^cuda@:8.0.43')
|
||||
conflicts('%intel@17.0:', when='+cuda ^cuda@:8.0.60')
|
||||
conflicts('%intel@18.0:', when='+cuda ^cuda@:9.9')
|
||||
conflicts('%intel@19.0:', when='+cuda ^cuda@:10.0')
|
||||
conflicts('%intel@19.1:', when='+cuda ^cuda@:10.1')
|
||||
conflicts('%intel@19.2:', when='+cuda ^cuda@:11.1.0')
|
||||
|
||||
# Darwin.
|
||||
# TODO: add missing conflicts for %apple-clang cuda@:10
|
||||
conflicts('platform=darwin', when='+cuda ^cuda@11.0.2: ')
|
||||
# XL is mostly relevant for ppc64le Linux
|
||||
conflicts('%xl@:12,14:', when='+cuda ^cuda@:9.1')
|
||||
conflicts('%xl@:12,14:15,17:', when='+cuda ^cuda@9.2')
|
||||
conflicts('%xl@:12,17:', when='+cuda ^cuda@:11.1.0')
|
||||
|
||||
# Darwin.
|
||||
# TODO: add missing conflicts for %apple-clang cuda@:10
|
||||
conflicts('platform=darwin', when='+cuda ^cuda@11.0.2: ')
|
||||
|
||||
# Make sure cuda_arch can not be used without +cuda
|
||||
for value in cuda_arch_values:
|
||||
|
||||
@@ -116,9 +116,9 @@ class IntelPackage(PackageBase):
|
||||
# that satisfies self.spec will be used.
|
||||
version_years = {
|
||||
# intel-daal is versioned 2016 and later, no divining is needed
|
||||
'intel-ipp@9.0:9': 2016,
|
||||
'intel-mkl@11.3.0:11.3': 2016,
|
||||
'intel-mpi@5.1:5': 2016,
|
||||
'intel-ipp@9.0:9.99': 2016,
|
||||
'intel-mkl@11.3.0:11.3.999': 2016,
|
||||
'intel-mpi@5.1:5.99': 2016,
|
||||
}
|
||||
|
||||
# Below is the list of possible values for setting auto dispatch functions
|
||||
|
||||
@@ -110,6 +110,3 @@ def installcheck(self):
|
||||
|
||||
# Check that self.prefix is there after installation
|
||||
run_after('install')(PackageBase.sanity_check_prefix)
|
||||
|
||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||
run_after('install')(PackageBase.apply_macos_rpath_fixups)
|
||||
|
||||
@@ -129,7 +129,7 @@ def import_modules(self):
|
||||
modules = []
|
||||
root = os.path.join(
|
||||
self.prefix,
|
||||
self.spec['python'].package.config_vars['python_lib']['true']['false'],
|
||||
self.spec['python'].package.config_vars['python_lib']['false']['false'],
|
||||
)
|
||||
|
||||
# Some Python libraries are packages: collections of modules
|
||||
@@ -393,15 +393,11 @@ def remove_files_from_view(self, view, merge_map):
|
||||
self.spec
|
||||
)
|
||||
)
|
||||
|
||||
to_remove = []
|
||||
for src, dst in merge_map.items():
|
||||
if ignore_namespace and namespace_init(dst):
|
||||
continue
|
||||
|
||||
if global_view or not path_contains_subdirectory(src, bin_dir):
|
||||
to_remove.append(dst)
|
||||
view.remove_file(src, dst)
|
||||
else:
|
||||
os.remove(dst)
|
||||
|
||||
view.remove_files(to_remove)
|
||||
|
||||
@@ -18,9 +18,6 @@ class RubyPackage(PackageBase):
|
||||
#. :py:meth:`~.RubyPackage.build`
|
||||
#. :py:meth:`~.RubyPackage.install`
|
||||
"""
|
||||
|
||||
maintainers = ['Kerilk']
|
||||
|
||||
#: Phases of a Ruby package
|
||||
phases = ['build', 'install']
|
||||
|
||||
@@ -53,12 +50,8 @@ def install(self, spec, prefix):
|
||||
|
||||
gems = glob.glob('*.gem')
|
||||
if gems:
|
||||
# if --install-dir is not used, GEM_PATH is deleted from the
|
||||
# environement, and Gems required to build native extensions will
|
||||
# not be found. Those extensions are built during `gem install`.
|
||||
inspect.getmodule(self).gem(
|
||||
'install', '--norc', '--ignore-dependencies',
|
||||
'--install-dir', prefix, gems[0])
|
||||
'install', '--norc', '--ignore-dependencies', gems[0])
|
||||
|
||||
# Check that self.prefix is there after installation
|
||||
run_after('install')(PackageBase.sanity_check_prefix)
|
||||
|
||||
@@ -66,7 +66,7 @@ def import_modules(self):
|
||||
modules = []
|
||||
root = os.path.join(
|
||||
self.prefix,
|
||||
self.spec['python'].package.config_vars['python_lib']['true']['false'],
|
||||
self.spec['python'].package.config_vars['python_lib']['false']['false'],
|
||||
)
|
||||
|
||||
# Some Python libraries are packages: collections of modules
|
||||
@@ -99,9 +99,7 @@ def configure(self, spec, prefix):
|
||||
|
||||
args = self.configure_args()
|
||||
|
||||
python_include_dir = os.path.basename(
|
||||
inspect.getmodule(self).python_include_dir
|
||||
)
|
||||
python_include_dir = 'python' + str(spec['python'].version.up_to(2))
|
||||
|
||||
args.extend([
|
||||
'--verbose',
|
||||
|
||||
@@ -23,8 +23,11 @@ def misc_cache_location():
|
||||
Currently the ``misc_cache`` stores indexes for virtual dependency
|
||||
providers and for which packages provide which tags.
|
||||
"""
|
||||
path = spack.config.get('config:misc_cache', spack.paths.default_misc_cache_path)
|
||||
return spack.util.path.canonicalize_path(path)
|
||||
path = spack.config.get('config:misc_cache')
|
||||
if not path:
|
||||
path = os.path.join(spack.paths.user_config_path, 'cache')
|
||||
path = spack.util.path.canonicalize_path(path)
|
||||
return path
|
||||
|
||||
|
||||
def _misc_cache():
|
||||
@@ -44,7 +47,7 @@ def fetch_cache_location():
|
||||
"""
|
||||
path = spack.config.get('config:source_cache')
|
||||
if not path:
|
||||
path = spack.paths.default_fetch_cache_path
|
||||
path = os.path.join(spack.paths.var_path, "cache")
|
||||
path = spack.util.path.canonicalize_path(path)
|
||||
return path
|
||||
|
||||
|
||||
@@ -396,6 +396,9 @@ def append_dep(s, d):
|
||||
})
|
||||
|
||||
for spec in spec_list:
|
||||
spec.concretize()
|
||||
|
||||
# root_spec = get_spec_string(spec)
|
||||
root_spec = spec
|
||||
|
||||
for s in spec.traverse(deptype=all):
|
||||
@@ -665,35 +668,16 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
|
||||
# Speed up staging by first fetching binary indices from all mirrors
|
||||
# (including the per-PR mirror we may have just added above).
|
||||
try:
|
||||
bindist.binary_index.update()
|
||||
except bindist.FetchCacheError as e:
|
||||
tty.error(e)
|
||||
bindist.binary_index.update()
|
||||
|
||||
staged_phases = {}
|
||||
try:
|
||||
for phase in phases:
|
||||
phase_name = phase['name']
|
||||
if phase_name == 'specs':
|
||||
# Anything in the "specs" of the environment are already
|
||||
# concretized by the block at the top of this method, so we
|
||||
# only need to find the concrete versions, and then avoid
|
||||
# re-concretizing them needlessly later on.
|
||||
concrete_phase_specs = [
|
||||
concrete for abstract, concrete in env.concretized_specs()
|
||||
if abstract in env.spec_lists[phase_name]
|
||||
]
|
||||
else:
|
||||
# Any specs lists in other definitions (but not in the
|
||||
# "specs") of the environment are not yet concretized so we
|
||||
# have to concretize them explicitly here.
|
||||
concrete_phase_specs = env.spec_lists[phase_name]
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
for phase_spec in concrete_phase_specs:
|
||||
phase_spec.concretize()
|
||||
staged_phases[phase_name] = stage_spec_jobs(
|
||||
concrete_phase_specs,
|
||||
check_index_only=check_index_only)
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
staged_phases[phase_name] = stage_spec_jobs(
|
||||
env.spec_lists[phase_name],
|
||||
check_index_only=check_index_only)
|
||||
finally:
|
||||
# Clean up PR mirror if enabled
|
||||
if pr_mirror_url:
|
||||
@@ -709,17 +693,6 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
max_length_needs = 0
|
||||
max_needs_job = ''
|
||||
|
||||
# If this is configured, spack will fail "spack ci generate" if it
|
||||
# generates any full hash which exists under the broken specs url.
|
||||
broken_spec_urls = None
|
||||
if broken_specs_url:
|
||||
if broken_specs_url.startswith('http'):
|
||||
# To make checking each spec against the list faster, we require
|
||||
# a url protocol that allows us to iterate the url in advance.
|
||||
tty.msg('Cannot use an http(s) url for broken specs, ignoring')
|
||||
else:
|
||||
broken_spec_urls = web_util.list_url(broken_specs_url)
|
||||
|
||||
before_script, after_script = None, None
|
||||
for phase in phases:
|
||||
phase_name = phase['name']
|
||||
@@ -906,10 +879,14 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
tty.debug('Pruning spec that does not need to be rebuilt.')
|
||||
continue
|
||||
|
||||
if (broken_spec_urls is not None and
|
||||
release_spec_full_hash in broken_spec_urls):
|
||||
known_broken_specs_encountered.append('{0} ({1})'.format(
|
||||
release_spec, release_spec_full_hash))
|
||||
# Check if this spec is in our list of known failures, now that
|
||||
# we know this spec needs a rebuild
|
||||
if broken_specs_url:
|
||||
broken_spec_path = url_util.join(
|
||||
broken_specs_url, release_spec_full_hash)
|
||||
if web_util.url_exists(broken_spec_path):
|
||||
known_broken_specs_encountered.append('{0} ({1})'.format(
|
||||
release_spec, release_spec_full_hash))
|
||||
|
||||
if artifacts_root:
|
||||
job_dependencies.append({
|
||||
@@ -1027,14 +1004,6 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
'after_script',
|
||||
]
|
||||
|
||||
service_job_retries = {
|
||||
'max': 2,
|
||||
'when': [
|
||||
'runner_system_failure',
|
||||
'stuck_or_timeout_failure'
|
||||
]
|
||||
}
|
||||
|
||||
if job_id > 0:
|
||||
if temp_storage_url_prefix:
|
||||
# There were some rebuild jobs scheduled, so we will need to
|
||||
@@ -1054,7 +1023,6 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
temp_storage_url_prefix)
|
||||
]
|
||||
cleanup_job['when'] = 'always'
|
||||
cleanup_job['retry'] = service_job_retries
|
||||
|
||||
output_object['cleanup'] = cleanup_job
|
||||
|
||||
@@ -1078,7 +1046,11 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
index_target_mirror)
|
||||
]
|
||||
final_job['when'] = 'always'
|
||||
final_job['retry'] = service_job_retries
|
||||
|
||||
if artifacts_root:
|
||||
final_job['variables'] = {
|
||||
'SPACK_CONCRETE_ENV_DIR': concrete_env_dir
|
||||
}
|
||||
|
||||
output_object['rebuild-index'] = final_job
|
||||
|
||||
@@ -1142,8 +1114,6 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
'echo "All specs already up to date, nothing to rebuild."',
|
||||
]
|
||||
|
||||
noop_job['retry'] = service_job_retries
|
||||
|
||||
sorted_output = {'no-specs-to-rebuild': noop_job}
|
||||
|
||||
if known_broken_specs_encountered:
|
||||
|
||||
@@ -27,7 +27,6 @@
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.string
|
||||
|
||||
@@ -154,7 +153,6 @@ def parse_specs(args, **kwargs):
|
||||
concretize = kwargs.get('concretize', False)
|
||||
normalize = kwargs.get('normalize', False)
|
||||
tests = kwargs.get('tests', False)
|
||||
reuse = kwargs.get('reuse', False)
|
||||
|
||||
try:
|
||||
sargs = args
|
||||
@@ -163,7 +161,7 @@ def parse_specs(args, **kwargs):
|
||||
specs = spack.spec.parse(sargs)
|
||||
for spec in specs:
|
||||
if concretize:
|
||||
spec.concretize(tests=tests, reuse=reuse) # implies normalize
|
||||
spec.concretize(tests=tests) # implies normalize
|
||||
elif normalize:
|
||||
spec.normalize(tests=tests)
|
||||
|
||||
@@ -261,19 +259,17 @@ def display_specs_as_json(specs, deps=False):
|
||||
seen = set()
|
||||
records = []
|
||||
for spec in specs:
|
||||
dag_hash = spec.dag_hash()
|
||||
if dag_hash in seen:
|
||||
if spec.dag_hash() in seen:
|
||||
continue
|
||||
records.append(spec.node_dict_with_hashes())
|
||||
seen.add(dag_hash)
|
||||
seen.add(spec.dag_hash())
|
||||
records.append(spec.to_node_dict())
|
||||
|
||||
if deps:
|
||||
for dep in spec.traverse():
|
||||
dep_dag_hash = dep.dag_hash()
|
||||
if dep_dag_hash in seen:
|
||||
if dep.dag_hash() in seen:
|
||||
continue
|
||||
records.append(dep.node_dict_with_hashes())
|
||||
seen.add(dep_dag_hash)
|
||||
seen.add(dep.dag_hash())
|
||||
records.append(dep.to_node_dict())
|
||||
|
||||
sjson.dump(records, sys.stdout)
|
||||
|
||||
@@ -443,28 +439,6 @@ def format_list(specs):
|
||||
output.flush()
|
||||
|
||||
|
||||
def filter_loaded_specs(specs):
|
||||
"""Filter a list of specs returning only those that are
|
||||
currently loaded."""
|
||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, '').split(':')
|
||||
return [x for x in specs if x.dag_hash() in hashes]
|
||||
|
||||
|
||||
def print_how_many_pkgs(specs, pkg_type=""):
|
||||
"""Given a list of specs, this will print a message about how many
|
||||
specs are in that list.
|
||||
|
||||
Args:
|
||||
specs (list): depending on how many items are in this list, choose
|
||||
the plural or singular form of the word "package"
|
||||
pkg_type (str): the output string will mention this provided
|
||||
category, e.g. if pkg_type is "installed" then the message
|
||||
would be "3 installed packages"
|
||||
"""
|
||||
tty.msg("%s" % spack.util.string.plural(
|
||||
len(specs), pkg_type + " package"))
|
||||
|
||||
|
||||
def spack_is_git_repo():
|
||||
"""Ensure that this instance of Spack is a git clone."""
|
||||
return is_git_repo(spack.paths.prefix)
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
import llnl.util.tty.colify as colify
|
||||
import llnl.util.tty.color as color
|
||||
|
||||
import spack.platforms
|
||||
import spack.architecture as architecture
|
||||
|
||||
description = "print architecture information about this machine"
|
||||
section = "system"
|
||||
@@ -20,10 +20,6 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-g', '--generic-target', action='store_true',
|
||||
help='show the best generic target'
|
||||
)
|
||||
subparser.add_argument(
|
||||
'--known-targets', action='store_true',
|
||||
help='show a list of all known targets and exit'
|
||||
@@ -78,32 +74,25 @@ def display_target_group(header, target_group):
|
||||
|
||||
|
||||
def arch(parser, args):
|
||||
if args.generic_target:
|
||||
print(archspec.cpu.host().generic)
|
||||
return
|
||||
|
||||
if args.known_targets:
|
||||
display_targets(archspec.cpu.TARGETS)
|
||||
return
|
||||
|
||||
os_args, target_args = 'default_os', 'default_target'
|
||||
if args.frontend:
|
||||
os_args, target_args = 'frontend', 'frontend'
|
||||
arch = architecture.Arch(architecture.platform(),
|
||||
'frontend', 'frontend')
|
||||
elif args.backend:
|
||||
os_args, target_args = 'backend', 'backend'
|
||||
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system(os_args)
|
||||
host_target = host_platform.target(target_args)
|
||||
architecture = spack.spec.ArchSpec(
|
||||
(str(host_platform), str(host_os), str(host_target))
|
||||
)
|
||||
arch = architecture.Arch(architecture.platform(),
|
||||
'backend', 'backend')
|
||||
else:
|
||||
arch = architecture.Arch(architecture.platform(),
|
||||
'default_os', 'default_target')
|
||||
|
||||
if args.platform:
|
||||
print(architecture.platform)
|
||||
print(arch.platform)
|
||||
elif args.operating_system:
|
||||
print(architecture.os)
|
||||
print(arch.os)
|
||||
elif args.target:
|
||||
print(architecture.target)
|
||||
print(arch.target)
|
||||
else:
|
||||
print(architecture)
|
||||
print(arch)
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.architecture
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
@@ -104,9 +105,6 @@ def setup_parser(subparser):
|
||||
" instead of default platform and OS")
|
||||
# This argument is needed by the bootstrapping logic to verify checksums
|
||||
install.add_argument('--sha256', help=argparse.SUPPRESS)
|
||||
install.add_argument(
|
||||
'--only-root', action='store_true', help=argparse.SUPPRESS
|
||||
)
|
||||
|
||||
arguments.add_common_arguments(install, ['specs'])
|
||||
install.set_defaults(func=installtarball)
|
||||
@@ -337,13 +335,9 @@ def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False,
|
||||
specs_from_cli = []
|
||||
has_errors = False
|
||||
|
||||
try:
|
||||
specs = bindist.update_cache_and_get_specs()
|
||||
except bindist.FetchCacheError as e:
|
||||
tty.error(e)
|
||||
|
||||
specs = bindist.update_cache_and_get_specs()
|
||||
if not other_arch:
|
||||
arch = spack.spec.Spec.default_arch()
|
||||
arch = spack.architecture.default_arch().to_spec()
|
||||
specs = [s for s in specs if s.satisfies(arch)]
|
||||
|
||||
for pkg in pkgs:
|
||||
@@ -537,14 +531,9 @@ def install_tarball(spec, args):
|
||||
if s.external or s.virtual:
|
||||
tty.warn("Skipping external or virtual package %s" % spec.format())
|
||||
return
|
||||
|
||||
# This argument is used only for bootstrapping specs without signatures,
|
||||
# since we need to check the sha256 of each tarball
|
||||
if not args.only_root:
|
||||
for d in s.dependencies(deptype=('link', 'run')):
|
||||
tty.msg("Installing buildcache for dependency spec %s" % d)
|
||||
install_tarball(d, args)
|
||||
|
||||
for d in s.dependencies(deptype=('link', 'run')):
|
||||
tty.msg("Installing buildcache for dependency spec %s" % d)
|
||||
install_tarball(d, args)
|
||||
package = spack.repo.get(spec)
|
||||
if s.concrete and package.installed and not args.force:
|
||||
tty.warn("Package for spec %s already installed." % spec.format())
|
||||
@@ -572,13 +561,9 @@ def install_tarball(spec, args):
|
||||
|
||||
def listspecs(args):
|
||||
"""list binary packages available from mirrors"""
|
||||
try:
|
||||
specs = bindist.update_cache_and_get_specs()
|
||||
except bindist.FetchCacheError as e:
|
||||
tty.error(e)
|
||||
|
||||
specs = bindist.update_cache_and_get_specs()
|
||||
if not args.allarch:
|
||||
arch = spack.spec.Spec.default_arch()
|
||||
arch = spack.architecture.default_arch().to_spec()
|
||||
specs = [s for s in specs if s.satisfies(arch)]
|
||||
|
||||
if args.specs:
|
||||
|
||||
@@ -14,7 +14,6 @@
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.util.crypto
|
||||
from spack.package import preferred_version
|
||||
from spack.util.naming import valid_fully_qualified_module_name
|
||||
from spack.version import Version, ver
|
||||
|
||||
@@ -27,16 +26,9 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'--keep-stage', action='store_true',
|
||||
help="don't clean up staging area when command completes")
|
||||
sp = subparser.add_mutually_exclusive_group()
|
||||
sp.add_argument(
|
||||
subparser.add_argument(
|
||||
'-b', '--batch', action='store_true',
|
||||
help="don't ask which versions to checksum")
|
||||
sp.add_argument(
|
||||
'-l', '--latest', action='store_true',
|
||||
help="checksum the latest available version only")
|
||||
sp.add_argument(
|
||||
'-p', '--preferred', action='store_true',
|
||||
help="checksum the preferred version only")
|
||||
arguments.add_common_arguments(subparser, ['package'])
|
||||
subparser.add_argument(
|
||||
'versions', nargs=argparse.REMAINDER,
|
||||
@@ -56,18 +48,15 @@ def checksum(parser, args):
|
||||
# Get the package we're going to generate checksums for
|
||||
pkg = spack.repo.get(args.package)
|
||||
|
||||
url_dict = {}
|
||||
if args.versions:
|
||||
# If the user asked for specific versions, use those
|
||||
url_dict = {}
|
||||
for version in args.versions:
|
||||
version = ver(version)
|
||||
if not isinstance(version, Version):
|
||||
tty.die("Cannot generate checksums for version lists or "
|
||||
"version ranges. Use unambiguous versions.")
|
||||
url_dict[version] = pkg.url_for_version(version)
|
||||
elif args.preferred:
|
||||
version = preferred_version(pkg)
|
||||
url_dict = dict([(version, pkg.url_for_version(version))])
|
||||
else:
|
||||
# Otherwise, see what versions we can find online
|
||||
url_dict = pkg.fetch_remote_versions()
|
||||
@@ -87,7 +76,7 @@ def checksum(parser, args):
|
||||
version_lines = spack.stage.get_checksums_for_versions(
|
||||
url_dict, pkg.name, keep_stage=args.keep_stage,
|
||||
batch=(args.batch or len(args.versions) > 0 or len(url_dict) == 1),
|
||||
latest=args.latest, fetch_options=pkg.fetch_options)
|
||||
fetch_options=pkg.fetch_options)
|
||||
|
||||
print()
|
||||
print(version_lines)
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.bootstrap
|
||||
@@ -15,9 +14,9 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.cmd.test
|
||||
import spack.config
|
||||
import spack.main
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.util.path
|
||||
from spack.paths import lib_path, var_path
|
||||
|
||||
description = "remove temporary build files and/or downloaded archives"
|
||||
@@ -28,7 +27,7 @@
|
||||
class AllClean(argparse.Action):
|
||||
"""Activates flags -s -d -f -m and -p simultaneously"""
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
parser.parse_args(['-sdfmp'], namespace=namespace)
|
||||
parser.parse_args(['-sdfmpb'], namespace=namespace)
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
@@ -49,11 +48,9 @@ def setup_parser(subparser):
|
||||
help="remove .pyc, .pyo files and __pycache__ folders")
|
||||
subparser.add_argument(
|
||||
'-b', '--bootstrap', action='store_true',
|
||||
help="remove software and configuration needed to bootstrap Spack")
|
||||
help="remove software needed to bootstrap Spack")
|
||||
subparser.add_argument(
|
||||
'-a', '--all', action=AllClean,
|
||||
help="equivalent to -sdfmp (does not include --bootstrap)",
|
||||
nargs=0
|
||||
'-a', '--all', action=AllClean, help="equivalent to -sdfmpb", nargs=0
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ['specs'])
|
||||
|
||||
@@ -76,11 +73,7 @@ def clean(parser, args):
|
||||
if args.stage:
|
||||
tty.msg('Removing all temporary build stages')
|
||||
spack.stage.purge()
|
||||
# Temp directory where buildcaches are extracted
|
||||
extract_tmp = os.path.join(spack.store.layout.root, '.tmp')
|
||||
if os.path.exists(extract_tmp):
|
||||
tty.debug('Removing {0}'.format(extract_tmp))
|
||||
shutil.rmtree(extract_tmp)
|
||||
|
||||
if args.downloads:
|
||||
tty.msg('Removing cached downloads')
|
||||
spack.caches.fetch_cache.destroy()
|
||||
@@ -109,9 +102,8 @@ def clean(parser, args):
|
||||
shutil.rmtree(dname)
|
||||
|
||||
if args.bootstrap:
|
||||
bootstrap_prefix = spack.util.path.canonicalize_path(
|
||||
spack.config.get('bootstrap:root')
|
||||
)
|
||||
msg = 'Removing bootstrapped software and configuration in "{0}"'
|
||||
tty.msg(msg.format(bootstrap_prefix))
|
||||
llnl.util.filesystem.remove_directory_contents(bootstrap_prefix)
|
||||
msg = 'Removing software in "{0}"'
|
||||
tty.msg(msg.format(spack.bootstrap.store_path()))
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
uninstall = spack.main.SpackCommand('uninstall')
|
||||
uninstall('-a', '-y')
|
||||
|
||||
@@ -69,7 +69,7 @@ def _specs(self, **kwargs):
|
||||
|
||||
# If an environment is provided, we'll restrict the search to
|
||||
# only its installed packages.
|
||||
env = ev.active_environment()
|
||||
env = ev._active_environment
|
||||
if env:
|
||||
kwargs['hashes'] = set(env.all_hashes())
|
||||
|
||||
@@ -320,11 +320,3 @@ def add_cdash_args(subparser, add_help):
|
||||
default=None,
|
||||
help=cdash_help['buildstamp']
|
||||
)
|
||||
|
||||
|
||||
@arg
|
||||
def reuse():
|
||||
return Args(
|
||||
'--reuse', action='store_true', default=False,
|
||||
help='reuse installed dependencies'
|
||||
)
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.spec
|
||||
from spack.spec import ArchSpec, CompilerSpec
|
||||
|
||||
description = "manage compilers"
|
||||
section = "system"
|
||||
@@ -77,13 +78,24 @@ def compiler_find(args):
|
||||
# None signals spack.compiler.find_compilers to use its default logic
|
||||
paths = args.add_paths or None
|
||||
|
||||
# Below scope=None because we want new compilers that don't appear
|
||||
# in any other configuration.
|
||||
new_compilers = spack.compilers.find_new_compilers(paths, scope=None)
|
||||
# Don't initialize compilers config via compilers.get_compiler_config.
|
||||
# Just let compiler_find do the
|
||||
# entire process and return an empty config from all_compilers
|
||||
# Default for any other process is init_config=True
|
||||
compilers = [c for c in spack.compilers.find_compilers(paths)]
|
||||
new_compilers = []
|
||||
for c in compilers:
|
||||
arch_spec = ArchSpec((None, c.operating_system, c.target))
|
||||
same_specs = spack.compilers.compilers_for_spec(
|
||||
c.spec, arch_spec, init_config=False)
|
||||
|
||||
if not same_specs:
|
||||
new_compilers.append(c)
|
||||
|
||||
if new_compilers:
|
||||
spack.compilers.add_compilers_to_config(
|
||||
new_compilers, scope=args.scope, init_config=False
|
||||
)
|
||||
spack.compilers.add_compilers_to_config(new_compilers,
|
||||
scope=args.scope,
|
||||
init_config=False)
|
||||
n = len(new_compilers)
|
||||
s = 's' if n > 1 else ''
|
||||
|
||||
@@ -98,7 +110,7 @@ def compiler_find(args):
|
||||
|
||||
|
||||
def compiler_remove(args):
|
||||
cspec = spack.spec.CompilerSpec(args.compiler_spec)
|
||||
cspec = CompilerSpec(args.compiler_spec)
|
||||
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
|
||||
if not compilers:
|
||||
tty.die("No compilers match spec %s" % cspec)
|
||||
@@ -116,7 +128,7 @@ def compiler_remove(args):
|
||||
|
||||
def compiler_info(args):
|
||||
"""Print info about all compilers matching a spec."""
|
||||
cspec = spack.spec.CompilerSpec(args.compiler_spec)
|
||||
cspec = CompilerSpec(args.compiler_spec)
|
||||
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
|
||||
|
||||
if not compilers:
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
import spack.environment as ev
|
||||
|
||||
description = 'concretize an environment and write a lockfile'
|
||||
@@ -13,7 +12,6 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
spack.cmd.common.arguments.add_common_arguments(subparser, ['reuse'])
|
||||
subparser.add_argument(
|
||||
'-f', '--force', action='store_true',
|
||||
help="Re-concretize even if already concretized.")
|
||||
@@ -36,8 +34,6 @@ def concretize(parser, args):
|
||||
tests = False
|
||||
|
||||
with env.write_transaction():
|
||||
concretized_specs = env.concretize(
|
||||
force=args.force, tests=tests, reuse=args.reuse
|
||||
)
|
||||
concretized_specs = env.concretize(force=args.force, tests=tests)
|
||||
ev.display_specs(concretized_specs)
|
||||
env.write()
|
||||
|
||||
@@ -143,10 +143,7 @@ def config_get(args):
|
||||
"""
|
||||
scope, section = _get_scope_and_section(args)
|
||||
|
||||
if section is not None:
|
||||
spack.config.config.print_section(section)
|
||||
|
||||
elif scope and scope.startswith('env:'):
|
||||
if scope and scope.startswith('env:'):
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
if os.path.exists(config_file):
|
||||
with open(config_file) as f:
|
||||
@@ -154,6 +151,9 @@ def config_get(args):
|
||||
else:
|
||||
tty.die('environment has no %s file' % ev.manifest_name)
|
||||
|
||||
elif section is not None:
|
||||
spack.config.config.print_section(section)
|
||||
|
||||
else:
|
||||
tty.die('`spack config get` requires a section argument '
|
||||
'or an active environment.')
|
||||
@@ -170,19 +170,12 @@ def config_edit(args):
|
||||
With no arguments and an active environment, edit the spack.yaml for
|
||||
the active environment.
|
||||
"""
|
||||
spack_env = os.environ.get(ev.spack_env_var)
|
||||
if spack_env and not args.scope:
|
||||
# Don't use the scope object for envs, as `config edit` can be called
|
||||
# for a malformed environment. Use SPACK_ENV to find spack.yaml.
|
||||
config_file = ev.manifest_file(spack_env)
|
||||
else:
|
||||
# If we aren't editing a spack.yaml file, get config path from scope.
|
||||
scope, section = _get_scope_and_section(args)
|
||||
if not scope and not section:
|
||||
tty.die('`spack config edit` requires a section argument '
|
||||
'or an active environment.')
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
scope, section = _get_scope_and_section(args)
|
||||
if not scope and not section:
|
||||
tty.die('`spack config edit` requires a section argument '
|
||||
'or an active environment.')
|
||||
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
if args.print_file:
|
||||
print(config_file)
|
||||
else:
|
||||
@@ -433,8 +426,7 @@ def config_prefer_upstream(args):
|
||||
or var_name not in spec.package.variants):
|
||||
continue
|
||||
|
||||
variant_desc, _ = spec.package.variants[var_name]
|
||||
if variant.value != variant_desc.default:
|
||||
if variant.value != spec.package.variants[var_name].default:
|
||||
variants.append(str(variant))
|
||||
variants.sort()
|
||||
variants = ' '.join(variants)
|
||||
|
||||
@@ -5,10 +5,7 @@
|
||||
import os
|
||||
import os.path
|
||||
|
||||
import llnl.util.tty
|
||||
|
||||
import spack.container
|
||||
import spack.container.images
|
||||
import spack.monitor
|
||||
|
||||
description = ("creates recipes to build images for different"
|
||||
@@ -19,26 +16,9 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
monitor_group = spack.monitor.get_monitor_group(subparser) # noqa
|
||||
subparser.add_argument(
|
||||
'--list-os', action='store_true', default=False,
|
||||
help='list all the OS that can be used in the bootstrap phase and exit'
|
||||
)
|
||||
subparser.add_argument(
|
||||
'--last-stage',
|
||||
choices=('bootstrap', 'build', 'final'),
|
||||
default='final',
|
||||
help='last stage in the container recipe'
|
||||
)
|
||||
|
||||
|
||||
def containerize(parser, args):
|
||||
if args.list_os:
|
||||
possible_os = spack.container.images.all_bootstrap_os()
|
||||
msg = 'The following operating systems can be used to bootstrap Spack:'
|
||||
msg += '\n{0}'.format(' '.join(possible_os))
|
||||
llnl.util.tty.msg(msg)
|
||||
return
|
||||
|
||||
config_dir = args.env_dir or os.getcwd()
|
||||
config_file = os.path.abspath(os.path.join(config_dir, 'spack.yaml'))
|
||||
if not os.path.exists(config_file):
|
||||
@@ -49,12 +29,10 @@ def containerize(parser, args):
|
||||
|
||||
# If we have a monitor request, add monitor metadata to config
|
||||
if args.use_monitor:
|
||||
config['spack']['monitor'] = {
|
||||
"disable_auth": args.monitor_disable_auth,
|
||||
"host": args.monitor_host,
|
||||
"keep_going": args.monitor_keep_going,
|
||||
"prefix": args.monitor_prefix,
|
||||
"tags": args.monitor_tags
|
||||
}
|
||||
recipe = spack.container.recipe(config, last_phase=args.last_stage)
|
||||
config['spack']['monitor'] = {"disable_auth": args.monitor_disable_auth,
|
||||
"host": args.monitor_host,
|
||||
"keep_going": args.monitor_keep_going,
|
||||
"prefix": args.monitor_prefix,
|
||||
"tags": args.monitor_tags}
|
||||
recipe = spack.container.recipe(config)
|
||||
print(recipe)
|
||||
|
||||
@@ -14,9 +14,9 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack.architecture as architecture
|
||||
import spack.config
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
from spack.main import get_version
|
||||
from spack.util.executable import which
|
||||
|
||||
@@ -87,15 +87,10 @@ def create_db_tarball(args):
|
||||
|
||||
|
||||
def report(args):
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system('frontend')
|
||||
host_target = host_platform.target('frontend')
|
||||
architecture = spack.spec.ArchSpec(
|
||||
(str(host_platform), str(host_os), str(host_target))
|
||||
)
|
||||
print('* **Spack:**', get_version())
|
||||
print('* **Python:**', platform.python_version())
|
||||
print('* **Platform:**', architecture)
|
||||
print('* **Platform:**', architecture.Arch(
|
||||
architecture.platform(), 'frontend', 'frontend'))
|
||||
print('* **Concretizer:**', spack.config.get('config:concretizer'))
|
||||
|
||||
|
||||
|
||||
@@ -68,8 +68,8 @@ def compare_specs(a, b, to_string=False, color=None):
|
||||
# Prepare a solver setup to parse differences
|
||||
setup = asp.SpackSolverSetup()
|
||||
|
||||
a_facts = set(t for t in setup.spec_clauses(a, body=True, expand_hashes=True))
|
||||
b_facts = set(t for t in setup.spec_clauses(b, body=True, expand_hashes=True))
|
||||
a_facts = set(t for t in setup.spec_clauses(a, body=True))
|
||||
b_facts = set(t for t in setup.spec_clauses(b, body=True))
|
||||
|
||||
# We want to present them to the user as simple key: values
|
||||
intersect = sorted(a_facts.intersection(b_facts))
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -20,10 +19,8 @@
|
||||
import spack.cmd.uninstall
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.environment.shell
|
||||
import spack.schema.env
|
||||
import spack.util.string as string
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
description = "manage virtual environments"
|
||||
section = "environments"
|
||||
@@ -71,38 +68,23 @@ def env_activate_setup_parser(subparser):
|
||||
const=False, default=True,
|
||||
help="do not update PATH etc. with associated view")
|
||||
|
||||
subparser.add_argument(
|
||||
'-d', '--dir', action='store_true', default=False,
|
||||
help="force spack to treat env as a directory, not a name")
|
||||
subparser.add_argument(
|
||||
'-p', '--prompt', action='store_true', default=False,
|
||||
help="decorate the command line prompt when activating")
|
||||
|
||||
env_options = subparser.add_mutually_exclusive_group()
|
||||
env_options.add_argument(
|
||||
'--temp', action='store_true', default=False,
|
||||
help='create and activate an environment in a temporary directory')
|
||||
env_options.add_argument(
|
||||
'-d', '--dir', default=None,
|
||||
help="activate the environment in this directory")
|
||||
env_options.add_argument(
|
||||
metavar='env', dest='activate_env', nargs='?', default=None,
|
||||
subparser.add_argument(
|
||||
metavar='env', dest='activate_env',
|
||||
help='name of environment to activate')
|
||||
|
||||
|
||||
def create_temp_env_directory():
|
||||
"""
|
||||
Returns the path of a temporary directory in which to
|
||||
create an environment
|
||||
"""
|
||||
return tempfile.mkdtemp(prefix="spack-")
|
||||
|
||||
|
||||
def env_activate(args):
|
||||
if not args.activate_env and not args.dir and not args.temp:
|
||||
tty.die('spack env activate requires an environment name, directory, or --temp')
|
||||
|
||||
env = args.activate_env
|
||||
if not args.shell:
|
||||
spack.cmd.common.shell_init_instructions(
|
||||
"spack env activate",
|
||||
" eval `spack env activate {sh_arg} [...]`",
|
||||
" eval `spack env activate {sh_arg} %s`" % env,
|
||||
)
|
||||
return 1
|
||||
|
||||
@@ -111,50 +93,27 @@ def env_activate(args):
|
||||
tty.die('Calling spack env activate with --env, --env-dir and --no-env '
|
||||
'is ambiguous')
|
||||
|
||||
env_name_or_dir = args.activate_env or args.dir
|
||||
if ev.exists(env) and not args.dir:
|
||||
spack_env = ev.root(env)
|
||||
short_name = env
|
||||
env_prompt = '[%s]' % env
|
||||
|
||||
# Temporary environment
|
||||
if args.temp:
|
||||
env = create_temp_env_directory()
|
||||
env_path = os.path.abspath(env)
|
||||
short_name = os.path.basename(env_path)
|
||||
ev.Environment(env).write(regenerate=False)
|
||||
|
||||
# Named environment
|
||||
elif ev.exists(env_name_or_dir) and not args.dir:
|
||||
env_path = ev.root(env_name_or_dir)
|
||||
short_name = env_name_or_dir
|
||||
|
||||
# Environment directory
|
||||
elif ev.is_env_dir(env_name_or_dir):
|
||||
env_path = os.path.abspath(env_name_or_dir)
|
||||
short_name = os.path.basename(env_path)
|
||||
elif ev.is_env_dir(env):
|
||||
spack_env = os.path.abspath(env)
|
||||
short_name = os.path.basename(os.path.abspath(env))
|
||||
env_prompt = '[%s]' % short_name
|
||||
|
||||
else:
|
||||
tty.die("No such environment: '%s'" % env_name_or_dir)
|
||||
tty.die("No such environment: '%s'" % env)
|
||||
|
||||
env_prompt = '[%s]' % short_name
|
||||
if spack_env == os.environ.get('SPACK_ENV'):
|
||||
tty.debug("Environment %s is already active" % args.activate_env)
|
||||
return
|
||||
|
||||
# We only support one active environment at a time, so deactivate the current one.
|
||||
if ev.active_environment() is None:
|
||||
cmds = ''
|
||||
env_mods = EnvironmentModifications()
|
||||
else:
|
||||
cmds = spack.environment.shell.deactivate_header(shell=args.shell)
|
||||
env_mods = spack.environment.shell.deactivate()
|
||||
|
||||
# Activate new environment
|
||||
active_env = ev.Environment(env_path)
|
||||
cmds += spack.environment.shell.activate_header(
|
||||
env=active_env,
|
||||
shell=args.shell,
|
||||
cmds = ev.activate(
|
||||
ev.Environment(spack_env), add_view=args.with_view, shell=args.shell,
|
||||
prompt=env_prompt if args.prompt else None
|
||||
)
|
||||
env_mods.extend(spack.environment.shell.activate(
|
||||
env=active_env,
|
||||
add_view=args.with_view
|
||||
))
|
||||
cmds += env_mods.shell_modifications(args.shell)
|
||||
sys.stdout.write(cmds)
|
||||
|
||||
|
||||
@@ -188,12 +147,10 @@ def env_deactivate(args):
|
||||
tty.die('Calling spack env deactivate with --env, --env-dir and --no-env '
|
||||
'is ambiguous')
|
||||
|
||||
if ev.active_environment() is None:
|
||||
if 'SPACK_ENV' not in os.environ:
|
||||
tty.die('No environment is currently active.')
|
||||
|
||||
cmds = spack.environment.shell.deactivate_header(args.shell)
|
||||
env_mods = spack.environment.shell.deactivate()
|
||||
cmds += env_mods.shell_modifications(args.shell)
|
||||
cmds = ev.deactivate(shell=args.shell)
|
||||
sys.stdout.write(cmds)
|
||||
|
||||
|
||||
|
||||
@@ -5,17 +5,23 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from collections import defaultdict, namedtuple
|
||||
|
||||
import six
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.colify as colify
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
import spack.detection
|
||||
import spack.error
|
||||
import spack.util.environment
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
description = "manage external packages in Spack configuration"
|
||||
section = "config"
|
||||
@@ -47,6 +53,104 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
|
||||
def is_executable(path):
|
||||
return os.path.isfile(path) and os.access(path, os.X_OK)
|
||||
|
||||
|
||||
def _get_system_executables():
|
||||
"""Get the paths of all executables available from the current PATH.
|
||||
|
||||
For convenience, this is constructed as a dictionary where the keys are
|
||||
the executable paths and the values are the names of the executables
|
||||
(i.e. the basename of the executable path).
|
||||
|
||||
There may be multiple paths with the same basename. In this case it is
|
||||
assumed there are two different instances of the executable.
|
||||
"""
|
||||
path_hints = spack.util.environment.get_path('PATH')
|
||||
search_paths = llnl.util.filesystem.search_paths_for_executables(
|
||||
*path_hints)
|
||||
|
||||
path_to_exe = {}
|
||||
# Reverse order of search directories so that an exe in the first PATH
|
||||
# entry overrides later entries
|
||||
for search_path in reversed(search_paths):
|
||||
for exe in os.listdir(search_path):
|
||||
exe_path = os.path.join(search_path, exe)
|
||||
if is_executable(exe_path):
|
||||
path_to_exe[exe_path] = exe
|
||||
return path_to_exe
|
||||
|
||||
|
||||
ExternalPackageEntry = namedtuple(
|
||||
'ExternalPackageEntry',
|
||||
['spec', 'base_dir'])
|
||||
|
||||
|
||||
def _generate_pkg_config(external_pkg_entries):
|
||||
"""Generate config according to the packages.yaml schema for a single
|
||||
package.
|
||||
|
||||
This does not generate the entire packages.yaml. For example, given some
|
||||
external entries for the CMake package, this could return::
|
||||
|
||||
{
|
||||
'externals': [{
|
||||
'spec': 'cmake@3.17.1',
|
||||
'prefix': '/opt/cmake-3.17.1/'
|
||||
}, {
|
||||
'spec': 'cmake@3.16.5',
|
||||
'prefix': '/opt/cmake-3.16.5/'
|
||||
}]
|
||||
}
|
||||
"""
|
||||
|
||||
pkg_dict = syaml.syaml_dict()
|
||||
pkg_dict['externals'] = []
|
||||
for e in external_pkg_entries:
|
||||
if not _spec_is_valid(e.spec):
|
||||
continue
|
||||
|
||||
external_items = [('spec', str(e.spec)), ('prefix', e.base_dir)]
|
||||
if e.spec.external_modules:
|
||||
external_items.append(('modules', e.spec.external_modules))
|
||||
|
||||
if e.spec.extra_attributes:
|
||||
external_items.append(
|
||||
('extra_attributes',
|
||||
syaml.syaml_dict(e.spec.extra_attributes.items()))
|
||||
)
|
||||
|
||||
# external_items.extend(e.spec.extra_attributes.items())
|
||||
pkg_dict['externals'].append(
|
||||
syaml.syaml_dict(external_items)
|
||||
)
|
||||
|
||||
return pkg_dict
|
||||
|
||||
|
||||
def _spec_is_valid(spec):
|
||||
try:
|
||||
str(spec)
|
||||
except spack.error.SpackError:
|
||||
# It is assumed here that we can at least extract the package name from
|
||||
# the spec so we can look up the implementation of
|
||||
# determine_spec_details
|
||||
tty.warn('Constructed spec for {0} does not have a string'
|
||||
' representation'.format(spec.name))
|
||||
return False
|
||||
|
||||
try:
|
||||
spack.spec.Spec(str(spec))
|
||||
except spack.error.SpackError:
|
||||
tty.warn('Constructed spec has a string representation but the string'
|
||||
' representation does not evaluate to a valid spec: {0}'
|
||||
.format(str(spec)))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def external_find(args):
|
||||
# Construct the list of possible packages to be detected
|
||||
packages_to_check = []
|
||||
@@ -72,9 +176,9 @@ def external_find(args):
|
||||
if not args.tags and not packages_to_check:
|
||||
packages_to_check = spack.repo.path.all_packages()
|
||||
|
||||
detected_packages = spack.detection.by_executable(packages_to_check)
|
||||
new_entries = spack.detection.update_configuration(
|
||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||
pkg_to_entries = _get_external_packages(packages_to_check)
|
||||
new_entries = _update_pkg_config(
|
||||
args.scope, pkg_to_entries, args.not_buildable
|
||||
)
|
||||
if new_entries:
|
||||
path = spack.config.config.get_config_filename(args.scope, 'packages')
|
||||
@@ -86,6 +190,163 @@ def external_find(args):
|
||||
tty.msg('No new external packages detected')
|
||||
|
||||
|
||||
def _group_by_prefix(paths):
|
||||
groups = defaultdict(set)
|
||||
for p in paths:
|
||||
groups[os.path.dirname(p)].add(p)
|
||||
return groups.items()
|
||||
|
||||
|
||||
def _convert_to_iterable(single_val_or_multiple):
|
||||
x = single_val_or_multiple
|
||||
if x is None:
|
||||
return []
|
||||
elif isinstance(x, six.string_types):
|
||||
return [x]
|
||||
elif isinstance(x, spack.spec.Spec):
|
||||
# Specs are iterable, but a single spec should be converted to a list
|
||||
return [x]
|
||||
|
||||
try:
|
||||
iter(x)
|
||||
return x
|
||||
except TypeError:
|
||||
return [x]
|
||||
|
||||
|
||||
def _determine_base_dir(prefix):
|
||||
# Given a prefix where an executable is found, assuming that prefix ends
|
||||
# with /bin/, strip off the 'bin' directory to get a Spack-compatible
|
||||
# prefix
|
||||
assert os.path.isdir(prefix)
|
||||
if os.path.basename(prefix) == 'bin':
|
||||
return os.path.dirname(prefix)
|
||||
|
||||
|
||||
def _get_predefined_externals():
|
||||
# Pull from all scopes when looking for preexisting external package
|
||||
# entries
|
||||
pkg_config = spack.config.get('packages')
|
||||
already_defined_specs = set()
|
||||
for pkg_name, per_pkg_cfg in pkg_config.items():
|
||||
for item in per_pkg_cfg.get('externals', []):
|
||||
already_defined_specs.add(spack.spec.Spec(item['spec']))
|
||||
return already_defined_specs
|
||||
|
||||
|
||||
def _update_pkg_config(scope, pkg_to_entries, not_buildable):
|
||||
predefined_external_specs = _get_predefined_externals()
|
||||
|
||||
pkg_to_cfg, all_new_specs = {}, []
|
||||
for pkg_name, ext_pkg_entries in pkg_to_entries.items():
|
||||
new_entries = list(
|
||||
e for e in ext_pkg_entries
|
||||
if (e.spec not in predefined_external_specs))
|
||||
|
||||
pkg_config = _generate_pkg_config(new_entries)
|
||||
all_new_specs.extend([
|
||||
spack.spec.Spec(x['spec']) for x in pkg_config.get('externals', [])
|
||||
])
|
||||
if not_buildable:
|
||||
pkg_config['buildable'] = False
|
||||
pkg_to_cfg[pkg_name] = pkg_config
|
||||
|
||||
pkgs_cfg = spack.config.get('packages', scope=scope)
|
||||
|
||||
pkgs_cfg = spack.config.merge_yaml(pkgs_cfg, pkg_to_cfg)
|
||||
spack.config.set('packages', pkgs_cfg, scope=scope)
|
||||
|
||||
return all_new_specs
|
||||
|
||||
|
||||
def _get_external_packages(packages_to_check, system_path_to_exe=None):
|
||||
if not system_path_to_exe:
|
||||
system_path_to_exe = _get_system_executables()
|
||||
|
||||
exe_pattern_to_pkgs = defaultdict(list)
|
||||
for pkg in packages_to_check:
|
||||
if hasattr(pkg, 'executables'):
|
||||
for exe in pkg.executables:
|
||||
exe_pattern_to_pkgs[exe].append(pkg)
|
||||
|
||||
pkg_to_found_exes = defaultdict(set)
|
||||
for exe_pattern, pkgs in exe_pattern_to_pkgs.items():
|
||||
compiled_re = re.compile(exe_pattern)
|
||||
for path, exe in system_path_to_exe.items():
|
||||
if compiled_re.search(exe):
|
||||
for pkg in pkgs:
|
||||
pkg_to_found_exes[pkg].add(path)
|
||||
|
||||
pkg_to_entries = defaultdict(list)
|
||||
resolved_specs = {} # spec -> exe found for the spec
|
||||
|
||||
for pkg, exes in pkg_to_found_exes.items():
|
||||
if not hasattr(pkg, 'determine_spec_details'):
|
||||
tty.warn("{0} must define 'determine_spec_details' in order"
|
||||
" for Spack to detect externally-provided instances"
|
||||
" of the package.".format(pkg.name))
|
||||
continue
|
||||
|
||||
# TODO: iterate through this in a predetermined order (e.g. by package
|
||||
# name) to get repeatable results when there are conflicts. Note that
|
||||
# if we take the prefixes returned by _group_by_prefix, then consider
|
||||
# them in the order that they appear in PATH, this should be sufficient
|
||||
# to get repeatable results.
|
||||
for prefix, exes_in_prefix in _group_by_prefix(exes):
|
||||
# TODO: multiple instances of a package can live in the same
|
||||
# prefix, and a package implementation can return multiple specs
|
||||
# for one prefix, but without additional details (e.g. about the
|
||||
# naming scheme which differentiates them), the spec won't be
|
||||
# usable.
|
||||
specs = _convert_to_iterable(
|
||||
pkg.determine_spec_details(prefix, exes_in_prefix))
|
||||
|
||||
if not specs:
|
||||
tty.debug(
|
||||
'The following executables in {0} were decidedly not '
|
||||
'part of the package {1}: {2}'
|
||||
.format(prefix, pkg.name, ', '.join(
|
||||
_convert_to_iterable(exes_in_prefix)))
|
||||
)
|
||||
|
||||
for spec in specs:
|
||||
pkg_prefix = _determine_base_dir(prefix)
|
||||
|
||||
if not pkg_prefix:
|
||||
tty.debug("{0} does not end with a 'bin/' directory: it"
|
||||
" cannot be added as a Spack package"
|
||||
.format(prefix))
|
||||
continue
|
||||
|
||||
if spec in resolved_specs:
|
||||
prior_prefix = ', '.join(
|
||||
_convert_to_iterable(resolved_specs[spec]))
|
||||
|
||||
tty.debug(
|
||||
"Executables in {0} and {1} are both associated"
|
||||
" with the same spec {2}"
|
||||
.format(prefix, prior_prefix, str(spec)))
|
||||
continue
|
||||
else:
|
||||
resolved_specs[spec] = prefix
|
||||
|
||||
try:
|
||||
spec.validate_detection()
|
||||
except Exception as e:
|
||||
msg = ('"{0}" has been detected on the system but will '
|
||||
'not be added to packages.yaml [reason={1}]')
|
||||
tty.warn(msg.format(spec, str(e)))
|
||||
continue
|
||||
|
||||
if spec.external_path:
|
||||
pkg_prefix = spec.external_path
|
||||
|
||||
pkg_to_entries[pkg.name].append(
|
||||
ExternalPackageEntry(spec=spec, base_dir=pkg_prefix))
|
||||
|
||||
return pkg_to_entries
|
||||
|
||||
|
||||
def external_list(args):
|
||||
# Trigger a read of all packages, might take a long time.
|
||||
list(spack.repo.path.all_packages())
|
||||
|
||||
@@ -76,6 +76,10 @@ def fetch(parser, args):
|
||||
if args.missing and package.installed:
|
||||
continue
|
||||
|
||||
# Do not attempt to fetch externals (they're local)
|
||||
if package.spec.external:
|
||||
continue
|
||||
|
||||
package.do_fetch()
|
||||
|
||||
package = spack.repo.get(spec)
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import copy
|
||||
import os
|
||||
import sys
|
||||
|
||||
import llnl.util.lang
|
||||
@@ -17,7 +18,9 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.user_environment as uenv
|
||||
from spack.database import InstallStatuses
|
||||
from spack.util.string import plural
|
||||
|
||||
description = "list and search installed packages"
|
||||
section = "basic"
|
||||
@@ -238,7 +241,8 @@ def _find(parser, args):
|
||||
results = [x for x in results if x.name in packages_with_tags]
|
||||
|
||||
if args.loaded:
|
||||
results = spack.cmd.filter_loaded_specs(results)
|
||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, '').split(':')
|
||||
results = [x for x in results if x.dag_hash() in hashes]
|
||||
|
||||
# Display the result
|
||||
if args.json:
|
||||
@@ -247,10 +251,7 @@ def _find(parser, args):
|
||||
if not args.format:
|
||||
if env:
|
||||
display_env(env, args, decorator)
|
||||
|
||||
if sys.stdout.isatty() and args.groups:
|
||||
pkg_type = "loaded" if args.loaded else "installed"
|
||||
spack.cmd.print_how_many_pkgs(results, pkg_type)
|
||||
|
||||
tty.msg("%s" % plural(len(results), 'installed package'))
|
||||
cmd.display_specs(
|
||||
results, args, decorator=decorator, all_headers=True)
|
||||
|
||||
@@ -17,7 +17,6 @@
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
from spack.package import preferred_version
|
||||
|
||||
description = 'get detailed information on a particular package'
|
||||
section = 'basic'
|
||||
@@ -57,7 +56,7 @@ def variant(s):
|
||||
class VariantFormatter(object):
|
||||
def __init__(self, variants):
|
||||
self.variants = variants
|
||||
self.headers = ('Name [Default]', 'When', 'Allowed values', 'Description')
|
||||
self.headers = ('Name [Default]', 'Allowed values', 'Description')
|
||||
|
||||
# Formats
|
||||
fmt_name = '{0} [{1}]'
|
||||
@@ -68,11 +67,9 @@ def __init__(self, variants):
|
||||
self.column_widths = [len(x) for x in self.headers]
|
||||
|
||||
# Expand columns based on max line lengths
|
||||
for k, e in variants.items():
|
||||
v, w = e
|
||||
for k, v in variants.items():
|
||||
candidate_max_widths = (
|
||||
len(fmt_name.format(k, self.default(v))), # Name [Default]
|
||||
len(str(w)),
|
||||
len(v.allowed_values), # Allowed values
|
||||
len(v.description) # Description
|
||||
)
|
||||
@@ -80,29 +77,26 @@ def __init__(self, variants):
|
||||
self.column_widths = (
|
||||
max(self.column_widths[0], candidate_max_widths[0]),
|
||||
max(self.column_widths[1], candidate_max_widths[1]),
|
||||
max(self.column_widths[2], candidate_max_widths[2]),
|
||||
max(self.column_widths[3], candidate_max_widths[3])
|
||||
max(self.column_widths[2], candidate_max_widths[2])
|
||||
)
|
||||
|
||||
# Don't let name or possible values be less than max widths
|
||||
_, cols = tty.terminal_size()
|
||||
max_name = min(self.column_widths[0], 30)
|
||||
max_when = min(self.column_widths[1], 30)
|
||||
max_vals = min(self.column_widths[2], 20)
|
||||
max_vals = min(self.column_widths[1], 20)
|
||||
|
||||
# allow the description column to extend as wide as the terminal.
|
||||
max_description = min(
|
||||
self.column_widths[3],
|
||||
self.column_widths[2],
|
||||
# min width 70 cols, 14 cols of margins and column spacing
|
||||
max(cols, 70) - max_name - max_vals - 14,
|
||||
)
|
||||
self.column_widths = (max_name, max_when, max_vals, max_description)
|
||||
self.column_widths = (max_name, max_vals, max_description)
|
||||
|
||||
# Compute the format
|
||||
self.fmt = "%%-%ss%%-%ss%%-%ss%%s" % (
|
||||
self.fmt = "%%-%ss%%-%ss%%s" % (
|
||||
self.column_widths[0] + 4,
|
||||
self.column_widths[1] + 4,
|
||||
self.column_widths[2] + 4
|
||||
self.column_widths[1] + 4
|
||||
)
|
||||
|
||||
def default(self, v):
|
||||
@@ -120,27 +114,21 @@ def lines(self):
|
||||
underline = tuple([w * "=" for w in self.column_widths])
|
||||
yield ' ' + self.fmt % underline
|
||||
yield ''
|
||||
for k, e in sorted(self.variants.items()):
|
||||
v, w = e
|
||||
for k, v in sorted(self.variants.items()):
|
||||
name = textwrap.wrap(
|
||||
'{0} [{1}]'.format(k, self.default(v)),
|
||||
width=self.column_widths[0]
|
||||
)
|
||||
if len(w) == 1:
|
||||
w = w[0]
|
||||
if w == spack.spec.Spec():
|
||||
w = '--'
|
||||
when = textwrap.wrap(str(w), width=self.column_widths[1])
|
||||
allowed = v.allowed_values.replace('True, False', 'on, off')
|
||||
allowed = textwrap.wrap(allowed, width=self.column_widths[2])
|
||||
allowed = textwrap.wrap(allowed, width=self.column_widths[1])
|
||||
description = []
|
||||
for d_line in v.description.split('\n'):
|
||||
description += textwrap.wrap(
|
||||
d_line,
|
||||
width=self.column_widths[3]
|
||||
width=self.column_widths[2]
|
||||
)
|
||||
for t in zip_longest(
|
||||
name, when, allowed, description, fillvalue=''
|
||||
name, allowed, description, fillvalue=''
|
||||
):
|
||||
yield " " + self.fmt % t
|
||||
|
||||
@@ -203,38 +191,29 @@ def print_text_info(pkg):
|
||||
color.cprint('')
|
||||
color.cprint(section_title('Safe versions: '))
|
||||
color.cprint(version(' None'))
|
||||
color.cprint('')
|
||||
color.cprint(section_title('Deprecated versions: '))
|
||||
color.cprint(version(' None'))
|
||||
else:
|
||||
pad = padder(pkg.versions, 4)
|
||||
|
||||
preferred = preferred_version(pkg)
|
||||
# Here we sort first on the fact that a version is marked
|
||||
# as preferred in the package, then on the fact that the
|
||||
# version is not develop, then lexicographically
|
||||
key_fn = lambda v: (pkg.versions[v].get('preferred', False),
|
||||
not v.isdevelop(),
|
||||
v)
|
||||
preferred = sorted(pkg.versions, key=key_fn).pop()
|
||||
url = ''
|
||||
if pkg.has_code:
|
||||
url = fs.for_package_version(pkg, preferred)
|
||||
|
||||
line = version(' {0}'.format(pad(preferred))) + color.cescape(url)
|
||||
color.cprint(line)
|
||||
color.cprint('')
|
||||
color.cprint(section_title('Safe versions: '))
|
||||
|
||||
safe = []
|
||||
deprecated = []
|
||||
for v in reversed(sorted(pkg.versions)):
|
||||
if pkg.has_code:
|
||||
url = fs.for_package_version(pkg, v)
|
||||
if pkg.versions[v].get('deprecated', False):
|
||||
deprecated.append((v, url))
|
||||
else:
|
||||
safe.append((v, url))
|
||||
|
||||
for title, vers in [('Safe', safe), ('Deprecated', deprecated)]:
|
||||
color.cprint('')
|
||||
color.cprint(section_title('{0} versions: '.format(title)))
|
||||
if not vers:
|
||||
color.cprint(version(' None'))
|
||||
continue
|
||||
|
||||
for v, url in vers:
|
||||
if not pkg.versions[v].get('deprecated', False):
|
||||
if pkg.has_code:
|
||||
url = fs.for_package_version(pkg, v)
|
||||
line = version(' {0}'.format(pad(v))) + color.cescape(url)
|
||||
color.cprint(line)
|
||||
|
||||
@@ -243,7 +222,7 @@ def print_text_info(pkg):
|
||||
|
||||
formatter = VariantFormatter(pkg.variants)
|
||||
for line in formatter.lines:
|
||||
color.cprint(color.cescape(line))
|
||||
color.cprint(line)
|
||||
|
||||
if hasattr(pkg, 'phases') and pkg.phases:
|
||||
color.cprint('')
|
||||
|
||||
@@ -78,7 +78,7 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-u', '--until', type=str, dest='until', default=None,
|
||||
help="phase to stop after when installing (default None)")
|
||||
arguments.add_common_arguments(subparser, ['jobs', 'reuse'])
|
||||
arguments.add_common_arguments(subparser, ['jobs'])
|
||||
subparser.add_argument(
|
||||
'--overwrite', action='store_true',
|
||||
help="reinstall an existing spec, even if it has dependents")
|
||||
@@ -338,7 +338,7 @@ def get_tests(specs):
|
||||
|
||||
if not args.only_concrete:
|
||||
with env.write_transaction():
|
||||
concretized_specs = env.concretize(tests=tests, reuse=args.reuse)
|
||||
concretized_specs = env.concretize(tests=tests)
|
||||
ev.display_specs(concretized_specs)
|
||||
|
||||
# save view regeneration for later, so that we only do it
|
||||
@@ -392,8 +392,7 @@ def get_tests(specs):
|
||||
|
||||
try:
|
||||
specs = spack.cmd.parse_specs(
|
||||
args.spec, concretize=True, tests=tests, reuse=args.reuse
|
||||
)
|
||||
args.spec, concretize=True, tests=tests)
|
||||
except SpackError as e:
|
||||
tty.debug(e)
|
||||
reporter.concretization_report(e.message)
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.dependency
|
||||
import spack.repo
|
||||
from spack.version import VersionList
|
||||
@@ -56,6 +57,8 @@ def setup_parser(subparser):
|
||||
'-v', '--virtuals', action='store_true', default=False,
|
||||
help='include virtual packages in list')
|
||||
|
||||
arguments.add_common_arguments(subparser, ['tags'])
|
||||
|
||||
|
||||
def filter_by_name(pkgs, args):
|
||||
"""
|
||||
@@ -218,13 +221,9 @@ def head(n, span_id, title, anchor=None):
|
||||
|
||||
out.write('<dt>Homepage:</dt>\n')
|
||||
out.write('<dd><ul class="first last simple">\n')
|
||||
|
||||
if pkg.homepage:
|
||||
out.write(('<li>'
|
||||
'<a class="reference external" href="%s">%s</a>'
|
||||
'</li>\n') % (pkg.homepage, escape(pkg.homepage, True)))
|
||||
else:
|
||||
out.write('No homepage\n')
|
||||
out.write(('<li>'
|
||||
'<a class="reference external" href="%s">%s</a>'
|
||||
'</li>\n') % (pkg.homepage, escape(pkg.homepage, True)))
|
||||
out.write('</ul></dd>\n')
|
||||
|
||||
out.write('<dt>Spack package:</dt>\n')
|
||||
@@ -274,6 +273,13 @@ def list(parser, args):
|
||||
# Filter the set appropriately
|
||||
sorted_packages = filter_by_name(pkgs, args)
|
||||
|
||||
# Filter by tags
|
||||
if args.tags:
|
||||
packages_with_tags = set(
|
||||
spack.repo.path.packages_with_tags(*args.tags))
|
||||
sorted_packages = set(sorted_packages) & packages_with_tags
|
||||
sorted_packages = sorted(sorted_packages)
|
||||
|
||||
if args.update:
|
||||
# change output stream if user asked for update
|
||||
if os.path.exists(args.update):
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.cmd.find
|
||||
import spack.environment as ev
|
||||
import spack.store
|
||||
import spack.user_environment as uenv
|
||||
@@ -21,7 +20,8 @@
|
||||
def setup_parser(subparser):
|
||||
"""Parser is only constructed so that this prints a nice help
|
||||
message with -h. """
|
||||
arguments.add_common_arguments(subparser, ['constraint'])
|
||||
arguments.add_common_arguments(
|
||||
subparser, ['recurse_dependencies', 'installed_specs'])
|
||||
|
||||
shells = subparser.add_mutually_exclusive_group()
|
||||
shells.add_argument(
|
||||
@@ -53,29 +53,14 @@ def setup_parser(subparser):
|
||||
the dependencies"""
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
'--list',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help="show loaded packages: same as `spack find --loaded`"
|
||||
)
|
||||
|
||||
|
||||
def load(parser, args):
|
||||
env = ev.active_environment()
|
||||
|
||||
if args.list:
|
||||
results = spack.cmd.filter_loaded_specs(args.specs())
|
||||
if sys.stdout.isatty():
|
||||
spack.cmd.print_how_many_pkgs(results, "loaded")
|
||||
spack.cmd.display_specs(results)
|
||||
return
|
||||
|
||||
specs = [spack.cmd.disambiguate_spec(spec, env, first=args.load_first)
|
||||
for spec in spack.cmd.parse_specs(args.constraint)]
|
||||
for spec in spack.cmd.parse_specs(args.specs)]
|
||||
|
||||
if not args.shell:
|
||||
specs_str = ' '.join(args.constraint) or "SPECS"
|
||||
specs_str = ' '.join(args.specs) or "SPECS"
|
||||
spack.cmd.common.shell_init_instructions(
|
||||
"spack load",
|
||||
" eval `spack load {sh_arg} %s`" % specs_str,
|
||||
|
||||
@@ -56,8 +56,8 @@ def setup_parser(subparser):
|
||||
help="build directory for a spec "
|
||||
"(requires it to be staged first)")
|
||||
directories.add_argument(
|
||||
'-e', '--env', action='store', dest='location_env', nargs='?', metavar="name",
|
||||
default=False, help="location of the named or current environment")
|
||||
'-e', '--env', action='store', dest='location_env',
|
||||
help="location of an environment managed by spack")
|
||||
|
||||
arguments.add_common_arguments(subparser, ['spec'])
|
||||
|
||||
@@ -71,17 +71,10 @@ def location(parser, args):
|
||||
print(spack.paths.prefix)
|
||||
return
|
||||
|
||||
# no -e corresponds to False, -e without arg to None, -e name to the string name.
|
||||
if args.location_env is not False:
|
||||
if args.location_env is None:
|
||||
# Get current environment path
|
||||
spack.cmd.require_active_env('location -e')
|
||||
path = ev.active_environment().path
|
||||
else:
|
||||
# Get named environment path
|
||||
if not ev.exists(args.location_env):
|
||||
tty.die("no such environment: '%s'" % args.location_env)
|
||||
path = ev.root(args.location_env)
|
||||
if args.location_env:
|
||||
path = ev.root(args.location_env)
|
||||
if not os.path.isdir(path):
|
||||
tty.die("no such environment: '%s'" % args.location_env)
|
||||
print(path)
|
||||
return
|
||||
|
||||
|
||||
@@ -44,8 +44,7 @@ def setup_parser(subparser):
|
||||
|
||||
# Below are arguments w.r.t. spec display (like spack spec)
|
||||
arguments.add_common_arguments(
|
||||
subparser, ['long', 'very_long', 'install_status', 'reuse']
|
||||
)
|
||||
subparser, ['long', 'very_long', 'install_status'])
|
||||
subparser.add_argument(
|
||||
'-y', '--yaml', action='store_const', dest='format', default=None,
|
||||
const='yaml', help='print concrete spec as yaml')
|
||||
@@ -104,14 +103,16 @@ def solve(parser, args):
|
||||
|
||||
# dump generated ASP program
|
||||
result = asp.solve(
|
||||
specs, dump=dump, models=models, timers=args.timers, stats=args.stats,
|
||||
reuse=args.reuse,
|
||||
specs, dump=dump, models=models, timers=args.timers, stats=args.stats
|
||||
)
|
||||
if 'solutions' not in dump:
|
||||
return
|
||||
|
||||
# die if no solution was found
|
||||
result.raise_if_unsat()
|
||||
# TODO: we need to be able to provide better error messages than this
|
||||
if not result.satisfiable:
|
||||
result.print_cores()
|
||||
tty.die("Unsatisfiable spec.")
|
||||
|
||||
# dump the solutions as concretized specs
|
||||
if 'solutions' in dump:
|
||||
@@ -120,21 +121,13 @@ def solve(parser, args):
|
||||
tty.msg("Best of %d considered solutions." % result.nmodels)
|
||||
tty.msg("Optimization Criteria:")
|
||||
|
||||
maxlen = max(len(s[2]) for s in result.criteria)
|
||||
maxlen = max(len(s) for s in result.criteria)
|
||||
color.cprint(
|
||||
"@*{ Priority Criterion %sInstalled ToBuild}" % ((maxlen - 10) * " ")
|
||||
"@*{ Priority Criterion %sValue}" % ((maxlen - 10) * " ")
|
||||
)
|
||||
|
||||
fmt = " @K{%%-8d} %%-%ds%%9s %%7s" % maxlen
|
||||
for i, (idx, build_idx, name) in enumerate(result.criteria, 1):
|
||||
color.cprint(
|
||||
fmt % (
|
||||
i,
|
||||
name,
|
||||
"-" if build_idx is None else opt[idx],
|
||||
opt[idx] if build_idx is None else opt[build_idx],
|
||||
)
|
||||
)
|
||||
for i, (name, val) in enumerate(zip(result.criteria, opt)):
|
||||
fmt = " @K{%%-8d} %%-%ds%%5d" % maxlen
|
||||
color.cprint(fmt % (i + 1, name, val))
|
||||
print()
|
||||
|
||||
for spec in result.specs:
|
||||
|
||||
@@ -28,8 +28,7 @@ def setup_parser(subparser):
|
||||
spack help --spec
|
||||
"""
|
||||
arguments.add_common_arguments(
|
||||
subparser, ['long', 'very_long', 'install_status', 'reuse']
|
||||
)
|
||||
subparser, ['long', 'very_long', 'install_status'])
|
||||
subparser.add_argument(
|
||||
'-y', '--yaml', action='store_const', dest='format', default=None,
|
||||
const='yaml', help='print concrete spec as YAML')
|
||||
@@ -65,7 +64,7 @@ def spec(parser, args):
|
||||
name_fmt = '{namespace}.{name}' if args.namespaces else '{name}'
|
||||
fmt = '{@version}{%compiler}{compiler_flags}{variants}{arch=architecture}'
|
||||
install_status_fn = spack.spec.Spec.install_status
|
||||
tree_kwargs = {
|
||||
kwargs = {
|
||||
'cover': args.cover,
|
||||
'format': name_fmt + fmt,
|
||||
'hashlen': None if args.very_long else 7,
|
||||
@@ -82,15 +81,11 @@ def spec(parser, args):
|
||||
if not args.specs:
|
||||
tty.die("spack spec requires at least one spec")
|
||||
|
||||
concretize_kwargs = {
|
||||
'reuse': args.reuse
|
||||
}
|
||||
|
||||
for spec in spack.cmd.parse_specs(args.specs):
|
||||
# With -y, just print YAML to output.
|
||||
if args.format:
|
||||
if spec.name in spack.repo.path or spec.virtual:
|
||||
spec.concretize(**concretize_kwargs)
|
||||
spec.concretize()
|
||||
|
||||
# The user can specify the hash type to use
|
||||
hash_type = getattr(ht, args.hash_type)
|
||||
@@ -103,13 +98,13 @@ def spec(parser, args):
|
||||
continue
|
||||
|
||||
with tree_context():
|
||||
tree_kwargs['hashes'] = False # Always False for input spec
|
||||
kwargs['hashes'] = False # Always False for input spec
|
||||
print("Input spec")
|
||||
print("--------------------------------")
|
||||
print(spec.tree(**tree_kwargs))
|
||||
print(spec.tree(**kwargs))
|
||||
|
||||
tree_kwargs['hashes'] = args.long or args.very_long
|
||||
kwargs['hashes'] = args.long or args.very_long
|
||||
print("Concretized")
|
||||
print("--------------------------------")
|
||||
spec.concretize(**concretize_kwargs)
|
||||
print(spec.tree(**tree_kwargs))
|
||||
spec.concretize()
|
||||
print(spec.tree(**kwargs))
|
||||
|
||||
@@ -1,107 +0,0 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import six
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.colify as colify
|
||||
|
||||
import spack.repo
|
||||
import spack.store
|
||||
import spack.tag
|
||||
|
||||
description = "Show package tags and associated packages"
|
||||
section = "basic"
|
||||
level = "long"
|
||||
|
||||
|
||||
def report_tags(category, tags):
|
||||
buffer = six.StringIO()
|
||||
isatty = sys.stdout.isatty()
|
||||
|
||||
if isatty:
|
||||
num = len(tags)
|
||||
fmt = '{0} package tag'.format(category)
|
||||
buffer.write("{0}:\n".format(spack.util.string.plural(num, fmt)))
|
||||
|
||||
if tags:
|
||||
colify.colify(tags, output=buffer, tty=isatty, indent=4)
|
||||
else:
|
||||
buffer.write(" None\n")
|
||||
print(buffer.getvalue())
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.epilog = (
|
||||
"Tags from known packages will be used if no tags are provided on "
|
||||
"the command\nline. If tags are provided, packages with at least one "
|
||||
"will be reported.\n\nYou are not allowed to provide tags and use "
|
||||
"'--all' at the same time."
|
||||
)
|
||||
subparser.add_argument(
|
||||
'-i', '--installed', action='store_true', default=False,
|
||||
help="show information for installed packages only"
|
||||
)
|
||||
subparser.add_argument(
|
||||
'-a', '--all', action='store_true', default=False,
|
||||
help="show packages for all available tags"
|
||||
)
|
||||
subparser.add_argument(
|
||||
'tag',
|
||||
nargs='*',
|
||||
help="show packages with the specified tag"
|
||||
)
|
||||
|
||||
|
||||
def tags(parser, args):
|
||||
# Disallow combining all option with (positional) tags to avoid confusion
|
||||
if args.all and args.tag:
|
||||
tty.die("Use the '--all' option OR provide tag(s) on the command line")
|
||||
|
||||
# Provide a nice, simple message if database is empty
|
||||
if args.installed and not spack.environment.installed_specs():
|
||||
tty.msg("No installed packages")
|
||||
return
|
||||
|
||||
# unique list of available tags
|
||||
available_tags = sorted(spack.repo.path.tag_index.keys())
|
||||
if not available_tags:
|
||||
tty.msg("No tagged packages")
|
||||
return
|
||||
|
||||
show_packages = args.tag or args.all
|
||||
|
||||
# Only report relevant, available tags if no packages are to be shown
|
||||
if not show_packages:
|
||||
if not args.installed:
|
||||
report_tags("available", available_tags)
|
||||
else:
|
||||
tag_pkgs = spack.tag.packages_with_tags(available_tags, True, True)
|
||||
tags = tag_pkgs.keys() if tag_pkgs else []
|
||||
report_tags("installed", tags)
|
||||
return
|
||||
|
||||
# Report packages associated with tags
|
||||
buffer = six.StringIO()
|
||||
isatty = sys.stdout.isatty()
|
||||
|
||||
tags = args.tag if args.tag else available_tags
|
||||
tag_pkgs = spack.tag.packages_with_tags(tags, args.installed, False)
|
||||
missing = 'No installed packages' if args.installed else 'None'
|
||||
for tag in sorted(tag_pkgs):
|
||||
# TODO: Remove the sorting once we're sure noone has an old
|
||||
# TODO: tag cache since it can accumulate duplicates.
|
||||
packages = sorted(list(set(tag_pkgs[tag])))
|
||||
if isatty:
|
||||
buffer.write("{0}:\n".format(tag))
|
||||
|
||||
if packages:
|
||||
colify.colify(packages, output=buffer, tty=isatty, indent=4)
|
||||
else:
|
||||
buffer.write(" {0}\n".format(missing))
|
||||
buffer.write("\n")
|
||||
print(buffer.getvalue())
|
||||
@@ -89,12 +89,6 @@ def setup_parser(subparser):
|
||||
"-a", "--all", action="store_true", dest="list_all",
|
||||
help="list all packages with tests (not just installed)")
|
||||
|
||||
list_parser.add_argument(
|
||||
'tag',
|
||||
nargs='*',
|
||||
help="limit packages to those with all listed tags"
|
||||
)
|
||||
|
||||
# Find
|
||||
find_parser = sp.add_parser('find', description=test_find.__doc__,
|
||||
help=first_line(test_find.__doc__))
|
||||
@@ -143,12 +137,6 @@ def test_run(args):
|
||||
If no specs are listed, run tests for all packages in the current
|
||||
environment or all installed packages if there is no active environment.
|
||||
"""
|
||||
if args.alias:
|
||||
suites = spack.install_test.get_named_test_suites(args.alias)
|
||||
if suites:
|
||||
tty.die('Test suite "{0}" already exists. Try another alias.'
|
||||
.format(args.alias))
|
||||
|
||||
# cdash help option
|
||||
if args.help_cdash:
|
||||
parser = argparse.ArgumentParser(
|
||||
@@ -220,25 +208,15 @@ def has_test_method(pkg):
|
||||
|
||||
def test_list(args):
|
||||
"""List installed packages with available tests."""
|
||||
tagged = set(spack.repo.path.packages_with_tags(*args.tag)) if args.tag \
|
||||
else set()
|
||||
|
||||
def has_test_and_tags(pkg_class):
|
||||
return has_test_method(pkg_class) and \
|
||||
(not args.tag or pkg_class.name in tagged)
|
||||
|
||||
if args.list_all:
|
||||
report_packages = [
|
||||
all_packages_with_tests = [
|
||||
pkg_class.name
|
||||
for pkg_class in spack.repo.path.all_package_classes()
|
||||
if has_test_and_tags(pkg_class)
|
||||
if has_test_method(pkg_class)
|
||||
]
|
||||
|
||||
if sys.stdout.isatty():
|
||||
filtered = ' tagged' if args.tag else ''
|
||||
tty.msg("{0}{1} packages with tests.".
|
||||
format(len(report_packages), filtered))
|
||||
colify.colify(report_packages)
|
||||
tty.msg("%d packages with tests." % len(all_packages_with_tests))
|
||||
colify.colify(all_packages_with_tests)
|
||||
return
|
||||
|
||||
# TODO: This can be extended to have all of the output formatting options
|
||||
@@ -247,7 +225,7 @@ def has_test_and_tags(pkg_class):
|
||||
hashes = env.all_hashes() if env else None
|
||||
|
||||
specs = spack.store.db.query(hashes=hashes)
|
||||
specs = list(filter(lambda s: has_test_and_tags(s.package_class), specs))
|
||||
specs = list(filter(lambda s: has_test_method(s.package_class), specs))
|
||||
|
||||
spack.cmd.display_specs(specs, long=True)
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
|
||||
|
||||
# tutorial configuration parameters
|
||||
tutorial_branch = "releases/v%d.%d" % spack.spack_version_info[:2]
|
||||
tutorial_branch = "releases/v0.16"
|
||||
tutorial_mirror = "file:///mirror"
|
||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import path_contains_subdirectory, paths_containing_libs
|
||||
|
||||
import spack.architecture
|
||||
import spack.compilers
|
||||
import spack.error
|
||||
import spack.spec
|
||||
|
||||
@@ -20,11 +20,11 @@
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.architecture
|
||||
import spack.compiler
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
from spack.util.environment import get_path
|
||||
from spack.util.naming import mod_to_class
|
||||
@@ -192,12 +192,15 @@ def all_compiler_specs(scope=None, init_config=True):
|
||||
|
||||
|
||||
def find_compilers(path_hints=None):
|
||||
"""Return the list of compilers found in the paths given as arguments.
|
||||
"""Returns the list of compilers found in the paths given as arguments.
|
||||
|
||||
Args:
|
||||
path_hints (list or None): list of path hints where to look for.
|
||||
A sensible default based on the ``PATH`` environment variable
|
||||
will be used if the value is None
|
||||
|
||||
Returns:
|
||||
List of compilers found
|
||||
"""
|
||||
if path_hints is None:
|
||||
path_hints = get_path('PATH')
|
||||
@@ -239,30 +242,6 @@ def remove_errors(item):
|
||||
)
|
||||
|
||||
|
||||
def find_new_compilers(path_hints=None, scope=None):
|
||||
"""Same as ``find_compilers`` but return only the compilers that are not
|
||||
already in compilers.yaml.
|
||||
|
||||
Args:
|
||||
path_hints (list or None): list of path hints where to look for.
|
||||
A sensible default based on the ``PATH`` environment variable
|
||||
will be used if the value is None
|
||||
scope (str): scope to look for a compiler. If None consider the
|
||||
merged configuration.
|
||||
"""
|
||||
compilers = find_compilers(path_hints)
|
||||
compilers_not_in_config = []
|
||||
for c in compilers:
|
||||
arch_spec = spack.spec.ArchSpec((None, c.operating_system, c.target))
|
||||
same_specs = compilers_for_spec(
|
||||
c.spec, arch_spec, scope=scope, init_config=False
|
||||
)
|
||||
if not same_specs:
|
||||
compilers_not_in_config.append(c)
|
||||
|
||||
return compilers_not_in_config
|
||||
|
||||
|
||||
def supported_compilers():
|
||||
"""Return a set of names of compilers supported by Spack.
|
||||
|
||||
@@ -310,9 +289,8 @@ def all_compilers(scope=None):
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def compilers_for_spec(
|
||||
compiler_spec, arch_spec=None, scope=None, use_cache=True, init_config=True
|
||||
):
|
||||
def compilers_for_spec(compiler_spec, arch_spec=None, scope=None,
|
||||
use_cache=True, init_config=True):
|
||||
"""This gets all compilers that satisfy the supplied CompilerSpec.
|
||||
Returns an empty list if none are found.
|
||||
"""
|
||||
@@ -519,7 +497,7 @@ def all_os_classes():
|
||||
"""
|
||||
classes = []
|
||||
|
||||
platform = spack.platforms.host()
|
||||
platform = spack.architecture.platform()
|
||||
for os_class in platform.operating_sys.values():
|
||||
classes.append(os_class)
|
||||
|
||||
|
||||
@@ -31,10 +31,10 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.abi
|
||||
import spack.architecture
|
||||
import spack.compilers
|
||||
import spack.environment
|
||||
import spack.error
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.target
|
||||
@@ -269,7 +269,7 @@ def concretize_architecture(self, spec):
|
||||
new_plat = spack.platforms.by_name(platform_spec.architecture.platform)
|
||||
else:
|
||||
# If no platform anywhere in this spec, grab the default
|
||||
new_plat = spack.platforms.host()
|
||||
new_plat = spack.architecture.platform()
|
||||
|
||||
# Get nearest spec with relevant platform and an os
|
||||
# Generally, same algorithm as finding platform, except we only
|
||||
@@ -384,8 +384,7 @@ def concretize_variants(self, spec):
|
||||
changed = False
|
||||
preferred_variants = PackagePrefs.preferred_variants(spec.name)
|
||||
pkg_cls = spec.package_class
|
||||
for name, entry in pkg_cls.variants.items():
|
||||
variant, when = entry
|
||||
for name, variant in pkg_cls.variants.items():
|
||||
var = spec.variants.get(name, None)
|
||||
if var and '*' in var:
|
||||
# remove variant wildcard before concretizing
|
||||
@@ -393,16 +392,12 @@ def concretize_variants(self, spec):
|
||||
# multivalue variant, a concrete variant cannot have the value
|
||||
# wildcard, and a wildcard does not constrain a variant
|
||||
spec.variants.pop(name)
|
||||
if name not in spec.variants and any(spec.satisfies(w)
|
||||
for w in when):
|
||||
if name not in spec.variants:
|
||||
changed = True
|
||||
if name in preferred_variants:
|
||||
spec.variants[name] = preferred_variants.get(name)
|
||||
else:
|
||||
spec.variants[name] = variant.make_default()
|
||||
if name in spec.variants and not any(spec.satisfies(w)
|
||||
for w in when):
|
||||
raise vt.InvalidVariantForSpecError(name, when, spec)
|
||||
|
||||
return changed
|
||||
|
||||
@@ -732,12 +727,12 @@ def concretize_specs_together(*abstract_specs, **kwargs):
|
||||
|
||||
def _concretize_specs_together_new(*abstract_specs, **kwargs):
|
||||
import spack.solver.asp
|
||||
concretization_kwargs = {
|
||||
'tests': kwargs.get('tests', False),
|
||||
'reuse': kwargs.get('reuse', False)
|
||||
}
|
||||
result = spack.solver.asp.solve(abstract_specs, **concretization_kwargs)
|
||||
result.raise_if_unsat()
|
||||
result = spack.solver.asp.solve(abstract_specs)
|
||||
|
||||
if not result.satisfiable:
|
||||
result.print_cores()
|
||||
tty.die("Unsatisfiable spec.")
|
||||
|
||||
return [s.copy() for s in result.specs]
|
||||
|
||||
|
||||
@@ -772,15 +767,10 @@ def make_concretization_repository(abstract_specs):
|
||||
abstract_specs = [spack.spec.Spec(s) for s in abstract_specs]
|
||||
concretization_repository = make_concretization_repository(abstract_specs)
|
||||
|
||||
concretization_kwargs = {
|
||||
'tests': kwargs.get('tests', False),
|
||||
'reuse': kwargs.get('reuse', False)
|
||||
}
|
||||
|
||||
with spack.repo.additional_repository(concretization_repository):
|
||||
# Spec from a helper package that depends on all the abstract_specs
|
||||
concretization_root = spack.spec.Spec('concretizationroot')
|
||||
concretization_root.concretize(**concretization_kwargs)
|
||||
concretization_root.concretize(tests=kwargs.get('tests', False))
|
||||
# Retrieve the direct dependencies
|
||||
concrete_specs = [
|
||||
concretization_root[spec.name].copy() for spec in abstract_specs
|
||||
|
||||
@@ -47,9 +47,9 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import mkdirp
|
||||
|
||||
import spack.architecture
|
||||
import spack.compilers
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.schema
|
||||
import spack.schema.bootstrap
|
||||
import spack.schema.compilers
|
||||
@@ -84,9 +84,22 @@
|
||||
all_schemas.update(dict((key, spack.schema.env.schema)
|
||||
for key in spack.schema.env.keys))
|
||||
|
||||
#: Path to the default configuration
|
||||
configuration_defaults_path = (
|
||||
'defaults', os.path.join(spack.paths.etc_path, 'spack', 'defaults')
|
||||
#: Builtin paths to configuration files in Spack
|
||||
configuration_paths = (
|
||||
# Default configuration scope is the lowest-level scope. These are
|
||||
# versioned with Spack and can be overridden by systems, sites or users
|
||||
('defaults', os.path.join(spack.paths.etc_path, 'spack', 'defaults')),
|
||||
|
||||
# System configuration is per machine.
|
||||
# No system-level configs should be checked into spack by default
|
||||
('system', os.path.join(spack.paths.system_etc_path, 'spack')),
|
||||
|
||||
# Site configuration is per spack instance, for sites or projects
|
||||
# No site-level configs should be checked into spack by default.
|
||||
('site', os.path.join(spack.paths.etc_path, 'spack')),
|
||||
|
||||
# User configuration can override both spack defaults and site config
|
||||
('user', spack.paths.user_config_path)
|
||||
)
|
||||
|
||||
#: Hard-coded default values for some key configuration options.
|
||||
@@ -751,7 +764,7 @@ def override(path_or_scope, value=None):
|
||||
|
||||
def _add_platform_scope(cfg, scope_type, name, path):
|
||||
"""Add a platform-specific subdirectory for the current platform."""
|
||||
platform = spack.platforms.host().name
|
||||
platform = spack.architecture.platform().name
|
||||
plat_name = '%s/%s' % (name, platform)
|
||||
plat_path = os.path.join(path, platform)
|
||||
cfg.push_scope(scope_type(plat_name, plat_path))
|
||||
@@ -790,37 +803,8 @@ def _config():
|
||||
cfg = Configuration()
|
||||
|
||||
# first do the builtin, hardcoded defaults
|
||||
builtin = InternalConfigScope('_builtin', config_defaults)
|
||||
cfg.push_scope(builtin)
|
||||
|
||||
# Builtin paths to configuration files in Spack
|
||||
configuration_paths = [
|
||||
# Default configuration scope is the lowest-level scope. These are
|
||||
# versioned with Spack and can be overridden by systems, sites or users
|
||||
configuration_defaults_path,
|
||||
]
|
||||
|
||||
disable_local_config = "SPACK_DISABLE_LOCAL_CONFIG" in os.environ
|
||||
|
||||
# System configuration is per machine.
|
||||
# This is disabled if user asks for no local configuration.
|
||||
if not disable_local_config:
|
||||
configuration_paths.append(
|
||||
('system', spack.paths.system_config_path),
|
||||
)
|
||||
|
||||
# Site configuration is per spack instance, for sites or projects
|
||||
# No site-level configs should be checked into spack by default.
|
||||
configuration_paths.append(
|
||||
('site', os.path.join(spack.paths.etc_path, 'spack')),
|
||||
)
|
||||
|
||||
# User configuration can override both spack defaults and site config
|
||||
# This is disabled if user asks for no local configuration.
|
||||
if not disable_local_config:
|
||||
configuration_paths.append(
|
||||
('user', spack.paths.user_config_path)
|
||||
)
|
||||
defaults = InternalConfigScope('_builtin', config_defaults)
|
||||
cfg.push_scope(defaults)
|
||||
|
||||
# add each scope and its platform-specific directory
|
||||
for name, path in configuration_paths:
|
||||
@@ -1067,36 +1051,22 @@ def get_valid_type(path):
|
||||
path given, the priority order is ``list``, ``dict``, ``str``, ``bool``,
|
||||
``int``, ``float``.
|
||||
"""
|
||||
types = {
|
||||
'array': list,
|
||||
'object': syaml.syaml_dict,
|
||||
'string': str,
|
||||
'boolean': bool,
|
||||
'integer': int,
|
||||
'number': float
|
||||
}
|
||||
|
||||
components = process_config_path(path)
|
||||
section = components[0]
|
||||
|
||||
# Use None to construct the test data
|
||||
test_data = None
|
||||
for component in reversed(components):
|
||||
test_data = {component: test_data}
|
||||
|
||||
try:
|
||||
validate(test_data, section_schemas[section])
|
||||
except (ConfigFormatError, AttributeError) as e:
|
||||
jsonschema_error = e.validation_error
|
||||
if jsonschema_error.validator == 'type':
|
||||
return types[jsonschema_error.validator_value]()
|
||||
elif jsonschema_error.validator == 'anyOf':
|
||||
for subschema in jsonschema_error.validator_value:
|
||||
anyof_type = subschema.get('type')
|
||||
if anyof_type is not None:
|
||||
return types[anyof_type]()
|
||||
else:
|
||||
return type(None)
|
||||
for type in (list, syaml.syaml_dict, str, bool, int, float):
|
||||
try:
|
||||
ret = type()
|
||||
test_data = ret
|
||||
for component in reversed(components):
|
||||
test_data = {component: test_data}
|
||||
validate(test_data, section_schemas[section])
|
||||
return ret
|
||||
except (ConfigFormatError, AttributeError):
|
||||
# This type won't validate, try the next one
|
||||
# Except AttributeError because undefined behavior of dict ordering
|
||||
# in python 3.5 can cause the validator to raise an AttributeError
|
||||
# instead of a ConfigFormatError.
|
||||
pass
|
||||
raise ConfigError("Cannot determine valid type for path '%s'." % path)
|
||||
|
||||
|
||||
@@ -1315,7 +1285,6 @@ class ConfigFormatError(ConfigError):
|
||||
def __init__(self, validation_error, data, filename=None, line=None):
|
||||
# spack yaml has its own file/line marks -- try to find them
|
||||
# we prioritize these over the inputs
|
||||
self.validation_error = validation_error
|
||||
mark = self._get_mark(validation_error, data)
|
||||
if mark:
|
||||
filename = mark.name
|
||||
|
||||
@@ -1,53 +1,6 @@
|
||||
{
|
||||
"images": {
|
||||
"alpine:3": {
|
||||
"bootstrap": {
|
||||
"template": "container/alpine_3.dockerfile"
|
||||
},
|
||||
"os_package_manager": "apk"
|
||||
},
|
||||
"amazonlinux:2": {
|
||||
"bootstrap": {
|
||||
"template": "container/amazonlinux_2.dockerfile"
|
||||
},
|
||||
"os_package_manager": "yum_amazon"
|
||||
},
|
||||
"centos:8": {
|
||||
"bootstrap": {
|
||||
"template": "container/centos_8.dockerfile"
|
||||
},
|
||||
"os_package_manager": "yum"
|
||||
},
|
||||
"centos:7": {
|
||||
"bootstrap": {
|
||||
"template": "container/centos_7.dockerfile"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"build": "spack/centos7",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
}
|
||||
},
|
||||
"nvidia/cuda:11.2.1": {
|
||||
"bootstrap": {
|
||||
"template": "container/cuda_11_2_1.dockerfile",
|
||||
"image": "nvidia/cuda:11.2.1-devel"
|
||||
},
|
||||
"final": {
|
||||
"image": "nvidia/cuda:11.2.1-base"
|
||||
},
|
||||
"os_package_manager": "apt"
|
||||
},
|
||||
"ubuntu:20.04": {
|
||||
"bootstrap": {
|
||||
"template": "container/ubuntu_2004.dockerfile"
|
||||
},
|
||||
"os_package_manager": "apt"
|
||||
},
|
||||
"ubuntu:18.04": {
|
||||
"bootstrap": {
|
||||
"template": "container/ubuntu_1804.dockerfile"
|
||||
},
|
||||
"os_package_manager": "apt",
|
||||
"build": "spack/ubuntu-bionic",
|
||||
"build_tags": {
|
||||
@@ -55,22 +8,29 @@
|
||||
}
|
||||
},
|
||||
"ubuntu:16.04": {
|
||||
"bootstrap": {
|
||||
"template": "container/ubuntu_1604.dockerfile"
|
||||
},
|
||||
"os_package_manager": "apt",
|
||||
"build": "spack/ubuntu-xenial",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
}
|
||||
},
|
||||
"centos:7": {
|
||||
"os_package_manager": "yum",
|
||||
"environment": [],
|
||||
"build": "spack/centos7",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
}
|
||||
},
|
||||
"centos:6": {
|
||||
"os_package_manager": "yum",
|
||||
"build": "spack/centos6",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
}
|
||||
}
|
||||
},
|
||||
"os_package_managers": {
|
||||
"apk": {
|
||||
"update": "apk update",
|
||||
"install": "apk add --no-cache",
|
||||
"clean": "true"
|
||||
},
|
||||
"apt": {
|
||||
"update": "apt-get -yqq update && apt-get -yqq upgrade",
|
||||
"install": "apt-get -yqq install",
|
||||
@@ -80,11 +40,6 @@
|
||||
"update": "yum update -y && yum install -y epel-release && yum update -y",
|
||||
"install": "yum install -y",
|
||||
"clean": "rm -rf /var/cache/yum && yum clean all"
|
||||
},
|
||||
"yum_amazon": {
|
||||
"update": "yum update -y && amazon-linux-extras install epel -y",
|
||||
"install": "yum install -y",
|
||||
"clean": "rm -rf /var/cache/yum && yum clean all"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,15 +2,9 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Manages the details on the images used in the various stages."""
|
||||
"""Manages the details on the images used in the build and the run stage."""
|
||||
import json
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.util.executable as executable
|
||||
|
||||
#: Global variable used to cache in memory the content of images.json
|
||||
_data = None
|
||||
@@ -45,12 +39,18 @@ def build_info(image, spack_version):
|
||||
# Don't handle error here, as a wrong image should have been
|
||||
# caught by the JSON schema
|
||||
image_data = data()["images"][image]
|
||||
build_image = image_data.get('build', None)
|
||||
if not build_image:
|
||||
return None, None
|
||||
build_image = image_data['build']
|
||||
|
||||
# Translate version from git to docker if necessary
|
||||
build_tag = image_data['build_tags'].get(spack_version, spack_version)
|
||||
# Try to check if we have a tag for this Spack version
|
||||
try:
|
||||
# Translate version from git to docker if necessary
|
||||
build_tag = image_data['build_tags'].get(spack_version, spack_version)
|
||||
except KeyError:
|
||||
msg = ('the image "{0}" has no tag for Spack version "{1}" '
|
||||
'[valid versions are {2}]')
|
||||
msg = msg.format(build_image, spack_version,
|
||||
', '.join(image_data['build_tags'].keys()))
|
||||
raise ValueError(msg)
|
||||
|
||||
return build_image, build_tag
|
||||
|
||||
@@ -70,11 +70,6 @@ def os_package_manager_for(image):
|
||||
return name
|
||||
|
||||
|
||||
def all_bootstrap_os():
|
||||
"""Return a list of all the OS that can be used to bootstrap Spack"""
|
||||
return list(data()['images'])
|
||||
|
||||
|
||||
def commands_for(package_manager):
|
||||
"""Returns the commands used to update system repositories, install
|
||||
system packages and clean afterwards.
|
||||
@@ -87,47 +82,3 @@ def commands_for(package_manager):
|
||||
"""
|
||||
info = data()["os_package_managers"][package_manager]
|
||||
return info['update'], info['install'], info['clean']
|
||||
|
||||
|
||||
def bootstrap_template_for(image):
|
||||
return data()["images"][image]["bootstrap"]["template"]
|
||||
|
||||
|
||||
def _verify_ref(url, ref, enforce_sha):
|
||||
# Do a checkout in a temporary directory
|
||||
msg = 'Cloning "{0}" to verify ref "{1}"'.format(url, ref)
|
||||
tty.info(msg, stream=sys.stderr)
|
||||
git = executable.which('git', required=True)
|
||||
with fs.temporary_dir():
|
||||
git('clone', '-q', url, '.')
|
||||
sha = git('rev-parse', '-q', ref + '^{commit}',
|
||||
output=str, error=os.devnull, fail_on_error=False)
|
||||
if git.returncode:
|
||||
msg = '"{0}" is not a valid reference for "{1}"'
|
||||
raise RuntimeError(msg.format(sha, url))
|
||||
|
||||
if enforce_sha:
|
||||
ref = sha.strip()
|
||||
|
||||
return ref
|
||||
|
||||
|
||||
def checkout_command(url, ref, enforce_sha, verify):
|
||||
"""Return the checkout command to be used in the bootstrap phase.
|
||||
|
||||
Args:
|
||||
url (str): url of the Spack repository
|
||||
ref (str): either a branch name, a tag or a commit sha
|
||||
enforce_sha (bool): if true turns every
|
||||
verify (bool):
|
||||
"""
|
||||
url = url or 'https://github.com/spack/spack.git'
|
||||
ref = ref or 'develop'
|
||||
enforce_sha, verify = bool(enforce_sha), bool(verify)
|
||||
# If we want to enforce a sha or verify the ref we need
|
||||
# to checkout the repository locally
|
||||
if enforce_sha or verify:
|
||||
ref = _verify_ref(url, ref, enforce_sha)
|
||||
|
||||
command = 'git clone {0} . && git checkout {1} '.format(url, ref)
|
||||
return command
|
||||
|
||||
@@ -12,14 +12,7 @@
|
||||
import spack.schema.env
|
||||
import spack.tengine as tengine
|
||||
import spack.util.spack_yaml as syaml
|
||||
from spack.container.images import (
|
||||
bootstrap_template_for,
|
||||
build_info,
|
||||
checkout_command,
|
||||
commands_for,
|
||||
data,
|
||||
os_package_manager_for,
|
||||
)
|
||||
from spack.container.images import build_info, commands_for, os_package_manager_for
|
||||
|
||||
#: Caches all the writers that are currently supported
|
||||
_writer_factory = {}
|
||||
@@ -38,94 +31,23 @@ def _decorator(factory):
|
||||
return _decorator
|
||||
|
||||
|
||||
def create(configuration, last_phase=None):
|
||||
def create(configuration):
|
||||
"""Returns a writer that conforms to the configuration passed as input.
|
||||
|
||||
Args:
|
||||
configuration (dict): how to generate the current recipe
|
||||
last_phase (str): last phase to be printed or None to print them all
|
||||
configuration: how to generate the current recipe
|
||||
"""
|
||||
name = ev.config_dict(configuration)['container']['format']
|
||||
return _writer_factory[name](configuration, last_phase)
|
||||
return _writer_factory[name](configuration)
|
||||
|
||||
|
||||
def recipe(configuration, last_phase=None):
|
||||
def recipe(configuration):
|
||||
"""Returns a recipe that conforms to the configuration passed as input.
|
||||
|
||||
Args:
|
||||
configuration (dict): how to generate the current recipe
|
||||
last_phase (str): last phase to be printed or None to print them all
|
||||
configuration: how to generate the current recipe
|
||||
"""
|
||||
return create(configuration, last_phase)()
|
||||
|
||||
|
||||
def _stage_base_images(images_config):
|
||||
"""Return a tuple with the base images to be used at the various stages.
|
||||
|
||||
Args:
|
||||
images_config (dict): configuration under container:images
|
||||
"""
|
||||
# If we have custom base images, just return them verbatim.
|
||||
build_stage = images_config.get('build', None)
|
||||
if build_stage:
|
||||
final_stage = images_config['final']
|
||||
return None, build_stage, final_stage
|
||||
|
||||
# Check the operating system: this will be the base of the bootstrap
|
||||
# stage, if there, and of the final stage.
|
||||
operating_system = images_config.get('os', None)
|
||||
|
||||
# Check the OS is mentioned in the internal data stored in a JSON file
|
||||
images_json = data()['images']
|
||||
if not any(os_name == operating_system for os_name in images_json):
|
||||
msg = ('invalid operating system name "{0}". '
|
||||
'[Allowed values are {1}]')
|
||||
msg = msg.format(operating_system, ', '.join(data()['images']))
|
||||
raise ValueError(msg)
|
||||
|
||||
# Retrieve the build stage
|
||||
spack_info = images_config['spack']
|
||||
if isinstance(spack_info, dict):
|
||||
build_stage = 'bootstrap'
|
||||
else:
|
||||
spack_version = images_config['spack']
|
||||
image_name, tag = build_info(operating_system, spack_version)
|
||||
build_stage = 'bootstrap'
|
||||
if image_name:
|
||||
build_stage = ':'.join([image_name, tag])
|
||||
|
||||
# Retrieve the bootstrap stage
|
||||
bootstrap_stage = None
|
||||
if build_stage == 'bootstrap':
|
||||
bootstrap_stage = images_json[operating_system]['bootstrap'].get(
|
||||
'image', operating_system
|
||||
)
|
||||
|
||||
# Retrieve the final stage
|
||||
final_stage = images_json[operating_system].get(
|
||||
'final', {'image': operating_system}
|
||||
)['image']
|
||||
|
||||
return bootstrap_stage, build_stage, final_stage
|
||||
|
||||
|
||||
def _spack_checkout_config(images_config):
|
||||
spack_info = images_config['spack']
|
||||
|
||||
url = 'https://github.com/spack/spack.git'
|
||||
ref = 'develop'
|
||||
resolve_sha, verify = False, False
|
||||
|
||||
# Config specific values may override defaults
|
||||
if isinstance(spack_info, dict):
|
||||
url = spack_info.get('url', url)
|
||||
ref = spack_info.get('ref', ref)
|
||||
resolve_sha = spack_info.get('resolve_sha', resolve_sha)
|
||||
verify = spack_info.get('verify', verify)
|
||||
else:
|
||||
ref = spack_info
|
||||
|
||||
return url, ref, resolve_sha, verify
|
||||
return create(configuration)()
|
||||
|
||||
|
||||
class PathContext(tengine.Context):
|
||||
@@ -133,34 +55,41 @@ class PathContext(tengine.Context):
|
||||
install software in a common location and make it available
|
||||
directly via PATH.
|
||||
"""
|
||||
def __init__(self, config, last_phase):
|
||||
def __init__(self, config):
|
||||
self.config = ev.config_dict(config)
|
||||
self.container_config = self.config['container']
|
||||
|
||||
# Operating system tag as written in the configuration file
|
||||
self.operating_system_key = self.container_config['images'].get('os')
|
||||
# Get base images and verify the OS
|
||||
bootstrap, build, final = _stage_base_images(
|
||||
self.container_config['images']
|
||||
)
|
||||
self.bootstrap_image = bootstrap
|
||||
self.build_image = build
|
||||
self.final_image = final
|
||||
|
||||
# Record the last phase
|
||||
self.last_phase = last_phase
|
||||
|
||||
@tengine.context_property
|
||||
def run(self):
|
||||
"""Information related to the run image."""
|
||||
images_config = self.container_config['images']
|
||||
|
||||
# Check if we have custom images
|
||||
image = images_config.get('final', None)
|
||||
# If not use the base OS image
|
||||
if image is None:
|
||||
image = images_config['os']
|
||||
|
||||
Run = collections.namedtuple('Run', ['image'])
|
||||
return Run(image=self.final_image)
|
||||
return Run(image=image)
|
||||
|
||||
@tengine.context_property
|
||||
def build(self):
|
||||
"""Information related to the build image."""
|
||||
images_config = self.container_config['images']
|
||||
|
||||
# Check if we have custom images
|
||||
image = images_config.get('build', None)
|
||||
|
||||
# If not select the correct build image based on OS and Spack version
|
||||
if image is None:
|
||||
operating_system = images_config['os']
|
||||
spack_version = images_config['spack']
|
||||
image_name, tag = build_info(operating_system, spack_version)
|
||||
image = ':'.join([image_name, tag])
|
||||
|
||||
Build = collections.namedtuple('Build', ['image'])
|
||||
return Build(image=self.build_image)
|
||||
return Build(image=image)
|
||||
|
||||
@tengine.context_property
|
||||
def strip(self):
|
||||
@@ -284,39 +213,6 @@ def extra_instructions(self):
|
||||
def labels(self):
|
||||
return self.container_config.get('labels', {})
|
||||
|
||||
@tengine.context_property
|
||||
def bootstrap(self):
|
||||
"""Information related to the build image."""
|
||||
images_config = self.container_config['images']
|
||||
bootstrap_recipe = None
|
||||
if self.bootstrap_image:
|
||||
config_args = _spack_checkout_config(images_config)
|
||||
command = checkout_command(*config_args)
|
||||
template_path = bootstrap_template_for(self.operating_system_key)
|
||||
env = tengine.make_environment()
|
||||
context = {"bootstrap": {
|
||||
"image": self.bootstrap_image,
|
||||
"spack_checkout": command
|
||||
}}
|
||||
bootstrap_recipe = env.get_template(template_path).render(**context)
|
||||
|
||||
Bootstrap = collections.namedtuple('Bootstrap', ['image', 'recipe'])
|
||||
return Bootstrap(image=self.bootstrap_image, recipe=bootstrap_recipe)
|
||||
|
||||
@tengine.context_property
|
||||
def render_phase(self):
|
||||
render_bootstrap = bool(self.bootstrap_image)
|
||||
render_build = not (self.last_phase == 'bootstrap')
|
||||
render_final = self.last_phase in (None, 'final')
|
||||
Render = collections.namedtuple(
|
||||
'Render', ['bootstrap', 'build', 'final']
|
||||
)
|
||||
return Render(
|
||||
bootstrap=render_bootstrap,
|
||||
build=render_build,
|
||||
final=render_final
|
||||
)
|
||||
|
||||
def __call__(self):
|
||||
"""Returns the recipe as a string"""
|
||||
env = tengine.make_environment()
|
||||
|
||||
@@ -1024,7 +1024,12 @@ def _write(self, type, value, traceback):
|
||||
raise
|
||||
|
||||
def _read(self):
|
||||
"""Re-read Database from the data in the set location. This does no locking."""
|
||||
"""Re-read Database from the data in the set location.
|
||||
|
||||
This does no locking, with one exception: it will automatically
|
||||
try to regenerate a missing DB if local. This requires taking a
|
||||
write lock.
|
||||
"""
|
||||
if os.path.isfile(self._index_path):
|
||||
current_verifier = ''
|
||||
if _use_uuid:
|
||||
@@ -1044,6 +1049,12 @@ def _read(self):
|
||||
"No database index file is present, and upstream"
|
||||
" databases cannot generate an index file")
|
||||
|
||||
# The file doesn't exist, try to traverse the directory.
|
||||
# reindex() takes its own write lock, so no lock here.
|
||||
with lk.WriteTransaction(self.lock):
|
||||
self._write(None, None, None)
|
||||
self.reindex(spack.store.layout)
|
||||
|
||||
def _add(
|
||||
self,
|
||||
spec,
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from .common import DetectedPackage, executable_prefix, update_configuration
|
||||
from .path import by_executable, executables_in_path
|
||||
|
||||
__all__ = [
|
||||
'DetectedPackage',
|
||||
'by_executable',
|
||||
'executables_in_path',
|
||||
'executable_prefix',
|
||||
'update_configuration'
|
||||
]
|
||||
@@ -1,177 +0,0 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Define a common data structure to represent external packages and a
|
||||
function to update packages.yaml given a list of detected packages.
|
||||
|
||||
Ideally, each detection method should be placed in a specific subpackage
|
||||
and implement at least a function that returns a list of DetectedPackage
|
||||
objects. The update in packages.yaml can then be done using the function
|
||||
provided here.
|
||||
|
||||
The module also contains other functions that might be useful across different
|
||||
detection mechanisms.
|
||||
"""
|
||||
import collections
|
||||
import os
|
||||
import os.path
|
||||
|
||||
import six
|
||||
|
||||
import llnl.util.tty
|
||||
|
||||
import spack.config
|
||||
import spack.spec
|
||||
import spack.util.spack_yaml
|
||||
|
||||
#: Information on a package that has been detected
|
||||
DetectedPackage = collections.namedtuple(
|
||||
'DetectedPackage', ['spec', 'prefix']
|
||||
)
|
||||
|
||||
|
||||
def _externals_in_packages_yaml():
|
||||
"""Return all the specs mentioned as externals in packages.yaml"""
|
||||
packages_yaml = spack.config.get('packages')
|
||||
already_defined_specs = set()
|
||||
for pkg_name, package_configuration in packages_yaml.items():
|
||||
for item in package_configuration.get('externals', []):
|
||||
already_defined_specs.add(spack.spec.Spec(item['spec']))
|
||||
return already_defined_specs
|
||||
|
||||
|
||||
def _pkg_config_dict(external_pkg_entries):
|
||||
"""Generate a package specific config dict according to the packages.yaml schema.
|
||||
|
||||
This does not generate the entire packages.yaml. For example, given some
|
||||
external entries for the CMake package, this could return::
|
||||
|
||||
{
|
||||
'externals': [{
|
||||
'spec': 'cmake@3.17.1',
|
||||
'prefix': '/opt/cmake-3.17.1/'
|
||||
}, {
|
||||
'spec': 'cmake@3.16.5',
|
||||
'prefix': '/opt/cmake-3.16.5/'
|
||||
}]
|
||||
}
|
||||
"""
|
||||
pkg_dict = spack.util.spack_yaml.syaml_dict()
|
||||
pkg_dict['externals'] = []
|
||||
for e in external_pkg_entries:
|
||||
if not _spec_is_valid(e.spec):
|
||||
continue
|
||||
|
||||
external_items = [('spec', str(e.spec)), ('prefix', e.prefix)]
|
||||
if e.spec.external_modules:
|
||||
external_items.append(('modules', e.spec.external_modules))
|
||||
|
||||
if e.spec.extra_attributes:
|
||||
external_items.append(
|
||||
('extra_attributes',
|
||||
spack.util.spack_yaml.syaml_dict(e.spec.extra_attributes.items()))
|
||||
)
|
||||
|
||||
# external_items.extend(e.spec.extra_attributes.items())
|
||||
pkg_dict['externals'].append(
|
||||
spack.util.spack_yaml.syaml_dict(external_items)
|
||||
)
|
||||
|
||||
return pkg_dict
|
||||
|
||||
|
||||
def _spec_is_valid(spec):
|
||||
try:
|
||||
str(spec)
|
||||
except spack.error.SpackError:
|
||||
# It is assumed here that we can at least extract the package name from
|
||||
# the spec so we can look up the implementation of
|
||||
# determine_spec_details
|
||||
msg = 'Constructed spec for {0} does not have a string representation'
|
||||
llnl.util.tty.warn(msg.format(spec.name))
|
||||
return False
|
||||
|
||||
try:
|
||||
spack.spec.Spec(str(spec))
|
||||
except spack.error.SpackError:
|
||||
llnl.util.tty.warn(
|
||||
'Constructed spec has a string representation but the string'
|
||||
' representation does not evaluate to a valid spec: {0}'
|
||||
.format(str(spec))
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def is_executable(file_path):
|
||||
"""Return True if the path passed as argument is that of an executable"""
|
||||
return os.path.isfile(file_path) and os.access(file_path, os.X_OK)
|
||||
|
||||
|
||||
def _convert_to_iterable(single_val_or_multiple):
|
||||
x = single_val_or_multiple
|
||||
if x is None:
|
||||
return []
|
||||
elif isinstance(x, six.string_types):
|
||||
return [x]
|
||||
elif isinstance(x, spack.spec.Spec):
|
||||
# Specs are iterable, but a single spec should be converted to a list
|
||||
return [x]
|
||||
|
||||
try:
|
||||
iter(x)
|
||||
return x
|
||||
except TypeError:
|
||||
return [x]
|
||||
|
||||
|
||||
def executable_prefix(executable_dir):
|
||||
"""Given a directory where an executable is found, guess the prefix
|
||||
(i.e. the "root" directory of that installation) and return it.
|
||||
|
||||
Args:
|
||||
executable_dir: directory where an executable is found
|
||||
"""
|
||||
# Given a prefix where an executable is found, assuming that prefix
|
||||
# contains /bin/, strip off the 'bin' directory to get a Spack-compatible
|
||||
# prefix
|
||||
assert os.path.isdir(executable_dir)
|
||||
|
||||
components = executable_dir.split(os.sep)
|
||||
if 'bin' not in components:
|
||||
return None
|
||||
idx = components.index('bin')
|
||||
return os.sep.join(components[:idx])
|
||||
|
||||
|
||||
def update_configuration(detected_packages, scope=None, buildable=True):
|
||||
"""Add the packages passed as arguments to packages.yaml
|
||||
|
||||
Args:
|
||||
detected_packages (list): list of DetectedPackage objects to be added
|
||||
scope (str): configuration scope where to add the detected packages
|
||||
buildable (bool): whether the detected packages are buildable or not
|
||||
"""
|
||||
predefined_external_specs = _externals_in_packages_yaml()
|
||||
pkg_to_cfg, all_new_specs = {}, []
|
||||
for package_name, entries in detected_packages.items():
|
||||
new_entries = [
|
||||
e for e in entries if (e.spec not in predefined_external_specs)
|
||||
]
|
||||
|
||||
pkg_config = _pkg_config_dict(new_entries)
|
||||
all_new_specs.extend([
|
||||
spack.spec.Spec(x['spec']) for x in pkg_config.get('externals', [])
|
||||
])
|
||||
if buildable is False:
|
||||
pkg_config['buildable'] = False
|
||||
pkg_to_cfg[package_name] = pkg_config
|
||||
|
||||
pkgs_cfg = spack.config.get('packages', scope=scope)
|
||||
|
||||
pkgs_cfg = spack.config.merge_yaml(pkgs_cfg, pkg_to_cfg)
|
||||
spack.config.set('packages', pkgs_cfg, scope=scope)
|
||||
|
||||
return all_new_specs
|
||||
@@ -1,149 +0,0 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Detection of software installed in the system based on paths inspections
|
||||
and running executables.
|
||||
"""
|
||||
import collections
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty
|
||||
|
||||
import spack.util.environment
|
||||
|
||||
from .common import (
|
||||
DetectedPackage,
|
||||
_convert_to_iterable,
|
||||
executable_prefix,
|
||||
is_executable,
|
||||
)
|
||||
|
||||
|
||||
def executables_in_path(path_hints=None):
|
||||
"""Get the paths of all executables available from the current PATH.
|
||||
|
||||
For convenience, this is constructed as a dictionary where the keys are
|
||||
the executable paths and the values are the names of the executables
|
||||
(i.e. the basename of the executable path).
|
||||
|
||||
There may be multiple paths with the same basename. In this case it is
|
||||
assumed there are two different instances of the executable.
|
||||
|
||||
Args:
|
||||
path_hints (list): list of paths to be searched. If None the list will be
|
||||
constructed based on the PATH environment variable.
|
||||
"""
|
||||
path_hints = path_hints or spack.util.environment.get_path('PATH')
|
||||
search_paths = llnl.util.filesystem.search_paths_for_executables(*path_hints)
|
||||
|
||||
path_to_exe = {}
|
||||
# Reverse order of search directories so that an exe in the first PATH
|
||||
# entry overrides later entries
|
||||
for search_path in reversed(search_paths):
|
||||
for exe in os.listdir(search_path):
|
||||
exe_path = os.path.join(search_path, exe)
|
||||
if is_executable(exe_path):
|
||||
path_to_exe[exe_path] = exe
|
||||
return path_to_exe
|
||||
|
||||
|
||||
def _group_by_prefix(paths):
|
||||
groups = collections.defaultdict(set)
|
||||
for p in paths:
|
||||
groups[os.path.dirname(p)].add(p)
|
||||
return groups.items()
|
||||
|
||||
|
||||
def by_executable(packages_to_check, path_hints=None):
|
||||
"""Return the list of packages that have been detected on the system,
|
||||
searching by path.
|
||||
|
||||
Args:
|
||||
packages_to_check (list): list of packages to be detected
|
||||
path_hints (list): list of paths to be searched. If None the list will be
|
||||
constructed based on the PATH environment variable.
|
||||
"""
|
||||
path_to_exe_name = executables_in_path(path_hints=path_hints)
|
||||
exe_pattern_to_pkgs = collections.defaultdict(list)
|
||||
for pkg in packages_to_check:
|
||||
if hasattr(pkg, 'executables'):
|
||||
for exe in pkg.executables:
|
||||
exe_pattern_to_pkgs[exe].append(pkg)
|
||||
|
||||
pkg_to_found_exes = collections.defaultdict(set)
|
||||
for exe_pattern, pkgs in exe_pattern_to_pkgs.items():
|
||||
compiled_re = re.compile(exe_pattern)
|
||||
for path, exe in path_to_exe_name.items():
|
||||
if compiled_re.search(exe):
|
||||
for pkg in pkgs:
|
||||
pkg_to_found_exes[pkg].add(path)
|
||||
|
||||
pkg_to_entries = collections.defaultdict(list)
|
||||
resolved_specs = {} # spec -> exe found for the spec
|
||||
|
||||
for pkg, exes in pkg_to_found_exes.items():
|
||||
if not hasattr(pkg, 'determine_spec_details'):
|
||||
llnl.util.tty.warn(
|
||||
"{0} must define 'determine_spec_details' in order"
|
||||
" for Spack to detect externally-provided instances"
|
||||
" of the package.".format(pkg.name))
|
||||
continue
|
||||
|
||||
for prefix, exes_in_prefix in sorted(_group_by_prefix(exes)):
|
||||
# TODO: multiple instances of a package can live in the same
|
||||
# prefix, and a package implementation can return multiple specs
|
||||
# for one prefix, but without additional details (e.g. about the
|
||||
# naming scheme which differentiates them), the spec won't be
|
||||
# usable.
|
||||
specs = _convert_to_iterable(
|
||||
pkg.determine_spec_details(prefix, exes_in_prefix)
|
||||
)
|
||||
|
||||
if not specs:
|
||||
llnl.util.tty.debug(
|
||||
'The following executables in {0} were decidedly not '
|
||||
'part of the package {1}: {2}'
|
||||
.format(prefix, pkg.name, ', '.join(
|
||||
_convert_to_iterable(exes_in_prefix)))
|
||||
)
|
||||
|
||||
for spec in specs:
|
||||
pkg_prefix = executable_prefix(prefix)
|
||||
|
||||
if not pkg_prefix:
|
||||
msg = "no bin/ dir found in {0}. Cannot add it as a Spack package"
|
||||
llnl.util.tty.debug(msg.format(prefix))
|
||||
continue
|
||||
|
||||
if spec in resolved_specs:
|
||||
prior_prefix = ', '.join(
|
||||
_convert_to_iterable(resolved_specs[spec]))
|
||||
|
||||
llnl.util.tty.debug(
|
||||
"Executables in {0} and {1} are both associated"
|
||||
" with the same spec {2}"
|
||||
.format(prefix, prior_prefix, str(spec)))
|
||||
continue
|
||||
else:
|
||||
resolved_specs[spec] = prefix
|
||||
|
||||
try:
|
||||
spec.validate_detection()
|
||||
except Exception as e:
|
||||
msg = ('"{0}" has been detected on the system but will '
|
||||
'not be added to packages.yaml [reason={1}]')
|
||||
llnl.util.tty.warn(msg.format(spec, str(e)))
|
||||
continue
|
||||
|
||||
if spec.external_path:
|
||||
pkg_prefix = spec.external_path
|
||||
|
||||
pkg_to_entries[pkg.name].append(
|
||||
DetectedPackage(spec=spec, prefix=pkg_prefix)
|
||||
)
|
||||
|
||||
return pkg_to_entries
|
||||
@@ -89,9 +89,6 @@ def make_when_spec(value):
|
||||
value indicating when a directive should be applied.
|
||||
|
||||
"""
|
||||
if isinstance(value, spack.spec.Spec):
|
||||
return value
|
||||
|
||||
# Unsatisfiable conditions are discarded by the caller, and never
|
||||
# added to the package class
|
||||
if value is False:
|
||||
@@ -244,23 +241,17 @@ def _wrapper(*args, **kwargs):
|
||||
if DirectiveMeta._when_constraints_from_context:
|
||||
# Check that directives not yet supporting the when= argument
|
||||
# are not used inside the context manager
|
||||
if decorated_function.__name__ == 'version':
|
||||
if decorated_function.__name__ in ('version', 'variant'):
|
||||
msg = ('directive "{0}" cannot be used within a "when"'
|
||||
' context since it does not support a "when=" '
|
||||
'argument')
|
||||
msg = msg.format(decorated_function.__name__)
|
||||
raise DirectiveError(msg)
|
||||
|
||||
when_constraints = [
|
||||
spack.spec.Spec(x) for x in
|
||||
when_spec_from_context = ' '.join(
|
||||
DirectiveMeta._when_constraints_from_context
|
||||
]
|
||||
if kwargs.get('when'):
|
||||
when_constraints.append(spack.spec.Spec(kwargs['when']))
|
||||
when_spec = spack.spec.merge_abstract_anonymous_specs(
|
||||
*when_constraints
|
||||
)
|
||||
|
||||
when_spec = kwargs.get('when', '') + ' ' + when_spec_from_context
|
||||
kwargs['when'] = when_spec
|
||||
|
||||
# If any of the arguments are executors returned by a
|
||||
@@ -562,8 +553,7 @@ def variant(
|
||||
description='',
|
||||
values=None,
|
||||
multi=None,
|
||||
validator=None,
|
||||
when=None):
|
||||
validator=None):
|
||||
"""Define a variant for the package. Packager can specify a default
|
||||
value as well as a text description.
|
||||
|
||||
@@ -582,8 +572,6 @@ def variant(
|
||||
logic. It receives the package name, the variant name and a tuple
|
||||
of values and should raise an instance of SpackError if the group
|
||||
doesn't meet the additional constraints
|
||||
when (spack.spec.Spec, bool): optional condition on which the
|
||||
variant applies
|
||||
|
||||
Raises:
|
||||
DirectiveError: if arguments passed to the directive are invalid
|
||||
@@ -643,23 +631,14 @@ def _raise_default_not_set(pkg):
|
||||
description = str(description).strip()
|
||||
|
||||
def _execute_variant(pkg):
|
||||
when_spec = make_when_spec(when)
|
||||
when_specs = [when_spec]
|
||||
|
||||
if not re.match(spack.spec.identifier_re, name):
|
||||
directive = 'variant'
|
||||
msg = "Invalid variant name in {0}: '{1}'"
|
||||
raise DirectiveError(directive, msg.format(pkg.name, name))
|
||||
|
||||
if name in pkg.variants:
|
||||
# We accumulate when specs, but replace the rest of the variant
|
||||
# with the newer values
|
||||
_, orig_when = pkg.variants[name]
|
||||
when_specs += orig_when
|
||||
|
||||
pkg.variants[name] = (spack.variant.Variant(
|
||||
pkg.variants[name] = spack.variant.Variant(
|
||||
name, default, description, values, multi, validator
|
||||
), when_specs)
|
||||
)
|
||||
return _execute_variant
|
||||
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
import errno
|
||||
import glob
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
@@ -89,8 +88,8 @@ def __init__(self, root, **kwargs):
|
||||
self.manifest_file_name = 'install_manifest.json'
|
||||
|
||||
@property
|
||||
def hidden_file_regexes(self):
|
||||
return (re.escape(self.metadata_dir),)
|
||||
def hidden_file_paths(self):
|
||||
return (self.metadata_dir,)
|
||||
|
||||
def relative_path_for_spec(self, spec):
|
||||
_check_concrete(spec)
|
||||
|
||||
@@ -9,7 +9,6 @@
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import time
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
import six
|
||||
@@ -17,27 +16,22 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.color import colorize
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.hash_types as ht
|
||||
import spack.hooks
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.schema.env
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.cpus
|
||||
import spack.util.environment
|
||||
import spack.util.hash
|
||||
import spack.util.lock as lk
|
||||
import spack.util.parallel
|
||||
import spack.util.path
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
@@ -91,7 +85,7 @@
|
||||
valid_environment_name_re = r'^\w[\w-]*$'
|
||||
|
||||
#: version of the lockfile format. Must increase monotonically.
|
||||
lockfile_format_version = 3
|
||||
lockfile_format_version = 2
|
||||
|
||||
# Magic names
|
||||
# The name of the standalone spec list in the manifest yaml
|
||||
@@ -102,16 +96,6 @@
|
||||
default_view_link = 'all'
|
||||
|
||||
|
||||
def installed_specs():
|
||||
"""
|
||||
Returns the specs of packages installed in the active environment or None
|
||||
if no packages are installed.
|
||||
"""
|
||||
env = spack.environment.active_environment()
|
||||
hashes = env.all_hashes() if env else None
|
||||
return spack.store.db.query(hashes=hashes)
|
||||
|
||||
|
||||
def valid_env_name(name):
|
||||
return re.match(valid_environment_name_re, name)
|
||||
|
||||
@@ -124,7 +108,9 @@ def validate_env_name(name):
|
||||
return name
|
||||
|
||||
|
||||
def activate(env, use_env_repo=False):
|
||||
def activate(
|
||||
env, use_env_repo=False, add_view=True, shell='sh', prompt=None
|
||||
):
|
||||
"""Activate an environment.
|
||||
|
||||
To activate an environment, we add its configuration scope to the
|
||||
@@ -135,53 +121,149 @@ def activate(env, use_env_repo=False):
|
||||
env (Environment): the environment to activate
|
||||
use_env_repo (bool): use the packages exactly as they appear in the
|
||||
environment's repository
|
||||
add_view (bool): generate commands to add view to path variables
|
||||
shell (str): One of `sh`, `csh`, `fish`.
|
||||
prompt (str): string to add to the users prompt, or None
|
||||
|
||||
Returns:
|
||||
str: Shell commands to activate environment.
|
||||
|
||||
TODO: environment to use the activated spack environment.
|
||||
"""
|
||||
global _active_environment
|
||||
|
||||
# Fail early to avoid ending in an invalid state
|
||||
if not isinstance(env, Environment):
|
||||
raise TypeError("`env` should be of type {0}".format(Environment.__name__))
|
||||
|
||||
# Check if we need to reinitialize the store due to pushing the configuration
|
||||
# below.
|
||||
store_before_pushing = spack.config.get('config:install_tree')
|
||||
prepare_config_scope(env)
|
||||
store_after_pushing = spack.config.get('config:install_tree')
|
||||
if store_before_pushing != store_after_pushing:
|
||||
# Hack to store the state of the store before activation
|
||||
env.store_token = spack.store.reinitialize()
|
||||
|
||||
if use_env_repo:
|
||||
spack.repo.path.put_first(env.repo)
|
||||
|
||||
tty.debug("Using environment '%s'" % env.name)
|
||||
|
||||
# Do this last, because setting up the config must succeed first.
|
||||
_active_environment = env
|
||||
prepare_config_scope(_active_environment)
|
||||
if use_env_repo:
|
||||
spack.repo.path.put_first(_active_environment.repo)
|
||||
|
||||
tty.debug("Using environment '%s'" % _active_environment.name)
|
||||
|
||||
# Construct the commands to run
|
||||
cmds = ''
|
||||
if shell == 'csh':
|
||||
# TODO: figure out how to make color work for csh
|
||||
cmds += 'setenv SPACK_ENV %s;\n' % env.path
|
||||
cmds += 'alias despacktivate "spack env deactivate";\n'
|
||||
if prompt:
|
||||
cmds += 'if (! $?SPACK_OLD_PROMPT ) '
|
||||
cmds += 'setenv SPACK_OLD_PROMPT "${prompt}";\n'
|
||||
cmds += 'set prompt="%s ${prompt}";\n' % prompt
|
||||
elif shell == 'fish':
|
||||
if os.getenv('TERM') and 'color' in os.getenv('TERM') and prompt:
|
||||
prompt = colorize('@G{%s} ' % prompt, color=True)
|
||||
|
||||
cmds += 'set -gx SPACK_ENV %s;\n' % env.path
|
||||
cmds += 'function despacktivate;\n'
|
||||
cmds += ' spack env deactivate;\n'
|
||||
cmds += 'end;\n'
|
||||
#
|
||||
# NOTE: We're not changing the fish_prompt function (which is fish's
|
||||
# solution to the PS1 variable) here. This is a bit fiddly, and easy to
|
||||
# screw up => spend time reasearching a solution. Feedback welcome.
|
||||
#
|
||||
else:
|
||||
if os.getenv('TERM') and 'color' in os.getenv('TERM') and prompt:
|
||||
prompt = colorize('@G{%s} ' % prompt, color=True)
|
||||
|
||||
cmds += 'export SPACK_ENV=%s;\n' % env.path
|
||||
cmds += "alias despacktivate='spack env deactivate';\n"
|
||||
if prompt:
|
||||
cmds += 'if [ -z ${SPACK_OLD_PS1+x} ]; then\n'
|
||||
cmds += ' if [ -z ${PS1+x} ]; then\n'
|
||||
cmds += " PS1='$$$$';\n"
|
||||
cmds += ' fi;\n'
|
||||
cmds += ' export SPACK_OLD_PS1="${PS1}";\n'
|
||||
cmds += 'fi;\n'
|
||||
cmds += 'export PS1="%s ${PS1}";\n' % prompt
|
||||
|
||||
#
|
||||
# NOTE in the fish-shell: Path variables are a special kind of variable
|
||||
# used to support colon-delimited path lists including PATH, CDPATH,
|
||||
# MANPATH, PYTHONPATH, etc. All variables that end in PATH (case-sensitive)
|
||||
# become PATH variables.
|
||||
#
|
||||
try:
|
||||
if add_view and default_view_name in env.views:
|
||||
with spack.store.db.read_transaction():
|
||||
cmds += env.add_default_view_to_shell(shell)
|
||||
except (spack.repo.UnknownPackageError,
|
||||
spack.repo.UnknownNamespaceError) as e:
|
||||
tty.error(e)
|
||||
tty.die(
|
||||
'Environment view is broken due to a missing package or repo.\n',
|
||||
' To activate without views enabled, activate with:\n',
|
||||
' spack env activate -V {0}\n'.format(env.name),
|
||||
' To remove it and resolve the issue, '
|
||||
'force concretize with the command:\n',
|
||||
' spack -e {0} concretize --force'.format(env.name))
|
||||
|
||||
return cmds
|
||||
|
||||
|
||||
def deactivate():
|
||||
"""Undo any configuration or repo settings modified by ``activate()``."""
|
||||
def deactivate(shell='sh'):
|
||||
"""Undo any configuration or repo settings modified by ``activate()``.
|
||||
|
||||
Arguments:
|
||||
shell (str): One of `sh`, `csh`, `fish`. Shell style to use.
|
||||
|
||||
Returns:
|
||||
str: shell commands for `shell` to undo environment variables
|
||||
|
||||
"""
|
||||
global _active_environment
|
||||
|
||||
if not _active_environment:
|
||||
return
|
||||
|
||||
# If we attached a store token on activation, restore the previous state
|
||||
# and consume the token
|
||||
if hasattr(_active_environment, 'store_token'):
|
||||
spack.store.restore(_active_environment.store_token)
|
||||
delattr(_active_environment, 'store_token')
|
||||
deactivate_config_scope(_active_environment)
|
||||
|
||||
# use _repo so we only remove if a repo was actually constructed
|
||||
if _active_environment._repo:
|
||||
spack.repo.path.remove(_active_environment._repo)
|
||||
|
||||
tty.debug("Deactivated environment '%s'" % _active_environment.name)
|
||||
cmds = ''
|
||||
if shell == 'csh':
|
||||
cmds += 'unsetenv SPACK_ENV;\n'
|
||||
cmds += 'if ( $?SPACK_OLD_PROMPT ) '
|
||||
cmds += 'set prompt="$SPACK_OLD_PROMPT" && '
|
||||
cmds += 'unsetenv SPACK_OLD_PROMPT;\n'
|
||||
cmds += 'unalias despacktivate;\n'
|
||||
elif shell == 'fish':
|
||||
cmds += 'set -e SPACK_ENV;\n'
|
||||
cmds += 'functions -e despacktivate;\n'
|
||||
#
|
||||
# NOTE: Not changing fish_prompt (above) => no need to restore it here.
|
||||
#
|
||||
else:
|
||||
cmds += 'if [ ! -z ${SPACK_ENV+x} ]; then\n'
|
||||
cmds += 'unset SPACK_ENV; export SPACK_ENV;\n'
|
||||
cmds += 'fi;\n'
|
||||
cmds += 'unalias despacktivate;\n'
|
||||
cmds += 'if [ ! -z ${SPACK_OLD_PS1+x} ]; then\n'
|
||||
cmds += ' if [ "$SPACK_OLD_PS1" = \'$$$$\' ]; then\n'
|
||||
cmds += ' unset PS1; export PS1;\n'
|
||||
cmds += ' else\n'
|
||||
cmds += ' export PS1="$SPACK_OLD_PS1";\n'
|
||||
cmds += ' fi;\n'
|
||||
cmds += ' unset SPACK_OLD_PS1; export SPACK_OLD_PS1;\n'
|
||||
cmds += 'fi;\n'
|
||||
|
||||
try:
|
||||
if default_view_name in _active_environment.views:
|
||||
with spack.store.db.read_transaction():
|
||||
cmds += _active_environment.rm_default_view_from_shell(shell)
|
||||
except (spack.repo.UnknownPackageError,
|
||||
spack.repo.UnknownNamespaceError) as e:
|
||||
tty.warn(e)
|
||||
tty.warn('Could not fully deactivate view due to missing package '
|
||||
'or repo, shell environment may be corrupt.')
|
||||
|
||||
tty.debug("Deactivated environment '%s'" % _active_environment.name)
|
||||
_active_environment = None
|
||||
|
||||
return cmds
|
||||
|
||||
|
||||
def active_environment():
|
||||
"""Returns the active environment when there is any"""
|
||||
@@ -499,10 +581,6 @@ def regenerate(self, all_specs, roots):
|
||||
tty.warn(msg)
|
||||
|
||||
|
||||
def _create_environment(*args, **kwargs):
|
||||
return Environment(*args, **kwargs)
|
||||
|
||||
|
||||
class Environment(object):
|
||||
def __init__(self, path, init_file=None, with_view=None, keep_relative=False):
|
||||
"""Create a new environment.
|
||||
@@ -523,9 +601,6 @@ def __init__(self, path, init_file=None, with_view=None, keep_relative=False):
|
||||
directory.
|
||||
"""
|
||||
self.path = os.path.abspath(path)
|
||||
self.init_file = init_file
|
||||
self.with_view = with_view
|
||||
self.keep_relative = keep_relative
|
||||
|
||||
self.txlock = lk.Lock(self._transaction_lock_path)
|
||||
|
||||
@@ -568,11 +643,6 @@ def __init__(self, path, init_file=None, with_view=None, keep_relative=False):
|
||||
# If with_view is None, then defer to the view settings determined by
|
||||
# the manifest file
|
||||
|
||||
def __reduce__(self):
|
||||
return _create_environment, (
|
||||
self.path, self.init_file, self.with_view, self.keep_relative
|
||||
)
|
||||
|
||||
def _rewrite_relative_paths_on_relocation(self, init_file_dir):
|
||||
"""When initializing the environment from a manifest file and we plan
|
||||
to store the environment in a different directory, we have to rewrite
|
||||
@@ -1035,7 +1105,7 @@ def is_develop(self, spec):
|
||||
"""Returns true when the spec is built from local sources"""
|
||||
return spec.name in self.dev_specs
|
||||
|
||||
def concretize(self, force=False, tests=False, reuse=False):
|
||||
def concretize(self, force=False, tests=False):
|
||||
"""Concretize user_specs in this environment.
|
||||
|
||||
Only concretizes specs that haven't been concretized yet unless
|
||||
@@ -1049,8 +1119,6 @@ def concretize(self, force=False, tests=False, reuse=False):
|
||||
already concretized
|
||||
tests (bool or list or set): False to run no tests, True to test
|
||||
all packages, or a list of package names to run tests for some
|
||||
reuse (bool): if True try to maximize reuse of already installed
|
||||
specs, if False don't account for installation status.
|
||||
|
||||
Returns:
|
||||
List of specs that have been concretized. Each entry is a tuple of
|
||||
@@ -1064,15 +1132,14 @@ def concretize(self, force=False, tests=False, reuse=False):
|
||||
|
||||
# Pick the right concretization strategy
|
||||
if self.concretization == 'together':
|
||||
return self._concretize_together(tests=tests, reuse=reuse)
|
||||
|
||||
return self._concretize_together(tests=tests)
|
||||
if self.concretization == 'separately':
|
||||
return self._concretize_separately(tests=tests, reuse=reuse)
|
||||
return self._concretize_separately(tests=tests)
|
||||
|
||||
msg = 'concretization strategy not implemented [{0}]'
|
||||
raise SpackEnvironmentError(msg.format(self.concretization))
|
||||
|
||||
def _concretize_together(self, tests=False, reuse=False):
|
||||
def _concretize_together(self, tests=False):
|
||||
"""Concretization strategy that concretizes all the specs
|
||||
in the same DAG.
|
||||
"""
|
||||
@@ -1105,14 +1172,13 @@ def _concretize_together(self, tests=False, reuse=False):
|
||||
self.specs_by_hash = {}
|
||||
|
||||
concrete_specs = spack.concretize.concretize_specs_together(
|
||||
*self.user_specs, tests=tests, reuse=reuse
|
||||
)
|
||||
*self.user_specs, tests=tests)
|
||||
concretized_specs = [x for x in zip(self.user_specs, concrete_specs)]
|
||||
for abstract, concrete in concretized_specs:
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
return concretized_specs
|
||||
|
||||
def _concretize_separately(self, tests=False, reuse=False):
|
||||
def _concretize_separately(self, tests=False):
|
||||
"""Concretization strategy that concretizes separately one
|
||||
user spec after the other.
|
||||
"""
|
||||
@@ -1131,62 +1197,14 @@ def _concretize_separately(self, tests=False, reuse=False):
|
||||
self._add_concrete_spec(s, concrete, new=False)
|
||||
|
||||
# Concretize any new user specs that we haven't concretized yet
|
||||
arguments, root_specs = [], []
|
||||
concretized_specs = []
|
||||
for uspec, uspec_constraints in zip(
|
||||
self.user_specs, self.user_specs.specs_as_constraints
|
||||
):
|
||||
self.user_specs, self.user_specs.specs_as_constraints):
|
||||
if uspec not in old_concretized_user_specs:
|
||||
root_specs.append(uspec)
|
||||
arguments.append((uspec_constraints, tests, reuse))
|
||||
|
||||
# Ensure we don't try to bootstrap clingo in parallel
|
||||
if spack.config.get('config:concretizer') == 'clingo':
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
|
||||
# Ensure all the indexes have been built or updated, since
|
||||
# otherwise the processes in the pool may timeout on waiting
|
||||
# for a write lock. We do this indirectly by retrieving the
|
||||
# provider index, which should in turn trigger the update of
|
||||
# all the indexes if there's any need for that.
|
||||
_ = spack.repo.path.provider_index
|
||||
|
||||
# Ensure we have compilers in compilers.yaml to avoid that
|
||||
# processes try to write the config file in parallel
|
||||
_ = spack.compilers.get_compiler_config()
|
||||
|
||||
# Early return if there is nothing to do
|
||||
if len(arguments) == 0:
|
||||
return []
|
||||
|
||||
# Solve the environment in parallel on Linux
|
||||
start = time.time()
|
||||
max_processes = min(
|
||||
len(arguments), # Number of specs
|
||||
16 # Cap on 16 cores
|
||||
)
|
||||
|
||||
# TODO: revisit this print as soon as darwin is parallel too
|
||||
msg = 'Starting concretization'
|
||||
if sys.platform != 'darwin':
|
||||
pool_size = spack.util.parallel.num_processes(max_processes=max_processes)
|
||||
if pool_size > 1:
|
||||
msg = msg + ' pool with {0} processes'.format(pool_size)
|
||||
tty.msg(msg)
|
||||
|
||||
concretized_root_specs = spack.util.parallel.parallel_map(
|
||||
_concretize_task, arguments, max_processes=max_processes,
|
||||
debug=tty.is_debug()
|
||||
)
|
||||
|
||||
finish = time.time()
|
||||
tty.msg('Environment concretized in %.2f seconds.' % (finish - start))
|
||||
results = []
|
||||
for abstract, concrete in zip(root_specs, concretized_root_specs):
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
results.append((abstract, concrete))
|
||||
|
||||
return results
|
||||
concrete = _concretize_from_constraints(uspec_constraints, tests=tests)
|
||||
self._add_concrete_spec(uspec, concrete)
|
||||
concretized_specs.append((uspec, concrete))
|
||||
return concretized_specs
|
||||
|
||||
def concretize_and_add(self, user_spec, concrete_spec=None, tests=False):
|
||||
"""Concretize and add a single spec to the environment.
|
||||
@@ -1315,18 +1333,12 @@ def _env_modifications_for_default_view(self, reverse=False):
|
||||
|
||||
return all_mods, errors
|
||||
|
||||
def add_default_view_to_env(self, env_mod):
|
||||
"""
|
||||
Collect the environment modifications to activate an environment using the
|
||||
default view. Removes duplicate paths.
|
||||
def add_default_view_to_shell(self, shell):
|
||||
env_mod = spack.util.environment.EnvironmentModifications()
|
||||
|
||||
Args:
|
||||
env_mod (spack.util.environment.EnvironmentModifications): the environment
|
||||
modifications object that is modified.
|
||||
"""
|
||||
if default_view_name not in self.views:
|
||||
# No default view to add to shell
|
||||
return env_mod
|
||||
return env_mod.shell_modifications(shell)
|
||||
|
||||
env_mod.extend(uenv.unconditional_environment_modifications(
|
||||
self.default_view))
|
||||
@@ -1341,20 +1353,14 @@ def add_default_view_to_env(self, env_mod):
|
||||
for env_var in env_mod.group_by_name():
|
||||
env_mod.prune_duplicate_paths(env_var)
|
||||
|
||||
return env_mod
|
||||
return env_mod.shell_modifications(shell)
|
||||
|
||||
def rm_default_view_from_env(self, env_mod):
|
||||
"""
|
||||
Collect the environment modifications to deactivate an environment using the
|
||||
default view. Reverses the action of ``add_default_view_to_env``.
|
||||
def rm_default_view_from_shell(self, shell):
|
||||
env_mod = spack.util.environment.EnvironmentModifications()
|
||||
|
||||
Args:
|
||||
env_mod (spack.util.environment.EnvironmentModifications): the environment
|
||||
modifications object that is modified.
|
||||
"""
|
||||
if default_view_name not in self.views:
|
||||
# No default view to add to shell
|
||||
return env_mod
|
||||
return env_mod.shell_modifications(shell)
|
||||
|
||||
env_mod.extend(uenv.unconditional_environment_modifications(
|
||||
self.default_view).reversed())
|
||||
@@ -1362,7 +1368,7 @@ def rm_default_view_from_env(self, env_mod):
|
||||
mods, _ = self._env_modifications_for_default_view(reverse=True)
|
||||
env_mod.extend(mods)
|
||||
|
||||
return env_mod
|
||||
return env_mod.shell_modifications(shell)
|
||||
|
||||
def _add_concrete_spec(self, spec, concrete, new=True):
|
||||
"""Called when a new concretized spec is added to the environment.
|
||||
@@ -1689,12 +1695,12 @@ def _to_lockfile_dict(self):
|
||||
concrete_specs = {}
|
||||
for spec in self.specs_by_hash.values():
|
||||
for s in spec.traverse():
|
||||
build_hash = s.build_hash()
|
||||
if build_hash not in concrete_specs:
|
||||
dag_hash_all = s.build_hash()
|
||||
if dag_hash_all not in concrete_specs:
|
||||
spec_dict = s.to_node_dict(hash=ht.build_hash)
|
||||
# Assumes no legacy formats, since this was just created.
|
||||
spec_dict[ht.dag_hash.name] = s.dag_hash()
|
||||
concrete_specs[build_hash] = spec_dict
|
||||
concrete_specs[dag_hash_all] = spec_dict
|
||||
|
||||
hash_spec_list = zip(
|
||||
self.concretized_order, self.concretized_user_specs)
|
||||
@@ -1705,7 +1711,6 @@ def _to_lockfile_dict(self):
|
||||
'_meta': {
|
||||
'file-type': 'spack-lockfile',
|
||||
'lockfile-version': lockfile_format_version,
|
||||
'specfile-version': spack.spec.specfile_format_version
|
||||
},
|
||||
|
||||
# users specs + hashes are the 'roots' of the environment
|
||||
@@ -1736,18 +1741,13 @@ def _read_lockfile_dict(self, d):
|
||||
root_hashes = set(self.concretized_order)
|
||||
|
||||
specs_by_hash = {}
|
||||
for build_hash, node_dict in json_specs_by_hash.items():
|
||||
spec = Spec.from_node_dict(node_dict)
|
||||
if d['_meta']['lockfile-version'] > 1:
|
||||
# Build hash is stored as a key, but not as part of the node dict
|
||||
# To ensure build hashes are not recomputed, we reattach here
|
||||
setattr(spec, ht.build_hash.attr, build_hash)
|
||||
specs_by_hash[build_hash] = spec
|
||||
for dag_hash, node_dict in json_specs_by_hash.items():
|
||||
specs_by_hash[dag_hash] = Spec.from_node_dict(node_dict)
|
||||
|
||||
for build_hash, node_dict in json_specs_by_hash.items():
|
||||
for dag_hash, node_dict in json_specs_by_hash.items():
|
||||
for _, dep_hash, deptypes, _ in (
|
||||
Spec.dependencies_from_node_dict(node_dict)):
|
||||
specs_by_hash[build_hash]._add_dependency(
|
||||
specs_by_hash[dag_hash]._add_dependency(
|
||||
specs_by_hash[dep_hash], deptypes)
|
||||
|
||||
# If we are reading an older lockfile format (which uses dag hashes
|
||||
@@ -1926,7 +1926,7 @@ def _update_and_write_manifest(self, raw_yaml_dict, yaml_dict):
|
||||
written = os.path.exists(self.manifest_path)
|
||||
if changed or not written:
|
||||
self.raw_yaml = copy.deepcopy(self.yaml)
|
||||
with fs.write_tmp_and_move(os.path.realpath(self.manifest_path)) as f:
|
||||
with fs.write_tmp_and_move(self.manifest_path) as f:
|
||||
_write_yaml(self.yaml, f)
|
||||
|
||||
def __enter__(self):
|
||||
@@ -1992,7 +1992,7 @@ def _tree_to_display(spec):
|
||||
print('')
|
||||
|
||||
|
||||
def _concretize_from_constraints(spec_constraints, tests=False, reuse=False):
|
||||
def _concretize_from_constraints(spec_constraints, tests=False):
|
||||
# Accept only valid constraints from list and concretize spec
|
||||
# Get the named spec even if out of order
|
||||
root_spec = [s for s in spec_constraints if s.name]
|
||||
@@ -2011,7 +2011,7 @@ def _concretize_from_constraints(spec_constraints, tests=False, reuse=False):
|
||||
if c not in invalid_constraints:
|
||||
s.constrain(c)
|
||||
try:
|
||||
return s.concretized(tests=tests, reuse=reuse)
|
||||
return s.concretized(tests=tests)
|
||||
except spack.spec.InvalidDependencyError as e:
|
||||
invalid_deps_string = ['^' + d for d in e.invalid_deps]
|
||||
invalid_deps = [c for c in spec_constraints
|
||||
@@ -2030,12 +2030,6 @@ def _concretize_from_constraints(spec_constraints, tests=False, reuse=False):
|
||||
invalid_constraints.extend(inv_variant_constraints)
|
||||
|
||||
|
||||
def _concretize_task(packed_arguments):
|
||||
spec_constraints, tests, reuse = packed_arguments
|
||||
with tty.SuppressOutput(msg_enabled=False):
|
||||
return _concretize_from_constraints(spec_constraints, tests, reuse)
|
||||
|
||||
|
||||
def make_repo_path(root):
|
||||
"""Make a RepoPath from the repo subdirectories in an environment."""
|
||||
path = spack.repo.RepoPath()
|
||||
@@ -2153,17 +2147,14 @@ def is_latest_format(manifest):
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def no_active_environment():
|
||||
"""Deactivate the active environment for the duration of the context. Has no
|
||||
effect when there is no active environment."""
|
||||
env = active_environment()
|
||||
def deactivate_environment():
|
||||
"""Deactivate an active environment for the duration of the context."""
|
||||
global _active_environment
|
||||
current, _active_environment = _active_environment, None
|
||||
try:
|
||||
deactivate()
|
||||
yield
|
||||
finally:
|
||||
# TODO: we don't handle `use_env_repo` here.
|
||||
if env:
|
||||
activate(env)
|
||||
_active_environment = current
|
||||
|
||||
|
||||
class SpackEnvironmentError(spack.error.SpackError):
|
||||
@@ -1,59 +0,0 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from .environment import (
|
||||
Environment,
|
||||
SpackEnvironmentError,
|
||||
activate,
|
||||
active,
|
||||
active_environment,
|
||||
all_environment_names,
|
||||
all_environments,
|
||||
config_dict,
|
||||
create,
|
||||
deactivate,
|
||||
default_manifest_yaml,
|
||||
default_view_name,
|
||||
display_specs,
|
||||
exists,
|
||||
installed_specs,
|
||||
is_env_dir,
|
||||
is_latest_format,
|
||||
lockfile_name,
|
||||
manifest_file,
|
||||
manifest_name,
|
||||
no_active_environment,
|
||||
read,
|
||||
root,
|
||||
spack_env_var,
|
||||
update_yaml,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'Environment',
|
||||
'SpackEnvironmentError',
|
||||
'activate',
|
||||
'active',
|
||||
'active_environment',
|
||||
'all_environment_names',
|
||||
'all_environments',
|
||||
'config_dict',
|
||||
'create',
|
||||
'deactivate',
|
||||
'default_manifest_yaml',
|
||||
'default_view_name',
|
||||
'display_specs',
|
||||
'exists',
|
||||
'installed_specs',
|
||||
'is_env_dir',
|
||||
'is_latest_format',
|
||||
'lockfile_name',
|
||||
'manifest_file',
|
||||
'manifest_name',
|
||||
'no_active_environment',
|
||||
'read',
|
||||
'root',
|
||||
'spack_env_var',
|
||||
'update_yaml',
|
||||
]
|
||||
@@ -1,165 +0,0 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.color import colorize
|
||||
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.store
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
|
||||
def activate_header(env, shell, prompt=None):
|
||||
# Construct the commands to run
|
||||
cmds = ''
|
||||
if shell == 'csh':
|
||||
# TODO: figure out how to make color work for csh
|
||||
cmds += 'setenv SPACK_ENV %s;\n' % env.path
|
||||
cmds += 'alias despacktivate "spack env deactivate";\n'
|
||||
if prompt:
|
||||
cmds += 'if (! $?SPACK_OLD_PROMPT ) '
|
||||
cmds += 'setenv SPACK_OLD_PROMPT "${prompt}";\n'
|
||||
cmds += 'set prompt="%s ${prompt}";\n' % prompt
|
||||
elif shell == 'fish':
|
||||
if 'color' in os.getenv('TERM', '') and prompt:
|
||||
prompt = colorize('@G{%s} ' % prompt, color=True)
|
||||
|
||||
cmds += 'set -gx SPACK_ENV %s;\n' % env.path
|
||||
cmds += 'function despacktivate;\n'
|
||||
cmds += ' spack env deactivate;\n'
|
||||
cmds += 'end;\n'
|
||||
#
|
||||
# NOTE: We're not changing the fish_prompt function (which is fish's
|
||||
# solution to the PS1 variable) here. This is a bit fiddly, and easy to
|
||||
# screw up => spend time reasearching a solution. Feedback welcome.
|
||||
#
|
||||
else:
|
||||
if 'color' in os.getenv('TERM', '') and prompt:
|
||||
prompt = colorize('@G{%s} ' % prompt, color=True)
|
||||
|
||||
cmds += 'export SPACK_ENV=%s;\n' % env.path
|
||||
cmds += "alias despacktivate='spack env deactivate';\n"
|
||||
if prompt:
|
||||
cmds += 'if [ -z ${SPACK_OLD_PS1+x} ]; then\n'
|
||||
cmds += ' if [ -z ${PS1+x} ]; then\n'
|
||||
cmds += " PS1='$$$$';\n"
|
||||
cmds += ' fi;\n'
|
||||
cmds += ' export SPACK_OLD_PS1="${PS1}";\n'
|
||||
cmds += 'fi;\n'
|
||||
cmds += 'export PS1="%s ${PS1}";\n' % prompt
|
||||
|
||||
return cmds
|
||||
|
||||
|
||||
def deactivate_header(shell):
|
||||
cmds = ''
|
||||
if shell == 'csh':
|
||||
cmds += 'unsetenv SPACK_ENV;\n'
|
||||
cmds += 'if ( $?SPACK_OLD_PROMPT ) '
|
||||
cmds += 'set prompt="$SPACK_OLD_PROMPT" && '
|
||||
cmds += 'unsetenv SPACK_OLD_PROMPT;\n'
|
||||
cmds += 'unalias despacktivate;\n'
|
||||
elif shell == 'fish':
|
||||
cmds += 'set -e SPACK_ENV;\n'
|
||||
cmds += 'functions -e despacktivate;\n'
|
||||
#
|
||||
# NOTE: Not changing fish_prompt (above) => no need to restore it here.
|
||||
#
|
||||
else:
|
||||
cmds += 'if [ ! -z ${SPACK_ENV+x} ]; then\n'
|
||||
cmds += 'unset SPACK_ENV; export SPACK_ENV;\n'
|
||||
cmds += 'fi;\n'
|
||||
cmds += 'unalias despacktivate;\n'
|
||||
cmds += 'if [ ! -z ${SPACK_OLD_PS1+x} ]; then\n'
|
||||
cmds += ' if [ "$SPACK_OLD_PS1" = \'$$$$\' ]; then\n'
|
||||
cmds += ' unset PS1; export PS1;\n'
|
||||
cmds += ' else\n'
|
||||
cmds += ' export PS1="$SPACK_OLD_PS1";\n'
|
||||
cmds += ' fi;\n'
|
||||
cmds += ' unset SPACK_OLD_PS1; export SPACK_OLD_PS1;\n'
|
||||
cmds += 'fi;\n'
|
||||
|
||||
return cmds
|
||||
|
||||
|
||||
def activate(env, use_env_repo=False, add_view=True):
|
||||
"""
|
||||
Activate an environment and append environment modifications
|
||||
|
||||
To activate an environment, we add its configuration scope to the
|
||||
existing Spack configuration, and we set active to the current
|
||||
environment.
|
||||
|
||||
Arguments:
|
||||
env (spack.environment.Environment): the environment to activate
|
||||
use_env_repo (bool): use the packages exactly as they appear in the
|
||||
environment's repository
|
||||
add_view (bool): generate commands to add view to path variables
|
||||
|
||||
Returns:
|
||||
spack.util.environment.EnvironmentModifications: Environment variables
|
||||
modifications to activate environment.
|
||||
"""
|
||||
ev.activate(env, use_env_repo=use_env_repo)
|
||||
|
||||
env_mods = EnvironmentModifications()
|
||||
|
||||
#
|
||||
# NOTE in the fish-shell: Path variables are a special kind of variable
|
||||
# used to support colon-delimited path lists including PATH, CDPATH,
|
||||
# MANPATH, PYTHONPATH, etc. All variables that end in PATH (case-sensitive)
|
||||
# become PATH variables.
|
||||
#
|
||||
try:
|
||||
if add_view and ev.default_view_name in env.views:
|
||||
with spack.store.db.read_transaction():
|
||||
env.add_default_view_to_env(env_mods)
|
||||
except (spack.repo.UnknownPackageError,
|
||||
spack.repo.UnknownNamespaceError) as e:
|
||||
tty.error(e)
|
||||
tty.die(
|
||||
'Environment view is broken due to a missing package or repo.\n',
|
||||
' To activate without views enabled, activate with:\n',
|
||||
' spack env activate -V {0}\n'.format(env.name),
|
||||
' To remove it and resolve the issue, '
|
||||
'force concretize with the command:\n',
|
||||
' spack -e {0} concretize --force'.format(env.name))
|
||||
|
||||
return env_mods
|
||||
|
||||
|
||||
def deactivate():
|
||||
"""
|
||||
Deactivate an environment and collect corresponding environment modifications.
|
||||
|
||||
Note: unloads the environment in its current state, not in the state it was
|
||||
loaded in, meaning that specs that were removed from the spack environment
|
||||
after activation are not unloaded.
|
||||
|
||||
Returns:
|
||||
spack.util.environment.EnvironmentModifications: Environment variables
|
||||
modifications to activate environment.
|
||||
"""
|
||||
env_mods = EnvironmentModifications()
|
||||
active = ev.active_environment()
|
||||
|
||||
if active is None:
|
||||
return env_mods
|
||||
|
||||
if ev.default_view_name in active.views:
|
||||
try:
|
||||
with spack.store.db.read_transaction():
|
||||
active.rm_default_view_from_env(env_mods)
|
||||
except (spack.repo.UnknownPackageError,
|
||||
spack.repo.UnknownNamespaceError) as e:
|
||||
tty.warn(e)
|
||||
tty.warn('Could not fully deactivate view due to missing package '
|
||||
'or repo, shell environment may be corrupt.')
|
||||
|
||||
ev.deactivate()
|
||||
|
||||
return env_mods
|
||||
@@ -117,24 +117,11 @@ class SpecError(SpackError):
|
||||
|
||||
|
||||
class UnsatisfiableSpecError(SpecError):
|
||||
"""
|
||||
Raised when a spec conflicts with package constraints.
|
||||
|
||||
For original concretizer, provide the requirement that was violated when
|
||||
raising.
|
||||
"""
|
||||
def __init__(self, provided, required=None, constraint_type=None, conflicts=None):
|
||||
# required is only set by the original concretizer.
|
||||
# clingo concretizer handles error messages differently.
|
||||
if required is not None:
|
||||
assert not conflicts # can't mix formats
|
||||
super(UnsatisfiableSpecError, self).__init__(
|
||||
"%s does not satisfy %s" % (provided, required))
|
||||
else:
|
||||
indented = [' %s\n' % conflict for conflict in conflicts]
|
||||
conflict_msg = ''.join(indented)
|
||||
msg = '%s is unsatisfiable, conflicts are:\n%s' % (provided, conflict_msg)
|
||||
super(UnsatisfiableSpecError, self).__init__(msg)
|
||||
"""Raised when a spec conflicts with package constraints.
|
||||
Provide the requirement that was violated when raising."""
|
||||
def __init__(self, provided, required, constraint_type):
|
||||
super(UnsatisfiableSpecError, self).__init__(
|
||||
"%s does not satisfy %s" % (provided, required))
|
||||
self.provided = provided
|
||||
self.required = required
|
||||
self.constraint_type = constraint_type
|
||||
|
||||
@@ -48,11 +48,11 @@
|
||||
import spack.util.crypto as crypto
|
||||
import spack.util.pattern as pattern
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web
|
||||
import spack.version
|
||||
import spack.util.web as web_util
|
||||
from spack.util.compression import decompressor_for, extension
|
||||
from spack.util.executable import CommandNotFoundError, which
|
||||
from spack.util.string import comma_and, quote
|
||||
from spack.version import Version, ver
|
||||
|
||||
#: List of all fetch strategies, created by FetchStrategy metaclass.
|
||||
all_strategies = []
|
||||
@@ -350,8 +350,8 @@ def _existing_url(self, url):
|
||||
else:
|
||||
# Telling urllib to check if url is accessible
|
||||
try:
|
||||
url, headers, response = spack.util.web.read_from_url(url)
|
||||
except spack.util.web.SpackWebError:
|
||||
url, headers, response = web_util.read_from_url(url)
|
||||
except web_util.SpackWebError:
|
||||
msg = "Urllib fetch failed to verify url {0}".format(url)
|
||||
raise FailedDownloadError(url, msg)
|
||||
return (response.getcode() is None or response.getcode() == 200)
|
||||
@@ -380,8 +380,8 @@ def _fetch_urllib(self, url):
|
||||
|
||||
# Run urllib but grab the mime type from the http headers
|
||||
try:
|
||||
url, headers, response = spack.util.web.read_from_url(url)
|
||||
except spack.util.web.SpackWebError as e:
|
||||
url, headers, response = web_util.read_from_url(url)
|
||||
except web_util.SpackWebError as e:
|
||||
# clean up archive on failure.
|
||||
if self.archive_file:
|
||||
os.remove(self.archive_file)
|
||||
@@ -571,7 +571,7 @@ def archive(self, destination):
|
||||
if not self.archive_file:
|
||||
raise NoArchiveFileError("Cannot call archive() before fetching.")
|
||||
|
||||
spack.util.web.push_to_url(
|
||||
web_util.push_to_url(
|
||||
self.archive_file,
|
||||
destination,
|
||||
keep_original=True)
|
||||
@@ -750,7 +750,7 @@ def __init__(self, **kwargs):
|
||||
@property
|
||||
def go_version(self):
|
||||
vstring = self.go('version', output=str).split(' ')[2]
|
||||
return spack.version.Version(vstring)
|
||||
return Version(vstring)
|
||||
|
||||
@property
|
||||
def go(self):
|
||||
@@ -843,7 +843,7 @@ def version_from_git(git_exe):
|
||||
"""
|
||||
version_output = git_exe('--version', output=str)
|
||||
m = re.search(GitFetchStrategy.git_version_re, version_output)
|
||||
return spack.version.Version(m.group(1))
|
||||
return Version(m.group(1))
|
||||
|
||||
@property
|
||||
def git(self):
|
||||
@@ -852,7 +852,7 @@ def git(self):
|
||||
|
||||
# Disable advice for a quieter fetch
|
||||
# https://github.com/git/git/blob/master/Documentation/RelNotes/1.7.2.txt
|
||||
if self.git_version >= spack.version.Version('1.7.2'):
|
||||
if self.git_version >= Version('1.7.2'):
|
||||
self._git.add_default_arg('-c')
|
||||
self._git.add_default_arg('advice.detachedHead=false')
|
||||
|
||||
@@ -895,52 +895,25 @@ def fetch(self):
|
||||
tty.debug('Already fetched {0}'.format(self.stage.source_path))
|
||||
return
|
||||
|
||||
self.clone(commit=self.commit, branch=self.branch, tag=self.tag)
|
||||
|
||||
def clone(self, dest=None, commit=None, branch=None, tag=None, bare=False):
|
||||
"""
|
||||
Clone a repository to a path.
|
||||
|
||||
This method handles cloning from git, but does not require a stage.
|
||||
|
||||
Arguments:
|
||||
dest (str or None): The path into which the code is cloned. If None,
|
||||
requires a stage and uses the stage's source path.
|
||||
commit (str or None): A commit to fetch from the remote. Only one of
|
||||
commit, branch, and tag may be non-None.
|
||||
branch (str or None): A branch to fetch from the remote.
|
||||
tag (str or None): A tag to fetch from the remote.
|
||||
bare (bool): Execute a "bare" git clone (--bare option to git)
|
||||
"""
|
||||
# Default to spack source path
|
||||
dest = dest or self.stage.source_path
|
||||
tty.debug('Cloning git repository: {0}'.format(self._repo_info()))
|
||||
|
||||
git = self.git
|
||||
debug = spack.config.get('config:debug')
|
||||
|
||||
if bare:
|
||||
# We don't need to worry about which commit/branch/tag is checked out
|
||||
clone_args = ['clone', '--bare']
|
||||
if not debug:
|
||||
clone_args.append('--quiet')
|
||||
clone_args.extend([self.url, dest])
|
||||
git(*clone_args)
|
||||
elif commit:
|
||||
if self.commit:
|
||||
# Need to do a regular clone and check out everything if
|
||||
# they asked for a particular commit.
|
||||
debug = spack.config.get('config:debug')
|
||||
|
||||
clone_args = ['clone', self.url]
|
||||
if not debug:
|
||||
clone_args.insert(1, '--quiet')
|
||||
with temp_cwd():
|
||||
git(*clone_args)
|
||||
repo_name = get_single_file('.')
|
||||
if self.stage:
|
||||
self.stage.srcdir = repo_name
|
||||
shutil.move(repo_name, dest)
|
||||
self.stage.srcdir = repo_name
|
||||
shutil.move(repo_name, self.stage.source_path)
|
||||
|
||||
with working_dir(dest):
|
||||
checkout_args = ['checkout', commit]
|
||||
with working_dir(self.stage.source_path):
|
||||
checkout_args = ['checkout', self.commit]
|
||||
if not debug:
|
||||
checkout_args.insert(1, '--quiet')
|
||||
git(*checkout_args)
|
||||
@@ -948,18 +921,18 @@ def clone(self, dest=None, commit=None, branch=None, tag=None, bare=False):
|
||||
else:
|
||||
# Can be more efficient if not checking out a specific commit.
|
||||
args = ['clone']
|
||||
if not debug:
|
||||
if not spack.config.get('config:debug'):
|
||||
args.append('--quiet')
|
||||
|
||||
# If we want a particular branch ask for it.
|
||||
if branch:
|
||||
args.extend(['--branch', branch])
|
||||
elif tag and self.git_version >= spack.version.ver('1.8.5.2'):
|
||||
args.extend(['--branch', tag])
|
||||
if self.branch:
|
||||
args.extend(['--branch', self.branch])
|
||||
elif self.tag and self.git_version >= ver('1.8.5.2'):
|
||||
args.extend(['--branch', self.tag])
|
||||
|
||||
# Try to be efficient if we're using a new enough git.
|
||||
# This checks out only one branch's history
|
||||
if self.git_version >= spack.version.ver('1.7.10'):
|
||||
if self.git_version >= ver('1.7.10'):
|
||||
if self.get_full_repo:
|
||||
args.append('--no-single-branch')
|
||||
else:
|
||||
@@ -969,7 +942,7 @@ def clone(self, dest=None, commit=None, branch=None, tag=None, bare=False):
|
||||
# Yet more efficiency: only download a 1-commit deep
|
||||
# tree, if the in-use git and protocol permit it.
|
||||
if (not self.get_full_repo) and \
|
||||
self.git_version >= spack.version.ver('1.7.1') and \
|
||||
self.git_version >= ver('1.7.1') and \
|
||||
self.protocol_supports_shallow_clone():
|
||||
args.extend(['--depth', '1'])
|
||||
|
||||
@@ -977,15 +950,14 @@ def clone(self, dest=None, commit=None, branch=None, tag=None, bare=False):
|
||||
git(*args)
|
||||
|
||||
repo_name = get_single_file('.')
|
||||
if self.stage:
|
||||
self.stage.srcdir = repo_name
|
||||
shutil.move(repo_name, dest)
|
||||
self.stage.srcdir = repo_name
|
||||
shutil.move(repo_name, self.stage.source_path)
|
||||
|
||||
with working_dir(dest):
|
||||
with working_dir(self.stage.source_path):
|
||||
# For tags, be conservative and check them out AFTER
|
||||
# cloning. Later git versions can do this with clone
|
||||
# --branch, but older ones fail.
|
||||
if tag and self.git_version < spack.version.ver('1.8.5.2'):
|
||||
if self.tag and self.git_version < ver('1.8.5.2'):
|
||||
# pull --tags returns a "special" error code of 1 in
|
||||
# older versions that we have to ignore.
|
||||
# see: https://github.com/git/git/commit/19d122b
|
||||
@@ -999,7 +971,7 @@ def clone(self, dest=None, commit=None, branch=None, tag=None, bare=False):
|
||||
git(*co_args)
|
||||
|
||||
if self.submodules_delete:
|
||||
with working_dir(dest):
|
||||
with working_dir(self.stage.source_path):
|
||||
for submodule_to_delete in self.submodules_delete:
|
||||
args = ['rm', submodule_to_delete]
|
||||
if not spack.config.get('config:debug'):
|
||||
@@ -1008,7 +980,7 @@ def clone(self, dest=None, commit=None, branch=None, tag=None, bare=False):
|
||||
|
||||
# Init submodules if the user asked for them.
|
||||
if self.submodules:
|
||||
with working_dir(dest):
|
||||
with working_dir(self.stage.source_path):
|
||||
args = ['submodule', 'update', '--init', '--recursive']
|
||||
if not spack.config.get('config:debug'):
|
||||
args.insert(1, '--quiet')
|
||||
@@ -1387,55 +1359,6 @@ def fetch(self):
|
||||
|
||||
basename = os.path.basename(parsed_url.path)
|
||||
|
||||
with working_dir(self.stage.path):
|
||||
_, headers, stream = spack.util.web.read_from_url(self.url)
|
||||
|
||||
with open(basename, 'wb') as f:
|
||||
shutil.copyfileobj(stream, f)
|
||||
|
||||
content_type = spack.util.web.get_header(headers, 'Content-type')
|
||||
|
||||
if content_type == 'text/html':
|
||||
warn_content_type_mismatch(self.archive_file or "the archive")
|
||||
|
||||
if self.stage.save_filename:
|
||||
os.rename(
|
||||
os.path.join(self.stage.path, basename),
|
||||
self.stage.save_filename)
|
||||
|
||||
if not self.archive_file:
|
||||
raise FailedDownloadError(self.url)
|
||||
|
||||
|
||||
@fetcher
|
||||
class GCSFetchStrategy(URLFetchStrategy):
|
||||
"""FetchStrategy that pulls from a GCS bucket."""
|
||||
url_attr = 'gs'
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
try:
|
||||
super(GCSFetchStrategy, self).__init__(*args, **kwargs)
|
||||
except ValueError:
|
||||
if not kwargs.get('url'):
|
||||
raise ValueError(
|
||||
"GCSFetchStrategy requires a url for fetching.")
|
||||
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
import spack.util.web as web_util
|
||||
if self.archive_file:
|
||||
tty.debug('Already downloaded {0}'.format(self.archive_file))
|
||||
return
|
||||
|
||||
parsed_url = url_util.parse(self.url)
|
||||
if parsed_url.scheme != 'gs':
|
||||
raise FetchError(
|
||||
'GCSFetchStrategy can only fetch from gs:// urls.')
|
||||
|
||||
tty.debug('Fetching {0}'.format(self.url))
|
||||
|
||||
basename = os.path.basename(parsed_url.path)
|
||||
|
||||
with working_dir(self.stage.path):
|
||||
_, headers, stream = web_util.read_from_url(self.url)
|
||||
|
||||
@@ -1579,15 +1502,8 @@ def for_package_version(pkg, version):
|
||||
|
||||
check_pkg_attributes(pkg)
|
||||
|
||||
if not isinstance(version, spack.version.Version):
|
||||
version = spack.version.Version(version)
|
||||
|
||||
# if it's a commit, we must use a GitFetchStrategy
|
||||
if version.is_commit and hasattr(pkg, "git"):
|
||||
# Populate the version with comparisons to other commits
|
||||
version.generate_commit_lookup(pkg)
|
||||
fetcher = GitFetchStrategy(git=pkg.git, commit=str(version))
|
||||
return fetcher
|
||||
if not isinstance(version, Version):
|
||||
version = Version(version)
|
||||
|
||||
# If it's not a known version, try to extrapolate one by URL
|
||||
if version not in pkg.versions:
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
|
||||
import spack.config
|
||||
import spack.projections
|
||||
import spack.relocate
|
||||
import spack.schema.projections
|
||||
import spack.spec
|
||||
import spack.store
|
||||
@@ -60,8 +61,8 @@ def view_copy(src, dst, view, spec=None):
|
||||
|
||||
Use spec and view to generate relocations
|
||||
"""
|
||||
shutil.copy2(src, dst)
|
||||
if spec and not spec.external:
|
||||
shutil.copyfile(src, dst)
|
||||
if spec:
|
||||
# Not metadata, we have to relocate it
|
||||
|
||||
# Get information on where to relocate from/to
|
||||
@@ -72,20 +73,16 @@ def view_copy(src, dst, view, spec=None):
|
||||
# will have the old sbang location in their shebangs.
|
||||
# TODO: Not sure which one to use...
|
||||
import spack.hooks.sbang as sbang
|
||||
|
||||
# Break a package include cycle
|
||||
import spack.relocate
|
||||
|
||||
orig_sbang = '#!/bin/bash {0}/bin/sbang'.format(spack.paths.spack_root)
|
||||
new_sbang = sbang.sbang_shebang_line()
|
||||
|
||||
prefix_to_projection = OrderedDict({
|
||||
spec.prefix: view.get_projection_for_spec(spec)})
|
||||
spec.prefix: view.get_projection_for_spec(spec),
|
||||
spack.paths.spack_root: view._root})
|
||||
|
||||
for dep in spec.traverse():
|
||||
if not dep.external:
|
||||
prefix_to_projection[dep.prefix] = \
|
||||
view.get_projection_for_spec(dep)
|
||||
prefix_to_projection[dep.prefix] = \
|
||||
view.get_projection_for_spec(dep)
|
||||
|
||||
if spack.relocate.is_binary(dst):
|
||||
spack.relocate.relocate_text_bin(
|
||||
@@ -99,11 +96,6 @@ def view_copy(src, dst, view, spec=None):
|
||||
files=[dst],
|
||||
prefixes=prefix_to_projection
|
||||
)
|
||||
try:
|
||||
stat = os.stat(src)
|
||||
os.chown(dst, stat.st_uid, stat.st_gid)
|
||||
except OSError:
|
||||
tty.debug('Can\'t change the permissions for %s' % dst)
|
||||
|
||||
|
||||
def view_func_parser(parsed_name):
|
||||
@@ -408,7 +400,7 @@ def merge(self, spec, ignore=None):
|
||||
|
||||
ignore = ignore or (lambda f: False)
|
||||
ignore_file = match_predicate(
|
||||
self.layout.hidden_file_regexes, ignore)
|
||||
self.layout.hidden_file_paths, ignore)
|
||||
|
||||
# check for dir conflicts
|
||||
conflicts = tree.find_dir_conflicts(view_dst, ignore_file)
|
||||
@@ -434,7 +426,7 @@ def unmerge(self, spec, ignore=None):
|
||||
|
||||
ignore = ignore or (lambda f: False)
|
||||
ignore_file = match_predicate(
|
||||
self.layout.hidden_file_regexes, ignore)
|
||||
self.layout.hidden_file_paths, ignore)
|
||||
|
||||
merge_map = tree.get_file_map(view_dst, ignore_file)
|
||||
pkg.remove_files_from_view(self, merge_map)
|
||||
@@ -442,7 +434,11 @@ def unmerge(self, spec, ignore=None):
|
||||
# now unmerge the directory tree
|
||||
tree.unmerge_directories(view_dst, ignore_file)
|
||||
|
||||
def remove_files(self, files):
|
||||
def remove_file(self, src, dest):
|
||||
if not os.path.lexists(dest):
|
||||
tty.warn("Tried to remove %s which does not exist" % dest)
|
||||
return
|
||||
|
||||
def needs_file(spec, file):
|
||||
# convert the file we want to remove to a source in this spec
|
||||
projection = self.get_projection_for_spec(spec)
|
||||
@@ -461,23 +457,16 @@ def needs_file(spec, file):
|
||||
manifest = {}
|
||||
return test_path in manifest
|
||||
|
||||
specs = self.get_all_specs()
|
||||
# remove if dest is not owned by any other package in the view
|
||||
# This will only be false if two packages are merged into a prefix
|
||||
# and have a conflicting file
|
||||
|
||||
for file in files:
|
||||
if not os.path.lexists(file):
|
||||
tty.warn("Tried to remove %s which does not exist" % file)
|
||||
continue
|
||||
|
||||
# remove if file is not owned by any other package in the view
|
||||
# This will only be false if two packages are merged into a prefix
|
||||
# and have a conflicting file
|
||||
|
||||
# check all specs for whether they own the file. That include the spec
|
||||
# we are currently removing, as we remove files before unlinking the
|
||||
# metadata directory.
|
||||
if len([s for s in specs if needs_file(s, file)]) <= 1:
|
||||
tty.debug("Removing file " + file)
|
||||
os.remove(file)
|
||||
# check all specs for whether they own the file. That include the spec
|
||||
# we are currently removing, as we remove files before unlinking the
|
||||
# metadata directory.
|
||||
if len([s for s in self.get_all_specs()
|
||||
if needs_file(s, dest)]) <= 1:
|
||||
os.remove(dest)
|
||||
|
||||
def check_added(self, spec):
|
||||
assert spec.concrete
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import six.moves.urllib.response as urllib_response
|
||||
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
|
||||
|
||||
def gcs_open(req, *args, **kwargs):
|
||||
"""Open a reader stream to a blob object on GCS
|
||||
"""
|
||||
import spack.util.gcs as gcs_util
|
||||
|
||||
url = url_util.parse(req.get_full_url())
|
||||
gcsblob = gcs_util.GCSBlob(url)
|
||||
|
||||
if not gcsblob.exists():
|
||||
raise web_util.SpackWebError('GCS blob {0} does not exist'.format(
|
||||
gcsblob.blob_path))
|
||||
stream = gcsblob.get_blob_byte_stream()
|
||||
headers = gcsblob.get_blob_headers()
|
||||
|
||||
return urllib_response.addinfourl(stream, headers, url)
|
||||
@@ -44,13 +44,6 @@ def attr(self):
|
||||
deptype=('build', 'link', 'run'), package_hash=False, name='build_hash')
|
||||
|
||||
|
||||
#: Hash descriptor used only to transfer a DAG, as is, across processes
|
||||
process_hash = SpecHashDescriptor(
|
||||
deptype=('build', 'link', 'run', 'test'),
|
||||
package_hash=False,
|
||||
name='process_hash'
|
||||
)
|
||||
|
||||
#: Full hash used in build pipelines to determine when to rebuild packages.
|
||||
full_hash = SpecHashDescriptor(
|
||||
deptype=('build', 'link', 'run'), package_hash=True, name='full_hash')
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user