Compare commits

..

4 Commits

Author SHA1 Message Date
eugeneswalker
a260bf038d boost: fix v1.78 build with oneapi (#30463) 2022-05-04 10:41:58 -07:00
Sergey Kosukhin
e0045a9daa cray platform: unload cray-mpich (#29898) 2022-04-12 08:50:20 -07:00
Tom Vander Aa
783615cacf intel-oneapi-mpi: set I_MPI_ROOT in setup_dependent_build_environment (#28890)
* setting I_MPI_ROOT is sometimes needed for find_package(MPI) in cmake to work
2022-02-15 08:23:04 -08:00
Thomas Madlener
a91621c6e1 fastjet add v3.3.4, fjcontrib add v1.045 (#22667) 2022-02-15 08:23:00 -08:00
2415 changed files with 15155 additions and 38891 deletions

View File

@@ -16,29 +16,19 @@ body:
attributes:
label: Steps to reproduce the issue
description: |
Fill in the console output from the exact spec you are trying to build.
value: |
Fill in the exact spec you are trying to build and the relevant part of the error message
placeholder: |
```console
$ spack spec -I <spec>
$ spack install <spec>
...
```
- type: textarea
id: error
attributes:
label: Error message
description: |
Please post the error message from spack inside the `<details>` tag below:
value: |
<details><summary>Error message</summary><pre>
...
</pre></details>
validations:
required: true
- type: textarea
id: information
attributes:
label: Information on your system
description: Please include the output of `spack debug report`.
description: Please include the output of `spack debug report`
validations:
required: true
- type: markdown

View File

@@ -31,7 +31,7 @@ jobs:
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch unzip which xz python3 python3-devel tree \
cmake bison bison-devel libstdc++-static
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
- name: Setup repo and non-root user
run: |
git --version
@@ -43,7 +43,7 @@ jobs:
shell: runuser -u spack-test -- bash {0}
run: |
source share/spack/setup-env.sh
spack bootstrap untrust github-actions-v0.2
spack bootstrap untrust github-actions
spack external find cmake bison
spack -d solve zlib
tree ~/.spack/bootstrap/store/
@@ -61,15 +61,7 @@ jobs:
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
make patch unzip xz-utils python3 python3-dev tree \
cmake bison
- name: Work around CVE-2022-24765
run: |
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
# a breaking behavior. See:
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
# - https://github.com/actions/checkout/issues/760
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
git config --global --add safe.directory /__w/spack/spack
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
- name: Setup repo and non-root user
run: |
git --version
@@ -81,7 +73,7 @@ jobs:
shell: runuser -u spack-test -- bash {0}
run: |
source share/spack/setup-env.sh
spack bootstrap untrust github-actions-v0.2
spack bootstrap untrust github-actions
spack external find cmake bison
spack -d solve zlib
tree ~/.spack/bootstrap/store/
@@ -98,15 +90,7 @@ jobs:
apt-get install -y \
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
make patch unzip xz-utils python3 python3-dev tree
- name: Work around CVE-2022-24765
run: |
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
# a breaking behavior. See:
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
# - https://github.com/actions/checkout/issues/760
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
git config --global --add safe.directory /__w/spack/spack
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
- name: Setup repo and non-root user
run: |
git --version
@@ -134,7 +118,7 @@ jobs:
bzip2 curl file gcc-c++ gcc gcc-fortran tar git gpg2 gzip \
make patch unzip which xz python3 python3-devel tree \
cmake bison
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
- name: Setup repo and non-root user
run: |
git --version
@@ -143,7 +127,7 @@ jobs:
- name: Bootstrap clingo
run: |
source share/spack/setup-env.sh
spack bootstrap untrust github-actions-v0.2
spack bootstrap untrust github-actions
spack external find cmake bison
spack -d solve zlib
tree ~/.spack/bootstrap/store/
@@ -154,12 +138,12 @@ jobs:
- name: Install dependencies
run: |
brew install cmake bison@2.7 tree
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
- name: Bootstrap clingo
run: |
source share/spack/setup-env.sh
export PATH=/usr/local/opt/bison@2.7/bin:$PATH
spack bootstrap untrust github-actions-v0.2
spack bootstrap untrust github-actions
spack external find --not-buildable cmake bison
spack -d solve zlib
tree ~/.spack/bootstrap/store/
@@ -168,13 +152,13 @@ jobs:
runs-on: macos-latest
strategy:
matrix:
python-version: ['3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
python-version: ['3.5', '3.6', '3.7', '3.8', '3.9']
steps:
- name: Install dependencies
run: |
brew install tree
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
- uses: actions/setup-python@f38219332975fe8f9c04cca981d674bf22aea1d3 # @v2
with:
python-version: ${{ matrix.python-version }}
- name: Bootstrap clingo
@@ -188,10 +172,10 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9']
steps:
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
- uses: actions/setup-python@f38219332975fe8f9c04cca981d674bf22aea1d3 # @v2
with:
python-version: ${{ matrix.python-version }}
- name: Setup repo and non-root user
@@ -218,15 +202,7 @@ jobs:
apt-get install -y \
bzip2 curl file g++ gcc patchelf gfortran git gzip \
make patch unzip xz-utils python3 python3-dev tree
- name: Work around CVE-2022-24765
run: |
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
# a breaking behavior. See:
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
# - https://github.com/actions/checkout/issues/760
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
git config --global --add safe.directory /__w/spack/spack
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
- uses: actions/checkout@v2
- name: Setup repo and non-root user
run: |
git --version
@@ -255,15 +231,7 @@ jobs:
bzip2 curl file g++ gcc patchelf gfortran git gzip \
make patch unzip xz-utils python3 python3-dev tree \
gawk
- name: Work around CVE-2022-24765
run: |
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
# a breaking behavior. See:
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
# - https://github.com/actions/checkout/issues/760
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
git config --global --add safe.directory /__w/spack/spack
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
- uses: actions/checkout@v2
- name: Setup repo and non-root user
run: |
git --version
@@ -276,7 +244,7 @@ jobs:
run: |
source share/spack/setup-env.sh
spack solve zlib
spack bootstrap untrust github-actions-v0.2
spack bootstrap untrust github-actions
spack -d gpg list
tree ~/.spack/bootstrap/store/
@@ -288,7 +256,7 @@ jobs:
brew install tree
# Remove GnuPG since we want to bootstrap it
sudo rm -rf /usr/local/bin/gpg
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
- uses: actions/checkout@v2
- name: Bootstrap GnuPG
run: |
source share/spack/setup-env.sh
@@ -304,11 +272,11 @@ jobs:
brew install gawk tree
# Remove GnuPG since we want to bootstrap it
sudo rm -rf /usr/local/bin/gpg
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
- uses: actions/checkout@v2
- name: Bootstrap GnuPG
run: |
source share/spack/setup-env.sh
spack solve zlib
spack bootstrap untrust github-actions-v0.2
spack bootstrap untrust github-actions
spack -d gpg list
tree ~/.spack/bootstrap/store/

View File

@@ -13,8 +13,6 @@ on:
paths:
- '.github/workflows/build-containers.yml'
- 'share/spack/docker/*'
- 'share/templates/container/*'
- 'lib/spack/spack/container/*'
# Let's also build & tag Spack containers on releases.
release:
types: [published]
@@ -31,21 +29,15 @@ jobs:
# A matrix of Dockerfile paths, associated tags, and which architectures
# they support.
matrix:
# Meaning of the various items in the matrix list
# 0: Container name (e.g. ubuntu-bionic)
# 1: Platforms to build for
# 2: Base image (e.g. ubuntu:18.04)
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
[centos7, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:7'],
[centos-stream, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream'],
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
[ubuntu-bionic, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:18.04'],
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04']]
dockerfile: [[amazon-linux, amazonlinux-2.dockerfile, 'linux/amd64,linux/arm64'],
[centos7, centos-7.dockerfile, 'linux/amd64,linux/arm64,linux/ppc64le'],
[leap15, leap-15.dockerfile, 'linux/amd64,linux/arm64,linux/ppc64le'],
[ubuntu-xenial, ubuntu-1604.dockerfile, 'linux/amd64,linux/arm64,linux/ppc64le'],
[ubuntu-bionic, ubuntu-1804.dockerfile, 'linux/amd64,linux/arm64,linux/ppc64le']]
name: Build ${{ matrix.dockerfile[0] }}
steps:
- name: Checkout
uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
- name: Set Container Tag Normal (Nightly)
run: |
@@ -60,26 +52,14 @@ jobs:
versioned="${{matrix.dockerfile[0]}}:${GITHUB_REF##*/}"
echo "versioned=${versioned}" >> $GITHUB_ENV
- name: Generate the Dockerfile
env:
SPACK_YAML_OS: "${{ matrix.dockerfile[2] }}"
- name: Check ${{ matrix.dockerfile[1] }} Exists
run: |
.github/workflows/generate_spack_yaml_containerize.sh
. share/spack/setup-env.sh
mkdir -p dockerfiles/${{ matrix.dockerfile[0] }}
spack containerize --last-stage=bootstrap | tee dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile
printf "Preparing to build ${{ env.container }} from dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile"
if [ ! -f "dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile" ]; then
printf "dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile does not exist"
printf "Preparing to build ${{ env.container }} from ${{ matrix.dockerfile[1] }}"
if [ ! -f "share/spack/docker/${{ matrix.dockerfile[1]}}" ]; then
printf "Dockerfile ${{ matrix.dockerfile[0]}} does not exist"
exit 1;
fi
- name: Upload Dockerfile
uses: actions/upload-artifact@6673cd052c4cd6fcf4b4e6e60ea986c889389535
with:
name: dockerfiles
path: dockerfiles
- name: Set up QEMU
uses: docker/setup-qemu-action@27d0a4f181a40b142cce983c5393082c365d1480 # @v1
@@ -87,7 +67,7 @@ jobs:
uses: docker/setup-buildx-action@94ab11c41e45d028884a99163086648e898eed25 # @v1
- name: Log in to GitHub Container Registry
uses: docker/login-action@dd4fa0671be5250ee6f50aedf4cb05514abda2c7 # @v1
uses: docker/login-action@42d299face0c5c43a0487c477f595ac9cf22f1a7 # @v1
with:
registry: ghcr.io
username: ${{ github.actor }}
@@ -95,19 +75,17 @@ jobs:
- name: Log in to DockerHub
if: ${{ github.event_name != 'pull_request' }}
uses: docker/login-action@dd4fa0671be5250ee6f50aedf4cb05514abda2c7 # @v1
uses: docker/login-action@42d299face0c5c43a0487c477f595ac9cf22f1a7 # @v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
uses: docker/build-push-action@ac9327eae2b366085ac7f6a2d02df8aa8ead720a # @v2
- name: Build & Deploy ${{ matrix.dockerfile[1] }}
uses: docker/build-push-action@7f9d37fa544684fb73bfe4835ed7214c255ce02b # @v2
with:
context: dockerfiles/${{ matrix.dockerfile[0] }}
platforms: ${{ matrix.dockerfile[1] }}
file: share/spack/docker/${{matrix.dockerfile[1]}}
platforms: ${{ matrix.dockerfile[2] }}
push: ${{ github.event_name != 'pull_request' }}
cache-from: type=gha
cache-to: type=gha,mode=max
tags: |
spack/${{ env.container }}
spack/${{ env.versioned }}

View File

@@ -1,7 +0,0 @@
$ proc = Start-Process ${{ env.spack_installer }}\spack.exe "/install /quiet" -Passthru
$handle = $proc.Handle # cache proc.Handle
$proc.WaitForExit();
if ($proc.ExitCode -ne 0) {
Write-Warning "$_ exited with status code $($proc.ExitCode)"
}

View File

@@ -1,9 +0,0 @@
#!/bin/bash
(echo "spack:" \
&& echo " specs: []" \
&& echo " container:" \
&& echo " format: docker" \
&& echo " images:" \
&& echo " os: \"${SPACK_YAML_OS}\"" \
&& echo " spack:" \
&& echo " ref: ${GITHUB_REF}") > spack.yaml

View File

@@ -24,8 +24,8 @@ jobs:
name: gcc with clang
runs-on: macos-latest
steps:
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
- uses: actions/setup-python@f38219332975fe8f9c04cca981d674bf22aea1d3 # @v2
with:
python-version: 3.9
- name: spack install
@@ -39,8 +39,8 @@ jobs:
runs-on: macos-latest
timeout-minutes: 700
steps:
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
- uses: actions/setup-python@f38219332975fe8f9c04cca981d674bf22aea1d3 # @v2
with:
python-version: 3.9
- name: spack install
@@ -52,8 +52,8 @@ jobs:
name: scipy, mpl, pd
runs-on: macos-latest
steps:
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
- uses: actions/setup-python@f38219332975fe8f9c04cca981d674bf22aea1d3 # @v2
with:
python-version: 3.9
- name: spack install

View File

@@ -1,11 +0,0 @@
# (c) 2021 Lawrence Livermore National Laboratory
Set-Location spack
git config --global user.email "spack@example.com"
git config --global user.name "Test User"
if ($(git branch --show-current) -ne "develop")
{
git branch develop origin/develop
}

View File

@@ -1,4 +0,0 @@
param ($systemFolder, $shortcut)
$start = [System.Environment]::GetFolderPath("$systemFolder")
Invoke-Item "$start\Programs\Spack\$shortcut"

View File

@@ -15,10 +15,10 @@ jobs:
validate:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
- uses: actions/setup-python@f38219332975fe8f9c04cca981d674bf22aea1d3 # @v2
with:
python-version: '3.10'
python-version: 3.9
- name: Install Python Packages
run: |
pip install --upgrade pip
@@ -31,12 +31,12 @@ jobs:
style:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
- uses: actions/setup-python@f38219332975fe8f9c04cca981d674bf22aea1d3 # @v2
with:
python-version: '3.10'
python-version: 3.9
- name: Install Python packages
run: |
pip install --upgrade pip six setuptools types-six
@@ -57,7 +57,7 @@ jobs:
packages: ${{ steps.filter.outputs.packages }}
with_coverage: ${{ steps.coverage.outputs.with_coverage }}
steps:
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
if: ${{ github.event_name == 'push' }}
with:
fetch-depth: 0
@@ -96,7 +96,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
python-version: [2.7, 3.5, 3.6, 3.7, 3.8, 3.9]
concretizer: ['clingo']
include:
- python-version: 2.7
@@ -106,10 +106,10 @@ jobs:
- python-version: 3.9
concretizer: original
steps:
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
- uses: actions/setup-python@f38219332975fe8f9c04cca981d674bf22aea1d3 # @v2
with:
python-version: ${{ matrix.python-version }}
- name: Install System packages
@@ -162,7 +162,7 @@ jobs:
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
with:
flags: unittests,linux,${{ matrix.concretizer }}
@@ -171,12 +171,12 @@ jobs:
needs: [ validate, style, changes ]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
- uses: actions/setup-python@f38219332975fe8f9c04cca981d674bf22aea1d3 # @v2
with:
python-version: '3.10'
python-version: 3.9
- name: Install System packages
run: |
sudo apt-get -y update
@@ -200,7 +200,7 @@ jobs:
COVERAGE: true
run: |
share/spack/qa/run-shell-tests
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
with:
flags: shelltests,linux
@@ -218,7 +218,7 @@ jobs:
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
- name: Setup repo and non-root user
run: |
git --version
@@ -237,12 +237,12 @@ jobs:
needs: [ validate, style, changes ]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
- uses: actions/setup-python@f38219332975fe8f9c04cca981d674bf22aea1d3 # @v2
with:
python-version: '3.10'
python-version: 3.9
- name: Install System packages
run: |
sudo apt-get -y update
@@ -274,7 +274,7 @@ jobs:
SPACK_TEST_SOLVER: clingo
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
with:
flags: unittests,linux,clingo
@@ -286,10 +286,10 @@ jobs:
matrix:
python-version: [3.8]
steps:
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
- uses: actions/setup-python@f38219332975fe8f9c04cca981d674bf22aea1d3 # @v2
with:
python-version: ${{ matrix.python-version }}
- name: Install Python packages
@@ -320,7 +320,7 @@ jobs:
echo "ONLY PACKAGE RECIPES CHANGED [skipping coverage]"
$(which spack) unit-test -x -m "not maybeslow" -k "package_sanity"
fi
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
with:
files: ./coverage.xml
@@ -331,10 +331,10 @@ jobs:
needs: [ validate, style, changes ]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
- uses: actions/setup-python@f38219332975fe8f9c04cca981d674bf22aea1d3 # @v2
with:
python-version: '3.10'
python-version: 3.9
- name: Install Python packages
run: |
pip install --upgrade pip six setuptools pytest codecov coverage[toml]==6.2
@@ -350,7 +350,7 @@ jobs:
run: |
. share/spack/setup-env.sh
$(which spack) audit packages
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
with:
flags: unittests,linux,audits

View File

@@ -1,188 +0,0 @@
name: windows tests
on:
push:
branches:
- develop
- releases/**
pull_request:
branches:
- develop
- releases/**
defaults:
run:
shell:
powershell Invoke-Expression -Command ".\share\spack\qa\windows_test_setup.ps1"; {0}
jobs:
validate:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
with:
python-version: 3.9
- name: Install Python Packages
run: |
python -m pip install --upgrade pip
python -m pip install --upgrade vermin
- name: vermin (Spack's Core)
run: vermin --backport argparse --backport typing -t='2.7-' -t='3.5-' -v spack/lib/spack/spack/ spack/lib/spack/llnl/ spack/bin/
- name: vermin (Repositories)
run: vermin --backport argparse --backport typing -t='2.7-' -t='3.5-' -v spack/var/spack/repos
# Run style checks on the files that have been changed
style:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
with:
python-version: 3.9
- name: Install Python packages
run: |
python -m pip install --upgrade pip six setuptools flake8 isort>=4.3.5 mypy>=0.800 black pywin32 types-python-dateutil
- name: Create local develop
run: |
.\spack\.github\workflows\setup_git.ps1
- name: Run style tests
run: |
spack style
- name: Verify license headers
run: |
python spack\bin\spack license verify
unittest:
needs: [ validate, style ]
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
with:
python-version: 3.9
- name: Install Python packages
run: |
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
- name: Create local develop
run: |
.\spack\.github\workflows\setup_git.ps1
- name: Unit Test
run: |
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
spack unit-test --verbose --ignore=lib/spack/spack/test/cmd
unittest-cmd:
needs: [ validate, style ]
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
with:
python-version: 3.9
- name: Install Python packages
run: |
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
- name: Create local develop
run: |
.\spack\.github\workflows\setup_git.ps1
- name: Command Unit Test
run: |
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
spack unit-test lib/spack/spack/test/cmd --verbose
buildtest:
needs: [ validate, style ]
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
with:
python-version: 3.9
- name: Install Python packages
run: |
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
- name: Build Test
run: |
spack compiler find
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
spack external find cmake
spack external find ninja
spack install abseil-cpp
generate-installer-test:
needs: [ validate, style ]
runs-on: windows-latest
steps:
- name: Disable Windows Symlinks
run: |
git config --global core.symlinks false
shell:
powershell
- uses: actions/checkout@v2
with:
fetch-depth: 0
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
with:
python-version: 3.9
- name: Install Python packages
run: |
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
- name: Add Light and Candle to Path
run: |
$env:WIX >> $GITHUB_PATH
- name: Run Installer
run: |
.\spack\share\spack\qa\setup_spack.ps1
spack make-installer -s spack -g SILENT pkg
echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
env:
ProgressPreference: SilentlyContinue
- uses: actions/upload-artifact@v3
with:
name: Windows Spack Installer Bundle
path: ${{ env.installer_root }}\pkg\Spack.exe
- uses: actions/upload-artifact@v3
with:
name: Windows Spack Installer
path: ${{ env.installer_root}}\pkg\Spack.msi
execute-installer:
needs: generate-installer-test
runs-on: windows-latest
defaults:
run:
shell: pwsh
steps:
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
with:
python-version: 3.9
- name: Install Python packages
run: |
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
- name: Setup installer directory
run: |
mkdir -p spack_installer
echo "spack_installer=$((pwd).Path)\spack_installer" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
- uses: actions/download-artifact@v3
with:
name: Windows Spack Installer Bundle
path: ${{ env.spack_installer }}
- name: Execute Bundled Installer
run: |
$proc = Start-Process ${{ env.spack_installer }}\spack.exe "/install /quiet" -Passthru
$handle = $proc.Handle # cache proc.Handle
$proc.WaitForExit();
$LASTEXITCODE
env:
ProgressPreference: SilentlyContinue
- uses: actions/download-artifact@v3
with:
name: Windows Spack Installer
path: ${{ env.spack_installer }}
- name: Execute MSI
run: |
$proc = Start-Process ${{ env.spack_installer }}\spack.msi "/quiet" -Passthru
$handle = $proc.Handle # cache proc.Handle
$proc.WaitForExit();
$LASTEXITCODE

View File

@@ -1,19 +1,3 @@
# v0.17.2 (2022-04-13)
### Spack bugfixes
* Fix --reuse with upstreams set in an environment (#29680)
* config add: fix parsing of validator error to infer type from oneOf (#29475)
* Fix spack -C command_line_scope used in conjunction with other flags (#28418)
* Use Spec.constrain to construct spec lists for stacks (#28783)
* Fix bug occurring when searching for inherited patches in packages (#29574)
* Fixed a few bugs when manipulating symlinks (#28318, #29515, #29636)
* Fixed a few minor bugs affecting command prompt, terminal title and argument completion (#28279, #28278, #28939, #29405, #29070, #29402)
* Fixed a few bugs affecting the spack ci command (#29518, #29419)
* Fix handling of Intel compiler environment (#29439)
* Fix a few edge cases when reindexing the DB (#28764)
* Remove "Known issues" from documentation (#29664)
* Other miscellaneous bugfixes (0b72e070583fc5bcd016f5adc8a84c99f2b7805f, #28403, #29261)
# v0.17.1 (2021-12-23)
### Spack Bugfixes

View File

@@ -1,20 +0,0 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import subprocess
import sys
def getpywin():
try:
import win32con # noqa
except ImportError:
subprocess.check_call(
[sys.executable, "-m", "pip", "-q", "install", "--upgrade", "pip"])
subprocess.check_call(
[sys.executable, "-m", "pip", "-q", "install", "pywin32"])
if __name__ == '__main__':
getpywin()

View File

@@ -1,223 +0,0 @@
:: Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
:: Spack Project Developers. See the top-level COPYRIGHT file for details.
::
:: SPDX-License-Identifier: (Apache-2.0 OR MIT)
::#######################################################################
::
:: This file is part of Spack and sets up the spack environment for batch,
:: This includes environment modules and lmod support,
:: and it also puts spack in your path. The script also checks that at least
:: module support exists, and provides suggestions if it doesn't. Source
:: it like this:
::
:: . /path/to/spack/install/spack_cmd.bat
::
@echo off
set spack=%SPACK_ROOT%\bin\spack
::#######################################################################
:: This is a wrapper around the spack command that forwards calls to
:: 'spack load' and 'spack unload' to shell functions. This in turn
:: allows them to be used to invoke environment modules functions.
::
:: 'spack load' is smarter than just 'load' because it converts its
:: arguments into a unique Spack spec that is then passed to module
:: commands. This allows the user to use packages without knowing all
:: their installation details.
::
:: e.g., rather than requiring a full spec for libelf, the user can type:
::
:: spack load libelf
::
:: This will first find the available libelf module file and use a
:: matching one. If there are two versions of libelf, the user would
:: need to be more specific, e.g.:
::
:: spack load libelf@0.8.13
::
:: This is very similar to how regular spack commands work and it
:: avoids the need to come up with a user-friendly naming scheme for
:: spack module files.
::#######################################################################
:_sp_shell_wrapper
set "_sp_flags="
set "_sp_args="
set "_sp_subcommand="
setlocal enabledelayedexpansion
:: commands have the form '[flags] [subcommand] [args]'
:: flags will always start with '-', e.g. --help or -V
:: subcommands will never start with '-'
:: everything after the subcommand is an arg
for %%x in (%*) do (
set t="%%~x"
if "!t:~0,1!" == "-" (
if defined _sp_subcommand (
:: We already have a subcommand, processing args now
set "_sp_args=!_sp_args! !t!"
) else (
set "_sp_flags=!_sp_flags! !t!"
shift
)
) else if not defined _sp_subcommand (
set "_sp_subcommand=!t!"
shift
) else (
set "_sp_args=!_sp_args! !t!"
shift
)
)
:: --help, -h and -V flags don't require further output parsing.
:: If we encounter, execute and exit
if defined _sp_flags (
if NOT "%_sp_flags%"=="%_sp_flags:-h=%" (
python "%spack%" %_sp_flags%
exit /B 0
) else if NOT "%_sp_flags%"=="%_sp_flags:--help=%" (
python "%spack%" %_sp_flags%
exit /B 0
) else if NOT "%_sp_flags%"=="%_sp_flags:-V=%" (
python "%spack%" %_sp_flags%
exit /B 0
)
)
:: pass parsed variables outside of local scope. Need to do
:: this because delayedexpansion can only be set by setlocal
echo %_sp_flags%>flags
echo %_sp_args%>args
echo %_sp_subcommand%>subcmd
endlocal
set /p _sp_subcommand=<subcmd
set /p _sp_flags=<flags
set /p _sp_args=<args
set str_subcommand=%_sp_subcommand:"='%
set str_flags=%_sp_flags:"='%
set str_args=%_sp_args:"='%
if "%str_subcommand%"=="ECHO is off." (set "_sp_subcommand=")
if "%str_flags%"=="ECHO is off." (set "_sp_flags=")
if "%str_args%"=="ECHO is off." (set "_sp_args=")
del subcmd
del flags
del args
:: Filter out some commands. For any others, just run the command.
if "%_sp_subcommand%" == "cd" (
goto :case_cd
) else if "%_sp_subcommand%" == "env" (
goto :case_env
) else if "%_sp_subcommand%" == "load" (
goto :case_load
) else if "%_sp_subcommand%" == "unload" (
goto :case_load
) else (
goto :default_case
)
::#######################################################################
:case_cd
:: Check for --help or -h
:: TODO: This is not exactly the same as setup-env.
:: In setup-env, '--help' or '-h' must follow the cd
:: Here, they may be anywhere in the args
if defined _sp_args (
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
python "%spack%" cd -h
goto :end_switch
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
python "%spack%" cd -h
goto :end_switch
)
)
for /F "tokens=* USEBACKQ" %%F in (
`python "%spack%" location %_sp_args%`) do (
set "LOC=%%F"
)
for %%Z in ("%LOC%") do if EXIST %%~sZ\NUL (cd /d "%LOC%")
goto :end_switch
:case_env
:: If no args or args contain --bat or -h/--help: just execute.
if NOT defined _sp_args (
goto :default_case
)else if NOT "%_sp_args%"=="%_sp_args:--help=%" (
goto :default_case
) else if NOT "%_sp_args%"=="%_sp_args: -h=%" (
goto :default_case
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
goto :default_case
) else if NOT "%_sp_args%"=="%_sp_args:deactivate=%" (
for /f "tokens=* USEBACKQ" %%I in (
`call python "%spack%" %_sp_flags% env deactivate --bat %_sp_args:deactivate=%`
) do %%I
) else if NOT "%_sp_args%"=="%_sp_args:activate=%" (
for /f "tokens=* USEBACKQ" %%I in (
`call python "%spack%" %_sp_flags% env activate --bat %_sp_args:activate=%`
) do %%I
) else (
goto :default_case
)
goto :end_switch
:case_load
:: If args contain --sh, --csh, or -h/--help: just execute.
if defined _sp_args (
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
goto :default_case
) else if NOT "%_sp_args%"=="%_sp_args: -h=%" (
goto :default_case
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
goto :default_case
)
)
for /f "tokens=* USEBACKQ" %%I in (
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`) do %%I
)
goto :end_switch
:case_unload
goto :case_load
:default_case
python "%spack%" %_sp_flags% %_sp_subcommand% %_sp_args%
goto :end_switch
:end_switch
exit /B %ERRORLEVEL%
::########################################################################
:: Prepends directories to path, if they exist.
:: pathadd /path/to/dir # add to PATH
:: or pathadd OTHERPATH /path/to/dir # add to OTHERPATH
::########################################################################
:_spack_pathadd
set "_pa_varname=PATH"
set "_pa_new_path=%~1"
if NOT "%~2" == "" (
set "_pa_varname=%~1"
set "_pa_new_path=%~2"
)
set "_pa_oldvalue=%_pa_varname%"
for %%Z in ("%_pa_new_path%") do if EXIST %%~sZ\NUL (
if defined %_pa_oldvalue% (
set "_pa_varname=%_pa_new_path%:%_pa_oldvalue%"
) else (
set "_pa_varname=%_pa_new_path%"
)
)
exit /b 0
:: set module system roots
:_sp_multi_pathadd
for %%I in (%~2) do (
for %%Z in (%_sp_compatible_sys_types%) do (
:pathadd "%~1" "%%I\%%Z"
)
)
exit /B %ERRORLEVEL%

View File

@@ -1,72 +0,0 @@
@ECHO OFF
setlocal EnableDelayedExpansion
:: (c) 2021 Lawrence Livermore National Laboratory
:: To use this file independently of Spack's installer, execute this script in its directory, or add the
:: associated bin directory to your PATH. Invoke to launch Spack Shell.
::
:: source_dir/spack/bin/spack_cmd.bat
::
pushd %~dp0..
set SPACK_ROOT=%CD%
pushd %CD%\..
set spackinstdir=%CD%
popd
:: Check if Python is on the PATH
if not defined python_pf_ver (
(for /f "delims=" %%F in ('where python.exe') do (
set "python_pf_ver=%%F"
goto :found_python
) ) 2> NUL
)
:found_python
if not defined python_pf_ver (
:: If not, look for Python from the Spack installer
:get_builtin
(for /f "tokens=*" %%g in ('dir /b /a:d "!spackinstdir!\Python*"') do (
set "python_ver=%%g")) 2> NUL
if not defined python_ver (
echo Python was not found on your system.
echo Please install Python or add Python to your PATH.
) else (
set "py_path=!spackinstdir!\!python_ver!"
set "py_exe=!py_path!\python.exe"
)
goto :exitpoint
) else (
:: Python is already on the path
set "py_exe=!python_pf_ver!"
(for /F "tokens=* USEBACKQ" %%F in (
`"!py_exe!" --version`) do (set "output=%%F")) 2>NUL
if not "!output:Microsoft Store=!"=="!output!" goto :get_builtin
goto :exitpoint
)
:exitpoint
set "PATH=%SPACK_ROOT%\bin\;%PATH%"
if defined py_path (
set "PATH=%py_path%;%PATH%"
)
if defined py_exe (
"%py_exe%" "%SPACK_ROOT%\bin\haspywin.py"
"%py_exe%" "%SPACK_ROOT%\bin\spack" external find python >NUL
)
set "EDITOR=notepad"
DOSKEY spacktivate=spack env activate $*
@echo **********************************************************************
@echo ** Spack Package Manager
@echo **********************************************************************
IF "%1"=="" GOTO CONTINUE
set
GOTO:EOF
:continue
set PROMPT=[spack] %PROMPT%
%comspec% /k

View File

@@ -1,10 +0,0 @@
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
$Env:SPACK_PS1_PATH="$PSScriptRoot\..\share\spack\setup-env.ps1"
& (Get-Process -Id $pid).Path -NoExit {
. $Env:SPACK_PS1_PATH ;
Push-Location $ENV:SPACK_ROOT
}

View File

@@ -9,24 +9,15 @@ bootstrap:
# may not be able to bootstrap all of the software that Spack needs,
# depending on its type.
sources:
- name: 'github-actions-v0.2'
type: buildcache
description: |
Buildcache generated from a public workflow using Github Actions.
The sha256 checksum of binaries is checked before installation.
info:
url: https://mirror.spack.io/bootstrap/github-actions/v0.2
homepage: https://github.com/spack/spack-bootstrap-mirrors
releases: https://github.com/spack/spack-bootstrap-mirrors/releases
- name: 'github-actions-v0.1'
- name: 'github-actions'
type: buildcache
description: |
Buildcache generated from a public workflow using Github Actions.
The sha256 checksum of binaries is checked before installation.
info:
url: https://mirror.spack.io/bootstrap/github-actions/v0.1
homepage: https://github.com/spack/spack-bootstrap-mirrors
releases: https://github.com/spack/spack-bootstrap-mirrors/releases
homepage: https://github.com/alalazo/spack-bootstrap-mirrors
releases: https://github.com/alalazo/spack-bootstrap-mirrors/releases
# This method is just Spack bootstrapping the software it needs from sources.
# It has been added here so that users can selectively disable bootstrapping
# from sources by "untrusting" it.
@@ -37,5 +28,5 @@ bootstrap:
trusted:
# By default we trust bootstrapping from sources and from binaries
# produced on Github via the workflow
github-actions-v0.2: true
github-actions: true
spack-install: true

View File

@@ -1,17 +0,0 @@
# -------------------------------------------------------------------------
# This is the default spack configuration file.
#
# Settings here are versioned with Spack and are intended to provide
# sensible defaults out of the box. Spack maintainers should edit this
# file to keep it current.
#
# Users can override these settings by editing
# `$SPACK_ROOT/etc/spack/concretizer.yaml`, `~/.spack/concretizer.yaml`,
# or by adding a `concretizer:` section to an environment.
# -------------------------------------------------------------------------
concretizer:
# Whether to consider installed packages or packages from buildcaches when
# concretizing specs. If `true`, we'll try to use as many installs/binaries
# as possible, rather than building. If `false`, we'll always give you a fresh
# concretization.
reuse: false

View File

@@ -155,17 +155,14 @@ config:
# The concretization algorithm to use in Spack. Options are:
#
# 'original': Spack's original greedy, fixed-point concretizer. This
# algorithm can make decisions too early and will not backtrack
# sufficiently for many specs.
#
# 'clingo': Uses a logic solver under the hood to solve DAGs with full
# backtracking and optimization for user preferences. Spack will
# try to bootstrap the logic solver, if not already available.
#
# 'original': Spack's original greedy, fixed-point concretizer. This
# algorithm can make decisions too early and will not backtrack
# sufficiently for many specs. This will soon be deprecated in
# favor of clingo.
#
# See `concretizer.yaml` for more settings you can fine-tune when
# using clingo.
concretizer: clingo

View File

@@ -35,10 +35,13 @@ modules:
# These are configurations for the module set named "default"
default:
# These values are defaulted in the code. They are not defaulted here so
# that we can enable backwards compatibility with the old syntax more
# easily (old value is in the config yaml, config:module_roots)
# Where to install modules
roots:
tcl: $spack/share/spack/modules
lmod: $spack/share/spack/lmod
# roots:
# tcl: $spack/share/spack/modules
# lmod: $spack/share/spack/lmod
# What type of modules to use
enable:
- tcl

View File

@@ -1,5 +0,0 @@
config:
locks: false
concretizer: original
build_stage::
- '$spack/.staging'

View File

@@ -194,9 +194,9 @@ Reusing installed dependencies
.. warning::
The ``--reuse`` option described here will become the default installation
method in the next Spack version, and you will be able to get the current
behavior by using ``spack install --fresh``.
The ``--reuse`` option described here is experimental, and it will
likely be replaced with a different option and configuration settings
in the next Spack release.
By default, when you run ``spack install``, Spack tries to build a new
version of the package you asked for, along with updated versions of
@@ -216,9 +216,6 @@ the ``mpich`` will be build with the installed versions, if possible.
You can use the :ref:`spack spec -I <cmd-spack-spec>` command to see what
will be reused and what will be built before you install.
You can configure Spack to use the ``--reuse`` behavior by default in
``concretizer.yaml``.
.. _cmd-spack-uninstall:
^^^^^^^^^^^^^^^^^^^
@@ -1283,7 +1280,7 @@ Normally users don't have to bother specifying the architecture if they
are installing software for their current host, as in that case the
values will be detected automatically. If you need fine-grained control
over which packages use which targets (or over *all* packages' default
target), see :ref:`package-preferences`.
target), see :ref:`concretization-preferences`.
.. admonition:: Cray machines
@@ -1723,8 +1720,8 @@ Activating Extensions in a View
Another way to use extensions is to create a view, which merges the
python installation along with the extensions into a single prefix.
See :ref:`configuring_environment_views` for a more in-depth description
of views.
See :ref:`filesystem-views` for a more in-depth description of views and
:ref:`cmd-spack-view` for usage of the ``spack view`` command.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Activating Extensions Globally

View File

@@ -5,9 +5,9 @@
.. _build-settings:
================================
Package Settings (packages.yaml)
================================
===================
Build Customization
===================
Spack allows you to customize how your software is built through the
``packages.yaml`` file. Using it, you can make Spack prefer particular
@@ -209,49 +209,11 @@ Specific limitations include:
then Spack will not add a new external entry (``spack config blame packages``
can help locate all external entries).
.. _concretizer-options:
.. _concretization-preferences:
----------------------
Concretizer options
----------------------
``packages.yaml`` gives the concretizer preferences for specific packages,
but you can also use ``concretizer.yaml`` to customize aspects of the
algorithm it uses to select the dependencies you install:
.. _code-block: yaml
concretizer:
# Whether to consider installed packages or packages from buildcaches when
# concretizing specs. If `true`, we'll try to use as many installs/binaries
# as possible, rather than building. If `false`, we'll always give you a fresh
# concretization.
reuse: false
^^^^^^^^^^^^^^^^
``reuse``
^^^^^^^^^^^^^^^^
This controls whether Spack will prefer to use installed packages (``true``), or
whether it will do a "fresh" installation and prefer the latest settings from
``package.py`` files and ``packages.yaml`` (``false``). .
You can use ``spack install --reuse`` to enable reuse for a single installation,
and you can use ``spack install --fresh`` to do a fresh install if ``reuse`` is
enabled by default.
.. note::
``reuse: false`` is the current default, but ``reuse: true`` will be the default
in the next Spack release. You will still be able to use ``spack install --fresh``
to get the old behavior.
.. _package-preferences:
-------------------
Package Preferences
-------------------
--------------------------
Concretization Preferences
--------------------------
Spack can be configured to prefer certain compilers, package
versions, dependencies, and variants during concretization.
@@ -307,7 +269,6 @@ concretization rules. A provider lists a value that packages may
``depend_on`` (e.g, MPI) and a list of rules for fulfilling that
dependency.
.. _package_permissions:
-------------------

View File

@@ -51,7 +51,6 @@ on these ideas for each distinct build system that Spack supports:
build_systems/perlpackage
build_systems/pythonpackage
build_systems/rpackage
build_systems/racketpackage
build_systems/rubypackage
.. toctree::

View File

@@ -433,7 +433,7 @@ For example:
.. code-block:: python
variant('profiler', when='@2.0:')
config_args += self.with_or_without('profiler')
config_args += self.with_or_without('profiler)
will neither add ``--with-profiler`` nor ``--without-profiler`` when the version is
below ``2.0``.

View File

@@ -159,85 +159,6 @@ and CMake simply ignores the empty command line argument. For example the follow
will generate ``'cmake' '-DEXAMPLE=ON' ...`` when `@2.0: +example` is met, but will
result in ``'cmake' '' ...`` when the spec version is below ``2.0``.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
CMake arguments provided by Spack
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The following default arguments are controlled by Spack:
``CMAKE_INSTALL_PREFIX``
------------------------
Is set to the the package's install directory.
``CMAKE_PREFIX_PATH``
---------------------
CMake finds dependencies through calls to ``find_package()``, ``find_program()``,
``find_library()``, ``find_file()``, and ``find_path()``, which use a list of search
paths from ``CMAKE_PREFIX_PATH``. Spack sets this variable to a list of prefixes of the
spec's transitive dependencies.
For troubleshooting cases where CMake fails to find a dependency, add the
``--debug-find`` flag to ``cmake_args``.
``CMAKE_BUILD_TYPE``
--------------------
Every CMake-based package accepts a ``-DCMAKE_BUILD_TYPE`` flag to
dictate which level of optimization to use. In order to ensure
uniformity across packages, the ``CMakePackage`` base class adds
a variant to control this:
.. code-block:: python
variant('build_type', default='RelWithDebInfo',
description='CMake build type',
values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel'))
However, not every CMake package accepts all four of these options.
Grep the ``CMakeLists.txt`` file to see if the default values are
missing or replaced. For example, the
`dealii <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/dealii/package.py>`_
package overrides the default variant with:
.. code-block:: python
variant('build_type', default='DebugRelease',
description='The build type to build',
values=('Debug', 'Release', 'DebugRelease'))
For more information on ``CMAKE_BUILD_TYPE``, see:
https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
``CMAKE_INSTALL_RPATH`` and ``CMAKE_INSTALL_RPATH_USE_LINK_PATH=ON``
--------------------------------------------------------------------
CMake uses different RPATHs during the build and after installation, so that executables
can locate the libraries they're linked to during the build, and installed executables
do not have RPATHs to build directories. In Spack, we have to make sure that RPATHs are
set properly after installation.
Spack sets ``CMAKE_INSTALL_RPATH`` to a list of ``<prefix>/lib`` or ``<prefix>/lib64``
directories of the spec's link-type dependencies. Apart from that, it sets
``-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=ON``, which should add RPATHs for directories of
linked libraries not in the directories covered by ``CMAKE_INSTALL_RPATH``.
Usually it's enough to set only ``-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=ON``, but the
reason to provide both options is that packages may dynamically open shared libraries,
which CMake cannot detect. In those cases, the RPATHs from ``CMAKE_INSTALL_RPATH`` are
used as search paths.
.. note::
Some packages provide stub libraries, which contain an interface for linking without
an implementation. When using such libraries, it's best to override the option
``-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=OFF`` in ``cmake_args``, so that stub libraries
are not used at runtime.
^^^^^^^^^^
Generators
@@ -275,6 +196,36 @@ generators, but it should be simple to add support for alternative
generators. For more information on CMake generators, see:
https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html
^^^^^^^^^^^^^^^^
CMAKE_BUILD_TYPE
^^^^^^^^^^^^^^^^
Every CMake-based package accepts a ``-DCMAKE_BUILD_TYPE`` flag to
dictate which level of optimization to use. In order to ensure
uniformity across packages, the ``CMakePackage`` base class adds
a variant to control this:
.. code-block:: python
variant('build_type', default='RelWithDebInfo',
description='CMake build type',
values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel'))
However, not every CMake package accepts all four of these options.
Grep the ``CMakeLists.txt`` file to see if the default values are
missing or replaced. For example, the
`dealii <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/dealii/package.py>`_
package overrides the default variant with:
.. code-block:: python
variant('build_type', default='DebugRelease',
description='The build type to build',
values=('Debug', 'Release', 'DebugRelease'))
For more information on ``CMAKE_BUILD_TYPE``, see:
https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
CMakeLists.txt in a sub-directory
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

View File

@@ -649,7 +649,7 @@ follow `the next section <intel-install-libs_>`_ instead.
* If you specified a custom variant (for example ``+vtune``) you may want to add this as your
preferred variant in the packages configuration for the ``intel-parallel-studio`` package
as described in :ref:`package-preferences`. Otherwise you will have to specify
as described in :ref:`concretization-preferences`. Otherwise you will have to specify
the variant everytime ``intel-parallel-studio`` is being used as ``mkl``, ``fftw`` or ``mpi``
implementation to avoid pulling in a different variant.
@@ -811,13 +811,13 @@ by one of the following means:
$ spack install libxc@3.0.0%intel
* Alternatively, request Intel compilers implicitly by package preferences.
* Alternatively, request Intel compilers implicitly by concretization preferences.
Configure the order of compilers in the appropriate ``packages.yaml`` file,
under either an ``all:`` or client-package-specific entry, in a
``compiler:`` list. Consult the Spack documentation for
`Configuring Package Preferences <https://spack-tutorial.readthedocs.io/en/latest/tutorial_configuration.html#configuring-package-preferences>`_
and
:ref:`Package Preferences <package-preferences>`.
:ref:`Concretization Preferences <concretization-preferences>`.
Example: ``etc/spack/packages.yaml`` might simply contain:
@@ -867,7 +867,7 @@ virtual package, in order of decreasing preference. To learn more about the
``providers:`` settings, see the Spack tutorial for
`Configuring Package Preferences <https://spack-tutorial.readthedocs.io/en/latest/tutorial_configuration.html#configuring-package-preferences>`_
and the section
:ref:`Package Preferences <package-preferences>`.
:ref:`Concretization Preferences <concretization-preferences>`.
Example: The following fairly minimal example for ``packages.yaml`` shows how
to exclusively use the standalone ``intel-mkl`` package for all the linear

View File

@@ -645,7 +645,8 @@ are not yet in Spack, and Spack contains many Python packages that are
not yet in Anaconda. The main advantage of Spack over Anaconda is its
ability to choose a specific compiler and BLAS/LAPACK or MPI library.
Spack also has better platform support for supercomputers, and can build
optimized binaries for your specific microarchitecture.
optimized binaries for your specific microarchitecture. On the other hand,
Anaconda offers Windows support.
^^^^^^^^^^^^^^^^^^^^^^
External documentation

View File

@@ -1,46 +0,0 @@
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT)
.. _racketpackage:
-------------
RacketPackage
-------------
Much like Python, Racket packages and modules have their own special build system.
To learn more about the specifics of Racket package system, please refer to the
`Racket Docs <https://docs.racket-lang.org/pkg/cmdline.html>`_.
^^^^^^
Phases
^^^^^^
The ``RacketPackage`` base class provides an ``install`` phase that
can be overridden, corresponding to the use of:
.. code-block:: console
$ raco pkg install
^^^^^^^
Caveats
^^^^^^^
In principle, ``raco`` supports a second, ``setup`` phase; however, we have not
implemented this separately, as in normal circumstances, ``install`` also handles
running ``setup`` automatically.
Unlike Python, Racket currently on supports two installation scopes for packages, user
or system, and keeps a registry of installed packages at each scope in its configuration files.
This means we can't simply compose a "``RACKET_PATH``" environment variable listing all of the
places packages are installed, and update this at will.
Unfortunately this means that all currently installed packages which extend Racket via ``raco pkg install``
are accessible whenever Racket is accessible.
Additionally, because Spack does not implement uninstall hooks, uninstalling a Spack ``rkt-`` package
will have no effect on the ``raco`` installed packages visible to your Racket installation.
Instead, you must manually run ``raco pkg remove`` to keep the two package managers in a mutually
consistent state.

View File

@@ -180,7 +180,6 @@ def setup(sphinx):
('py:class', '_frozen_importlib_external.SourceFileLoader'),
# Spack classes that are private and we don't want to expose
('py:class', 'spack.provider_index._IndexBase'),
('py:class', 'spack.repo._PrependFileLoader'),
]
# The reST default role (used for this markup: `text`) to use for all documents.

View File

@@ -5,9 +5,9 @@
.. _config-yaml:
============================
Spack Settings (config.yaml)
============================
==============
Basic Settings
==============
Spack's basic configuration options are set in ``config.yaml``. You can
see the default settings by looking at
@@ -72,6 +72,21 @@ used to configure module names.
packages have been installed will prevent Spack from being
able to find the old installation directories.
--------------------
``module_roots``
--------------------
Controls where Spack installs generated module files. You can customize
the location for each type of module. e.g.:
.. code-block:: yaml
module_roots:
tcl: $spack/share/spack/modules
lmod: $spack/share/spack/lmod
See :ref:`modules` for details.
--------------------
``build_stage``
--------------------

View File

@@ -13,16 +13,12 @@ Spack has many configuration files. Here is a quick list of them, in
case you want to skip directly to specific docs:
* :ref:`compilers.yaml <compiler-config>`
* :ref:`concretizer.yaml <concretizer-options>`
* :ref:`config.yaml <config-yaml>`
* :ref:`mirrors.yaml <mirrors>`
* :ref:`modules.yaml <modules>`
* :ref:`packages.yaml <build-settings>`
* :ref:`repos.yaml <repositories>`
You can also add any of these as inline configuration in ``spack.yaml``
in an :ref:`environment <environment-configuration>`.
-----------
YAML Format
-----------
@@ -37,6 +33,8 @@ Here is an example ``config.yaml`` file:
config:
install_tree: $spack/opt/spack
module_roots:
lmod: $spack/share/spack/lmod
build_stage:
- $tempdir/$user/spack-stage
- ~/.spack/stage
@@ -251,6 +249,8 @@ your configurations look like this:
config:
install_tree: $spack/opt/spack
module_roots:
lmod: $spack/share/spack/lmod
build_stage:
- $tempdir/$user/spack-stage
- ~/.spack/stage
@@ -274,6 +274,8 @@ command:
$ spack config get config
config:
install_tree: /some/other/directory
module_roots:
lmod: $spack/share/spack/lmod
build_stage:
- $tempdir/$user/spack-stage
- ~/.spack/stage
@@ -339,11 +341,13 @@ higher-precedence scope is *prepended* to the defaults. ``spack config
get config`` shows the result:
.. code-block:: console
:emphasize-lines: 5-8
:emphasize-lines: 7-10
$ spack config get config
config:
install_tree: /some/other/directory
module_roots:
lmod: $spack/share/spack/lmod
build_stage:
- /lustre-scratch/$user/spack
- ~/mystage
@@ -367,11 +371,13 @@ user config looked like this:
The merged configuration would look like this:
.. code-block:: console
:emphasize-lines: 5-6
:emphasize-lines: 7-8
$ spack config get config
config:
install_tree: /some/other/directory
module_roots:
lmod: $spack/share/spack/lmod
build_stage:
- /lustre-scratch/$user/spack
- ~/mystage
@@ -492,6 +498,9 @@ account all scopes. For example, to see the fully merged
template_dirs:
- $spack/templates
directory_layout: {architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}
module_roots:
tcl: $spack/share/spack/modules
lmod: $spack/share/spack/lmod
build_stage:
- $tempdir/$user/spack-stage
- ~/.spack/stage
@@ -539,6 +548,9 @@ down the problem:
/home/myuser/spack/etc/spack/defaults/config.yaml:23 template_dirs:
/home/myuser/spack/etc/spack/defaults/config.yaml:24 - $spack/templates
/home/myuser/spack/etc/spack/defaults/config.yaml:28 directory_layout: {architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}
/home/myuser/spack/etc/spack/defaults/config.yaml:32 module_roots:
/home/myuser/spack/etc/spack/defaults/config.yaml:33 tcl: $spack/share/spack/modules
/home/myuser/spack/etc/spack/defaults/config.yaml:34 lmod: $spack/share/spack/lmod
/home/myuser/spack/etc/spack/defaults/config.yaml:49 build_stage:
/home/myuser/spack/etc/spack/defaults/config.yaml:50 - $tempdir/$user/spack-stage
/home/myuser/spack/etc/spack/defaults/config.yaml:51 - ~/.spack/stage

View File

@@ -1057,39 +1057,39 @@ Release branches
^^^^^^^^^^^^^^^^
There are currently two types of Spack releases: :ref:`major releases
<major-releases>` (``0.17.0``, ``0.18.0``, etc.) and :ref:`point releases
<point-releases>` (``0.17.1``, ``0.17.2``, ``0.17.3``, etc.). Here is a
<major-releases>` (``0.13.0``, ``0.14.0``, etc.) and :ref:`point releases
<point-releases>` (``0.13.1``, ``0.13.2``, ``0.13.3``, etc.). Here is a
diagram of how Spack release branches work::
o branch: develop (latest version, v0.19.0.dev0)
o branch: develop (latest version)
|
o
| o branch: releases/v0.18, tag: v0.18.1
o |
| o tag: v0.18.0
o |
| o
o merge v0.14.1 into develop
|\
| o branch: releases/v0.14, tag: v0.14.1
o | merge v0.14.0 into develop
|\|
| o tag: v0.14.0
|/
o
|
o
| o branch: releases/v0.17, tag: v0.17.2
o |
| o tag: v0.17.1
o |
| o tag: v0.17.0
o merge v0.13.2 into develop
|\
| o branch: releases/v0.13, tag: v0.13.2
o | merge v0.13.1 into develop
|\|
| o tag: v0.13.1
o | merge v0.13.0 into develop
|\|
| o tag: v0.13.0
o |
| o
|/
o
The ``develop`` branch has the latest contributions, and nearly all pull
requests target ``develop``. The ``develop`` branch will report that its
version is that of the next **major** release with a ``.dev0`` suffix.
requests target ``develop``.
Each Spack release series also has a corresponding branch, e.g.
``releases/v0.18`` has ``0.18.x`` versions of Spack, and
``releases/v0.17`` has ``0.17.x`` versions. A major release is the first
``releases/v0.14`` has ``0.14.x`` versions of Spack, and
``releases/v0.13`` has ``0.13.x`` versions. A major release is the first
tagged version on a release branch. Minor releases are back-ported from
develop onto release branches. This is typically done by cherry-picking
bugfix commits off of ``develop``.
@@ -1100,20 +1100,12 @@ packages. They should generally only contain fixes to the Spack core.
However, sometimes priorities are such that new functionality needs to
be added to a minor release.
Both major and minor releases are tagged. As a convenience, we also tag
the latest release as ``releases/latest``, so that users can easily check
it out to get the latest stable version. See :ref:`updating-latest-release`
for more details.
.. note::
Older spack releases were merged **back** into develop so that we could
do fancy things with tags, but since tarballs and many git checkouts do
not have tags, this proved overly complex and confusing.
We have since converted to using `PEP 440 <https://peps.python.org/pep-0440/>`_
compliant versions. `See here <https://github.com/spack/spack/pull/25267>`_ for
details.
Both major and minor releases are tagged. After each release, we merge
the release branch back into ``develop`` so that the version bump and any
other release-specific changes are visible in the mainline. As a
convenience, we also tag the latest release as ``releases/latest``,
so that users can easily check it out to get the latest
stable version. See :ref:`merging-releases` for more details.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Scheduling work for releases
@@ -1171,11 +1163,10 @@ completed, the steps to make the major release are:
``releases/vX.Y``. That is, you should create a ``releases/vX.Y``
branch if you are preparing the ``X.Y.0`` release.
#. Remove the ``dev0`` development release segment from the version tuple in
``lib/spack/spack/__init__.py``.
#. Bump the version in ``lib/spack/spack/__init__.py``.
The version number itself should already be correct and should not be
modified.
See `this example from 0.13.0
<https://github.com/spack/spack/commit/8eeb64096c98b8a43d1c587f13ece743c864fba9>`_
#. Update ``CHANGELOG.md`` with major highlights in bullet form.
@@ -1197,16 +1188,9 @@ completed, the steps to make the major release are:
is outdated submit pull requests to ``develop`` as normal
and keep rebasing the release branch on ``develop``.
#. Bump the major version in the ``develop`` branch.
Create a pull request targeting the ``develop`` branch, bumping the major
version in ``lib/spack/spack/__init__.py`` with a ``dev0`` release segment.
For instance when you have just released ``v0.15.0``, set the version
to ``(0, 16, 0, 'dev0')`` on ``develop``.
#. Follow the steps in :ref:`publishing-releases`.
#. Follow the steps in :ref:`updating-latest-release`.
#. Follow the steps in :ref:`merging-releases`.
#. Follow the steps in :ref:`announcing-releases`.
@@ -1282,6 +1266,9 @@ completed, the steps to make the point release are:
#. Bump the version in ``lib/spack/spack/__init__.py``.
See `this example from 0.14.1
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
#. Update ``CHANGELOG.md`` with a list of the changes.
This is typically a summary of the commits you cherry-picked onto the
@@ -1303,7 +1290,7 @@ completed, the steps to make the point release are:
#. Follow the steps in :ref:`publishing-releases`.
#. Follow the steps in :ref:`updating-latest-release`.
#. Follow the steps in :ref:`merging-releases`.
#. Follow the steps in :ref:`announcing-releases`.
@@ -1364,11 +1351,11 @@ Publishing a release on GitHub
selectable in the versions menu.
.. _updating-latest-release:
.. _merging-releases:
^^^^^^^^^^^^^^^^^^^^^^^^^^
Updating `releases/latest`
^^^^^^^^^^^^^^^^^^^^^^^^^^
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Updating `releases/latest` and `develop`
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
If the new release is the **highest** Spack release yet, you should
also tag it as ``releases/latest``. For example, suppose the highest
@@ -1392,6 +1379,40 @@ To tag ``releases/latest``, do this:
The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing
``releases/latest`` tag with the new one.
We also merge each release that we tag as ``releases/latest`` into ``develop``.
Make sure to do this with a merge commit:
.. code-block:: console
$ git checkout develop
$ git merge --no-ff -s ours vX.Y.Z # vX.Y.Z is the new release's tag
$ git push
We merge back to ``develop`` because it:
* updates the version and ``CHANGELOG.md`` on ``develop``; and
* ensures that your release tag is reachable from the head of
``develop``.
We *must* use a real merge commit (via the ``--no-ff`` option) to
ensure that the release tag is reachable from the tip of ``develop``.
This is necessary for ``spack -V`` to work properly -- it uses ``git
describe --tags`` to find the last reachable tag in the repository and
reports how far we are from it. For example:
.. code-block:: console
$ spack -V
0.14.2-1486-b80d5e74e5
This says that we are at commit ``b80d5e74e5``, which is 1,486 commits
ahead of the ``0.14.2`` release.
We put this step last in the process because it's best to do it only once
the release is complete and tagged. If you do it before you've tagged the
release and later decide you want to tag some later commit, you'll need
to merge again.
.. _announcing-releases:

View File

@@ -5,9 +5,9 @@
.. _environments:
=========================
Environments (spack.yaml)
=========================
============
Environments
============
An environment is used to group together a set of specs for the
purpose of building, rebuilding and deploying in a coherent fashion.
@@ -384,11 +384,18 @@ Sourcing that file in Bash will make the environment available to the
user; and can be included in ``.bashrc`` files, etc. The ``loads``
file may also be copied out of the environment, renamed, etc.
----------
spack.yaml
----------
Spack environments can be customized at finer granularity by editing
the ``spack.yaml`` manifest file directly.
.. _environment-configuration:
------------------------
^^^^^^^^^^^^^^^^^^^^^^^^
Configuring Environments
------------------------
^^^^^^^^^^^^^^^^^^^^^^^^
A variety of Spack behaviors are changed through Spack configuration
files, covered in more detail in the :ref:`configuration`
@@ -410,9 +417,9 @@ environment can be specified by ``env:NAME`` (to affect environment
``foo``, set ``--scope env:foo``). These commands will automatically
manipulate configuration inline in the ``spack.yaml`` file.
^^^^^^^^^^^^^^^^^^^^^
"""""""""""""""""""""
Inline configurations
^^^^^^^^^^^^^^^^^^^^^
"""""""""""""""""""""
Inline Environment-scope configuration is done using the same yaml
format as standard Spack configuration scopes, covered in the
@@ -433,9 +440,9 @@ a ``packages.yaml`` file) could contain:
This configuration sets the default compiler for all packages to
``intel``.
^^^^^^^^^^^^^^^^^^^^^^^
"""""""""""""""""""""""
Included configurations
^^^^^^^^^^^^^^^^^^^^^^^
"""""""""""""""""""""""
Spack environments allow an ``include`` heading in their yaml
schema. This heading pulls in external configuration files and applies
@@ -455,9 +462,9 @@ to make small changes to an individual Environment. Included configs
listed earlier will have higher precedence, as the included configs are
applied in reverse order.
-------------------------------
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Manually Editing the Specs List
-------------------------------
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The list of abstract/root specs in the Environment is maintained in
the ``spack.yaml`` manifest under the heading ``specs``.
@@ -475,9 +482,9 @@ Appending to this list in the yaml is identical to using the ``spack
add`` command from the command line. However, there is more power
available from the yaml file.
^^^^^^^^^^^^^^^^^^^
"""""""""""""""""""
Spec concretization
^^^^^^^^^^^^^^^^^^^
"""""""""""""""""""
Specs can be concretized separately or together, as already
explained in :ref:`environments_concretization`. The behavior active
@@ -503,9 +510,9 @@ which can currently take either one of the two allowed values ``together`` or ``
the environment remains consistent. When instead the specs are concretized
separately only the new specs will be re-concretized after any addition.
^^^^^^^^^^^^^
"""""""""""""
Spec Matrices
^^^^^^^^^^^^^
"""""""""""""
Entries in the ``specs`` list can be individual abstract specs or a
spec matrix.
@@ -565,9 +572,9 @@ This allows one to create toolchains out of combinations of
constraints and apply them somewhat indiscriminately to packages,
without regard for the applicability of the constraint.
^^^^^^^^^^^^^^^^^^^^
""""""""""""""""""""
Spec List References
^^^^^^^^^^^^^^^^^^^^
""""""""""""""""""""
The last type of possible entry in the specs list is a reference.
@@ -667,9 +674,9 @@ The valid variables for a ``when`` clause are:
#. ``hostname``. The hostname of the system (if ``hostname`` is an
executable in the user's PATH).
^^^^^^^^^^^^^^^^^^^^^^^^
""""""""""""""""""""""""
SpecLists as Constraints
^^^^^^^^^^^^^^^^^^^^^^^^
""""""""""""""""""""""""
Dependencies and compilers in Spack can be both packages in an
environment and constraints on other packages. References to SpecLists
@@ -701,41 +708,41 @@ For example, the following environment has three root packages:
This allows for a much-needed reduction in redundancy between packages
and constraints.
----------------
Filesystem Views
----------------
^^^^^^^^^^^^^^^^^^^^^^^^^
Environment-managed Views
^^^^^^^^^^^^^^^^^^^^^^^^^
Spack Environments can define filesystem views, which provide a direct access point
for software similar to the directory hierarchy that might exist under ``/usr/local``.
Filesystem views are updated every time the environment is written out to the lock
file ``spack.lock``, so the concrete environment and the view are always compatible.
The files of the view's installed packages are brought into the view by symbolic or
hard links, referencing the original Spack installation, or by copy.
Spack Environments can define filesystem views of their software,
which are maintained as packages and can be installed and uninstalled from
the Environment. Filesystem views provide an access point for packages
from the filesystem for users who want to access those packages
directly. For more information on filesystem views, see the section
:ref:`filesystem-views`.
Spack Environment managed views are updated every time the environment
is written out to the lock file ``spack.lock``, so the concrete
environment and the view are always compatible.
.. _configuring_environment_views:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Configuration in ``spack.yaml``
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
"""""""""""""""""""""""""""""
Configuring environment views
"""""""""""""""""""""""""""""
The Spack Environment manifest file has a top-level keyword
``view``. Each entry under that heading is a **view descriptor**, headed
by a name. Any number of views may be defined under the ``view`` heading.
The view descriptor contains the root of the view, and
``view``. Each entry under that heading is a view descriptor, headed
by a name. The view descriptor contains the root of the view, and
optionally the projections for the view, ``select`` and
``exclude`` lists for the view and link information via ``link`` and
``link_type``.
For example, in the following manifest
``link_type``. For example, in the following manifest
file snippet we define a view named ``mpis``, rooted at
``/path/to/view`` in which all projections use the package name,
version, and compiler name to determine the path for a given
package. This view selects all packages that depend on MPI, and
excludes those built with the PGI compiler at version 18.5.
The root specs with their (transitive) link and run type dependencies
will be put in the view due to the ``link: all`` option,
and the files in the view will be symlinks to the spack install
directories.
All the dependencies of each root spec in the environment will be linked
in the view due to the command ``link: all`` and the files in the view will
be symlinks to the spack install directories.
.. code-block:: yaml
@@ -751,26 +758,16 @@ directories.
link: all
link_type: symlink
The default for the ``select`` and
For more information on using view projections, see the section on
:ref:`adding_projections_to_views`. The default for the ``select`` and
``exclude`` values is to select everything and exclude nothing. The
default projection is the default view projection (``{}``). The ``link``
attribute allows the following values:
#. ``link: all`` include root specs with their transitive run and link type
dependencies (default);
#. ``link: run`` include root specs with their transitive run type dependencies;
#. ``link: roots`` include root specs without their dependencies.
The ``link_type`` defaults to ``symlink`` but can also take the value
of ``hardlink`` or ``copy``.
.. tip::
The option ``link: run`` can be used to create small environment views for
Python packages. Python will be able to import packages *inside* of the view even
when the environment is not activated, and linked libraries will be located
*outside* of the view thanks to rpaths.
defaults to ``all`` but can also be ``roots`` when only the root specs
in the environment are desired in the view. The ``link_type`` defaults
to ``symlink`` but can also take the value of ``hardlink`` or ``copy``.
Any number of views may be defined under the ``view`` heading in a
Spack Environment.
There are two shorthands for environments with a single view. If the
environment at ``/path/to/env`` has a single view, with a root at
@@ -836,47 +833,9 @@ regenerate`` will regenerate the views for the environment. This will
apply any updates in the environment configuration that have not yet
been applied.
.. _view_projections:
""""""""""""""""
View Projections
""""""""""""""""
The default projection into a view is to link every package into the
root of the view. The projections attribute is a mapping of partial specs to
spec format strings, defined by the :meth:`~spack.spec.Spec.format`
function, as shown in the example below:
.. code-block:: yaml
projections:
zlib: {name}-{version}
^mpi: {name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}
all: {name}-{version}/{compiler.name}-{compiler.version}
The entries in the projections configuration file must all be either
specs or the keyword ``all``. For each spec, the projection used will
be the first non-``all`` entry that the spec satisfies, or ``all`` if
there is an entry for ``all`` and no other entry is satisfied by the
spec. Where the keyword ``all`` appears in the file does not
matter.
Given the example above, the spec ``zlib@1.2.8``
will be linked into ``/my/view/zlib-1.2.8/``, the spec
``hdf5@1.8.10+mpi %gcc@4.9.3 ^mvapich2@2.2`` will be linked into
``/my/view/hdf5-1.8.10/mvapich2-2.2-gcc-4.9.3``, and the spec
``hdf5@1.8.10~mpi %gcc@4.9.3`` will be linked into
``/my/view/hdf5-1.8.10/gcc-4.9.3``.
If the keyword ``all`` does not appear in the projections
configuration file, any spec that does not satisfy any entry in the
file will be linked into the root of the view as in a single-prefix
view. Any entries that appear below the keyword ``all`` in the
projections configuration file will not be used, as all specs will use
the projection under ``all`` before reaching those entries.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
""""""""""""""""""""""""""""
Activating environment views
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
""""""""""""""""""""""""""""
The ``spack env activate`` command will put the default view for the
environment into the user's path, in addition to activating the

View File

@@ -149,28 +149,27 @@ Spack fall back to bootstrapping from sources:
.. code-block:: console
$ spack bootstrap untrust github-actions-v0.2
==> "github-actions-v0.2" is now untrusted and will not be used for bootstrapping
$ spack bootstrap untrust github-actions
==> "github-actions" is now untrusted and will not be used for bootstrapping
You can verify that the new settings are effective with:
.. code-block:: console
$ spack bootstrap list
Name: github-actions-v0.2 UNTRUSTED
Name: github-actions UNTRUSTED
Type: buildcache
Info:
url: https://mirror.spack.io/bootstrap/github-actions/v0.2
homepage: https://github.com/spack/spack-bootstrap-mirrors
releases: https://github.com/spack/spack-bootstrap-mirrors/releases
url: https://mirror.spack.io/bootstrap/github-actions/v0.1
homepage: https://github.com/alalazo/spack-bootstrap-mirrors
releases: https://github.com/alalazo/spack-bootstrap-mirrors/releases
Description:
Buildcache generated from a public workflow using Github Actions.
The sha256 checksum of binaries is checked before installation.
[ ... ]
Name: spack-install TRUSTED
@@ -1517,238 +1516,3 @@ To ensure that Spack does not autodetect the Cray programming
environment, unset the environment variable ``MODULEPATH``. This
will cause Spack to treat a linux container on a Cray system as a base
linux distro.
.. _windows_support:
----------------
Spack On Windows
----------------
Windows support for Spack is currently under development. While this work is still in an early stage,
it is currently possible to set up Spack and perform a few operations on Windows. This section will guide
you through the steps needed to install Spack and start running it on a fresh Windows machine.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Step 1: Install prerequisites
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
To use Spack on Windows, you will need the following packages:
Required:
* Microsoft Visual Studio
* Python
* Git
Optional:
* Intel Fortran (needed for some packages)
.. note::
Currently MSVC is the only compiler tested for C/C++ projects. Intel OneAPI provides Fortran support.
"""""""""""""""""""""""
Microsoft Visual Studio
"""""""""""""""""""""""
Microsoft Visual Studio provides the only Windows C/C++ compiler that is currently supported by Spack.
We require several specific components to be included in the Visual Studio installation.
One is the C/C++ toolset, which can be selected as "Desktop development with C++" or "C++ build tools,"
depending on installation type (Professional, Build Tools, etc.) The other required component is
"C++ CMake tools for Windows," which can be selected from among the optional packages.
This provides CMake and Ninja for use during Spack configuration.
If you already have Visual Studio installed, you can make sure these components are installed by
rerunning the installer. Next to your installation, select "Modify" and look at the
"Installation details" pane on the right.
"""""""""""""
Intel Fortran
"""""""""""""
For Fortran-based packages on Windows, we strongly recommend Intel's oneAPI Fortran compilers.
The suite is free to download from Intel's website, located at
https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/fortran-compiler.html#gs.70t5tw.
The executable of choice for Spack will be Intel's Beta Compiler, ifx, which supports the classic
compiler's (ifort's) frontend and runtime libraries by using LLVM.
""""""
Python
""""""
As Spack is a Python-based package, an installation of Python will be needed to run it.
Python 3 can be downloaded and installed from the Windows Store, and will be automatically added
to your ``PATH`` in this case.
.. note::
Spack currently supports Python versions later than 3.2 inclusive.
"""
Git
"""
A bash console and GUI can be downloaded from https://git-scm.com/downloads.
If you are unfamiliar with Git, there are a myriad of resources online to help
guide you through checking out repositories and switching development branches.
When given the option of adjusting your ``PATH``, choose the ``Git from the
command line and also from 3rd-party software`` option. This will automatically
update your ``PATH`` variable to include the ``git`` command.
Spack support on Windows is currently dependent on installing the Git for Windows project
as the project providing Git support on Windows. This is additionally the recommended method
for installing Git on Windows, a link to which can be found above. Spack requires the
utilities vendored by this project.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Step 2: Install and setup Spack
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
We are now ready to get the Spack environment set up on our machine. We
begin by using Git to clone the Spack repo, hosted at https://github.com/spack/spack.git
into a desired directory, for our purposes today, called ``spack_install``.
In order to install Spack with Windows support, run the following one liner
in a Windows CMD prompt.
.. code-block:: console
git clone https://github.com/spack/spack.git
.. note::
If you chose to install Spack into a directory on Windows that is set up to require Administrative
Privleges, Spack will require elevated privleges to run.
Administrative Privleges can be denoted either by default such as
``C:\Program Files``, or aministrator applied administrative restrictions
on a directory that spack installs files to such as ``C:\Users``
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Step 3: Run and configure Spack
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
To use Spack, run ``bin\spack_cmd.bat`` (you may need to Run as Administrator) from the top-level spack
directory. This will provide a Windows command prompt with an environment properly set up with Spack
and its prerequisites. If you receive a warning message that Python is not in your ``PATH``
(which may happen if you installed Python from the website and not the Windows Store) add the location
of the Python executable to your ``PATH`` now. You can permanently add Python to your ``PATH`` variable
by using the ``Edit the system environment variables`` utility in Windows Control Panel.
.. note::
Alternatively, Powershell can be used in place of CMD
To configure Spack, first run the following command inside the Spack console:
.. code-block:: console
spack compiler find
This creates a ``.staging`` directory in our Spack prefix, along with a ``windows`` subdirectory
containing a ``compilers.yaml`` file. On a fresh Windows install with the above packages
installed, this command should only detect Microsoft Visual Studio and the Intel Fortran
compiler will be integrated within the first version of MSVC present in the ``compilers.yaml``
output.
Spack provides a default ``config.yaml`` file for Windows that it will use unless overridden.
This file is located at ``etc\spack\defaults\windows\config.yaml``. You can read more on how to
do this and write your own configuration files in the :ref:`Configuration Files<configuration>` section of our
documentation. If you do this, pay particular attention to the ``build_stage`` block of the file
as this specifies the directory that will temporarily hold the source code for the packages to
be installed. This path name must be sufficiently short for compliance with cmd, otherwise you
will see build errors during installation (particularly with CMake) tied to long path names.
To allow Spack use of external tools and dependencies already on your system, the
external pieces of software must be described in the ``packages.yaml`` file.
There are two methods to populate this file:
The first and easiest choice is to use Spack to find installation on your system. In
the Spack terminal, run the following commands:
.. code-block:: console
spack external find cmake
spack external find ninja
The ``spack external find <name>`` will find executables on your system
with the same name given. The command will store the items found in
``packages.yaml`` in the ``.staging\`` directory.
Assuming that the command found CMake and Ninja executables in the previous
step, continue to Step 4. If no executables were found, we may need to manually direct spack towards the CMake
and Ninja installations we set up with Visual Studio. Therefore, your ``packages.yaml`` file will look something
like this, with possibly slight variants in the paths to CMake and Ninja:
.. code-block:: yaml
packages:
cmake:
externals:
- spec: cmake@3.19
prefix: 'c:\Program Files (x86)\Microsoft Visual Studio\2019\Professional\Common7\IDE\CommonExtensions\Microsoft\CMake\CMake'
buildable: False
ninja:
externals:
- spec: ninja@1.8.2
prefix: 'c:\Program Files (x86)\Microsoft Visual Studio\2019\Professional\Common7\IDE\CommonExtensions\Microsoft\CMake\Ninja'
buildable: False
You can also use an separate installation of CMake if you have one and prefer
to use it. If you don't have a path to Ninja analogous to the above, then you can
obtain it by running the Visual Studio Installer and following the instructions
at the start of this section. Also note that .yaml files use spaces for indentation
and not tabs, so ensure that this is the case when editing one directly.
.. note:: Cygwin
The use of Cygwin is not officially supported by Spack and is not tested.
However Spack will not throw an error, so use if choosing to use Spack
with Cygwin, know that no functionality is garunteed.
^^^^^^^^^^^^^^^^^
Step 4: Use Spack
^^^^^^^^^^^^^^^^^
Once the configuration is complete, it is time to give the installation a test. Install a basic package though the
Spack console via:
.. code-block:: console
spack install cpuinfo
If in the previous step, you did not have CMake or Ninja installed, running the command above should boostrap both packages
"""""""""""""""""""""""""""
Windows Compatible Packages
"""""""""""""""""""""""""""
Many Spack packages are not currently compatible with Windows, due to Unix
dependencies or incompatible build tools like autoconf. Here are several
packages known to work on Windows:
* abseil-cpp
* clingo
* cpuinfo
* cmake
* glm
* nasm
* netlib-lapack (requires Intel Fortran)
* ninja
* openssl
* perl
* python
* ruby
* wrf
* zlib
.. note::
This is by no means a comprehensive list
^^^^^^^^^^^^^^
For developers
^^^^^^^^^^^^^^
The intent is to provide a Windows installer that will automatically set up
Python, Git, and Spack, instead of requiring the user to do so manually.
Instructions for creating the installer are at
https://github.com/spack/spack/blob/develop/lib/spack/spack/cmd/installer/README.md
Alternatively a pre-built copy of the Windows installer is available as an artifact of Spack's Windows CI

View File

@@ -54,8 +54,9 @@ or refer to the full manual below.
features
getting_started
basic_usage
workflows
Tutorial: Spack 101 <https://spack-tutorial.readthedocs.io>
replace_conda_homebrew
known_issues
.. toctree::
:maxdepth: 2

View File

@@ -0,0 +1,77 @@
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT)
============
Known Issues
============
This is a list of known bugs in Spack. It provides ways of getting around these
problems if you encounter them.
---------------------------------------------------
Variants are not properly forwarded to dependencies
---------------------------------------------------
**Status:** Expected to be fixed by Spack's new concretizer
Sometimes, a variant of a package can also affect how its dependencies are
built. For example, in order to build MPI support for a package, it may
require that its dependencies are also built with MPI support. In the
``package.py``, this looks like:
.. code-block:: python
depends_on('hdf5~mpi', when='~mpi')
depends_on('hdf5+mpi', when='+mpi')
Spack handles this situation properly for *immediate* dependencies, and
builds ``hdf5`` with the same variant you used for the package that
depends on it. However, for *indirect* dependencies (dependencies of
dependencies), Spack does not backtrack up the DAG far enough to handle
this. Users commonly run into this situation when trying to build R with
X11 support:
.. code-block:: console
$ spack install r+X
...
==> Error: Invalid spec: 'cairo@1.14.8%gcc@6.2.1+X arch=linux-fedora25-x86_64 ^bzip2@1.0.6%gcc@6.2.1+shared arch=linux-fedora25-x86_64 ^font-util@1.3.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^fontconfig@2.12.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^freetype@2.7.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^gettext@0.19.8.1%gcc@6.2.1+bzip2+curses+git~libunistring+libxml2+tar+xz arch=linux-fedora25-x86_64 ^glib@2.53.1%gcc@6.2.1~libmount arch=linux-fedora25-x86_64 ^inputproto@2.3.2%gcc@6.2.1 arch=linux-fedora25-x86_64 ^kbproto@1.0.7%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libffi@3.2.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libpng@1.6.29%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libpthread-stubs@0.4%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libx11@1.6.5%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxau@1.0.8%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxcb@1.12%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxdmcp@1.1.2%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxext@1.3.3%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxml2@2.9.4%gcc@6.2.1~python arch=linux-fedora25-x86_64 ^libxrender@0.9.10%gcc@6.2.1 arch=linux-fedora25-x86_64 ^ncurses@6.0%gcc@6.2.1~symlinks arch=linux-fedora25-x86_64 ^openssl@1.0.2k%gcc@6.2.1 arch=linux-fedora25-x86_64 ^pcre@8.40%gcc@6.2.1+utf arch=linux-fedora25-x86_64 ^pixman@0.34.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^pkg-config@0.29.2%gcc@6.2.1+internal_glib arch=linux-fedora25-x86_64 ^python@2.7.13%gcc@6.2.1+shared~tk~ucs4 arch=linux-fedora25-x86_64 ^readline@7.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^renderproto@0.11.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^sqlite@3.18.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^tar^util-macros@1.19.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xcb-proto@1.12%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xextproto@7.3.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xproto@7.0.31%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xtrans@1.3.5%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xz@5.2.3%gcc@6.2.1 arch=linux-fedora25-x86_64 ^zlib@1.2.11%gcc@6.2.1+pic+shared arch=linux-fedora25-x86_64'.
Package cairo requires variant ~X, but spec asked for +X
A workaround is to explicitly activate the variants of dependencies as well:
.. code-block:: console
$ spack install r+X ^cairo+X ^pango+X
See https://github.com/spack/spack/issues/267 and
https://github.com/spack/spack/issues/2546 for further details.
-----------------------------------------------
depends_on cannot handle recursive dependencies
-----------------------------------------------
**Status:** Not yet a work in progress
Although ``depends_on`` can handle any aspect of Spack's spec syntax,
it currently cannot handle recursive dependencies. If the ``^`` sigil
appears in a ``depends_on`` statement, the concretizer will hang.
For example, something like:
.. code-block:: python
depends_on('mfem+cuda ^hypre+cuda', when='+cuda')
should be rewritten as:
.. code-block:: python
depends_on('mfem+cuda', when='+cuda')
depends_on('hypre+cuda', when='+cuda')
See https://github.com/spack/spack/issues/17660 and
https://github.com/spack/spack/issues/11160 for more details.

View File

@@ -5,9 +5,9 @@
.. _mirrors:
======================
Mirrors (mirrors.yaml)
======================
=======
Mirrors
=======
Some sites may not have access to the internet for fetching packages.
These sites will need a local repository of tarballs from which they

View File

@@ -5,9 +5,9 @@
.. _modules:
======================
Modules (modules.yaml)
======================
=======
Modules
=======
The use of module systems to manage user environment in a controlled way
is a common practice at HPC centers that is often embraced also by
@@ -181,7 +181,10 @@ to the environment variables listed below the folder name.
Spack modules can be configured for multiple module sets. The default
module set is named ``default``. All Spack commands which operate on
modules default to apply the ``default`` module set, but can be
applied to any module set in the configuration.
applied to any module set in the configuration. Settings applied at
the root of the configuration (e.g. ``modules:enable`` rather than
``modules:default:enable``) are applied to the default module set for
backwards compatibility.
"""""""""""""""""""""""""
Changing the modules root
@@ -375,7 +378,7 @@ most likely via the ``+blas`` variant specification.
The most heavyweight solution to module naming is to change the entire
naming convention for module files. This uses the projections format
covered in :ref:`view_projections`.
covered in :ref:`adding_projections_to_views`.
.. code-block:: yaml
@@ -537,7 +540,8 @@ configuration:
#. The configuration is for an :ref:`environment <environments>` and
will never be applied outside the environment,
#. The environment in question is configured to use a view,
#. The environment in question is configured to use a :ref:`view
<filesystem-views>`,
#. The :ref:`environment view is configured
<configuring_environment_views>` with a projection that ensures
every package is linked to a unique directory,

View File

@@ -705,8 +705,7 @@ as follows:
#. The following special strings are considered larger than any other
numeric or non-numeric version component, and satisfy the following
order between themselves:
``develop > main > master > head > trunk > stable``.
order between themselves: ``develop > main > master > head > trunk``.
#. Numbers are ordered numerically, are less than special strings, and
larger than other non-numeric components.
@@ -1423,37 +1422,6 @@ other similar operations:
).with_default('auto').with_non_feature_values('auto'),
)
"""""""""""""""""""""""""""
Conditional Possible Values
"""""""""""""""""""""""""""
There are cases where a variant may take multiple values, and the list of allowed values
expand over time. Think for instance at the C++ standard with which we might compile
Boost, which can take one of multiple possible values with the latest standards
only available from a certain version on.
To model a similar situation we can use *conditional possible values* in the variant declaration:
.. code-block:: python
variant(
'cxxstd', default='98',
values=(
'98', '11', '14',
# C++17 is not supported by Boost < 1.63.0.
conditional('17', when='@1.63.0:'),
# C++20/2a is not support by Boost < 1.73.0
conditional('2a', '2b', when='@1.73.0:')
),
multi=False,
description='Use the specified C++ standard when building.',
)
The snippet above allows ``98``, ``11`` and ``14`` as unconditional possible values for the
``cxxstd`` variant, while ``17`` requires a version greater or equal to ``1.63.0``
and both ``2a`` and ``2b`` require a version greater or equal to ``1.73.0``.
^^^^^^^^^^^^^^^^^^^^
Conditional Variants
^^^^^^^^^^^^^^^^^^^^
@@ -2501,24 +2469,6 @@ Now, the ``py-numpy`` package can be used as an argument to ``spack
activate``. When it is activated, all the files in its prefix will be
symbolically linked into the prefix of the python package.
A package can only extend one other package at a time. To support packages
that may extend one of a list of other packages, Spack supports multiple
``extends`` directives as long as at most one of them is selected as
a dependency during concretization. For example, a lua package could extend
either lua or luajit, but not both:
.. code-block:: python
class LuaLpeg(Package):
...
variant('use_lua', default=True)
extends('lua', when='+use_lua')
extends('lua-luajit', when='~use_lua')
...
Now, a user can install, and activate, the ``lua-lpeg`` package for either
lua or luajit.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Adding additional constraints
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -2574,7 +2524,7 @@ from being linked in at activation time.
Views
-----
The ``spack view`` command can be
As covered in :ref:`filesystem-views`, the ``spack view`` command can be
used to symlink a number of packages into a merged prefix. The methods of
``PackageViewMixin`` can be overridden to customize how packages are added
to views. Generally this can be used to create copies of specific files rather
@@ -2908,7 +2858,7 @@ be concretized on their system. For example, one user may prefer packages
built with OpenMPI and the Intel compiler. Another user may prefer
packages be built with MVAPICH and GCC.
See the :ref:`package-preferences` section for more details.
See the :ref:`concretization-preferences` section for more details.
.. _group_when_spec:
@@ -5323,7 +5273,7 @@ would be quite complicated to do using regex only. Employing the
.. code-block:: python
class Gcc(Package):
executables = [r'g\+\+']
executables = ['g++']
def filter_detected_exes(cls, prefix, exes_in_prefix):
return [x for x in exes_in_prefix if 'clang' not in x]

View File

@@ -1,206 +0,0 @@
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT)
=====================================
Using Spack to Replace Homebrew/Conda
=====================================
Spack is an incredibly powerful package manager, designed for supercomputers
where users have diverse installation needs. But Spack can also be used to
handle simple single-user installations on your laptop. Most macOS users are
already familiar with package managers like Homebrew and Conda, where all
installed packages are symlinked to a single central location like ``/usr/local``.
In this section, we will show you how to emulate the behavior of Homebrew/Conda
using :ref:`environments`!
-----
Setup
-----
First, let's create a new environment. We'll assume that Spack is already set up
correctly, and that you've already sourced the setup script for your shell.
To create a new environment, simply run:
.. code-block:: console
$ spack env create myenv
Here, *myenv* can be anything you want to name your environment. Next, we can add
a list of packages we would like to install into our environment. Let's say we
want a newer version of Bash than the one that comes with macOS, and we want a
few Python libraries. We can run:
.. code-block:: console
$ spack -e myenv add bash@5 python py-numpy py-scipy py-matplotlib
Each package can be listed on a separate line, or combined into a single line like we did above.
Notice that we're explicitly asking for Bash 5 here. You can use any spec
you would normally use on the command line with other Spack commands.
Next, we want to manually configure a couple of things:
.. code-block:: console
$ spack -e myenv config edit
.. code-block:: yaml
# This is a Spack Environment file.
#
# It describes a set of packages to be installed, along with
# configuration settings.
spack:
# add package specs to the `specs` list
specs: [bash@5, python, py-numpy, py-scipy, py-matplotlib]
view: true
You can see the packages we added earlier in the ``specs:`` section. If you
ever want to add more packages, you can either use ``spack add`` or manually
edit this file.
We also need to change the ``concretization:`` option. By default, Spack
concretizes each spec *separately*, allowing multiple versions of the same
package to coexist. Since we want a single consistent environment, we want to
concretize all of the specs *together*.
Here is what your ``spack.yaml`` looks like with this new setting:
.. code-block:: yaml
# This is a Spack Environment file.
#
# It describes a set of packages to be installed, along with
# configuration settings.
spack:
# add package specs to the `specs` list
specs: [bash@5, python, py-numpy, py-scipy, py-matplotlib]
view: true
concretization: together
^^^^^^^^^^^^^^^^
Symlink location
^^^^^^^^^^^^^^^^
Spack symlinks all installations to ``/Users/me/spack/var/spack/environments/myenv/.spack-env/view``,
which is the default when ``view: true``.
You can actually change this to any directory you want. For example, Homebrew
uses ``/usr/local``, while Conda uses ``/Users/me/anaconda``. In order to access
files in these locations, you need to update ``PATH`` and other environment variables
to point to them. Activating the Spack environment does this automatically, but
you can also manually set them in your ``.bashrc``.
.. warning::
There are several reasons why you shouldn't use ``/usr/local``:
1. If you are on macOS 10.11+ (El Capitan and newer), Apple makes it hard
for you. You may notice permissions issues on ``/usr/local`` due to their
`System Integrity Protection <https://support.apple.com/en-us/HT204899>`_.
By default, users don't have permissions to install anything in ``/usr/local``,
and you can't even change this using ``sudo chown`` or ``sudo chmod``.
2. Other package managers like Homebrew will try to install things to the
same directory. If you plan on using Homebrew in conjunction with Spack,
don't symlink things to ``/usr/local``.
3. If you are on a shared workstation, or don't have sudo privileges, you
can't do this.
If you still want to do this anyway, there are several ways around SIP.
You could disable SIP by booting into recovery mode and running
``csrutil disable``, but this is not recommended, as it can open up your OS
to security vulnerabilities. Another technique is to run ``spack concretize``
and ``spack install`` using ``sudo``. This is also not recommended.
The safest way I've found is to create your installation directories using
sudo, then change ownership back to the user like so:
.. code-block:: bash
for directory in .spack bin contrib include lib man share
do
sudo mkdir -p /usr/local/$directory
sudo chown $(id -un):$(id -gn) /usr/local/$directory
done
Depending on the packages you install in your environment, the exact list of
directories you need to create may vary. You may also find some packages
like Java libraries that install a single file to the installation prefix
instead of in a subdirectory. In this case, the action is the same, just replace
``mkdir -p`` with ``touch`` in the for-loop above.
But again, it's safer just to use the default symlink location.
------------
Installation
------------
To actually concretize the environment, run:
.. code-block:: console
$ spack -e myenv concretize
This will tell you which if any packages are already installed, and alert you
to any conflicting specs.
To actually install these packages and symlink them to your ``view:``
directory, simply run:
.. code-block:: console
$ spack -e myenv install
$ spack env activate myenv
Now, when you type ``which python3``, it should find the one you just installed.
In order to change the default shell to our newer Bash installation, we first
need to add it to this list of acceptable shells. Run:
.. code-block:: console
$ sudo vim /etc/shells
and add the absolute path to your bash executable. Then run:
.. code-block:: console
$ chsh -s /path/to/bash
Now, when you log out and log back in, ``echo $SHELL`` should point to the
newer version of Bash.
---------------------------
Updating Installed Packages
---------------------------
Let's say you upgraded to a new version of macOS, or a new version of Python
was released, and you want to rebuild your entire software stack. To do this,
simply run the following commands:
.. code-block:: console
$ spack env activate myenv
$ spack concretize --force
$ spack install
The ``--force`` flag tells Spack to overwrite its previous concretization
decisions, allowing you to choose a new version of Python. If any of the new
packages like Bash are already installed, ``spack install`` won't re-install
them, it will keep the symlinks in place.
--------------
Uninstallation
--------------
If you decide that Spack isn't right for you, uninstallation is simple.
Just run:
.. code-block:: console
$ spack env activate myenv
$ spack uninstall --all
This will uninstall all packages in your environment and remove the symlinks.

View File

@@ -5,9 +5,9 @@
.. _repositories:
=================================
Package Repositories (repos.yaml)
=================================
=============================
Package Repositories
=============================
Spack comes with thousands of built-in package recipes in
``var/spack/repos/builtin/``. This is a **package repository** -- a

View File

@@ -1,5 +1,5 @@
Name, Supported Versions, Notes, Requirement Reason
Python, 2.7/3.5-3.10, , Interpreter for Spack
Python, 2.7/3.5-3.9, , Interpreter for Spack
C/C++ Compilers, , , Building software
make, , , Build software
patch, , , Build software
1 Name Supported Versions Notes Requirement Reason
2 Python 2.7/3.5-3.10 2.7/3.5-3.9 Interpreter for Spack
3 C/C++ Compilers Building software
4 make Build software
5 patch Build software

1193
lib/spack/docs/workflows.rst Normal file

File diff suppressed because it is too large Load Diff

8
lib/spack/env/cc vendored
View File

@@ -241,28 +241,28 @@ case "$command" in
mode=cpp
debug_flags="-g"
;;
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc|amdclang|cl.exe|emcc)
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc)
command="$SPACK_CC"
language="C"
comp="CC"
lang_flags=C
debug_flags="-g"
;;
c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|em++)
c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC)
command="$SPACK_CXX"
language="C++"
comp="CXX"
lang_flags=CXX
debug_flags="-g"
;;
ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt|amdflang)
ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt)
command="$SPACK_FC"
language="Fortran 90"
comp="FC"
lang_flags=F
debug_flags="-g"
;;
f77|xlf|xlf_r|pgf77|amdflang)
f77|xlf|xlf_r|pgf77)
command="$SPACK_F77"
language="Fortran 77"
comp="F77"

View File

@@ -1 +0,0 @@
../cc

View File

@@ -1 +0,0 @@
../cc

View File

@@ -1 +0,0 @@
../cc

View File

@@ -1 +0,0 @@
../cpp

View File

@@ -1 +0,0 @@
../fc

View File

@@ -5,36 +5,26 @@
import collections
import errno
import glob
import grp
import hashlib
import itertools
import numbers
import os
import pwd
import re
import shutil
import stat
import sys
import tempfile
from contextlib import contextmanager
from sys import platform as _platform
import six
from llnl.util import tty
from llnl.util.compat import Sequence
from llnl.util.lang import dedupe, memoized
from llnl.util.symlink import symlink
from spack.util.executable import Executable
from spack.util.path import path_to_os_path, system_path_filter
is_windows = _platform == 'win32'
if not is_windows:
import grp
import pwd
else:
import win32security
__all__ = [
'FileFilter',
@@ -54,7 +44,6 @@
'fix_darwin_install_name',
'force_remove',
'force_symlink',
'getuid',
'chgrp',
'chmod_x',
'copy',
@@ -71,7 +60,6 @@
'remove_directory_contents',
'remove_if_dead_link',
'remove_linked_tree',
'rename',
'set_executable',
'set_install_permissions',
'touch',
@@ -83,26 +71,6 @@
]
def getuid():
if is_windows:
import ctypes
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
return 1
return 0
else:
return os.getuid()
@system_path_filter
def rename(src, dst):
# On Windows, os.rename will fail if the destination file already exists
if is_windows:
if os.path.exists(dst):
os.remove(dst)
os.rename(src, dst)
@system_path_filter
def path_contains_subdirectory(path, root):
norm_root = os.path.abspath(root).rstrip(os.path.sep) + os.path.sep
norm_path = os.path.abspath(path).rstrip(os.path.sep) + os.path.sep
@@ -127,7 +95,6 @@ def paths_containing_libs(paths, library_names):
required_lib_fnames = possible_library_filenames(library_names)
rpaths_to_include = []
paths = path_to_os_path(*paths)
for path in paths:
fnames = set(os.listdir(path))
if fnames & required_lib_fnames:
@@ -136,7 +103,6 @@ def paths_containing_libs(paths, library_names):
return rpaths_to_include
@system_path_filter
def same_path(path1, path2):
norm1 = os.path.abspath(path1).rstrip(os.path.sep)
norm2 = os.path.abspath(path2).rstrip(os.path.sep)
@@ -187,7 +153,7 @@ def groupid_to_group(x):
if string:
regex = re.escape(regex)
filenames = path_to_os_path(*filenames)
for filename in filenames:
msg = 'FILTER FILE: {0} [replacing "{1}"]'
@@ -297,39 +263,13 @@ def change_sed_delimiter(old_delim, new_delim, *filenames):
repl = r's@\1@\2@g'
repl = repl.replace('@', new_delim)
filenames = path_to_os_path(*filenames)
for f in filenames:
filter_file(whole_lines, repl, f)
filter_file(single_quoted, "'%s'" % repl, f)
filter_file(double_quoted, '"%s"' % repl, f)
@system_path_filter(arg_slice=slice(1))
def get_owner_uid(path, err_msg=None):
if not os.path.exists(path):
mkdirp(path, mode=stat.S_IRWXU)
p_stat = os.stat(path)
if p_stat.st_mode & stat.S_IRWXU != stat.S_IRWXU:
tty.error("Expected {0} to support mode {1}, but it is {2}"
.format(path, stat.S_IRWXU, p_stat.st_mode))
raise OSError(errno.EACCES,
err_msg.format(path, path) if err_msg else "")
else:
p_stat = os.stat(path)
if _platform != "win32":
owner_uid = p_stat.st_uid
else:
sid = win32security.GetFileSecurity(
path, win32security.OWNER_SECURITY_INFORMATION) \
.GetSecurityDescriptorOwner()
owner_uid = win32security.LookupAccountSid(None, sid)[0]
return owner_uid
@system_path_filter
def set_install_permissions(path):
"""Set appropriate permissions on the installed file."""
# If this points to a file maintained in a Spack prefix, it is assumed that
@@ -352,22 +292,14 @@ def group_ids(uid=None):
Returns:
(list of int): gids of groups the user is a member of
"""
if is_windows:
tty.warn("Function is not supported on Windows")
return []
if uid is None:
uid = getuid()
uid = os.getuid()
user = pwd.getpwuid(uid).pw_name
return [g.gr_gid for g in grp.getgrall() if user in g.gr_mem]
@system_path_filter(arg_slice=slice(1))
def chgrp(path, group):
"""Implement the bash chgrp function on a single path"""
if is_windows:
raise OSError("Function 'chgrp' is not supported on Windows")
if isinstance(group, six.string_types):
gid = grp.getgrnam(group).gr_gid
else:
@@ -375,7 +307,6 @@ def chgrp(path, group):
os.chown(path, -1, gid)
@system_path_filter(arg_slice=slice(1))
def chmod_x(entry, perms):
"""Implements chmod, treating all executable bits as set using the chmod
utility's `+X` option.
@@ -389,7 +320,6 @@ def chmod_x(entry, perms):
os.chmod(entry, perms)
@system_path_filter
def copy_mode(src, dest):
"""Set the mode of dest to that of src unless it is a link.
"""
@@ -406,7 +336,6 @@ def copy_mode(src, dest):
os.chmod(dest, dest_mode)
@system_path_filter
def unset_executable_mode(path):
mode = os.stat(path).st_mode
mode &= ~stat.S_IXUSR
@@ -415,7 +344,6 @@ def unset_executable_mode(path):
os.chmod(path, mode)
@system_path_filter
def copy(src, dest, _permissions=False):
"""Copy the file(s) *src* to the file or directory *dest*.
@@ -460,7 +388,6 @@ def copy(src, dest, _permissions=False):
copy_mode(src, dst)
@system_path_filter
def install(src, dest):
"""Install the file(s) *src* to the file or directory *dest*.
@@ -479,7 +406,6 @@ def install(src, dest):
copy(src, dest, _permissions=True)
@system_path_filter
def resolve_link_target_relative_to_the_link(link):
"""
os.path.isdir uses os.path.exists, which for links will check
@@ -494,7 +420,6 @@ def resolve_link_target_relative_to_the_link(link):
return os.path.join(link_dir, target)
@system_path_filter
def copy_tree(src, dest, symlinks=True, ignore=None, _permissions=False):
"""Recursively copy an entire directory tree rooted at *src*.
@@ -563,7 +488,7 @@ def copy_tree(src, dest, symlinks=True, ignore=None, _permissions=False):
.format(target, new_target))
target = new_target
symlink(target, d)
os.symlink(target, d)
elif os.path.isdir(link_target):
mkdirp(d)
else:
@@ -579,7 +504,6 @@ def copy_tree(src, dest, symlinks=True, ignore=None, _permissions=False):
copy_mode(s, d)
@system_path_filter
def install_tree(src, dest, symlinks=True, ignore=None):
"""Recursively install an entire directory tree rooted at *src*.
@@ -599,13 +523,11 @@ def install_tree(src, dest, symlinks=True, ignore=None):
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
@system_path_filter
def is_exe(path):
"""True if path is an executable file."""
return os.path.isfile(path) and os.access(path, os.X_OK)
@system_path_filter
def get_filetype(path_name):
"""
Return the output of file path_name as a string to identify file type.
@@ -617,30 +539,6 @@ def get_filetype(path_name):
return output.strip()
@system_path_filter
def is_nonsymlink_exe_with_shebang(path):
"""
Returns whether the path is an executable script with a shebang.
Return False when the path is a *symlink* to an executable script.
"""
try:
st = os.lstat(path)
# Should not be a symlink
if stat.S_ISLNK(st.st_mode):
return False
# Should be executable
if not st.st_mode & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH):
return False
# Should start with a shebang
with open(path, 'rb') as f:
return f.read(2) == b'#!'
except (IOError, OSError):
return False
@system_path_filter(arg_slice=slice(1))
def chgrp_if_not_world_writable(path, group):
"""chgrp path to group if path is not world writable"""
mode = os.stat(path).st_mode
@@ -670,7 +568,7 @@ def mkdirp(*paths, **kwargs):
mode = kwargs.get('mode', None)
group = kwargs.get('group', None)
default_perms = kwargs.get('default_perms', 'args')
paths = path_to_os_path(*paths)
for path in paths:
if not os.path.exists(path):
try:
@@ -731,7 +629,6 @@ def mkdirp(*paths, **kwargs):
raise OSError(errno.EEXIST, "File already exists", path)
@system_path_filter
def force_remove(*paths):
"""Remove files without printing errors. Like ``rm -f``, does NOT
remove directories."""
@@ -743,7 +640,6 @@ def force_remove(*paths):
@contextmanager
@system_path_filter
def working_dir(dirname, **kwargs):
if kwargs.get('create', False):
mkdirp(dirname)
@@ -763,7 +659,6 @@ def __init__(self, inner_exception, outer_exception):
@contextmanager
@system_path_filter
def replace_directory_transaction(directory_name, tmp_root=None):
"""Moves a directory to a temporary space. If the operations executed
within the context manager don't raise an exception, the directory is
@@ -819,7 +714,6 @@ def replace_directory_transaction(directory_name, tmp_root=None):
tty.debug('Temporary directory deleted [{0}]'.format(tmp_dir))
@system_path_filter
def hash_directory(directory, ignore=[]):
"""Hashes recursively the content of a directory.
@@ -848,7 +742,6 @@ def hash_directory(directory, ignore=[]):
@contextmanager
@system_path_filter
def write_tmp_and_move(filename):
"""Write to a temporary file, then move into place."""
dirname = os.path.dirname(filename)
@@ -860,7 +753,6 @@ def write_tmp_and_move(filename):
@contextmanager
@system_path_filter
def open_if_filename(str_or_file, mode='r'):
"""Takes either a path or a file object, and opens it if it is a path.
@@ -873,13 +765,9 @@ def open_if_filename(str_or_file, mode='r'):
yield str_or_file
@system_path_filter
def touch(path):
"""Creates an empty file at the specified path."""
if is_windows:
perms = (os.O_WRONLY | os.O_CREAT)
else:
perms = (os.O_WRONLY | os.O_CREAT | os.O_NONBLOCK | os.O_NOCTTY)
perms = (os.O_WRONLY | os.O_CREAT | os.O_NONBLOCK | os.O_NOCTTY)
fd = None
try:
fd = os.open(path, perms)
@@ -889,7 +777,6 @@ def touch(path):
os.close(fd)
@system_path_filter
def touchp(path):
"""Like ``touch``, but creates any parent directories needed for the file.
"""
@@ -897,16 +784,14 @@ def touchp(path):
touch(path)
@system_path_filter
def force_symlink(src, dest):
try:
symlink(src, dest)
os.symlink(src, dest)
except OSError:
os.remove(dest)
symlink(src, dest)
os.symlink(src, dest)
@system_path_filter
def join_path(prefix, *args):
path = str(prefix)
for elt in args:
@@ -914,7 +799,6 @@ def join_path(prefix, *args):
return path
@system_path_filter
def ancestor(dir, n=1):
"""Get the nth ancestor of a directory."""
parent = os.path.abspath(dir)
@@ -923,7 +807,6 @@ def ancestor(dir, n=1):
return parent
@system_path_filter
def get_single_file(directory):
fnames = os.listdir(directory)
if len(fnames) != 1:
@@ -943,7 +826,6 @@ def temp_cwd():
@contextmanager
@system_path_filter
def temp_rename(orig_path, temp_path):
same_path = os.path.realpath(orig_path) == os.path.realpath(temp_path)
if not same_path:
@@ -955,13 +837,11 @@ def temp_rename(orig_path, temp_path):
shutil.move(temp_path, orig_path)
@system_path_filter
def can_access(file_name):
"""True if we have read/write access to the file."""
return os.access(file_name, os.R_OK | os.W_OK)
@system_path_filter
def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
"""Traverse two filesystem trees simultaneously.
@@ -1044,80 +924,6 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
yield (source_path, dest_path)
def lexists_islink_isdir(path):
"""Computes the tuple (lexists(path), islink(path), isdir(path)) in a minimal
number of stat calls."""
# First try to lstat, so we know if it's a link or not.
try:
lst = os.lstat(path)
except (IOError, OSError):
return False, False, False
is_link = stat.S_ISLNK(lst.st_mode)
# Check whether file is a dir.
if not is_link:
is_dir = stat.S_ISDIR(lst.st_mode)
return True, is_link, is_dir
# Check whether symlink points to a dir.
try:
st = os.stat(path)
is_dir = stat.S_ISDIR(st.st_mode)
except (IOError, OSError):
# Dangling symlink (i.e. it lexists but not exists)
is_dir = False
return True, is_link, is_dir
def visit_directory_tree(root, visitor, rel_path='', depth=0):
"""
Recurses the directory root depth-first through a visitor pattern
The visitor interface is as follows:
- visit_file(root, rel_path, depth)
- before_visit_dir(root, rel_path, depth) -> bool
if True, descends into this directory
- before_visit_symlinked_dir(root, rel_path, depth) -> bool
if True, descends into this directory
- after_visit_dir(root, rel_path, depth) -> void
only called when before_visit_dir returns True
- after_visit_symlinked_dir(root, rel_path, depth) -> void
only called when before_visit_symlinked_dir returns True
"""
dir = os.path.join(root, rel_path)
if sys.version_info >= (3, 5, 0):
dir_entries = sorted(os.scandir(dir), key=lambda d: d.name) # novermin
else:
dir_entries = os.listdir(dir)
dir_entries.sort()
for f in dir_entries:
if sys.version_info >= (3, 5, 0):
rel_child = os.path.join(rel_path, f.name)
islink, isdir = f.is_symlink(), f.is_dir()
else:
rel_child = os.path.join(rel_path, f)
lexists, islink, isdir = lexists_islink_isdir(os.path.join(dir, f))
if not lexists:
continue
if not isdir:
# Handle files
visitor.visit_file(root, rel_child, depth)
elif not islink and visitor.before_visit_dir(root, rel_child, depth):
# Handle ordinary directories
visit_directory_tree(root, visitor, rel_child, depth + 1)
visitor.after_visit_dir(root, rel_child, depth)
elif islink and visitor.before_visit_symlinked_dir(root, rel_child, depth):
# Handle symlinked directories
visit_directory_tree(root, visitor, rel_child, depth + 1)
visitor.after_visit_symlinked_dir(root, rel_child, depth)
@system_path_filter
def set_executable(path):
mode = os.stat(path).st_mode
if mode & stat.S_IRUSR:
@@ -1129,7 +935,6 @@ def set_executable(path):
os.chmod(path, mode)
@system_path_filter
def last_modification_time_recursive(path):
path = os.path.abspath(path)
times = [os.stat(path).st_mtime]
@@ -1139,7 +944,6 @@ def last_modification_time_recursive(path):
return max(times)
@system_path_filter
def remove_empty_directories(root):
"""Ascend up from the leaves accessible from `root` and remove empty
directories.
@@ -1156,7 +960,6 @@ def remove_empty_directories(root):
pass
@system_path_filter
def remove_dead_links(root):
"""Recursively removes any dead link that is present in root.
@@ -1169,7 +972,6 @@ def remove_dead_links(root):
remove_if_dead_link(path)
@system_path_filter
def remove_if_dead_link(path):
"""Removes the argument if it is a dead link.
@@ -1180,7 +982,6 @@ def remove_if_dead_link(path):
os.unlink(path)
@system_path_filter
def remove_linked_tree(path):
"""Removes a directory and its contents.
@@ -1190,31 +991,15 @@ def remove_linked_tree(path):
Parameters:
path (str): Directory to be removed
"""
# On windows, cleaning a Git stage can be an issue
# as git leaves readonly files that Python handles
# poorly on Windows. Remove readonly status and try again
def onerror(func, path, exe_info):
os.chmod(path, stat.S_IWUSR)
try:
func(path)
except Exception as e:
tty.warn(e)
pass
kwargs = {'ignore_errors': True}
if is_windows:
kwargs = {'onerror': onerror}
if os.path.exists(path):
if os.path.islink(path):
shutil.rmtree(os.path.realpath(path), **kwargs)
shutil.rmtree(os.path.realpath(path), True)
os.unlink(path)
else:
shutil.rmtree(path, **kwargs)
shutil.rmtree(path, True)
@contextmanager
@system_path_filter
def safe_remove(*files_or_dirs):
"""Context manager to remove the files passed as input, but restore
them in case any exception is raised in the context block.
@@ -1261,7 +1046,6 @@ def safe_remove(*files_or_dirs):
raise
@system_path_filter
def fix_darwin_install_name(path):
"""Fix install name of dynamic libraries on Darwin to have full path.
@@ -1348,7 +1132,6 @@ def find(root, files, recursive=True):
return _find_non_recursive(root, files)
@system_path_filter
def _find_recursive(root, search_files):
# The variable here is **on purpose** a defaultdict. The idea is that
@@ -1359,6 +1142,7 @@ def _find_recursive(root, search_files):
# Make the path absolute to have os.walk also return an absolute path
root = os.path.abspath(root)
for path, _, list_files in os.walk(root):
for search_file in search_files:
matches = glob.glob(os.path.join(path, search_file))
@@ -1372,7 +1156,6 @@ def _find_recursive(root, search_files):
return answer
@system_path_filter
def _find_non_recursive(root, search_files):
# The variable here is **on purpose** a defaultdict as os.list_dir
# can return files in any order (does not preserve stability)
@@ -1504,7 +1287,7 @@ def directories(self, value):
if isinstance(value, six.string_types):
value = [value]
self._directories = [path_to_os_path(os.path.normpath(x))[0] for x in value]
self._directories = [os.path.normpath(x) for x in value]
def _default_directories(self):
"""Default computation of directories based on the list of
@@ -1662,7 +1445,6 @@ def find_headers(headers, root, recursive=False):
return HeaderList(find(root, headers, recursive))
@system_path_filter
def find_all_headers(root):
"""Convenience function that returns the list of all headers found
in the directory passed as argument.
@@ -1856,18 +1638,12 @@ def find_libraries(libraries, root, shared=True, recursive=False):
raise TypeError(message)
# Construct the right suffix for the library
if shared:
# Used on both Linux and macOS
suffixes = ['so']
if sys.platform == 'darwin':
# Only used on macOS
suffixes.append('dylib')
if shared is True:
suffix = 'dylib' if sys.platform == 'darwin' else 'so'
else:
suffixes = ['a']
suffix = 'a'
# List of libraries we are searching with suffixes
libraries = ['{0}.{1}'.format(lib, suffix) for lib in libraries
for suffix in suffixes]
libraries = ['{0}.{1}'.format(lib, suffix) for lib in libraries]
if not recursive:
# If not recursive, look for the libraries directly in root
@@ -1890,7 +1666,6 @@ def find_libraries(libraries, root, shared=True, recursive=False):
return LibraryList(found_libs)
@system_path_filter
@memoized
def can_access_dir(path):
"""Returns True if the argument is an accessible directory.
@@ -1904,7 +1679,6 @@ def can_access_dir(path):
return os.path.isdir(path) and os.access(path, os.R_OK | os.X_OK)
@system_path_filter
@memoized
def can_write_to_dir(path):
"""Return True if the argument is a directory in which we can write.
@@ -1918,7 +1692,6 @@ def can_write_to_dir(path):
return os.path.isdir(path) and os.access(path, os.R_OK | os.X_OK | os.W_OK)
@system_path_filter
@memoized
def files_in(*search_paths):
"""Returns all the files in paths passed as arguments.
@@ -1940,12 +1713,6 @@ def files_in(*search_paths):
return files
def is_readable_file(file_path):
"""Return True if the path passed as argument is readable"""
return os.path.isfile(file_path) and os.access(file_path, os.R_OK)
@system_path_filter
def search_paths_for_executables(*path_hints):
"""Given a list of path hints returns a list of paths where
to search for an executable.
@@ -1973,39 +1740,6 @@ def search_paths_for_executables(*path_hints):
return executable_paths
@system_path_filter
def search_paths_for_libraries(*path_hints):
"""Given a list of path hints returns a list of paths where
to search for a shared library.
Args:
*path_hints (list of paths): list of paths taken into
consideration for a search
Returns:
A list containing the real path of every existing directory
in `path_hints` and its `lib` and `lib64` subdirectory if it exists.
"""
library_paths = []
for path in path_hints:
if not os.path.isdir(path):
continue
path = os.path.abspath(path)
library_paths.append(path)
lib_dir = os.path.join(path, 'lib')
if os.path.isdir(lib_dir):
library_paths.append(lib_dir)
lib64_dir = os.path.join(path, 'lib64')
if os.path.isdir(lib64_dir):
library_paths.append(lib64_dir)
return library_paths
@system_path_filter
def partition_path(path, entry=None):
"""
Split the prefixes of the path at the first occurrence of entry and
@@ -2022,11 +1756,7 @@ def partition_path(path, entry=None):
# Derive the index of entry within paths, which will correspond to
# the location of the entry in within the path.
try:
sep = os.sep
entries = path.split(sep)
if entries[0].endswith(":"):
# Handle drive letters e.g. C:/ on Windows
entries[0] = entries[0] + sep
entries = path.split(os.sep)
i = entries.index(entry)
if '' in entries:
i -= 1
@@ -2037,7 +1767,6 @@ def partition_path(path, entry=None):
return paths, '', []
@system_path_filter
def prefixes(path):
"""
Returns a list containing the path and its ancestors, top-to-bottom.
@@ -2051,9 +1780,6 @@ def prefixes(path):
For example, path ``./hi/jkl/mn`` results in a list with the following
paths, in order: ``./hi``, ``./hi/jkl``, and ``./hi/jkl/mn``.
On Windows, paths will be normalized to use ``/`` and ``/`` will always
be used as the separator instead of ``os.sep``.
Parameters:
path (str): the string used to derive ancestor paths
@@ -2062,17 +1788,14 @@ def prefixes(path):
"""
if not path:
return []
sep = os.sep
parts = path.strip(sep).split(sep)
if path.startswith(sep):
parts.insert(0, sep)
elif parts[0].endswith(":"):
# Handle drive letters e.g. C:/ on Windows
parts[0] = parts[0] + sep
parts = path.strip(os.sep).split(os.sep)
if path.startswith(os.sep):
parts.insert(0, os.sep)
paths = [os.path.join(*parts[:i + 1]) for i in range(len(parts))]
try:
paths.remove(sep)
paths.remove(os.sep)
except ValueError:
pass
@@ -2084,7 +1807,6 @@ def prefixes(path):
return paths
@system_path_filter
def md5sum(file):
"""Compute the MD5 sum of a file.
@@ -2100,7 +1822,6 @@ def md5sum(file):
return md5.digest()
@system_path_filter
def remove_directory_contents(dir):
"""Remove all contents of a directory."""
if os.path.exists(dir):
@@ -2112,7 +1833,6 @@ def remove_directory_contents(dir):
@contextmanager
@system_path_filter
def keep_modification_time(*filenames):
"""
Context manager to keep the modification timestamps of the input files.

View File

@@ -5,7 +5,6 @@
from __future__ import division
import contextlib
import functools
import inspect
import os
@@ -13,10 +12,9 @@
import sys
from datetime import datetime, timedelta
import six
from six import string_types
from llnl.util.compat import MutableMapping, MutableSequence, zip_longest
from llnl.util.compat import Hashable, MutableMapping, zip_longest
# Ignore emacs backups when listing modules
ignore_modules = [r'^\.#', '~$']
@@ -166,19 +164,6 @@ def union_dicts(*dicts):
return result
# Used as a sentinel that disambiguates tuples passed in *args from coincidentally
# matching tuples formed from kwargs item pairs.
_kwargs_separator = (object(),)
def stable_args(*args, **kwargs):
"""A key factory that performs a stable sort of the parameters."""
key = args
if kwargs:
key += _kwargs_separator + tuple(sorted(kwargs.items()))
return key
def memoized(func):
"""Decorator that caches the results of a function, storing them in
an attribute of that function.
@@ -186,23 +171,15 @@ def memoized(func):
func.cache = {}
@functools.wraps(func)
def _memoized_function(*args, **kwargs):
key = stable_args(*args, **kwargs)
def _memoized_function(*args):
if not isinstance(args, Hashable):
# Not hashable, so just call the function.
return func(*args)
try:
return func.cache[key]
except KeyError:
ret = func(*args, **kwargs)
func.cache[key] = ret
return ret
except TypeError as e:
# TypeError is raised when indexing into a dict if the key is unhashable.
raise six.raise_from(
UnhashableArguments(
"args + kwargs '{}' was not hashable for function '{}'"
.format(key, func.__name__),
),
e)
if args not in func.cache:
func.cache[args] = func(*args)
return func.cache[args]
return _memoized_function
@@ -589,31 +566,20 @@ def match(string):
return match
def dedupe(sequence, key=None):
"""Yields a stable de-duplication of an hashable sequence by key
def dedupe(sequence):
"""Yields a stable de-duplication of an hashable sequence
Args:
sequence: hashable sequence to be de-duplicated
key: callable applied on values before uniqueness test; identity
by default.
Returns:
stable de-duplication of the sequence
Examples:
Dedupe a list of integers:
[x for x in dedupe([1, 2, 1, 3, 2])] == [1, 2, 3]
[x for x in llnl.util.lang.dedupe([1,-2,1,3,2], key=abs)] == [1, -2, 3]
"""
seen = set()
for x in sequence:
x_key = x if key is None else key(x)
if x_key not in seen:
if x not in seen:
yield x
seen.add(x_key)
seen.add(x)
def pretty_date(time, now=None):
@@ -889,6 +855,11 @@ def load_module_from_file(module_name, module_path):
except KeyError:
pass
raise
elif sys.version_info[0] == 3 and sys.version_info[1] < 5:
import importlib.machinery
loader = importlib.machinery.SourceFileLoader( # novm
module_name, module_path)
module = loader.load_module()
elif sys.version_info[0] == 2:
import imp
module = imp.load_source(module_name, module_path)
@@ -950,62 +921,3 @@ def elide_list(line_list, max_num=10):
return line_list[:max_num - 1] + ['...'] + line_list[-1:]
else:
return line_list
@contextlib.contextmanager
def nullcontext(*args, **kwargs):
"""Empty context manager.
TODO: replace with contextlib.nullcontext() if we ever require python 3.7.
"""
yield
class UnhashableArguments(TypeError):
"""Raise when an @memoized function receives unhashable arg or kwarg values."""
def enum(**kwargs):
"""Return an enum-like class.
Args:
**kwargs: explicit dictionary of enums
"""
return type('Enum', (object,), kwargs)
class TypedMutableSequence(MutableSequence):
"""Base class that behaves like a list, just with a different type.
Client code can inherit from this base class:
class Foo(TypedMutableSequence):
pass
and later perform checks based on types:
if isinstance(l, Foo):
# do something
"""
def __init__(self, iterable):
self.data = list(iterable)
def __getitem__(self, item):
return self.data[item]
def __setitem__(self, key, value):
self.data[key] = value
def __delitem__(self, key):
del self.data[key]
def __len__(self):
return len(self.data)
def insert(self, index, item):
self.data.insert(index, item)
def __repr__(self):
return repr(self.data)
def __str__(self):
return str(self.data)

View File

@@ -10,11 +10,9 @@
import filecmp
import os
import shutil
from collections import OrderedDict
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp, touch, traverse_tree
from llnl.util.symlink import islink, symlink
__all__ = ['LinkTree']
@@ -22,7 +20,7 @@
def remove_link(src, dest):
if not islink(dest):
if not os.path.islink(dest):
raise ValueError("%s is not a link tree!" % dest)
# remove if dest is a hardlink/symlink to src; this will only
# be false if two packages are merged into a prefix and have a
@@ -31,246 +29,6 @@ def remove_link(src, dest):
os.remove(dest)
class MergeConflict:
"""
The invariant here is that src_a and src_b are both mapped
to dst:
project(src_a) == project(src_b) == dst
"""
def __init__(self, dst, src_a=None, src_b=None):
self.dst = dst
self.src_a = src_a
self.src_b = src_b
class SourceMergeVisitor(object):
"""
Visitor that produces actions:
- An ordered list of directories to create in dst
- A list of files to link in dst
- A list of merge conflicts in dst/
"""
def __init__(self, ignore=None):
self.ignore = ignore if ignore is not None else lambda f: False
# When mapping <src root> to <dst root>/<projection>, we need
# to prepend the <projection> bit to the relative path in the
# destination dir.
self.projection = ''
# When a file blocks another file, the conflict can sometimes
# be resolved / ignored (e.g. <prefix>/LICENSE or
# or <site-packages>/<namespace>/__init__.py conflicts can be
# ignored).
self.file_conflicts = []
# When we have to create a dir where a file is, or a file
# where a dir is, we have fatal errors, listed here.
self.fatal_conflicts = []
# What directories we have to make; this is an ordered set,
# so that we have a fast lookup and can run mkdir in order.
self.directories = OrderedDict()
# Files to link. Maps dst_rel to (src_rel, src_root)
self.files = OrderedDict()
def before_visit_dir(self, root, rel_path, depth):
"""
Register a directory if dst / rel_path is not blocked by a file or ignored.
"""
proj_rel_path = os.path.join(self.projection, rel_path)
if self.ignore(rel_path):
# Don't recurse when dir is ignored.
return False
elif proj_rel_path in self.files:
# Can't create a dir where a file is.
src_a_root, src_a_relpath = self.files[proj_rel_path]
self.fatal_conflicts.append(MergeConflict(
dst=proj_rel_path,
src_a=os.path.join(src_a_root, src_a_relpath),
src_b=os.path.join(root, rel_path)))
return False
elif proj_rel_path in self.directories:
# No new directory, carry on.
return True
else:
# Register new directory.
self.directories[proj_rel_path] = (root, rel_path)
return True
def after_visit_dir(self, root, rel_path, depth):
pass
def before_visit_symlinked_dir(self, root, rel_path, depth):
"""
Replace symlinked dirs with actual directories when possible in low depths,
otherwise handle it as a file (i.e. we link to the symlink).
Transforming symlinks into dirs makes it more likely we can merge directories,
e.g. when <prefix>/lib -> <prefix>/subdir/lib.
We only do this when the symlink is pointing into a subdirectory from the
symlink's directory, to avoid potential infinite recursion; and only at a
constant level of nesting, to avoid potential exponential blowups in file
duplication.
"""
if self.ignore(rel_path):
return False
# Only follow symlinked dirs in <prefix>/**/**/*
if depth > 1:
handle_as_dir = False
else:
# Only follow symlinked dirs when pointing deeper
src = os.path.join(root, rel_path)
real_parent = os.path.realpath(os.path.dirname(src))
real_child = os.path.realpath(src)
handle_as_dir = real_child.startswith(real_parent)
if handle_as_dir:
return self.before_visit_dir(root, rel_path, depth)
self.visit_file(root, rel_path, depth)
return False
def after_visit_symlinked_dir(self, root, rel_path, depth):
pass
def visit_file(self, root, rel_path, depth):
proj_rel_path = os.path.join(self.projection, rel_path)
if self.ignore(rel_path):
pass
elif proj_rel_path in self.directories:
# Can't create a file where a dir is; fatal error
src_a_root, src_a_relpath = self.directories[proj_rel_path]
self.fatal_conflicts.append(MergeConflict(
dst=proj_rel_path,
src_a=os.path.join(src_a_root, src_a_relpath),
src_b=os.path.join(root, rel_path)))
elif proj_rel_path in self.files:
# In some cases we can resolve file-file conflicts
src_a_root, src_a_relpath = self.files[proj_rel_path]
self.file_conflicts.append(MergeConflict(
dst=proj_rel_path,
src_a=os.path.join(src_a_root, src_a_relpath),
src_b=os.path.join(root, rel_path)))
else:
# Otherwise register this file to be linked.
self.files[proj_rel_path] = (root, rel_path)
def set_projection(self, projection):
self.projection = os.path.normpath(projection)
# Todo, is this how to check in general for empty projection?
if self.projection == '.':
self.projection = ''
return
# If there is a projection, we'll also create the directories
# it consists of, and check whether that's causing conflicts.
path = ''
for part in self.projection.split(os.sep):
path = os.path.join(path, part)
if path not in self.files:
self.directories[path] = ('<projection>', path)
else:
# Can't create a dir where a file is.
src_a_root, src_a_relpath = self.files[path]
self.fatal_conflicts.append(MergeConflict(
dst=path,
src_a=os.path.join(src_a_root, src_a_relpath),
src_b=os.path.join('<projection>', path)))
class DestinationMergeVisitor(object):
"""DestinatinoMergeVisitor takes a SourceMergeVisitor
and:
a. registers additional conflicts when merging
to the destination prefix
b. removes redundant mkdir operations when
directories already exist in the destination
prefix.
This also makes sure that symlinked directories
in the target prefix will never be merged with
directories in the sources directories.
"""
def __init__(self, source_merge_visitor):
self.src = source_merge_visitor
def before_visit_dir(self, root, rel_path, depth):
# If destination dir is a file in a src dir, add a conflict,
# and don't traverse deeper
if rel_path in self.src.files:
src_a_root, src_a_relpath = self.src.files[rel_path]
self.src.fatal_conflicts.append(MergeConflict(
rel_path,
os.path.join(src_a_root, src_a_relpath),
os.path.join(root, rel_path)))
return False
# If destination dir was also a src dir, remove the mkdir
# action, and traverse deeper.
if rel_path in self.src.directories:
del self.src.directories[rel_path]
return True
# If the destination dir does not appear in the src dir,
# don't descend into it.
return False
def after_visit_dir(self, root, rel_path, depth):
pass
def before_visit_symlinked_dir(self, root, rel_path, depth):
"""
Symlinked directories in the destination prefix should
be seen as files; we should not accidentally merge
source dir with a symlinked dest dir.
"""
# Always conflict
if rel_path in self.src.directories:
src_a_root, src_a_relpath = self.src.directories[rel_path]
self.src.fatal_conflicts.append(MergeConflict(
rel_path,
os.path.join(src_a_root, src_a_relpath),
os.path.join(root, rel_path)))
if rel_path in self.src.files:
src_a_root, src_a_relpath = self.src.files[rel_path]
self.src.fatal_conflicts.append(MergeConflict(
rel_path,
os.path.join(src_a_root, src_a_relpath),
os.path.join(root, rel_path)))
# Never descend into symlinked target dirs.
return False
def after_visit_symlinked_dir(self, root, rel_path, depth):
pass
def visit_file(self, root, rel_path, depth):
# Can't merge a file if target already exists
if rel_path in self.src.directories:
src_a_root, src_a_relpath = self.src.directories[rel_path]
self.src.fatal_conflicts.append(MergeConflict(
rel_path,
os.path.join(src_a_root, src_a_relpath),
os.path.join(root, rel_path)))
elif rel_path in self.src.files:
src_a_root, src_a_relpath = self.src.files[rel_path]
self.src.fatal_conflicts.append(MergeConflict(
rel_path,
os.path.join(src_a_root, src_a_relpath),
os.path.join(root, rel_path)))
class LinkTree(object):
"""Class to create trees of symbolic links from a source directory.
@@ -355,7 +113,7 @@ def unmerge_directories(self, dest_root, ignore):
os.remove(marker)
def merge(self, dest_root, ignore_conflicts=False, ignore=None,
link=symlink, relative=False):
link=os.symlink, relative=False):
"""Link all files in src into dest, creating directories
if necessary.
@@ -367,7 +125,7 @@ def merge(self, dest_root, ignore_conflicts=False, ignore=None,
ignore (callable): callable that returns True if a file is to be
ignored in the merge (by default ignore nothing)
link (callable): function to create links with (defaults to llnl.util.symlink)
link (callable): function to create links with (defaults to os.symlink)
relative (bool): create all symlinks relative to the target
(default False)
@@ -379,7 +137,7 @@ def merge(self, dest_root, ignore_conflicts=False, ignore=None,
conflict = self.find_conflict(
dest_root, ignore=ignore, ignore_file_conflicts=ignore_conflicts)
if conflict:
raise SingleMergeConflictError(conflict)
raise MergeConflictError(conflict)
self.merge_directories(dest_root, ignore)
existing = []
@@ -411,24 +169,7 @@ def unmerge(self, dest_root, ignore=None, remove_file=remove_link):
class MergeConflictError(Exception):
pass
class SingleMergeConflictError(MergeConflictError):
def __init__(self, path):
super(MergeConflictError, self).__init__(
"Package merge blocked by file: %s" % path)
class MergeConflictSummary(MergeConflictError):
def __init__(self, conflicts):
"""
A human-readable summary of file system view merge conflicts (showing only the
first 3 issues.)
"""
msg = "{0} fatal error(s) when merging prefixes:".format(len(conflicts))
# show the first 3 merge conflicts.
for conflict in conflicts[:3]:
msg += "\n `{0}` and `{1}` both project to `{2}`".format(
conflict.src_a, conflict.src_b, conflict.dst)
super(MergeConflictSummary, self).__init__(msg)

View File

@@ -4,9 +4,9 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import errno
import fcntl
import os
import socket
import sys
import time
from datetime import datetime
from typing import Dict, Tuple # novm
@@ -15,10 +15,6 @@
import spack.util.string
if sys.platform != 'win32':
import fcntl
__all__ = [
'Lock',
'LockDowngradeError',
@@ -33,6 +29,8 @@
'CantCreateLockError'
]
#: Mapping of supported locks to description
lock_type = {fcntl.LOCK_SH: 'read', fcntl.LOCK_EX: 'write'}
#: A useful replacement for functions that should return True when not provided
#: for example.
@@ -168,30 +166,6 @@ def _attempts_str(wait_time, nattempts):
return ' after {0:0.2f}s and {1}'.format(wait_time, attempts)
class LockType(object):
READ = 0
WRITE = 1
@staticmethod
def to_str(tid):
ret = "READ"
if tid == LockType.WRITE:
ret = "WRITE"
return ret
@staticmethod
def to_module(tid):
lock = fcntl.LOCK_SH
if tid == LockType.WRITE:
lock = fcntl.LOCK_EX
return lock
@staticmethod
def is_valid(op):
return op == LockType.READ \
or op == LockType.WRITE
class Lock(object):
"""This is an implementation of a filesystem lock using Python's lockf.
@@ -302,10 +276,9 @@ def _lock(self, op, timeout=None):
successfully acquired, the total wait time and the number of attempts
is returned.
"""
assert LockType.is_valid(op)
op_str = LockType.to_str(op)
assert op in lock_type
self._log_acquiring('{0} LOCK'.format(op_str))
self._log_acquiring('{0} LOCK'.format(lock_type[op].upper()))
timeout = timeout or self.default_timeout
# Create file and parent directories if they don't exist.
@@ -313,13 +286,13 @@ def _lock(self, op, timeout=None):
self._ensure_parent_directory()
self._file = file_tracker.get_fh(self.path)
if LockType.to_module(op) == fcntl.LOCK_EX and self._file.mode == 'r':
if op == fcntl.LOCK_EX and self._file.mode == 'r':
# Attempt to upgrade to write lock w/a read-only file.
# If the file were writable, we'd have opened it 'r+'
raise LockROFileError(self.path)
self._log_debug("{0} locking [{1}:{2}]: timeout {3} sec"
.format(op_str.lower(), self._start, self._length,
.format(lock_type[op], self._start, self._length,
timeout))
poll_intervals = iter(Lock._poll_interval_generator())
@@ -340,16 +313,17 @@ def _lock(self, op, timeout=None):
return total_wait_time, num_attempts
raise LockTimeoutError("Timed out waiting for a {0} lock."
.format(op_str.lower()))
.format(lock_type[op]))
def _poll_lock(self, op):
"""Attempt to acquire the lock in a non-blocking manner. Return whether
the locking attempt succeeds
"""
module_op = LockType.to_module(op)
assert op in lock_type
try:
# Try to get the lock (will raise if not available.)
fcntl.lockf(self._file, module_op | fcntl.LOCK_NB,
fcntl.lockf(self._file, op | fcntl.LOCK_NB,
self._length, self._start, os.SEEK_SET)
# help for debugging distributed locking
@@ -357,11 +331,11 @@ def _poll_lock(self, op):
# All locks read the owner PID and host
self._read_log_debug_data()
self._log_debug('{0} locked {1} [{2}:{3}] (owner={4})'
.format(LockType.to_str(op), self.path,
.format(lock_type[op], self.path,
self._start, self._length, self.pid))
# Exclusive locks write their PID/host
if module_op == fcntl.LOCK_EX:
if op == fcntl.LOCK_EX:
self._write_log_debug_data()
return True
@@ -446,7 +420,7 @@ def acquire_read(self, timeout=None):
if self._reads == 0 and self._writes == 0:
# can raise LockError.
wait_time, nattempts = self._lock(LockType.READ, timeout=timeout)
wait_time, nattempts = self._lock(fcntl.LOCK_SH, timeout=timeout)
self._reads += 1
# Log if acquired, which includes counts when verbose
self._log_acquired('READ LOCK', wait_time, nattempts)
@@ -471,7 +445,7 @@ def acquire_write(self, timeout=None):
if self._writes == 0:
# can raise LockError.
wait_time, nattempts = self._lock(LockType.WRITE, timeout=timeout)
wait_time, nattempts = self._lock(fcntl.LOCK_EX, timeout=timeout)
self._writes += 1
# Log if acquired, which includes counts when verbose
self._log_acquired('WRITE LOCK', wait_time, nattempts)
@@ -515,7 +489,7 @@ def downgrade_write_to_read(self, timeout=None):
if self._writes == 1 and self._reads == 0:
self._log_downgrading()
# can raise LockError.
wait_time, nattempts = self._lock(LockType.READ, timeout=timeout)
wait_time, nattempts = self._lock(fcntl.LOCK_SH, timeout=timeout)
self._reads = 1
self._writes = 0
self._log_downgraded(wait_time, nattempts)
@@ -534,7 +508,7 @@ def upgrade_read_to_write(self, timeout=None):
if self._reads == 1 and self._writes == 0:
self._log_upgrading()
# can raise LockError.
wait_time, nattempts = self._lock(LockType.WRITE, timeout=timeout)
wait_time, nattempts = self._lock(fcntl.LOCK_EX, timeout=timeout)
self._reads = 0
self._writes = 1
self._log_upgraded(wait_time, nattempts)
@@ -618,12 +592,6 @@ def release_write(self, release_fn=None):
else:
return False
def cleanup(self):
if self._reads == 0 and self._writes == 0:
os.unlink(self.path)
else:
raise LockError("Attempting to cleanup active lock.")
def _get_counts_desc(self):
return '(reads {0}, writes {1})'.format(self._reads, self._writes) \
if tty.is_verbose() else ''

View File

@@ -1,112 +0,0 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import errno
import os
import shutil
import tempfile
from os.path import exists, join
from sys import platform as _platform
from llnl.util import lang
is_windows = _platform == 'win32'
if is_windows:
from win32file import CreateHardLink
def symlink(real_path, link_path):
"""
Create a symbolic link.
On Windows, use junctions if os.symlink fails.
"""
if not is_windows or _win32_can_symlink():
os.symlink(real_path, link_path)
else:
try:
# Try to use junctions
_win32_junction(real_path, link_path)
except OSError:
# If all else fails, fall back to copying files
shutil.copyfile(real_path, link_path)
def islink(path):
return os.path.islink(path) or _win32_is_junction(path)
# '_win32' functions based on
# https://github.com/Erotemic/ubelt/blob/master/ubelt/util_links.py
def _win32_junction(path, link):
# junctions require absolute paths
if not os.path.isabs(link):
link = os.path.abspath(link)
# os.symlink will fail if link exists, emulate the behavior here
if exists(link):
raise OSError(errno.EEXIST, 'File exists: %s -> %s' % (link, path))
if not os.path.isabs(path):
parent = os.path.join(link, os.pardir)
path = os.path.join(parent, path)
path = os.path.abspath(path)
CreateHardLink(link, path)
@lang.memoized
def _win32_can_symlink():
tempdir = tempfile.mkdtemp()
dpath = join(tempdir, 'dpath')
fpath = join(tempdir, 'fpath.txt')
dlink = join(tempdir, 'dlink')
flink = join(tempdir, 'flink.txt')
import llnl.util.filesystem as fs
fs.touchp(fpath)
try:
os.symlink(dpath, dlink)
can_symlink_directories = os.path.islink(dlink)
except OSError:
can_symlink_directories = False
try:
os.symlink(fpath, flink)
can_symlink_files = os.path.islink(flink)
except OSError:
can_symlink_files = False
# Cleanup the test directory
shutil.rmtree(tempdir)
return can_symlink_directories and can_symlink_files
def _win32_is_junction(path):
"""
Determines if a path is a win32 junction
"""
if os.path.islink(path):
return False
if is_windows:
import ctypes.wintypes
GetFileAttributes = ctypes.windll.kernel32.GetFileAttributesW
GetFileAttributes.argtypes = (ctypes.wintypes.LPWSTR,)
GetFileAttributes.restype = ctypes.wintypes.DWORD
INVALID_FILE_ATTRIBUTES = 0xFFFFFFFF
FILE_ATTRIBUTE_REPARSE_POINT = 0x400
res = GetFileAttributes(path)
return res != INVALID_FILE_ATTRIBUTES and \
bool(res & FILE_ATTRIBUTE_REPARSE_POINT)
return False

View File

@@ -6,22 +6,19 @@
from __future__ import unicode_literals
import contextlib
import fcntl
import os
import struct
import sys
import termios
import textwrap
import traceback
from datetime import datetime
from sys import platform as _platform
import six
from six import StringIO
from six.moves import input
if _platform != "win32":
import fcntl
import termios
from llnl.util.tty.color import cescape, clen, cprint, cwrite
# Globals
@@ -146,7 +143,7 @@ def process_stacktrace(countback):
file_list = []
for frame in st:
# Check that the file is a spack file
if frame[0].find(os.path.sep + "spack") >= 0:
if frame[0].find("/spack") >= 0:
file_list.append(frame[0])
# We use commonprefix to find what the spack 'root' directory is.
root_dir = os.path.commonprefix(file_list)
@@ -373,29 +370,22 @@ def hline(label=None, **kwargs):
def terminal_size():
"""Gets the dimensions of the console: (rows, cols)."""
if _platform != "win32":
def ioctl_gwinsz(fd):
try:
rc = struct.unpack('hh', fcntl.ioctl(
fd, termios.TIOCGWINSZ, '1234'))
except BaseException:
return
return rc
rc = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2)
if not rc:
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
rc = ioctl_gwinsz(fd)
os.close(fd)
except BaseException:
pass
if not rc:
rc = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
return int(rc[0]), int(rc[1])
else:
if sys.version_info[0] < 3:
raise RuntimeError("Terminal size not obtainable on Windows with a\
Python version older than 3")
def ioctl_gwinsz(fd):
try:
rc = struct.unpack('hh', fcntl.ioctl(
fd, termios.TIOCGWINSZ, '1234'))
except BaseException:
return
return rc
rc = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2)
if not rc:
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
rc = ioctl_gwinsz(fd)
os.close(fd)
except BaseException:
pass
if not rc:
rc = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
return int(rc[0]), int(rc[1])
return int(rc[0]), int(rc[1])

View File

@@ -8,19 +8,15 @@
from __future__ import unicode_literals
import atexit
import ctypes
import errno
import io
import multiprocessing
import os
import re
import select
import signal
import sys
import threading
import traceback
from contextlib import contextmanager
from threading import Thread
from types import ModuleType # novm
from typing import Optional # novm
@@ -403,7 +399,7 @@ def replace_environment(env):
os.environ[name] = val
def log_output(*args, **kwargs):
class log_output(object):
"""Context manager that logs its output to a file.
In the simplest case, the usage looks like this::
@@ -418,7 +414,6 @@ def log_output(*args, **kwargs):
with log_output('logfile.txt', echo=True):
# do things ... output will be logged and printed out
The following is available on Unix only. No-op on Windows.
And, if you just want to echo *some* stuff from the parent, use
``force_echo``::
@@ -428,20 +423,6 @@ def log_output(*args, **kwargs):
with logger.force_echo():
# things here will be echoed *and* logged
See individual log classes for more information.
This method is actually a factory serving a per platform
(unix vs windows) log_output class
"""
if sys.platform == 'win32':
return winlog(*args, **kwargs)
else:
return nixlog(*args, **kwargs)
class nixlog(object):
"""
Under the hood, we spawn a daemon and set up a pipe between this
process and the daemon. The daemon writes our output to both the
file and to stdout (if echoing). The parent process can communicate
@@ -583,7 +564,7 @@ def __enter__(self):
sys.stdout.flush()
sys.stderr.flush()
# Now do the actual output redirection.
# Now do the actual output rediction.
self.use_fds = _file_descriptors_work(sys.stdout, sys.stderr)
if self.use_fds:
# We try first to use OS-level file descriptors, as this
@@ -690,175 +671,6 @@ def force_echo(self):
sys.stdout.flush()
class StreamWrapper:
""" Wrapper class to handle redirection of io streams """
def __init__(self, sys_attr):
self.sys_attr = sys_attr
self.saved_stream = None
if sys.platform.startswith('win32'):
if sys.version_info < (3, 5):
libc = ctypes.CDLL(ctypes.util.find_library('c'))
else:
if hasattr(sys, 'gettotalrefcount'): # debug build
libc = ctypes.CDLL('ucrtbased')
else:
libc = ctypes.CDLL('api-ms-win-crt-stdio-l1-1-0')
kernel32 = ctypes.WinDLL('kernel32')
# https://docs.microsoft.com/en-us/windows/console/getstdhandle
if self.sys_attr == 'stdout':
STD_HANDLE = -11
elif self.sys_attr == 'stderr':
STD_HANDLE = -12
else:
raise KeyError(self.sys_attr)
c_stdout = kernel32.GetStdHandle(STD_HANDLE)
self.libc = libc
self.c_stream = c_stdout
else:
self.libc = ctypes.CDLL(None)
self.c_stream = ctypes.c_void_p.in_dll(self.libc, self.sys_attr)
self.sys_stream = getattr(sys, self.sys_attr)
self.orig_stream_fd = self.sys_stream.fileno()
# Save a copy of the original stdout fd in saved_stream
self.saved_stream = os.dup(self.orig_stream_fd)
def redirect_stream(self, to_fd):
"""Redirect stdout to the given file descriptor."""
# Flush the C-level buffer stream
if sys.platform.startswith('win32'):
self.libc.fflush(None)
else:
self.libc.fflush(self.c_stream)
# Flush and close sys_stream - also closes the file descriptor (fd)
sys_stream = getattr(sys, self.sys_attr)
sys_stream.flush()
sys_stream.close()
# Make orig_stream_fd point to the same file as to_fd
os.dup2(to_fd, self.orig_stream_fd)
# Set sys_stream to a new stream that points to the redirected fd
new_buffer = open(self.orig_stream_fd, 'wb')
new_stream = io.TextIOWrapper(new_buffer)
setattr(sys, self.sys_attr, new_stream)
self.sys_stream = getattr(sys, self.sys_attr)
def flush(self):
if sys.platform.startswith('win32'):
self.libc.fflush(None)
else:
self.libc.fflush(self.c_stream)
self.sys_stream.flush()
def close(self):
"""Redirect back to the original system stream, and close stream"""
try:
if self.saved_stream is not None:
self.redirect_stream(self.saved_stream)
finally:
if self.saved_stream is not None:
os.close(self.saved_stream)
class winlog(object):
"""
Similar to nixlog, with underlying
functionality ported to support Windows.
Does not support the use of 'v' toggling as nixlog does.
"""
def __init__(self, file_like=None, echo=False, debug=0, buffer=False,
env=None, filter_fn=None):
self.env = env
self.debug = debug
self.echo = echo
self.logfile = file_like
self.stdout = StreamWrapper('stdout')
self.stderr = StreamWrapper('stderr')
self._active = False
self._ioflag = False
self.old_stdout = sys.stdout
self.old_stderr = sys.stderr
def __enter__(self):
if self._active:
raise RuntimeError("Can't re-enter the same log_output!")
if self.logfile is None:
raise RuntimeError(
"file argument must be set by __init__ ")
# Open both write and reading on logfile
if type(self.logfile) == StringIO:
self._ioflag = True
# cannot have two streams on tempfile, so we must make our own
sys.stdout = self.logfile
sys.stderr = self.logfile
else:
self.writer = open(self.logfile, mode='wb+')
self.reader = open(self.logfile, mode='rb+')
# Dup stdout so we can still write to it after redirection
self.echo_writer = open(os.dup(sys.stdout.fileno()), "w")
# Redirect stdout and stderr to write to logfile
self.stderr.redirect_stream(self.writer.fileno())
self.stdout.redirect_stream(self.writer.fileno())
self._kill = threading.Event()
def background_reader(reader, echo_writer, _kill):
# for each line printed to logfile, read it
# if echo: write line to user
while True:
is_killed = _kill.wait(.1)
self.stderr.flush()
self.stdout.flush()
line = reader.readline()
while line:
if self.echo:
self.echo_writer.write('{0}'.format(line.decode()))
self.echo_writer.flush()
line = reader.readline()
if is_killed:
break
self._active = True
with replace_environment(self.env):
self._thread = Thread(target=background_reader,
args=(self.reader, self.echo_writer, self._kill))
self._thread.start()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self._ioflag:
sys.stdout = self.old_stdout
sys.stderr = self.old_stderr
self._ioflag = False
else:
self.writer.close()
self.reader.close()
self.echo_writer.flush()
self.stdout.flush()
self.stderr.flush()
self._kill.set()
self._thread.join()
self.stdout.close()
self.stderr.close()
self._active = False
@contextmanager
def force_echo(self):
"""Context manager to force local echo, even if echo is off."""
if not self._active:
raise RuntimeError(
"Can't call force_echo() outside log_output region!")
try:
yield self
finally:
pass
def _writer_daemon(stdin_multiprocess_fd, read_multiprocess_fd, write_fd, echo,
log_file_wrapper, control_pipe, filter_fn):
"""Daemon used by ``log_output`` to write to a log file and to ``stdout``.

View File

@@ -11,7 +11,6 @@
things like timeouts in ``ProcessController.wait()``, which are set to
get tests done quickly, not to avoid high CPU usage.
Note: The functionality in this module is unsupported on Windows
"""
from __future__ import print_function
@@ -20,6 +19,7 @@
import re
import signal
import sys
import termios
import time
import traceback
@@ -27,13 +27,6 @@
from spack.util.executable import which
termios = None
try:
import termios as term_mod
termios = term_mod
except ImportError:
pass
class ProcessController(object):
"""Wrapper around some fundamental process control operations.

View File

@@ -3,11 +3,10 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
#: (major, minor, micro, dev release) tuple
spack_version_info = (0, 18, 0, 'dev0')
#: major, minor, patch version for Spack, in a tuple
spack_version_info = (0, 17, 1)
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
spack_version = '.'.join(str(s) for s in spack_version_info)
#: String containing Spack version joined with .'s
spack_version = '.'.join(str(v) for v in spack_version_info)
__all__ = ['spack_version_info', 'spack_version']
__version__ = spack_version

View File

@@ -41,14 +41,11 @@ def _search_duplicate_compilers(error_cls):
from six.moves.urllib.request import urlopen
import llnl.util.lang
from llnl.util.compat import Sequence
try:
from collections.abc import Sequence # novm
except ImportError:
from collections import Sequence
import spack.config
import spack.patch
import spack.repo
import spack.spec
import spack.variant
#: Map an audit tag to a list of callables implementing checks
CALLBACKS = {}
@@ -183,6 +180,7 @@ def run_check(tag, **kwargs):
@config_compiler
def _search_duplicate_compilers(error_cls):
"""Report compilers with the same spec and two different definitions"""
import spack.config
errors = []
compilers = list(sorted(
@@ -219,6 +217,8 @@ def _search_duplicate_compilers(error_cls):
@config_packages
def _search_duplicate_specs_in_externals(error_cls):
"""Search for duplicate specs declared as externals"""
import spack.config
errors, externals = [], collections.defaultdict(list)
packages_yaml = spack.config.get('packages')
@@ -265,7 +265,6 @@ def _search_duplicate_specs_in_externals(error_cls):
kwargs=('pkgs',)
)
#: Sanity checks on linting
# This can take some time, so it's run separately from packages
package_https_directives = AuditClass(
@@ -276,40 +275,15 @@ def _search_duplicate_specs_in_externals(error_cls):
)
@package_directives
def _check_patch_urls(pkgs, error_cls):
"""Ensure that patches fetched from GitHub have stable sha256 hashes."""
github_patch_url_re = (
r"^https?://github\.com/.+/.+/(?:commit|pull)/[a-fA-F0-9]*.(?:patch|diff)"
)
errors = []
for pkg_name in pkgs:
pkg = spack.repo.get(pkg_name)
for condition, patches in pkg.patches.items():
for patch in patches:
if not isinstance(patch, spack.patch.UrlPatch):
continue
if not re.match(github_patch_url_re, patch.url):
continue
full_index_arg = "?full_index=1"
if not patch.url.endswith(full_index_arg):
errors.append(error_cls(
"patch URL in package {0} must end with {1}".format(
pkg.name, full_index_arg,
),
[patch.url],
))
return errors
@package_https_directives
def _linting_package_file(pkgs, error_cls):
"""Check for correctness of links
"""
import llnl.util.lang
import spack.repo
import spack.spec
errors = []
for pkg_name in pkgs:
pkg = spack.repo.get(pkg_name)
@@ -334,6 +308,11 @@ def _linting_package_file(pkgs, error_cls):
@package_directives
def _unknown_variants_in_directives(pkgs, error_cls):
"""Report unknown or wrong variants in directives for this package"""
import llnl.util.lang
import spack.repo
import spack.spec
errors = []
for pkg_name in pkgs:
pkg = spack.repo.get(pkg_name)
@@ -388,6 +367,9 @@ def _unknown_variants_in_directives(pkgs, error_cls):
@package_directives
def _unknown_variants_in_dependencies(pkgs, error_cls):
"""Report unknown dependencies and wrong variants for dependencies"""
import spack.repo
import spack.spec
errors = []
for pkg_name in pkgs:
pkg = spack.repo.get(pkg_name)
@@ -435,6 +417,8 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
@package_directives
def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls):
"""Report if version constraints used in directives are not satisfiable"""
import spack.repo
errors = []
for pkg_name in pkgs:
pkg = spack.repo.get(pkg_name)
@@ -471,6 +455,7 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
import spack.variant
variant_exceptions = (
spack.variant.InconsistentValidationError,
spack.variant.MultipleValuesInExclusiveVariantError,

View File

@@ -2065,13 +2065,14 @@ def download_buildcache_entry(file_descriptions, mirror_url=None):
def download_single_spec(
concrete_spec, destination, mirror_url=None
concrete_spec, destination, require_cdashid=False, mirror_url=None
):
"""Download the buildcache files for a single concrete spec.
Args:
concrete_spec: concrete spec to be downloaded
destination (str): path where to put the downloaded buildcache
require_cdashid (bool): if False the `.cdashid` file is optional
mirror_url (str): url of the mirror from which to download
"""
tarfile_name = tarball_name(concrete_spec, '.spack')
@@ -2089,6 +2090,10 @@ def download_single_spec(
tarball_name(concrete_spec, '.spec.yaml')],
'path': destination,
'required': True,
}, {
'url': [tarball_name(concrete_spec, '.cdashid')],
'path': destination,
'required': require_cdashid,
},
]

View File

@@ -727,11 +727,9 @@ def _root_spec(spec_str):
spec_str (str): spec to be bootstrapped. Must be without compiler and target.
"""
# Add a proper compiler hint to the root spec. We use GCC for
# everything but MacOS and Windows.
# everything but MacOS.
if str(spack.platforms.host()) == 'darwin':
spec_str += ' %apple-clang'
elif str(spack.platforms.host()) == 'windows':
spec_str += ' %msvc'
else:
spec_str += ' %gcc'

View File

@@ -46,7 +46,6 @@
import llnl.util.tty as tty
from llnl.util.filesystem import install, install_tree, mkdirp
from llnl.util.lang import dedupe
from llnl.util.symlink import symlink
from llnl.util.tty.color import cescape, colorize
from llnl.util.tty.log import MultiProcessFd
@@ -374,8 +373,7 @@ def set_wrapper_variables(pkg, env):
# directory. Add that to the path too.
env_paths = []
compiler_specific = os.path.join(
spack.paths.build_env_path,
os.path.dirname(pkg.compiler.link_paths['cc']))
spack.paths.build_env_path, os.path.dirname(pkg.compiler.link_paths['cc']))
for item in [spack.paths.build_env_path, compiler_specific]:
env_paths.append(item)
ci = os.path.join(item, 'case-insensitive')
@@ -512,14 +510,7 @@ def _set_variables_for_single_module(pkg, module):
m.make_jobs = jobs
# TODO: make these build deps that can be installed if not found.
# FIXME: !!!!!
m.make = MakeExecutable('make', jobs)
m.emmake = MakeExecutable('emmake', jobs)
m.emmake.add_default_arg('make')
m.emmake.add_default_arg('AR=emar')
m.emmake.add_default_arg('RANLIB=emranlib')
m.emmake.add_default_arg('NM=emnm')
m.gmake = MakeExecutable('gmake', jobs)
m.scons = MakeExecutable('scons', jobs)
m.ninja = MakeExecutable('ninja', jobs)
@@ -530,20 +521,12 @@ def _set_variables_for_single_module(pkg, module):
# Find the configure script in the archive path
# Don't use which for this; we want to find it in the current dir.
m.configure = Executable('./configure')
m.emconfigure = Executable('emconfigure')
m.emconfigure.add_default_arg('./configure')
m.meson = Executable('meson')
m.cmake = Executable('cmake')
m.emcmake = Executable('emcmake')
m.emcmake.add_default_arg('cmake')
m.ctest = MakeExecutable('ctest', jobs)
if sys.platform == 'win32':
m.nmake = Executable('nmake')
# Standard CMake arguments
# Standard build system arguments
m.std_cmake_args = spack.build_systems.cmake.CMakePackage._std_args(pkg)
m.std_meson_args = spack.build_systems.meson.MesonPackage._std_args(pkg)
m.std_pip_args = spack.build_systems.python.PythonPackage._std_args(pkg)
@@ -562,7 +545,7 @@ def _set_variables_for_single_module(pkg, module):
m.makedirs = os.makedirs
m.remove = os.remove
m.removedirs = os.removedirs
m.symlink = symlink
m.symlink = os.symlink
m.mkdirp = mkdirp
m.install = install
@@ -685,11 +668,11 @@ def _static_to_shared_library(arch, compiler, static_lib, shared_lib=None,
shared_lib_link = os.path.basename(shared_lib)
if version or compat_version:
symlink(shared_lib_link, shared_lib_base)
os.symlink(shared_lib_link, shared_lib_base)
if compat_version and compat_version != version:
symlink(shared_lib_link, '{0}.{1}'.format(shared_lib_base,
compat_version))
os.symlink(shared_lib_link, '{0}.{1}'.format(shared_lib_base,
compat_version))
return compiler(*compiler_args, output=compiler_output)
@@ -1153,8 +1136,7 @@ def child_fun():
try:
# Forward sys.stdin when appropriate, to allow toggling verbosity
if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin,
'fileno'):
if sys.stdin.isatty() and hasattr(sys.stdin, 'fileno'):
input_fd = os.dup(sys.stdin.fileno())
input_multiprocess_fd = MultiProcessFd(input_fd)
@@ -1162,7 +1144,6 @@ def child_fun():
target=_setup_pkg_and_run,
args=(serialized_pkg, function, kwargs, child_pipe,
input_multiprocess_fd))
p.start()
except InstallError as e:

View File

@@ -14,7 +14,7 @@
from llnl.util.filesystem import force_remove, working_dir
from spack.build_environment import InstallError
from spack.directives import conflicts, depends_on
from spack.directives import depends_on
from spack.operating_systems.mac_os import macos_version
from spack.package import PackageBase, run_after, run_before
from spack.util.executable import Executable
@@ -76,7 +76,7 @@ def patch_config_files(self):
or self.spec.satisfies('target=riscv64:'))
#: Whether or not to update ``libtool``
#: (currently only for Arm/Clang/Fujitsu/NVHPC compilers)
#: (currently only for Arm/Clang/Fujitsu compilers)
patch_libtool = True
#: Targets for ``make`` during the :py:meth:`~.AutotoolsPackage.build`
@@ -104,7 +104,6 @@ def patch_config_files(self):
depends_on('gnuconfig', type='build', when='target=ppc64le:')
depends_on('gnuconfig', type='build', when='target=aarch64:')
depends_on('gnuconfig', type='build', when='target=riscv64:')
conflicts('platform=windows')
@property
def _removed_la_files_log(self):
@@ -252,7 +251,7 @@ def _set_autotools_environment_variables(self):
def _do_patch_libtool(self):
"""If configure generates a "libtool" script that does not correctly
detect the compiler (and patch_libtool is set), patch in the correct
flags for the Arm, Clang/Flang, Fujitsu and NVHPC compilers."""
flags for the Arm, Clang/Flang, and Fujitsu compilers."""
# Exit early if we are required not to patch libtool
if not self.patch_libtool:
@@ -263,12 +262,9 @@ def _do_patch_libtool(self):
self._patch_libtool(libtool_path)
def _patch_libtool(self, libtool_path):
if (
self.spec.satisfies('%arm') or
self.spec.satisfies('%clang') or
self.spec.satisfies('%fj') or
self.spec.satisfies('%nvhpc')
):
if self.spec.satisfies('%arm')\
or self.spec.satisfies('%clang')\
or self.spec.satisfies('%fj'):
fs.filter_file('wl=""\n', 'wl="-Wl,"\n', libtool_path)
fs.filter_file('pic_flag=""\n',
'pic_flag="{0}"\n'
@@ -419,10 +415,7 @@ def configure(self, spec, prefix):
options += self.configure_args()
with working_dir(self.build_directory, create=True):
if self.spec.satisfies('%emscripten'):
inspect.getmodule(self).emconfigure(*options)
else:
inspect.getmodule(self).configure(*options)
inspect.getmodule(self).configure(*options)
def setup_build_environment(self, env):
if (self.spec.platform == 'darwin'

View File

@@ -8,8 +8,7 @@
import os
import platform
import re
import sys
from typing import List
from typing import List # novm
import six
@@ -19,7 +18,6 @@
import spack.build_environment
from spack.directives import conflicts, depends_on, variant
from spack.package import InstallError, PackageBase, run_after
from spack.util.path import convert_to_posix_path
# Regex to extract the primary generator from the CMake generator
# string.
@@ -93,14 +91,7 @@ class CMakePackage(PackageBase):
#:
#: See https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html
#: for more information.
# generator = "Unix Makefiles"
generator = "Ninja"
depends_on('ninja', type='build')
if sys.platform == 'win32':
generator = "Ninja"
depends_on('ninja', type='build')
generator = 'Unix Makefiles'
# https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
variant('build_type', default='RelWithDebInfo',
@@ -147,11 +138,10 @@ def std_cmake_args(self):
@staticmethod
def _std_args(pkg):
"""Computes the standard cmake arguments for a generic package"""
try:
generator = pkg.generator
except AttributeError:
generator = CMakePackage.generator
generator = 'Unix Makefiles'
# Make sure a valid generator was chosen
valid_primary_generators = ['Unix Makefiles', 'Ninja']
@@ -176,7 +166,7 @@ def _std_args(pkg):
define = CMakePackage.define
args = [
'-G', generator,
define('CMAKE_INSTALL_PREFIX', convert_to_posix_path(pkg.prefix)),
define('CMAKE_INSTALL_PREFIX', pkg.prefix),
define('CMAKE_BUILD_TYPE', build_type),
]
@@ -195,7 +185,7 @@ def _std_args(pkg):
# Set up CMake rpath
args.extend([
define('CMAKE_INSTALL_RPATH_USE_LINK_PATH', True),
define('CMAKE_INSTALL_RPATH_USE_LINK_PATH', False),
define('CMAKE_INSTALL_RPATH',
spack.build_environment.get_rpaths(pkg)),
define('CMAKE_PREFIX_PATH',
@@ -376,10 +366,7 @@ def cmake(self, spec, prefix):
options += self.cmake_args()
options.append(os.path.abspath(self.root_cmakelists_dir))
with working_dir(self.build_directory, create=True):
if self.spec.satisfies('%emscripten'):
inspect.getmodule(self).emcmake(*options)
else:
inspect.getmodule(self).cmake(*options)
inspect.getmodule(self).cmake(*options)
def build(self, spec, prefix):
"""Make the build targets"""

View File

@@ -106,9 +106,7 @@ def cuda_flags(arch_list):
# This implies that the last one in the list has to be updated at
# each release of a new cuda minor version.
conflicts('%gcc@10:', when='+cuda ^cuda@:11.0')
conflicts('%gcc@11:', when='+cuda ^cuda@:11.4.0')
conflicts('%gcc@12:', when='+cuda ^cuda@:11.6')
conflicts('%clang@12:', when='+cuda ^cuda@:11.4.0')
conflicts('%clang@13:', when='+cuda ^cuda@:11.5')
conflicts('%clang@14:', when='+cuda ^cuda@:11.6')
@@ -188,3 +186,7 @@ def cuda_flags(arch_list):
# Darwin.
# TODO: add missing conflicts for %apple-clang cuda@:10
conflicts('platform=darwin', when='+cuda ^cuda@11.0.2: ')
# Make sure cuda_arch can not be used without +cuda
for value in cuda_arch_values:
conflicts('~cuda', when='cuda_arch=' + value)

View File

@@ -24,7 +24,6 @@
install,
)
import spack.error
from spack.build_environment import dso_suffix
from spack.package import InstallError, PackageBase, run_after
from spack.util.environment import EnvironmentModifications
@@ -686,15 +685,15 @@ def openmp_libs(self):
# packages.yaml), specificially to provide the 'iomp5' libs.
elif '%gcc' in self.spec:
with self.compiler.compiler_environment():
omp_lib_path = Executable(self.compiler.cc)(
'--print-file-name', 'libgomp.%s' % dso_suffix, output=str)
gcc = Executable(self.compiler.cc)
omp_lib_path = gcc(
'--print-file-name', 'libgomp.%s' % dso_suffix, output=str)
omp_libs = LibraryList(omp_lib_path.strip())
elif '%clang' in self.spec:
with self.compiler.compiler_environment():
omp_lib_path = Executable(self.compiler.cc)(
'--print-file-name', 'libomp.%s' % dso_suffix, output=str)
clang = Executable(self.compiler.cc)
omp_lib_path = clang(
'--print-file-name', 'libomp.%s' % dso_suffix, output=str)
omp_libs = LibraryList(omp_lib_path.strip())
if len(omp_libs) < 1:
@@ -735,9 +734,8 @@ def tbb_libs(self):
# TODO: clang(?)
gcc = self._gcc_executable # must be gcc, not self.compiler.cc
with self.compiler.compiler_environment():
cxx_lib_path = gcc(
'--print-file-name', 'libstdc++.%s' % dso_suffix, output=str)
cxx_lib_path = gcc(
'--print-file-name', 'libstdc++.%s' % dso_suffix, output=str)
libs = tbb_lib + LibraryList(cxx_lib_path.rstrip())
debug_print(libs)
@@ -747,9 +745,8 @@ def tbb_libs(self):
def _tbb_abi(self):
'''Select the ABI needed for linking TBB'''
gcc = self._gcc_executable
with self.compiler.compiler_environment():
matches = re.search(r'(gcc|LLVM).* ([0-9]+\.[0-9]+\.[0-9]+).*',
gcc('--version', output=str), re.I | re.M)
matches = re.search(r'(gcc|LLVM).* ([0-9]+\.[0-9]+\.[0-9]+).*',
gcc('--version', output=str), re.I | re.M)
abi = ''
if sys.platform == 'darwin':
pass
@@ -1336,42 +1333,5 @@ def uninstall_ism(self):
debug_print(os.getcwd())
return
@property
def base_lib_dir(self):
"""Provide the library directory located in the base of Intel installation.
"""
d = self.normalize_path('')
d = os.path.join(d, 'lib')
debug_print(d)
return d
@run_after('install')
def modify_LLVMgold_rpath(self):
"""Add libimf.so and other required libraries to the RUNPATH of LLVMgold.so.
These are needed explicitly at dependent link time when
`ld -plugin LLVMgold.so` is called by the compiler.
"""
if self._has_compilers:
LLVMgold_libs = find_libraries('LLVMgold', self.base_lib_dir,
shared=True, recursive=True)
# Ignore ia32 entries as they mostly ignore throughout the rest
# of the file.
# The first entry in rpath preserves the original, the seconds entry
# is the location of libimf.so. If this relative location is changed
# in compiler releases, then we need to search for libimf.so instead
# of this static path.
for lib in LLVMgold_libs:
if not self.spec.satisfies('^patchelf'):
raise spack.error.SpackError(
'Attempting to patch RPATH in LLVMgold.so.'
+ '`patchelf` dependency should be set in package.py'
)
patchelf = Executable('patchelf')
rpath = ':'.join([patchelf('--print-rpath', lib, output=str).strip(),
'$ORIGIN/../compiler/lib/intel64_lin'])
patchelf('--set-rpath', rpath, lib)
# Check that self.prefix is there after installation
run_after('install')(PackageBase.sanity_check_prefix)

View File

@@ -10,7 +10,6 @@
import llnl.util.tty as tty
from llnl.util.filesystem import working_dir
from spack.directives import conflicts
from spack.package import PackageBase, run_after
@@ -56,7 +55,6 @@ class MakefilePackage(PackageBase):
#: phase
install_targets = ['install']
conflicts('platform=windows')
#: Callback names for build-time test
build_time_test_callbacks = ['check']
@@ -82,20 +80,14 @@ def build(self, spec, prefix):
as targets.
"""
with working_dir(self.build_directory):
if self.spec.satisfies('%emscripten'):
inspect.getmodule(self).emmake(*self.build_targets)
else:
inspect.getmodule(self).make(*self.build_targets)
inspect.getmodule(self).make(*self.build_targets)
def install(self, spec, prefix):
"""Calls make, passing :py:attr:`~.MakefilePackage.install_targets`
as targets.
"""
with working_dir(self.build_directory):
if self.spec.satisfies('%emscripten'):
inspect.getmodule(self).emmake(*self.install_targets)
else:
inspect.getmodule(self).make(*self.install_targets)
inspect.getmodule(self).make(*self.install_targets)
run_after('build')(PackageBase._run_default_build_time_test_callbacks)

View File

@@ -11,7 +11,7 @@
from llnl.util.filesystem import (
filter_file,
find,
is_nonsymlink_exe_with_shebang,
get_filetype,
path_contains_subdirectory,
same_path,
working_dir,
@@ -216,7 +216,7 @@ def view_file_conflicts(self, view, merge_map):
return conflicts
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
def add_files_to_view(self, view, merge_map):
bin_dir = self.spec.prefix.bin
python_prefix = self.extendee_spec.prefix
python_is_external = self.extendee_spec.external
@@ -230,7 +230,7 @@ def add_files_to_view(self, view, merge_map, skip_if_exists=True):
view.link(src, dst)
elif not os.path.islink(src):
shutil.copy2(src, dst)
is_script = is_nonsymlink_exe_with_shebang(src)
is_script = 'script' in get_filetype(src)
if is_script and not python_is_external:
filter_file(
python_prefix, os.path.abspath(

View File

@@ -1,70 +0,0 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import llnl.util.tty as tty
from llnl.util.filesystem import working_dir
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
from spack.directives import extends
from spack.package import PackageBase
from spack.util.environment import env_flag
from spack.util.executable import Executable, ProcessError
class RacketPackage(PackageBase):
"""Specialized class for packages that are built using Racket's
`raco pkg install` and `raco setup` commands.
This class provides the following phases that can be overridden:
* install
* setup
"""
#: Package name, version, and extension on PyPI
maintainers = ['elfprince13']
# Default phases
phases = ['install']
# To be used in UI queries that require to know which
# build-system class we are using
build_system_class = 'RacketPackage'
extends('racket')
pkgs = False
subdirectory = None
name = None
parallel = True
@property
def homepage(self):
if self.pkgs:
return 'https://pkgs.racket-lang.org/package/{0}'.format(self.name)
@property
def build_directory(self):
ret = os.getcwd()
if self.subdirectory:
ret = os.path.join(ret, self.subdirectory)
return ret
def install(self, spec, prefix):
"""Install everything from build directory."""
raco = Executable("raco")
with working_dir(self.build_directory):
allow_parallel = self.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE))
args = ['pkg', 'install', '-t', 'dir', '-n', self.name, '--deps', 'fail',
'--ignore-implies', '--copy', '-i', '-j',
str(determine_number_of_jobs(allow_parallel)),
'--', os.getcwd()]
try:
raco(*args)
except ProcessError:
args.insert(-2, "--skip-installed")
raco(*args)
tty.warn(("Racket package {0} was already installed, uninstalling via "
"Spack may make someone unhappy!").format(self.name))

View File

@@ -112,6 +112,10 @@ class ROCmPackage(PackageBase):
# need amd gpu type for rocm builds
conflicts('amdgpu_target=none', when='+rocm')
# Make sure amdgpu_targets cannot be used without +rocm
for value in amdgpu_targets:
conflicts('~rocm', when='amdgpu_target=' + value)
# https://github.com/ROCm-Developer-Tools/HIP/blob/master/bin/hipcc
# It seems that hip-clang does not (yet?) accept this flag, in which case
# we will still need to set the HCC_AMDGPU_TARGET environment flag in the

View File

@@ -8,7 +8,6 @@
import llnl.util.lang
from llnl.util.filesystem import mkdirp
from llnl.util.symlink import symlink
import spack.config
import spack.error
@@ -86,7 +85,7 @@ def symlink(self, mirror_ref):
# to https://github.com/spack/spack/pull/13908)
os.unlink(cosmetic_path)
mkdirp(os.path.dirname(cosmetic_path))
symlink(relative_dst, cosmetic_path)
os.symlink(relative_dst, cosmetic_path)
#: Spack's local cache for downloaded source archives

View File

@@ -5,6 +5,7 @@
import base64
import copy
import datetime
import json
import os
import re
@@ -23,6 +24,7 @@
import spack
import spack.binary_distribution as bindist
import spack.cmd
import spack.compilers as compilers
import spack.config as cfg
import spack.environment as ev
@@ -512,82 +514,6 @@ def format_job_needs(phase_name, strip_compilers, dep_jobs,
return needs_list
def get_change_revisions():
"""If this is a git repo get the revisions to use when checking
for changed packages and spack core modules."""
git_dir = os.path.join(spack.paths.prefix, '.git')
if os.path.exists(git_dir) and os.path.isdir(git_dir):
# TODO: This will only find changed packages from the last
# TODO: commit. While this may work for single merge commits
# TODO: when merging the topic branch into the base, it will
# TODO: require more thought outside of that narrow case.
return 'HEAD^', 'HEAD'
return None, None
def get_stack_changed(env_path, rev1='HEAD^', rev2='HEAD'):
"""Given an environment manifest path and two revisions to compare, return
whether or not the stack was changed. Returns True if the environment
manifest changed between the provided revisions (or additionally if the
`.gitlab-ci.yml` file itself changed). Returns False otherwise."""
git = exe.which("git")
if git:
with fs.working_dir(spack.paths.prefix):
git_log = git("diff", "--name-only", rev1, rev2,
output=str, error=os.devnull,
fail_on_error=False).strip()
lines = [] if not git_log else re.split(r'\s+', git_log)
for path in lines:
if '.gitlab-ci.yml' in path or path in env_path:
tty.debug('env represented by {0} changed'.format(
env_path))
tty.debug('touched file: {0}'.format(path))
return True
return False
def compute_affected_packages(rev1='HEAD^', rev2='HEAD'):
"""Determine which packages were added, removed or changed
between rev1 and rev2, and return the names as a set"""
return spack.repo.get_all_package_diffs('ARC', rev1=rev1, rev2=rev2)
def get_spec_filter_list(env, affected_pkgs, dependencies=True, dependents=True):
"""Given a list of package names, and assuming an active and
concretized environment, return a set of concrete specs from
the environment corresponding to any of the affected pkgs (or
optionally to any of their dependencies/dependents).
Arguments:
env (spack.environment.Environment): Active concrete environment
affected_pkgs (List[str]): Affected package names
dependencies (bool): Include dependencies of affected packages
dependents (bool): Include dependents of affected pacakges
Returns:
A list of concrete specs from the active environment including
those associated with affected packages, and possible their
dependencies and dependents as well.
"""
affected_specs = set()
all_concrete_specs = env.all_specs()
tty.debug('All concrete environment specs:')
for s in all_concrete_specs:
tty.debug(' {0}/{1}'.format(s.name, s.dag_hash()[:7]))
for pkg in affected_pkgs:
env_matches = [s for s in all_concrete_specs if s.name == pkg]
for match in env_matches:
affected_specs.add(match)
if dependencies:
affected_specs.update(match.traverse(direction='children', root=False))
if dependents:
affected_specs.update(match.traverse(direction='parents', root=False))
return affected_specs
def generate_gitlab_ci_yaml(env, print_summary, output_file,
prune_dag=False, check_index_only=False,
run_optimizer=False, use_dependencies=False,
@@ -620,26 +546,6 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
tty.verbose("Using CDash auth token from environment")
cdash_auth_token = os.environ.get('SPACK_CDASH_AUTH_TOKEN')
prune_untouched_packages = os.environ.get('SPACK_PRUNE_UNTOUCHED', None)
if prune_untouched_packages:
# Requested to prune untouched packages, but assume we won't do that
# unless we're actually in a git repo.
prune_untouched_packages = False
rev1, rev2 = get_change_revisions()
tty.debug('Got following revisions: rev1={0}, rev2={1}'.format(rev1, rev2))
if rev1 and rev2:
# If the stack file itself did not change, proceed with pruning
if not get_stack_changed(env.manifest_path, rev1, rev2):
prune_untouched_packages = True
affected_pkgs = compute_affected_packages(rev1, rev2)
tty.debug('affected pkgs:')
for p in affected_pkgs:
tty.debug(' {0}'.format(p))
affected_specs = get_spec_filter_list(env, affected_pkgs)
tty.debug('all affected specs:')
for s in affected_specs:
tty.debug(' {0}'.format(s.name))
generate_job_name = os.environ.get('CI_JOB_NAME', None)
parent_pipeline_id = os.environ.get('CI_PIPELINE_ID', None)
@@ -836,13 +742,6 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
release_spec_dag_hash = release_spec.dag_hash()
release_spec_build_hash = release_spec.build_hash()
if prune_untouched_packages:
if release_spec not in affected_specs:
tty.debug('Pruning {0}, untouched by change.'.format(
release_spec.name))
spec_record['needs_rebuild'] = False
continue
runner_attribs = find_matching_config(
release_spec, gitlab_ci)
@@ -1004,8 +903,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
tty.debug(debug_msg)
if prune_dag and not rebuild_spec:
tty.debug('Pruning {0}, does not need rebuild.'.format(
release_spec.name))
tty.debug('Pruning spec that does not need to be rebuilt.')
continue
if (broken_spec_urls is not None and
@@ -1025,7 +923,16 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
cdash_build_name = get_cdash_build_name(
release_spec, build_group)
all_job_names.append(cdash_build_name)
related_builds = [] # Used for relating CDash builds
if spec_label in dependencies:
related_builds = (
[spec_labels[d]['spec'].name
for d in dependencies[spec_label]])
job_vars['SPACK_CDASH_BUILD_NAME'] = cdash_build_name
job_vars['SPACK_RELATED_BUILDS_CDASH'] = ';'.join(
sorted(related_builds))
variables.update(job_vars)
@@ -1040,6 +947,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
local_mirror_dir, 'build_cache')
artifact_paths.extend([os.path.join(bc_root, p) for p in [
bindist.tarball_name(release_spec, '.spec.json'),
bindist.tarball_name(release_spec, '.cdashid'),
bindist.tarball_directory_name(release_spec),
]])
@@ -1206,10 +1114,6 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
if pr_mirror_url:
output_object['variables']['SPACK_PR_MIRROR_URL'] = pr_mirror_url
spack_stack_name = os.environ.get('SPACK_CI_STACK_NAME', None)
if spack_stack_name:
output_object['variables']['SPACK_CI_STACK_NAME'] = spack_stack_name
sorted_output = {}
for output_key, output_value in sorted(output_object.items()):
sorted_output[output_key] = output_value
@@ -1329,9 +1233,11 @@ def configure_compilers(compiler_action, scope=None):
return None
def get_concrete_specs(env, root_spec, job_name, compiler_action):
def get_concrete_specs(env, root_spec, job_name, related_builds,
compiler_action):
spec_map = {
'root': None,
'deps': {},
}
if compiler_action == 'FIND_ANY':
@@ -1355,9 +1261,161 @@ def get_concrete_specs(env, root_spec, job_name, compiler_action):
spec_map['root'] = concrete_root
spec_map[job_name] = concrete_root[job_name]
if related_builds:
for dep_job_name in related_builds.split(';'):
spec_map['deps'][dep_job_name] = concrete_root[dep_job_name]
return spec_map
def register_cdash_build(build_name, base_url, project, site, track):
url = base_url + '/api/v1/addBuild.php'
time_stamp = datetime.datetime.now().strftime('%Y%m%d-%H%M')
build_id = None
build_stamp = '{0}-{1}'.format(time_stamp, track)
payload = {
"project": project,
"site": site,
"name": build_name,
"stamp": build_stamp,
}
tty.debug('Registering cdash build to {0}, payload:'.format(url))
tty.debug(payload)
enc_data = json.dumps(payload).encode('utf-8')
headers = {
'Content-Type': 'application/json',
}
opener = build_opener(HTTPHandler)
request = Request(url, data=enc_data, headers=headers)
try:
response = opener.open(request)
response_code = response.getcode()
if response_code != 200 and response_code != 201:
msg = 'Adding build failed (response code = {0}'.format(response_code)
tty.warn(msg)
return (None, None)
response_text = response.read()
response_json = json.loads(response_text)
build_id = response_json['buildid']
except Exception as e:
print("Registering build in CDash failed: {0}".format(e))
return (build_id, build_stamp)
def relate_cdash_builds(spec_map, cdash_base_url, job_build_id, cdash_project,
cdashids_mirror_urls):
if not job_build_id:
return
dep_map = spec_map['deps']
headers = {
'Content-Type': 'application/json',
'Accept': 'application/json',
}
cdash_api_url = '{0}/api/v1/relateBuilds.php'.format(cdash_base_url)
for dep_pkg_name in dep_map:
tty.debug('Fetching cdashid file for {0}'.format(dep_pkg_name))
dep_spec = dep_map[dep_pkg_name]
dep_build_id = None
for url in cdashids_mirror_urls:
try:
if url:
dep_build_id = read_cdashid_from_mirror(dep_spec, url)
break
except web_util.SpackWebError:
tty.debug('Did not find cdashid for {0} on {1}'.format(
dep_pkg_name, url))
else:
tty.warn('Did not find cdashid for {0} anywhere'.format(
dep_pkg_name))
return
payload = {
"project": cdash_project,
"buildid": job_build_id,
"relatedid": dep_build_id,
"relationship": "depends on"
}
enc_data = json.dumps(payload).encode('utf-8')
opener = build_opener(HTTPHandler)
request = Request(cdash_api_url, data=enc_data, headers=headers)
try:
response = opener.open(request)
response_code = response.getcode()
if response_code != 200 and response_code != 201:
msg = 'Relate builds ({0} -> {1}) failed (resp code = {2})'.format(
job_build_id, dep_build_id, response_code)
tty.warn(msg)
return
response_text = response.read()
tty.debug('Relate builds response: {0}'.format(response_text))
except Exception as e:
print("Relating builds in CDash failed: {0}".format(e))
def write_cdashid_to_mirror(cdashid, spec, mirror_url):
if not spec.concrete:
tty.die('Can only write cdashid for concrete spec to mirror')
with TemporaryDirectory() as tmpdir:
local_cdash_path = os.path.join(tmpdir, 'job.cdashid')
with open(local_cdash_path, 'w') as fd:
fd.write(cdashid)
buildcache_name = bindist.tarball_name(spec, '')
cdashid_file_name = '{0}.cdashid'.format(buildcache_name)
remote_url = os.path.join(
mirror_url, bindist.build_cache_relative_path(), cdashid_file_name)
tty.debug('pushing cdashid to url')
tty.debug(' local file path: {0}'.format(local_cdash_path))
tty.debug(' remote url: {0}'.format(remote_url))
try:
web_util.push_to_url(local_cdash_path, remote_url)
except Exception as inst:
# No matter what went wrong here, don't allow the pipeline to fail
# just because there was an issue storing the cdashid on the mirror
msg = 'Failed to write cdashid {0} to mirror {1}'.format(
cdashid, mirror_url)
tty.warn(inst)
tty.warn(msg)
def read_cdashid_from_mirror(spec, mirror_url):
if not spec.concrete:
tty.die('Can only read cdashid for concrete spec from mirror')
buildcache_name = bindist.tarball_name(spec, '')
cdashid_file_name = '{0}.cdashid'.format(buildcache_name)
url = os.path.join(
mirror_url, bindist.build_cache_relative_path(), cdashid_file_name)
resp_url, resp_headers, response = web_util.read_from_url(url)
contents = response.fp.read()
return int(contents)
def _push_mirror_contents(env, specfile_path, sign_binaries, mirror_url):
"""Unchecked version of the public API, for easier mocking"""
unsigned = not sign_binaries
@@ -1508,7 +1566,7 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
# Next attempt to clone your local spack repo into the repro dir
with fs.working_dir(repro_dir):
clone_out = git("clone", spack_git_path, "spack",
clone_out = git("clone", spack_git_path,
output=str, error=os.devnull,
fail_on_error=False)

View File

@@ -308,7 +308,8 @@ def optimizer(yaml):
# try factoring out commonly repeated portions
common_job = {
'variables': {
'SPACK_COMPILER_ACTION': 'NONE'
'SPACK_COMPILER_ACTION': 'NONE',
'SPACK_RELATED_BUILDS_CDASH': ''
},
'after_script': ['rm -rf "./spack"'],

View File

@@ -154,6 +154,7 @@ def parse_specs(args, **kwargs):
concretize = kwargs.get('concretize', False)
normalize = kwargs.get('normalize', False)
tests = kwargs.get('tests', False)
reuse = kwargs.get('reuse', False)
try:
sargs = args
@@ -162,7 +163,7 @@ def parse_specs(args, **kwargs):
specs = spack.spec.parse(sargs)
for spec in specs:
if concretize:
spec.concretize(tests=tests) # implies normalize
spec.concretize(tests=tests, reuse=reuse) # implies normalize
elif normalize:
spec.normalize(tests=tests)

View File

@@ -26,10 +26,6 @@ def setup_parser(subparser):
def activate(parser, args):
tty.warn("spack activate is deprecated in favor of "
"environments and will be removed in v0.19.0")
specs = spack.cmd.parse_specs(args.spec)
if len(specs) != 1:
tty.die("activate requires one spec. %d given." % len(specs))

View File

@@ -180,6 +180,9 @@ def setup_parser(subparser):
download.add_argument(
'-p', '--path', default=None,
help="Path to directory where tarball should be downloaded")
download.add_argument(
'-c', '--require-cdashid', action='store_true', default=False,
help="Require .cdashid file to be downloaded with buildcache entry")
download.set_defaults(func=download_fn)
# Get buildcache name
@@ -437,7 +440,9 @@ def download_fn(args):
"""Download buildcache entry from a remote mirror to local folder. This
command uses the process exit code to indicate its result, specifically,
a non-zero exit code indicates that the command failed to download at
least one of the required buildcache components."""
least one of the required buildcache components. Normally, just the
tarball and .spec.json files are required, but if the --require-cdashid
argument was provided, then a .cdashid file is also required."""
if not args.spec and not args.spec_file:
tty.msg('No specs provided, exiting.')
sys.exit(0)
@@ -447,7 +452,9 @@ def download_fn(args):
sys.exit(0)
spec = _concrete_spec_from_args(args)
result = bindist.download_single_spec(spec, args.path)
result = bindist.download_single_spec(
spec, args.path, require_cdashid=args.require_cdashid
)
if not result:
sys.exit(1)
@@ -553,6 +560,11 @@ def copy_fn(args):
specfile_src_path_yaml = os.path.join(args.base_dir, specfile_rel_path)
specfile_dest_path_yaml = os.path.join(dest_root_path, specfile_rel_path)
cdashidfile_rel_path = os.path.join(
build_cache_dir, bindist.tarball_name(spec, '.cdashid'))
cdashid_src_path = os.path.join(args.base_dir, cdashidfile_rel_path)
cdashid_dest_path = os.path.join(dest_root_path, cdashidfile_rel_path)
# Make sure directory structure exists before attempting to copy
os.makedirs(os.path.dirname(tarball_dest_path))
@@ -566,6 +578,11 @@ def copy_fn(args):
tty.msg('Copying {0}'.format(specfile_rel_path_yaml))
shutil.copyfile(specfile_src_path_yaml, specfile_dest_path_yaml)
# Copy the cdashid file (if exists) to the destination mirror
if os.path.exists(cdashid_src_path):
tty.msg('Copying {0}'.format(cdashidfile_rel_path))
shutil.copyfile(cdashid_src_path, cdashid_dest_path)
def sync_fn(args):
""" Syncs binaries (and associated metadata) from one mirror to another.
@@ -650,6 +667,8 @@ def sync_fn(args):
build_cache_dir, bindist.tarball_name(s, '.spec.yaml')),
os.path.join(
build_cache_dir, bindist.tarball_name(s, '.spec.json')),
os.path.join(
build_cache_dir, bindist.tarball_name(s, '.cdashid'))
])
tmpdir = tempfile.mkdtemp()

View File

@@ -57,32 +57,32 @@ def checksum(parser, args):
pkg = spack.repo.get(args.package)
url_dict = {}
versions = args.versions
if (not versions) and args.preferred:
versions = [preferred_version(pkg)]
if versions:
remote_versions = None
for version in versions:
if args.versions:
# If the user asked for specific versions, use those
for version in args.versions:
version = ver(version)
if not isinstance(version, Version):
tty.die("Cannot generate checksums for version lists or "
"version ranges. Use unambiguous versions.")
url = pkg.find_valid_url_for_version(version)
if url is not None:
url_dict[version] = url
continue
# if we get here, it's because no valid url was provided by the package
# do expensive fallback to try to recover
if remote_versions is None:
remote_versions = pkg.fetch_remote_versions()
if version in remote_versions:
url_dict[version] = remote_versions[version]
url_dict[version] = pkg.url_for_version(version)
elif args.preferred:
version = preferred_version(pkg)
url_dict = dict([(version, pkg.url_for_version(version))])
else:
# Otherwise, see what versions we can find online
url_dict = pkg.fetch_remote_versions()
if not url_dict:
tty.die("Could not find any versions for {0}".format(pkg.name))
if not url_dict:
tty.die("Could not find any versions for {0}".format(pkg.name))
# And ensure the specified version URLs take precedence, if available
try:
explicit_dict = {}
for v in pkg.versions:
if not v.isdevelop():
explicit_dict[v] = pkg.url_for_version(v)
url_dict.update(explicit_dict)
except spack.package.NoURLError:
pass
version_lines = spack.stage.get_checksums_for_versions(
url_dict, pkg.name, keep_stage=args.keep_stage,

View File

@@ -196,6 +196,7 @@ def ci_rebuild(args):
job_spec_pkg_name = get_env_var('SPACK_JOB_SPEC_PKG_NAME')
compiler_action = get_env_var('SPACK_COMPILER_ACTION')
cdash_build_name = get_env_var('SPACK_CDASH_BUILD_NAME')
related_builds = get_env_var('SPACK_RELATED_BUILDS_CDASH')
spack_pipeline_type = get_env_var('SPACK_PIPELINE_TYPE')
pr_mirror_url = get_env_var('SPACK_PR_MIRROR_URL')
remote_mirror_url = get_env_var('SPACK_REMOTE_MIRROR_URL')
@@ -235,6 +236,7 @@ def ci_rebuild(args):
tty.debug('cdash_project_enc = {0}'.format(cdash_project_enc))
tty.debug('cdash_build_name = {0}'.format(cdash_build_name))
tty.debug('cdash_site = {0}'.format(cdash_site))
tty.debug('related_builds = {0}'.format(related_builds))
tty.debug('job_spec_buildgroup = {0}'.format(job_spec_buildgroup))
# Is this a pipeline run on a spack PR or a merge to develop? It might
@@ -277,7 +279,7 @@ def ci_rebuild(args):
# Whatever form of root_spec we got, use it to get a map giving us concrete
# specs for this job and all of its dependencies.
spec_map = spack_ci.get_concrete_specs(
env, root_spec, job_spec_pkg_name, compiler_action)
env, root_spec, job_spec_pkg_name, related_builds, compiler_action)
job_spec = spec_map[job_spec_pkg_name]
job_spec_yaml_file = '{0}.yaml'.format(job_spec_pkg_name)
@@ -360,10 +362,8 @@ def ci_rebuild(args):
# Write information about spack into an artifact in the repro dir
spack_info = spack_ci.get_spack_info()
spack_info_file = os.path.join(repro_dir, 'spack_info.txt')
with open(spack_info_file, 'wb') as fd:
fd.write(b'\n')
fd.write(spack_info.encode('utf8'))
fd.write(b'\n')
with open(spack_info_file, 'w') as fd:
fd.write('\n{0}\n'.format(spack_info))
# If we decided there should be a temporary storage mechanism, add that
# mirror now so it's used when we check for a full hash match already
@@ -373,6 +373,9 @@ def ci_rebuild(args):
pipeline_mirror_url,
cfg.default_modify_scope())
cdash_build_id = None
cdash_build_stamp = None
# Check configured mirrors for a built spec with a matching full hash
matches = bindist.get_mirrors_for_spec(
job_spec, full_hash_match=True, index_only=False)
@@ -397,6 +400,7 @@ def ci_rebuild(args):
bindist.download_single_spec(
job_spec,
build_cache_dir,
require_cdashid=False,
mirror_url=matching_mirror
)
@@ -423,8 +427,16 @@ def ci_rebuild(args):
if not verify_binaries:
install_args.append('--no-check-signature')
# If CDash reporting is enabled, we first register this build with
# the specified CDash instance, then relate the build to those of
# its dependencies.
if enable_cdash:
# Add additional arguments to `spack install` for CDash reporting.
tty.debug('CDash: Registering build')
(cdash_build_id,
cdash_build_stamp) = spack_ci.register_cdash_build(
cdash_build_name, cdash_base_url, cdash_project,
cdash_site, job_spec_buildgroup)
cdash_upload_url = '{0}/submit.php?project={1}'.format(
cdash_base_url, cdash_project_enc)
@@ -432,9 +444,15 @@ def ci_rebuild(args):
'--cdash-upload-url', cdash_upload_url,
'--cdash-build', cdash_build_name,
'--cdash-site', cdash_site,
'--cdash-track', job_spec_buildgroup,
'--cdash-buildstamp', cdash_build_stamp,
])
if cdash_build_id is not None:
tty.debug('CDash: Relating build with dependency builds')
spack_ci.relate_cdash_builds(
spec_map, cdash_base_url, cdash_build_id, cdash_project,
[pipeline_mirror_url, pr_mirror_url, remote_mirror_url])
# A compiler action of 'FIND_ANY' means we are building a bootstrap
# compiler or one of its deps.
# TODO: when compilers are dependencies, we should include --no-add
@@ -542,6 +560,12 @@ def ci_rebuild(args):
env, job_spec_yaml_path, buildcache_mirror_url, sign_binaries
)
if cdash_build_id:
tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(
cdash_build_id, buildcache_mirror_url))
spack_ci.write_cdashid_to_mirror(
cdash_build_id, job_spec, buildcache_mirror_url)
# Create another copy of that buildcache in the per-pipeline
# temporary storage mirror (this is only done if either
# artifacts buildcache is enabled or a temporary storage url
@@ -551,6 +575,12 @@ def ci_rebuild(args):
env, job_spec_yaml_path, pipeline_mirror_url, sign_binaries
)
if cdash_build_id:
tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(
cdash_build_id, pipeline_mirror_url))
spack_ci.write_cdashid_to_mirror(
cdash_build_id, job_spec, pipeline_mirror_url)
# If this is a develop pipeline, check if the spec that we just built is
# on the broken-specs list. If so, remove it.
if spack_is_develop_pipeline and 'broken-specs-url' in gitlab_ci:

View File

@@ -117,7 +117,7 @@ def format(self, cmd):
'virtual': '_providers',
'namespace': '_repos',
'hash': '_all_resource_hashes',
'pytest': '_unit_tests',
'pytest': '_tests',
}

View File

@@ -35,9 +35,6 @@ def shell_init_instructions(cmd, equivalent):
color.colorize("@*c{For fish:}"),
" source %s/setup-env.fish" % spack.paths.share_path,
"",
color.colorize("@*c{For Windows batch:}"),
" source %s/spack_cmd.bat" % spack.paths.share_path,
"",
"Or, if you do not want to use shell support, run " + (
"one of these" if shell_specific else "this") + " instead:",
"",
@@ -48,7 +45,6 @@ def shell_init_instructions(cmd, equivalent):
equivalent.format(sh_arg="--sh ") + " # bash/zsh/sh",
equivalent.format(sh_arg="--csh ") + " # csh/tcsh",
equivalent.format(sh_arg="--fish") + " # fish",
equivalent.format(sh_arg="--bat ") + " # batch"
]
else:
msg += [" " + equivalent]

View File

@@ -322,68 +322,11 @@ def add_cdash_args(subparser, add_help):
)
class ConfigSetAction(argparse.Action):
"""Generic action for setting spack config options from CLI.
This works like a ``store_const`` action but you can set the
``dest`` to some Spack configuration path (like ``concretizer:reuse``)
and the ``const`` will be stored there using ``spack.config.set()``
"""
def __init__(self,
option_strings,
dest,
const,
default=None,
required=False,
help=None,
metavar=None):
# save the config option we're supposed to set
self.config_path = dest
# destination is translated to a legal python identifier by
# substituting '_' for ':'.
dest = dest.replace(":", "_")
super(ConfigSetAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=0,
const=const,
default=default,
required=required,
help=help
)
def __call__(self, parser, namespace, values, option_string):
# Retrieve the name of the config option and set it to
# the const from the constructor or a value from the CLI.
# Note that this is only called if the argument is actually
# specified on the command line.
spack.config.set(self.config_path, self.const, scope="command_line")
def add_concretizer_args(subparser):
"""Add a subgroup of arguments for controlling concretization.
These will appear in a separate group called 'concretizer arguments'.
There's no need to handle them in your command logic -- they all use
``ConfigSetAction``, which automatically handles setting configuration
options.
If you *do* need to access a value passed on the command line, you can
get at, e.g., the ``concretizer:reuse`` via ``args.concretizer_reuse``.
Just substitute ``_`` for ``:``.
"""
subgroup = subparser.add_argument_group("concretizer arguments")
subgroup.add_argument(
'-U', '--fresh', action=ConfigSetAction, dest="concretizer:reuse",
const=False, default=None,
help='do not reuse installed deps; build newest configuration'
)
subgroup.add_argument(
'--reuse', action=ConfigSetAction, dest="concretizer:reuse",
const=True, default=None,
help='reuse installed dependencies/buildcaches when possible'
@arg
def reuse():
return Args(
'--reuse', action='store_true', default=False,
help='reuse installed dependencies'
)

View File

@@ -13,6 +13,7 @@
def setup_parser(subparser):
spack.cmd.common.arguments.add_common_arguments(subparser, ['reuse'])
subparser.add_argument(
'-f', '--force', action='store_true',
help="Re-concretize even if already concretized.")
@@ -23,8 +24,6 @@ def setup_parser(subparser):
dependencies are only added for the environment's root specs. When 'all' is
chosen, test dependencies are enabled for all packages in the environment.""")
spack.cmd.common.arguments.add_concretizer_args(subparser)
def concretize(parser, args):
env = spack.cmd.require_active_env(cmd_name='concretize')
@@ -37,6 +36,8 @@ def concretize(parser, args):
tests = False
with env.write_transaction():
concretized_specs = env.concretize(force=args.force, tests=tests)
concretized_specs = env.concretize(
force=args.force, tests=tests, reuse=args.reuse
)
ev.display_specs(concretized_specs)
env.write()

View File

@@ -258,42 +258,6 @@ def install(self, spec, prefix):
bazel()"""
class RacketPackageTemplate(PackageTemplate):
"""Provides approriate overrides for Racket extensions"""
base_class_name = 'RacketPackage'
url_line = """\
# FIXME: set the proper location from which to fetch your package
git = "git@github.com:example/example.git"
"""
dependencies = """\
# FIXME: Add dependencies if required. Only add the racket dependency
# if you need specific versions. A generic racket dependency is
# added implicity by the RacketPackage class.
# depends_on('racket@8.3:', type=('build', 'run'))"""
body_def = """\
# FIXME: specify the name of the package,
# as it should appear to ``raco pkg install``
name = '{0}'
# FIXME: set to true if published on pkgs.racket-lang.org
# pkgs = False
# FIXME: specify path to the root directory of the
# package, if not the base directory
# subdirectory = None
"""
def __init__(self, name, url, *args, **kwargs):
# If the user provided `--name rkt-scribble`, don't rename it rkt-rkt-scribble
if not name.startswith('rkt-'):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to rkt-{0}".format(name))
name = 'rkt-{0}'.format(name)
self.body_def = self.body_def.format(name[4:])
super(RacketPackageTemplate, self).__init__(name, url, *args, **kwargs)
class PythonPackageTemplate(PackageTemplate):
"""Provides appropriate overrides for python extensions"""
base_class_name = 'PythonPackage'
@@ -572,7 +536,6 @@ def __init__(self, name, *args, **kwargs):
'bazel': BazelPackageTemplate,
'python': PythonPackageTemplate,
'r': RPackageTemplate,
'racket': RacketPackageTemplate,
'perlmake': PerlmakePackageTemplate,
'perlbuild': PerlbuildPackageTemplate,
'octave': OctavePackageTemplate,
@@ -795,15 +758,7 @@ def get_versions(args, name):
# Default guesser
guesser = BuildSystemGuesser()
valid_url = True
try:
spack.util.url.require_url_format(args.url)
if args.url.startswith('file://'):
valid_url = False # No point in spidering these
except ValueError:
valid_url = False
if args.url is not None and args.template != 'bundle' and valid_url:
if args.url is not None and args.template != 'bundle':
# Find available versions
try:
url_dict = spack.util.web.find_versions_of_archive(args.url)

View File

@@ -8,9 +8,9 @@
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.environment as ev
import spack.graph
import spack.store
from spack.filesystem_view import YamlFilesystemView
from spack.graph import topological_sort
description = "deactivate a package extension"
section = "extensions"
@@ -32,10 +32,6 @@ def setup_parser(subparser):
def deactivate(parser, args):
tty.warn("spack deactivate is deprecated in favor of "
"environments and will be removed in v0.19.0")
specs = spack.cmd.parse_specs(args.spec)
if len(specs) != 1:
tty.die("deactivate requires one spec. %d given." % len(specs))
@@ -72,8 +68,11 @@ def deactivate(parser, args):
tty.msg("Deactivating %s and all dependencies." %
pkg.spec.short_spec)
nodes_in_topological_order = spack.graph.topological_sort(spec)
for espec in reversed(nodes_in_topological_order):
topo_order = topological_sort(spec)
index = spec.index()
for name in topo_order:
espec = index[name]
epkg = espec.package
if epkg.extends(pkg.extendee_spec):
if epkg.is_activated(view) or args.force:

View File

@@ -19,7 +19,6 @@
import os
import llnl.util.tty as tty
from llnl.util.symlink import symlink
import spack.cmd
import spack.cmd.common.arguments as arguments
@@ -124,7 +123,7 @@ def deprecate(parser, args):
if not answer:
tty.die('Will not deprecate any packages.')
link_fn = os.link if args.link_type == 'hard' else symlink
link_fn = os.link if args.link_type == 'hard' else os.symlink
for dcate, dcator in zip(all_deprecate, all_deprecators):
dcate.package.do_deprecate(dcator, link_fn)

View File

@@ -19,7 +19,7 @@
def setup_parser(subparser):
arguments.add_common_arguments(subparser, ['jobs'])
arguments.add_common_arguments(subparser, ['jobs', 'reuse'])
subparser.add_argument(
'-d', '--source-path', dest='source_path', default=None,
help="path to source directory. defaults to the current directory")
@@ -59,8 +59,6 @@ def setup_parser(subparser):
cd_group = subparser.add_mutually_exclusive_group()
arguments.add_common_arguments(cd_group, ['clean', 'dirty'])
spack.cmd.common.arguments.add_concretizer_args(subparser)
def dev_build(self, args):
if not args.spec:
@@ -88,7 +86,7 @@ def dev_build(self, args):
# Forces the build to run out of the source directory.
spec.constrain('dev_path=%s' % source_path)
spec.concretize()
spec.concretize(reuse=args.reuse)
package = spack.repo.get(spec)
if package.installed:

View File

@@ -60,9 +60,6 @@ def env_activate_setup_parser(subparser):
shells.add_argument(
'--fish', action='store_const', dest='shell', const='fish',
help="print fish commands to activate the environment")
shells.add_argument(
'--bat', action='store_const', dest='shell', const='bat',
help="print bat commands to activate the environment")
view_options = subparser.add_mutually_exclusive_group()
view_options.add_argument(
@@ -176,9 +173,6 @@ def env_deactivate_setup_parser(subparser):
shells.add_argument(
'--fish', action='store_const', dest='shell', const='fish',
help="print fish commands to activate the environment")
shells.add_argument(
'--bat', action='store_const', dest='shell', const='bat',
help="print bat commands to activate the environment")
def env_deactivate(args):

View File

@@ -39,17 +39,8 @@ def setup_parser(subparser):
'--scope', choices=scopes, metavar=scopes_metavar,
default=spack.config.default_modify_scope('packages'),
help="configuration scope to modify")
find_parser.add_argument(
'--all', action='store_true',
help="search for all packages that Spack knows about"
)
spack.cmd.common.arguments.add_common_arguments(find_parser, ['tags'])
find_parser.add_argument('packages', nargs=argparse.REMAINDER)
find_parser.epilog = (
'The search is by default on packages tagged with the "build-tools" or '
'"core-packages" tags. Use the --all option to search for every possible '
'package Spack knows how to find.'
)
sp.add_parser(
'list', help='list detectable packages, by repository and name'
@@ -57,14 +48,6 @@ def setup_parser(subparser):
def external_find(args):
# If the user didn't specify anything, search for build tools by default
if not args.tags and not args.all and not args.packages:
args.tags = ['core-packages', 'build-tools']
# If the user specified both --all and --tag, then --all has precedence
if args.all and args.tags:
args.tags = []
# Construct the list of possible packages to be detected
packages_to_check = []
@@ -81,18 +64,15 @@ def external_find(args):
# Since tags are cached it's much faster to construct what we need
# to search directly, rather than filtering after the fact
packages_to_check = [
spack.repo.get(pkg) for tag in args.tags for pkg in
spack.repo.path.packages_with_tags(tag)
spack.repo.get(pkg) for pkg in
spack.repo.path.packages_with_tags(*args.tags)
]
packages_to_check = list(set(packages_to_check))
# If the list of packages is empty, search for every possible package
if not args.tags and not packages_to_check:
packages_to_check = spack.repo.path.all_packages()
detected_packages = spack.detection.by_executable(packages_to_check)
detected_packages.update(spack.detection.by_library(packages_to_check))
new_entries = spack.detection.update_configuration(
detected_packages, scope=args.scope, buildable=not args.not_buildable
)

View File

@@ -202,12 +202,6 @@ def display_env(env, args, decorator):
def find(parser, args):
if args.bootstrap:
tty.warn(
"`spack find --bootstrap` is deprecated and will be removed in v0.19.",
"Use `spack --bootstrap find` instead."
)
if args.bootstrap:
bootstrap_store_path = spack.bootstrap.store_path()
with spack.bootstrap.ensure_bootstrap_configuration():

View File

@@ -8,7 +8,6 @@
import spack.binary_distribution
import spack.cmd.common.arguments as arguments
import spack.mirror
import spack.paths
import spack.util.gpg
@@ -201,13 +200,8 @@ def gpg_verify(args):
def gpg_publish(args):
"""publish public keys to a build cache"""
mirror = None
if args.directory:
mirror = spack.mirror.Mirror(args.directory, args.directory)
elif args.mirror_name:
mirror = spack.mirror.MirrorCollection().lookup(args.mirror_name)
elif args.mirror_url:
mirror = spack.mirror.Mirror(args.mirror_url, args.mirror_url)
# TODO(opadron): switch to using the mirror args once #17547 is merged
mirror = args.directory
spack.binary_distribution.push_keys(
mirror, keys=args.keys, regenerate_index=args.rebuild_index)

View File

@@ -5,7 +5,6 @@
from __future__ import print_function
import inspect
import textwrap
from six.moves import zip_longest
@@ -18,7 +17,7 @@
import spack.fetch_strategy as fs
import spack.repo
import spack.spec
from spack.package import has_test_method, preferred_version
from spack.package import preferred_version
description = 'get detailed information on a particular package'
section = 'basic'
@@ -40,25 +39,6 @@ def pad(string):
def setup_parser(subparser):
subparser.add_argument(
'-a', '--all', action='store_true', default=False,
help="output all package information"
)
options = [
('--detectable', print_detectable.__doc__),
('--maintainers', print_maintainers.__doc__),
('--no-dependencies', 'do not ' + print_dependencies.__doc__),
('--no-variants', 'do not ' + print_variants.__doc__),
('--no-versions', 'do not ' + print_versions.__doc__),
('--phases', print_phases.__doc__),
('--tags', print_tags.__doc__),
('--tests', print_tests.__doc__),
('--virtuals', print_virtuals.__doc__),
]
for opt, help_comment in options:
subparser.add_argument(opt, action='store_true', help=help_comment)
arguments.add_common_arguments(subparser, ['package'])
@@ -165,21 +145,27 @@ def lines(self):
yield " " + self.fmt % t
def print_dependencies(pkg):
"""output build, link, and run package dependencies"""
def print_text_info(pkg):
"""Print out a plain text description of a package."""
for deptype in ('build', 'link', 'run'):
header = section_title(
'{0}: '
).format(pkg.build_system_class) + pkg.name
color.cprint(header)
color.cprint('')
color.cprint(section_title('Description:'))
if pkg.__doc__:
color.cprint(color.cescape(pkg.format_doc(indent=4)))
else:
color.cprint(" None")
color.cprint(section_title('Homepage: ') + pkg.homepage)
if len(pkg.maintainers) > 0:
mnt = " ".join(['@@' + m for m in pkg.maintainers])
color.cprint('')
color.cprint(section_title('%s Dependencies:' % deptype.capitalize()))
deps = sorted(pkg.dependencies_of_type(deptype))
if deps:
colify(deps, indent=4)
else:
color.cprint(' None')
def print_detectable(pkg):
"""output information on external detection"""
color.cprint(section_title('Maintainers: ') + mnt)
color.cprint('')
color.cprint(section_title('Externally Detectable: '))
@@ -201,31 +187,6 @@ def print_detectable(pkg):
else:
color.cprint(' False')
def print_maintainers(pkg):
"""output package maintainers"""
if len(pkg.maintainers) > 0:
mnt = " ".join(['@@' + m for m in pkg.maintainers])
color.cprint('')
color.cprint(section_title('Maintainers: ') + mnt)
def print_phases(pkg):
"""output installation phases"""
if hasattr(pkg, 'phases') and pkg.phases:
color.cprint('')
color.cprint(section_title('Installation Phases:'))
phase_str = ''
for phase in pkg.phases:
phase_str += " {0}".format(phase)
color.cprint(phase_str)
def print_tags(pkg):
"""output package tags"""
color.cprint('')
color.cprint(section_title("Tags: "))
if hasattr(pkg, 'tags'):
@@ -234,90 +195,6 @@ def print_tags(pkg):
else:
color.cprint(" None")
def print_tests(pkg):
"""output relevant build-time and stand-alone tests"""
# Some built-in base packages (e.g., Autotools) define callback (e.g.,
# check) inherited by descendant packages. These checks may not result
# in build-time testing if the package's build does not implement the
# expected functionality (e.g., a 'check' or 'test' targets).
#
# So the presence of a callback in Spack does not necessarily correspond
# to the actual presence of built-time tests for a package.
for callbacks, phase in [(pkg.build_time_test_callbacks, 'Build'),
(pkg.install_time_test_callbacks, 'Install')]:
color.cprint('')
color.cprint(section_title('Available {0} Phase Test Methods:'
.format(phase)))
names = []
if callbacks:
for name in callbacks:
if getattr(pkg, name, False):
names.append(name)
if names:
colify(sorted(names), indent=4)
else:
color.cprint(' None')
# PackageBase defines an empty install/smoke test but we want to know
# if it has been overridden and, therefore, assumed to be implemented.
color.cprint('')
color.cprint(section_title('Stand-Alone/Smoke Test Methods:'))
names = []
pkg_cls = pkg if inspect.isclass(pkg) else pkg.__class__
if has_test_method(pkg_cls):
pkg_base = spack.package.PackageBase
test_pkgs = [str(cls.test) for cls in inspect.getmro(pkg_cls) if
issubclass(cls, pkg_base) and cls.test != pkg_base.test]
test_pkgs = list(set(test_pkgs))
names.extend([(test.split()[1]).lower() for test in test_pkgs])
# TODO Refactor START
# Use code from package.py's test_process IF this functionality is
# accepted.
v_names = list(set([vspec.name for vspec in pkg.virtuals_provided]))
# hack for compilers that are not dependencies (yet)
# TODO: this all eventually goes away
c_names = ('gcc', 'intel', 'intel-parallel-studio', 'pgi')
if pkg.name in c_names:
v_names.extend(['c', 'cxx', 'fortran'])
if pkg.spec.satisfies('llvm+clang'):
v_names.extend(['c', 'cxx'])
# TODO Refactor END
v_specs = [spack.spec.Spec(v_name) for v_name in v_names]
for v_spec in v_specs:
try:
pkg = v_spec.package
pkg_cls = pkg if inspect.isclass(pkg) else pkg.__class__
if has_test_method(pkg_cls):
names.append('{0}.test'.format(pkg.name.lower()))
except spack.repo.UnknownPackageError:
pass
if names:
colify(sorted(names), indent=4)
else:
color.cprint(' None')
def print_variants(pkg):
"""output variants"""
color.cprint('')
color.cprint(section_title('Variants:'))
formatter = VariantFormatter(pkg.variants)
for line in formatter.lines:
color.cprint(color.cescape(line))
def print_versions(pkg):
"""output versions"""
color.cprint('')
color.cprint(section_title('Preferred version: '))
@@ -361,9 +238,29 @@ def print_versions(pkg):
line = version(' {0}'.format(pad(v))) + color.cescape(url)
color.cprint(line)
color.cprint('')
color.cprint(section_title('Variants:'))
def print_virtuals(pkg):
"""output virtual packages"""
formatter = VariantFormatter(pkg.variants)
for line in formatter.lines:
color.cprint(color.cescape(line))
if hasattr(pkg, 'phases') and pkg.phases:
color.cprint('')
color.cprint(section_title('Installation Phases:'))
phase_str = ''
for phase in pkg.phases:
phase_str += " {0}".format(phase)
color.cprint(phase_str)
for deptype in ('build', 'link', 'run'):
color.cprint('')
color.cprint(section_title('%s Dependencies:' % deptype.capitalize()))
deps = sorted(pkg.dependencies_of_type(deptype))
if deps:
colify(deps, indent=4)
else:
color.cprint(' None')
color.cprint('')
color.cprint(section_title('Virtual Packages: '))
@@ -383,39 +280,9 @@ def print_virtuals(pkg):
else:
color.cprint(" None")
color.cprint('')
def info(parser, args):
pkg = spack.repo.get(args.package)
# Output core package information
header = section_title(
'{0}: '
).format(pkg.build_system_class) + pkg.name
color.cprint(header)
color.cprint('')
color.cprint(section_title('Description:'))
if pkg.__doc__:
color.cprint(color.cescape(pkg.format_doc(indent=4)))
else:
color.cprint(" None")
color.cprint(section_title('Homepage: ') + pkg.homepage)
# Now output optional information in expected order
sections = [
(args.all or args.maintainers, print_maintainers),
(args.all or args.detectable, print_detectable),
(args.all or args.tags, print_tags),
(args.all or not args.no_versions, print_versions),
(args.all or not args.no_variants, print_variants),
(args.all or args.phases, print_phases),
(args.all or not args.no_dependencies, print_dependencies),
(args.all or args.virtuals, print_virtuals),
(args.all or args.tests, print_tests),
]
for print_it, func in sections:
if print_it:
func(pkg)
color.cprint('')
print_text_info(pkg)

View File

@@ -78,7 +78,7 @@ def setup_parser(subparser):
subparser.add_argument(
'-u', '--until', type=str, dest='until', default=None,
help="phase to stop after when installing (default None)")
arguments.add_common_arguments(subparser, ['jobs'])
arguments.add_common_arguments(subparser, ['jobs', 'reuse'])
subparser.add_argument(
'--overwrite', action='store_true',
help="reinstall an existing spec, even if it has dependents")
@@ -182,8 +182,6 @@ def setup_parser(subparser):
arguments.add_cdash_args(subparser, False)
arguments.add_common_arguments(subparser, ['yes_to_all', 'spec'])
spack.cmd.common.arguments.add_concretizer_args(subparser)
def default_log_file(spec):
"""Computes the default filename for the log file and creates
@@ -341,7 +339,7 @@ def get_tests(specs):
if not args.only_concrete:
with env.write_transaction():
concretized_specs = env.concretize(tests=tests)
concretized_specs = env.concretize(tests=tests, reuse=args.reuse)
ev.display_specs(concretized_specs)
# save view regeneration for later, so that we only do it
@@ -399,7 +397,9 @@ def get_tests(specs):
kwargs['tests'] = tests
try:
specs = spack.cmd.parse_specs(args.spec, concretize=True, tests=tests)
specs = spack.cmd.parse_specs(
args.spec, concretize=True, tests=tests, reuse=args.reuse
)
except SpackError as e:
tty.debug(e)
reporter.concretization_report(e.message)

View File

@@ -1,116 +0,0 @@
cmake_minimum_required (VERSION 3.13)
project(spack_installer NONE)
set(PYTHON_VERSION "3.9.0" CACHE STRING "Version of Python to build.")
set(PY_DOWNLOAD_LINK "https://www.paraview.org/files/dependencies")
set(PY_FILENAME "Python-${PYTHON_VERSION}-win64.tar.xz")
set(PYTHON_DIR "Python-${PYTHON_VERSION}")
if (SPACK_VERSION)
set(SPACK_DL "https://github.com/spack/spack/releases/download/v${SPACK_VERSION}")
set(SPACK_FILENAME "spack-${SPACK_VERSION}.tar.gz")
set(SPACK_DIR "spack-${SPACK_VERSION}")
# SPACK DOWLOAD AND EXTRACTION-----------------------------------
file(DOWNLOAD "${SPACK_DL}/${SPACK_FILENAME}"
"${CMAKE_CURRENT_BINARY_DIR}/${SPACK_FILENAME}"
STATUS download_status
)
list(GET download_status 0 res)
if(res)
list(GET download_status 1 err)
message(FATAL_ERROR "Failed to download ${SPACK_FILENAME} ${err}")
endif()
message(STATUS "Successfully downloaded ${SPACK_FILENAME}")
execute_process(COMMAND ${CMAKE_COMMAND} -E tar xfz
"${CMAKE_CURRENT_BINARY_DIR}/${SPACK_FILENAME}"
WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}"
RESULT_VARIABLE res)
if(NOT res EQUAL 0)
message(FATAL_ERROR "Extraction of ${SPACK_FILENAME} failed.")
endif()
message(STATUS "Extracted ${SPACK_DIR}")
SET(SPACK_SOURCE "${CMAKE_CURRENT_BINARY_DIR}/${SPACK_DIR}")
elseif(SPACK_SOURCE)
get_filename_component(SPACK_DIR ${SPACK_SOURCE} NAME)
else()
message(FATAL_ERROR "Must specify SPACK_VERSION or SPACK_SOURCE")
endif()
# GIT DOWNLOAD----------------------------------------------------
set(GIT_FILENAME "Git-2.31.1-64-bit.exe")
file(DOWNLOAD "https://github.com/git-for-windows/git/releases/download/v2.31.1.windows.1/Git-2.31.1-64-bit.exe"
"${CMAKE_CURRENT_BINARY_DIR}/${GIT_FILENAME}"
STATUS download_status
EXPECTED_HASH "SHA256=c43611eb73ad1f17f5c8cc82ae51c3041a2e7279e0197ccf5f739e9129ce426e"
)
list(GET download_status 0 res)
if(res)
list(GET download_status 1 err)
message(FATAL_ERROR "Failed to download ${GIT_FILENAME} ${err}")
endif()
message(STATUS "Successfully downloaded ${GIT_FILENAME}")
# PYTHON DOWLOAD AND EXTRACTION-----------------------------------
file(DOWNLOAD "${PY_DOWNLOAD_LINK}/${PY_FILENAME}"
"${CMAKE_CURRENT_BINARY_DIR}/${PY_FILENAME}"
STATUS download_status
EXPECTED_HASH "SHA256=f6aeebc6d1ff77418678ed5612b64ce61be6bc9ef3ab9c291ac557abb1783420"
)
list(GET download_status 0 res)
if(res)
list(GET download_status 1 err)
message(FATAL_ERROR "Failed to download ${PY_FILENAME} ${err}")
endif()
message(STATUS "Successfully downloaded ${PY_FILENAME}")
execute_process(COMMAND ${CMAKE_COMMAND} -E tar xfz
"${CMAKE_CURRENT_BINARY_DIR}/${PY_FILENAME}"
WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}"
RESULT_VARIABLE res)
if(NOT res EQUAL 0)
message(FATAL_ERROR "Extraction of ${PY_FILENAME} failed.")
endif()
message(STATUS "Extracted ${PY_FILENAME}.")
# license must be a .txt or .rtf file
configure_file("${SPACK_LICENSE}" "${CMAKE_CURRENT_BINARY_DIR}/LICENSE.rtf" COPYONLY)
#INSTALLATION COMMANDS---------------------------------------------------
install(DIRECTORY "${SPACK_SOURCE}/"
DESTINATION "${SPACK_DIR}")
install(DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/Python-${PYTHON_VERSION}-win64/"
DESTINATION "${PYTHON_DIR}")
# CPACK Installer Instructions
set(CPACK_PACKAGE_NAME "Spack")
set(CPACK_PACKAGE_VENDOR "Lawrence Livermore National Laboratories")
set(CPACK_PACKAGE_VERSION "0.16.0")
set(CPACK_PACKAGE_DESCRIPTION "A flexible package manager designed to support multiple versions, configurations, platforms, and compilers.")
set(CPACK_PACKAGE_HOMEPAGE_URL "https://spack.io")
set(CPACK_PACKAGE_FILE_NAME "${CPACK_PACKAGE_NAME}")
set(CPACK_PACKAGE_ICON "${SPACK_LOGO}")
set(CPACK_RESOURCE_FILE_README "${CMAKE_CURRENT_SOURCE_DIR}/README.md")
set(CPACK_RESOURCE_FILE_LICENSE "${CMAKE_CURRENT_BINARY_DIR}/LICENSE.rtf")
#set(CPACK_RESOURCE_FILE_WELCOME "${CMAKE_CURRENT_SOURCE_DIR}/NOTICE")
# WIX options (the default)
set(CPACK_GENERATOR "WIX")
set(CPACK_WIX_PRODUCT_ICON "${SPACK_LOGO}")
set(CPACK_WIX_UI_BANNER "${CMAKE_CURRENT_SOURCE_DIR}/banner493x58.bmp")
set(CPACK_WIX_PATCH_FILE "${CMAKE_CURRENT_SOURCE_DIR}/patch.xml")
set(CPACK_WIX_UPGRADE_GUID "D2C703E4-721D-44EC-8016-BCB96BB64E0B")
set(CPACK_WIX_SKIP_PROGRAM_FOLDER TRUE)
set(SHORTCUT_GUID "099213BC-0D37-4F29-B758-60CA2A7E6DDA")
# Set full path to icon, shortcut in spack.wxs
set(SPACK_SHORTCUT "spack_cmd.bat")
configure_file("spack.wxs.in" "${CMAKE_CURRENT_BINARY_DIR}/spack.wxs")
configure_file("bundle.wxs.in" "${CMAKE_CURRENT_BINARY_DIR}/bundle.wxs")
set(CPACK_WIX_EXTRA_SOURCES "${CMAKE_CURRENT_BINARY_DIR}/spack.wxs")
include(CPack)

View File

@@ -1,85 +0,0 @@
This README is a guide for creating a Spack installer for Windows using the
``make-installer`` command. The installer is an executable file that users
can run to install Spack like any other Windows binary.
Before proceeding, follow the setup instructions in Steps 1 and 2 of
[Getting Started on Windows](https://spack.readthedocs.io/en/latest/getting_started.html#windows_support).
# Step 1: Install prerequisites
The only additional prerequisite for making the installer is Wix. Wix is a
utility used for .msi creation and can be downloaded and installed at
https://wixtoolset.org/releases/. The Visual Studio extensions are not
necessary.
# Step 2: Make the installer
To use Spack, run ``spack_cmd.bat``. This will provide a Windows command
prompt with an environment properly set up with Spack and its prerequisites.
Ensure that Python and CMake are on your PATH. If needed, you may add the
CMake executable provided by Visual Studio to your path, which will look
something like:
``C:\Program Files (x86)\Microsoft Visual Studio\<year>\<distribution>\Common7\IDE\CommonExtensions\Microsoft\CMake\CMake``
**IMPORTANT**: If you use Tab to complete any part of this path, the console
will automatically add quotation marks to the start and the end since it will
see the spaces and want to parse the whole of it as a string. This is
incorrect for our purposes so before submitting the command, ensure that the
quotes are removed. You will encounter configuration errors if you fail to do
this.
There are two ways to create the installer using Spack's ``make-installer``
command. The recommended method is to build the installer using a local
checkout of Spack source (release or development), using the
`-s` flag to specify the directory where the local checkout is. For
example, if the local checkout is in a directory called ``spack-develop``
and want to generate an installer with the source there, you can use:
``spack make-installer -s spack-develop tmp``
Both the Spack source directory (e.g. ``spack-develop``) and installer
destination directory (e.g. ``tmp``) may be an absolute path or relative to
the current working directory. The entire contents of the specified
directory will be included in the installer (e.g. .git files or local
changes).
Alternatively, if you would like to create an installer from a release version
of Spack, say, 0.16.0, and store it in ``tmp``, you can use the following
command:
``spack make-installer -v 0.16.0 tmp``
**IMPORTANT**: Windows features are not currently supported in Spack's
official release branches, so an installer created using this method will
*not* run on Windows.
Regardless of your method, a file called ``Spack.exe`` will be created
inside the destination directory. This executable bundles the Spack installer
(``Spack.msi`` also located in destination directory) and the git installer.
# Step 3: Run the installer
After accepting the terms of service, select where on your computer you would
like Spack installed, and after a few minutes Spack, Python and git will be
installed and ready for use.
**IMPORTANT**: To avoid permissions issues, it is recommended to select an
install location other than ``C:\Program Files``.
**IMPORTANT**: There is a specific option that must be chosen when letting Git
install. When given the option of adjusting your ``PATH``, choose the
``Git from the command line and also from 3rd-party software`` option. This will
automatically update your ``PATH`` variable to include the ``git`` command.
Certain Spack commands expect ``git`` to be part of the ``PATH``. If this step
is not performed properly, certain Spack comands will not work.
If your Spack installation needs to be modified, repaired, or uninstalled,
you can do any of these things by rerunning ``Spack.exe``.
Running the installer creates a shortcut on your desktop that, when
launched, will run ``spack_cmd.bat`` and launch a console with its initial
directory being wherever Spack was installed on your computer. If Python is
found on your PATH, that will be used. If not, the Python included with the
installer will be used when running Spack.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 29 KiB

View File

@@ -1,23 +0,0 @@
<?xml version="1.0"?>
<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi"
xmlns:bal="http://schemas.microsoft.com/wix/BalExtension">
<Bundle Version="1.0.0.0" UpgradeCode="63C4E213-0297-4CFE-BB7B-7A77EB68E966"
IconSourceFile="@CPACK_WIX_PRODUCT_ICON@"
Name="Spack Package Manager"
Manufacturer="Lawrence Livermore National Laboratory">
<BootstrapperApplicationRef Id="WixStandardBootstrapperApplication.RtfLicense">
<bal:WixStandardBootstrapperApplication LicenseFile="@CPACK_RESOURCE_FILE_LICENSE@"/>
</BootstrapperApplicationRef>
<Chain>
<MsiPackage
SourceFile="Spack.msi"
DisplayInternalUI="yes"/>
<ExePackage
SourceFile="Git-2.31.1-64-bit.exe"
DetectCondition="ExeDetectedVariable"
InstallCommand="@SPACK_GIT_VERBOSITY@ /SUPPRESSMSGBOXES"
RepairCommand="/VERYSILENT"
UninstallCommand="/VERYSILENT" />
</Chain>
</Bundle>
</Wix>

View File

@@ -1,10 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<CPackWiXPatch>
<CPackWiXFragment Id ="#PRODUCT">
<Property Id="DISABLEADVTSHORTCUTS" Value="1"/>
</CPackWiXFragment>
<CPackWiXFragment Id ="#PRODUCTFEATURE">
<ComponentGroupRef Id="ProductComponents" />
<ComponentRef Id="ProgramMenuDir"/>
</CPackWiXFragment>
</CPackWiXPatch>

View File

@@ -1,50 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
<Fragment>
<Icon Id="icon.ico" SourceFile="@CPACK_WIX_PRODUCT_ICON@"/>
<Property Id="ARPPRODUCTICON" Value="icon.ico" />
</Fragment>
<Fragment>
<DirectoryRef Id="TARGETDIR">
<Directory Id="DesktopFolder" Name="Desktop" >
<Component Id="SpackDesktopShortCut" Guid="@CPACK_WIX_UPGRADE_GUID@">
<Shortcut Id="SpackDesktopShortCut"
Name="Spack Package Manager"
Description="Spack package manager"
Target="[INSTALL_ROOT]/@SPACK_DIR@/bin/@SPACK_SHORTCUT@"
Icon="icon1.ico">
<Icon Id="icon1.ico" SourceFile="@CPACK_WIX_PRODUCT_ICON@" />
</Shortcut>
<RegistryValue Root="HKCU" Key="Software\LLNL\Spack"
Type="integer" Value="1" Name="SpackDesktopShortCut" KeyPath="yes" />
</Component>
</Directory>
<Directory Id="ProgramMenuFolder" Name="Programs">
<Directory Id="ApplicationProgramsFolder" Name="Spack">
<Component Id="SpackStartShortCut" Guid="@SHORTCUT_GUID@">
<Shortcut Id="SpackStartMenuShortCut"
Name="Spack Package Manager"
Description="Spack package manager"
Target="[INSTALL_ROOT]/@SPACK_DIR@/bin/@SPACK_SHORTCUT@"
Icon="icon2.ico">
<Icon Id="icon2.ico" SourceFile="@CPACK_WIX_PRODUCT_ICON@" />
</Shortcut>
<RegistryValue Root="HKCU" Key="Software/LLNL/Spack"
Type="integer" Value="1" Name="SpackStartMenuShortCut" KeyPath="yes" />
</Component>
<Component Id="ProgramMenuDir" Guid="*">
<RemoveFolder Id="ProgramMenuDir" On="uninstall"/>
<RegistryValue Root="HKMU" Key="Software\LLNL\Spack"
Type="integer" Value="1" Name="installed" KeyPath="yes" />
</Component>
</Directory>
</Directory>
</DirectoryRef>
</Fragment>
<Fragment>
<ComponentGroup Id="ProductComponents">
<ComponentRef Id="SpackStartShortCut"/>
<ComponentRef Id="SpackDesktopShortCut"/>
</ComponentGroup>
</Fragment>
</Wix>

View File

@@ -33,9 +33,6 @@ def setup_parser(subparser):
shells.add_argument(
'--fish', action='store_const', dest='shell', const='fish',
help="print fish commands to load the package")
shells.add_argument(
'--bat', action='store_const', dest='shell', const='bat',
help="print bat commands to load the package")
subparser.add_argument(
'--first',

View File

@@ -1,146 +0,0 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import posixpath
import sys
import spack.paths
import spack.util.executable
from spack.spec import Spec
from spack.util.path import convert_to_posix_path
description = "generate Windows installer"
section = "admin"
level = "long"
def txt_to_rtf(file_path):
rtf_header = r"""{{\rtf1\ansi\deff0\nouicompat
{{\fonttbl{{\f0\\fnil\fcharset0 Courier New;}}}}
{{\colortbl ;\red0\green0\blue255;}}
{{\*\generator Riched20 10.0.19041}}\viewkind4\uc1
\f0\fs22\lang1033
{}
}}
"""
def line_to_rtf(str):
return str.replace("\n", "\\par")
contents = ""
with open(file_path, "r+") as f:
for line in f.readlines():
contents += line_to_rtf(line)
return rtf_header.format(contents)
def setup_parser(subparser):
spack_source_group = subparser.add_mutually_exclusive_group(required=True)
spack_source_group.add_argument(
'-v', '--spack-version', default="",
help='download given spack version e.g. 0.16.0')
spack_source_group.add_argument(
'-s', '--spack-source', default="",
help='full path to spack source')
subparser.add_argument(
'-g', '--git-installer-verbosity', default="",
choices=set(['SILENT', 'VERYSILENT']),
help="Level of verbosity provided by bundled Git Installer.\
Default is fully verbose",
required=False, action='store', dest="git_verbosity"
)
subparser.add_argument(
'output_dir', help="output directory")
def make_installer(parser, args):
"""
Use CMake to generate WIX installer in newly created build directory
"""
if sys.platform == 'win32':
output_dir = args.output_dir
cmake_spec = Spec('cmake')
cmake_spec.concretize()
cmake_path = os.path.join(cmake_spec.prefix, "bin", "cmake.exe")
cpack_path = os.path.join(cmake_spec.prefix, "bin", "cpack.exe")
spack_source = args.spack_source
git_verbosity = ""
if args.git_verbosity:
git_verbosity = "/" + args.git_verbosity
if spack_source:
if not os.path.exists(spack_source):
print("%s does not exist" % spack_source)
return
else:
if not os.path.isabs(spack_source):
spack_source = posixpath.abspath(spack_source)
spack_source = convert_to_posix_path(spack_source)
spack_version = args.spack_version
here = os.path.dirname(os.path.abspath(__file__))
source_dir = os.path.join(here, "installer")
posix_root = convert_to_posix_path(spack.paths.spack_root)
spack_license = posixpath.join(posix_root, "LICENSE-APACHE")
rtf_spack_license = txt_to_rtf(spack_license)
spack_license = posixpath.join(source_dir, "LICENSE.rtf")
with open(spack_license, 'w') as rtf_license:
written = rtf_license.write(rtf_spack_license)
if written == 0:
raise RuntimeError("Failed to generate properly formatted license file")
spack_logo = posixpath.join(posix_root,
"share/spack/logo/favicon.ico")
try:
spack.util.executable.Executable(cmake_path)(
'-S', source_dir, '-B', output_dir,
'-DSPACK_VERSION=%s' % spack_version,
'-DSPACK_SOURCE=%s' % spack_source,
'-DSPACK_LICENSE=%s' % spack_license,
'-DSPACK_LOGO=%s' % spack_logo,
'-DSPACK_GIT_VERBOSITY=%s' % git_verbosity
)
except spack.util.executable.ProcessError:
print("Failed to generate installer")
return spack.util.executable.ProcessError.returncode
try:
spack.util.executable.Executable(cpack_path)(
"--config",
"%s/CPackConfig.cmake" % output_dir,
"-B",
"%s/" % output_dir)
except spack.util.executable.ProcessError:
print("Failed to generate installer")
return spack.util.executable.ProcessError.returncode
try:
spack.util.executable.Executable(os.environ.get('WIX') + '/bin/candle.exe')(
'-ext',
'WixBalExtension',
'%s/bundle.wxs' % output_dir,
'-out',
'%s/bundle.wixobj' % output_dir
)
except spack.util.executable.ProcessError:
print("Failed to generate installer chain")
return spack.util.executable.ProcessError.returncode
try:
spack.util.executable.Executable(os.environ.get('WIX') + "/bin/light.exe")(
"-sw1134",
"-ext",
"WixBalExtension",
"%s/bundle.wixobj" % output_dir,
'-out',
'%s/Spack.exe' % output_dir
)
except spack.util.executable.ProcessError:
print("Failed to generate installer chain")
return spack.util.executable.ProcessError.returncode
print("Successfully generated Spack.exe in %s" % (output_dir))
else:
print('The make-installer command is currently only supported on Windows.')

Some files were not shown because too many files have changed in this diff Show More