Compare commits
1 Commits
my_branch
...
bug/stop_a
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
deb3f0f43a |
18
.github/ISSUE_TEMPLATE/build_error.yml
vendored
18
.github/ISSUE_TEMPLATE/build_error.yml
vendored
@@ -16,29 +16,19 @@ body:
|
||||
attributes:
|
||||
label: Steps to reproduce the issue
|
||||
description: |
|
||||
Fill in the console output from the exact spec you are trying to build.
|
||||
value: |
|
||||
Fill in the exact spec you are trying to build and the relevant part of the error message
|
||||
placeholder: |
|
||||
```console
|
||||
$ spack spec -I <spec>
|
||||
$ spack install <spec>
|
||||
...
|
||||
```
|
||||
- type: textarea
|
||||
id: error
|
||||
attributes:
|
||||
label: Error message
|
||||
description: |
|
||||
Please post the error message from spack inside the `<details>` tag below:
|
||||
value: |
|
||||
<details><summary>Error message</summary><pre>
|
||||
...
|
||||
</pre></details>
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: information
|
||||
attributes:
|
||||
label: Information on your system
|
||||
description: Please include the output of `spack debug report`.
|
||||
description: Please include the output of `spack debug report`
|
||||
validations:
|
||||
required: true
|
||||
- type: markdown
|
||||
|
||||
123
.github/workflows/bootstrap.yml
vendored
123
.github/workflows/bootstrap.yml
vendored
@@ -31,25 +31,19 @@ jobs:
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack bootstrap untrust github-actions
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -67,25 +61,19 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack bootstrap untrust github-actions
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -102,20 +90,14 @@ jobs:
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
@@ -123,6 +105,7 @@ jobs:
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
|
||||
opensuse-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "opensuse/leap:latest"
|
||||
@@ -135,19 +118,16 @@ jobs:
|
||||
bzip2 curl file gcc-c++ gcc gcc-fortran tar git gpg2 gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup repo
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack bootstrap untrust github-actions
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -158,13 +138,12 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
export PATH=/usr/local/opt/bison@2.7/bin:$PATH
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack bootstrap untrust github-actions
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -173,14 +152,13 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
python-version: ['3.5', '3.6', '3.7', '3.8', '3.9']
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Bootstrap clingo
|
||||
@@ -194,14 +172,13 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9']
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Setup repo
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
@@ -225,20 +202,14 @@ jobs:
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap GnuPG
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
@@ -260,26 +231,20 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap GnuPG
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack bootstrap untrust github-actions
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
@@ -291,8 +256,7 @@ jobs:
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/checkout@v2
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -308,20 +272,11 @@ jobs:
|
||||
brew install gawk tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/checkout@v2
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack bootstrap untrust github-actions
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
|
||||
# [1] Distros that have patched git to resolve CVE-2022-24765 (e.g. Ubuntu patching v2.25.1)
|
||||
# introduce breaking behaviorso we have to set `safe.directory` in gitconfig ourselves.
|
||||
# See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
|
||||
58
.github/workflows/build-containers.yml
vendored
58
.github/workflows/build-containers.yml
vendored
@@ -13,8 +13,6 @@ on:
|
||||
paths:
|
||||
- '.github/workflows/build-containers.yml'
|
||||
- 'share/spack/docker/*'
|
||||
- 'share/templates/container/*'
|
||||
- 'lib/spack/spack/container/*'
|
||||
# Let's also build & tag Spack containers on releases.
|
||||
release:
|
||||
types: [published]
|
||||
@@ -31,21 +29,15 @@ jobs:
|
||||
# A matrix of Dockerfile paths, associated tags, and which architectures
|
||||
# they support.
|
||||
matrix:
|
||||
# Meaning of the various items in the matrix list
|
||||
# 0: Container name (e.g. ubuntu-bionic)
|
||||
# 1: Platforms to build for
|
||||
# 2: Base image (e.g. ubuntu:18.04)
|
||||
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
||||
[centos7, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:7'],
|
||||
[centos-stream, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream'],
|
||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||
[ubuntu-bionic, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:18.04'],
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04']]
|
||||
dockerfile: [[amazon-linux, amazonlinux-2.dockerfile, 'linux/amd64,linux/arm64'],
|
||||
[centos7, centos-7.dockerfile, 'linux/amd64,linux/arm64,linux/ppc64le'],
|
||||
[leap15, leap-15.dockerfile, 'linux/amd64,linux/arm64,linux/ppc64le'],
|
||||
[ubuntu-xenial, ubuntu-1604.dockerfile, 'linux/amd64,linux/arm64,linux/ppc64le'],
|
||||
[ubuntu-bionic, ubuntu-1804.dockerfile, 'linux/amd64,linux/arm64,linux/ppc64le']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
@@ -60,34 +52,22 @@ jobs:
|
||||
versioned="${{matrix.dockerfile[0]}}:${GITHUB_REF##*/}"
|
||||
echo "versioned=${versioned}" >> $GITHUB_ENV
|
||||
|
||||
- name: Generate the Dockerfile
|
||||
env:
|
||||
SPACK_YAML_OS: "${{ matrix.dockerfile[2] }}"
|
||||
- name: Check ${{ matrix.dockerfile[1] }} Exists
|
||||
run: |
|
||||
.github/workflows/generate_spack_yaml_containerize.sh
|
||||
. share/spack/setup-env.sh
|
||||
mkdir -p dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
spack containerize --last-stage=bootstrap | tee dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile
|
||||
printf "Preparing to build ${{ env.container }} from dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile"
|
||||
if [ ! -f "dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile" ]; then
|
||||
printf "dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile does not exist"
|
||||
printf "Preparing to build ${{ env.container }} from ${{ matrix.dockerfile[1] }}"
|
||||
if [ ! -f "share/spack/docker/${{ matrix.dockerfile[1]}}" ]; then
|
||||
printf "Dockerfile ${{ matrix.dockerfile[0]}} does not exist"
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@6673cd052c4cd6fcf4b4e6e60ea986c889389535
|
||||
with:
|
||||
name: dockerfiles
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@8b122486cedac8393e77aa9734c3528886e4a1a8 # @v1
|
||||
uses: docker/setup-qemu-action@27d0a4f181a40b142cce983c5393082c365d1480 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@dc7b9719a96d48369863986a06765841d7ea23f6 # @v1
|
||||
uses: docker/setup-buildx-action@94ab11c41e45d028884a99163086648e898eed25 # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # @v1
|
||||
uses: docker/login-action@42d299face0c5c43a0487c477f595ac9cf22f1a7 # @v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -95,19 +75,17 @@ jobs:
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # @v1
|
||||
uses: docker/login-action@42d299face0c5c43a0487c477f595ac9cf22f1a7 # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@e551b19e49efd4e98792db7592c17c09b89db8d8 # @v2
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[1] }}
|
||||
uses: docker/build-push-action@a66e35b9cbcf4ad0ea91ffcaf7bbad63ad9e0229 # @v2
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
file: share/spack/docker/${{matrix.dockerfile[1]}}
|
||||
platforms: ${{ matrix.dockerfile[2] }}
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
tags: |
|
||||
spack/${{ env.container }}
|
||||
spack/${{ env.versioned }}
|
||||
|
||||
7
.github/workflows/execute_installer.ps1
vendored
7
.github/workflows/execute_installer.ps1
vendored
@@ -1,7 +0,0 @@
|
||||
$ proc = Start-Process ${{ env.spack_installer }}\spack.exe "/install /quiet" -Passthru
|
||||
$handle = $proc.Handle # cache proc.Handle
|
||||
$proc.WaitForExit();
|
||||
|
||||
if ($proc.ExitCode -ne 0) {
|
||||
Write-Warning "$_ exited with status code $($proc.ExitCode)"
|
||||
}
|
||||
@@ -1,9 +0,0 @@
|
||||
#!/bin/bash
|
||||
(echo "spack:" \
|
||||
&& echo " specs: []" \
|
||||
&& echo " container:" \
|
||||
&& echo " format: docker" \
|
||||
&& echo " images:" \
|
||||
&& echo " os: \"${SPACK_YAML_OS}\"" \
|
||||
&& echo " spack:" \
|
||||
&& echo " ref: ${GITHUB_REF}") > spack.yaml
|
||||
16
.github/workflows/install_spack.sh
vendored
16
.github/workflows/install_spack.sh
vendored
@@ -2,7 +2,19 @@
|
||||
. share/spack/setup-env.sh
|
||||
echo -e "config:\n build_jobs: 2" > etc/spack/config.yaml
|
||||
spack config add "packages:all:target:[x86_64]"
|
||||
spack compiler find
|
||||
# TODO: remove this explicit setting once apple-clang detection is fixed
|
||||
cat <<EOF > etc/spack/compilers.yaml
|
||||
compilers:
|
||||
- compiler:
|
||||
spec: apple-clang@11.0.3
|
||||
paths:
|
||||
cc: /usr/bin/clang
|
||||
cxx: /usr/bin/clang++
|
||||
f77: /usr/local/bin/gfortran-9
|
||||
fc: /usr/local/bin/gfortran-9
|
||||
modules: []
|
||||
operating_system: catalina
|
||||
target: x86_64
|
||||
EOF
|
||||
spack compiler info apple-clang
|
||||
spack debug report
|
||||
spack solve zlib
|
||||
|
||||
14
.github/workflows/macos_python.yml
vendored
14
.github/workflows/macos_python.yml
vendored
@@ -24,23 +24,23 @@ jobs:
|
||||
name: gcc with clang
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
run: |
|
||||
. .github/workflows/install_spack.sh
|
||||
# 9.2.0 is the latest version on which we apply homebrew patch
|
||||
spack install -v --fail-fast gcc@11.2.0 %apple-clang
|
||||
spack install -v --fail-fast gcc@9.2.0 %apple-clang
|
||||
|
||||
install_jupyter_clang:
|
||||
name: jupyter
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 700
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
@@ -52,8 +52,8 @@ jobs:
|
||||
name: scipy, mpl, pd
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
|
||||
11
.github/workflows/setup_git.ps1
vendored
11
.github/workflows/setup_git.ps1
vendored
@@ -1,11 +0,0 @@
|
||||
# (c) 2021 Lawrence Livermore National Laboratory
|
||||
|
||||
Set-Location spack
|
||||
|
||||
git config --global user.email "spack@example.com"
|
||||
git config --global user.name "Test User"
|
||||
|
||||
if ($(git branch --show-current) -ne "develop")
|
||||
{
|
||||
git branch develop origin/develop
|
||||
}
|
||||
4
.github/workflows/system_shortcut_check.ps1
vendored
4
.github/workflows/system_shortcut_check.ps1
vendored
@@ -1,4 +0,0 @@
|
||||
param ($systemFolder, $shortcut)
|
||||
|
||||
$start = [System.Environment]::GetFolderPath("$systemFolder")
|
||||
Invoke-Item "$start\Programs\Spack\$shortcut"
|
||||
68
.github/workflows/unit_tests.yaml
vendored
68
.github/workflows/unit_tests.yaml
vendored
@@ -15,10 +15,10 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: 3.9
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
@@ -31,12 +31,12 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools types-six
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
with_coverage: ${{ steps.coverage.outputs.with_coverage }}
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
@@ -96,7 +96,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
python-version: [2.7, 3.5, 3.6, 3.7, 3.8, 3.9]
|
||||
concretizer: ['clingo']
|
||||
include:
|
||||
- python-version: 2.7
|
||||
@@ -106,10 +106,10 @@ jobs:
|
||||
- python-version: 3.9
|
||||
concretizer: original
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -121,16 +121,12 @@ jobs:
|
||||
patchelf cmake bison libbison-dev kcov
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov "coverage[toml]<=6.2"
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml]
|
||||
# ensure style checks are not skipped in unit tests for python >= 3.6
|
||||
# note that true/false (i.e., 1/0) are opposite in conditions in python and bash
|
||||
if python -c 'import sys; sys.exit(not sys.version_info >= (3, 6))'; then
|
||||
pip install --upgrade flake8 isort>=4.3.5 mypy>=0.900 black
|
||||
fi
|
||||
- name: Pin pathlib for Python 2.7
|
||||
if: ${{ matrix.python-version == 2.7 }}
|
||||
run: |
|
||||
pip install -U pathlib2==2.3.6
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -162,7 +158,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
@@ -171,12 +167,12 @@ jobs:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: 3.9
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
@@ -184,7 +180,7 @@ jobs:
|
||||
sudo apt-get install -y coreutils kcov csh zsh tcsh fish dash bash
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml]==6.2
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml]
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -200,7 +196,7 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
@@ -218,7 +214,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -237,12 +233,12 @@ jobs:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: 3.9
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
@@ -252,7 +248,7 @@ jobs:
|
||||
patchelf kcov
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml]==6.2 clingo
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml] clingo
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -274,7 +270,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
@@ -286,16 +282,16 @@ jobs:
|
||||
matrix:
|
||||
python-version: [3.8]
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools
|
||||
pip install --upgrade pytest codecov coverage[toml]==6.2
|
||||
pip install --upgrade pytest codecov coverage[toml]
|
||||
- name: Setup Homebrew packages
|
||||
run: |
|
||||
brew install dash fish gcc gnupg2 kcov
|
||||
@@ -320,7 +316,7 @@ jobs:
|
||||
echo "ONLY PACKAGE RECIPES CHANGED [skipping coverage]"
|
||||
$(which spack) unit-test -x -m "not maybeslow" -k "package_sanity"
|
||||
fi
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
files: ./coverage.xml
|
||||
@@ -331,13 +327,13 @@ jobs:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml]==6.2
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml]
|
||||
- name: Package audits (with coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
run: |
|
||||
@@ -350,7 +346,7 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,audits
|
||||
|
||||
188
.github/workflows/windows_python.yml
vendored
188
.github/workflows/windows_python.yml
vendored
@@ -1,188 +0,0 @@
|
||||
name: windows tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
powershell Invoke-Expression -Command ".\share\spack\qa\windows_test_setup.ps1"; {0}
|
||||
jobs:
|
||||
validate:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install --upgrade vermin
|
||||
- name: vermin (Spack's Core)
|
||||
run: vermin --backport argparse --backport typing -t='2.7-' -t='3.5-' -v spack/lib/spack/spack/ spack/lib/spack/llnl/ spack/bin/
|
||||
- name: vermin (Repositories)
|
||||
run: vermin --backport argparse --backport typing -t='2.7-' -t='3.5-' -v spack/var/spack/repos
|
||||
# Run style checks on the files that have been changed
|
||||
style:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six setuptools flake8 isort>=4.3.5 mypy>=0.800 black pywin32 types-python-dateutil
|
||||
- name: Create local develop
|
||||
run: |
|
||||
.\spack\.github\workflows\setup_git.ps1
|
||||
- name: Run style tests
|
||||
run: |
|
||||
spack style
|
||||
- name: Verify license headers
|
||||
run: |
|
||||
python spack\bin\spack license verify
|
||||
unittest:
|
||||
needs: [ validate, style ]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
||||
- name: Create local develop
|
||||
run: |
|
||||
.\spack\.github\workflows\setup_git.ps1
|
||||
- name: Unit Test
|
||||
run: |
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
spack unit-test --verbose --ignore=lib/spack/spack/test/cmd
|
||||
unittest-cmd:
|
||||
needs: [ validate, style ]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
||||
- name: Create local develop
|
||||
run: |
|
||||
.\spack\.github\workflows\setup_git.ps1
|
||||
- name: Command Unit Test
|
||||
run: |
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
spack unit-test lib/spack/spack/test/cmd --verbose
|
||||
buildtest:
|
||||
needs: [ validate, style ]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
||||
- name: Build Test
|
||||
run: |
|
||||
spack compiler find
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
spack external find cmake
|
||||
spack external find ninja
|
||||
spack install abseil-cpp
|
||||
generate-installer-test:
|
||||
needs: [ validate, style ]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- name: Disable Windows Symlinks
|
||||
run: |
|
||||
git config --global core.symlinks false
|
||||
shell:
|
||||
powershell
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
||||
- name: Add Light and Candle to Path
|
||||
run: |
|
||||
$env:WIX >> $GITHUB_PATH
|
||||
- name: Run Installer
|
||||
run: |
|
||||
.\spack\share\spack\qa\setup_spack.ps1
|
||||
spack make-installer -s spack -g SILENT pkg
|
||||
echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
env:
|
||||
ProgressPreference: SilentlyContinue
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Windows Spack Installer Bundle
|
||||
path: ${{ env.installer_root }}\pkg\Spack.exe
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Windows Spack Installer
|
||||
path: ${{ env.installer_root}}\pkg\Spack.msi
|
||||
execute-installer:
|
||||
needs: generate-installer-test
|
||||
runs-on: windows-latest
|
||||
defaults:
|
||||
run:
|
||||
shell: pwsh
|
||||
steps:
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
||||
- name: Setup installer directory
|
||||
run: |
|
||||
mkdir -p spack_installer
|
||||
echo "spack_installer=$((pwd).Path)\spack_installer" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: Windows Spack Installer Bundle
|
||||
path: ${{ env.spack_installer }}
|
||||
- name: Execute Bundled Installer
|
||||
run: |
|
||||
$proc = Start-Process ${{ env.spack_installer }}\spack.exe "/install /quiet" -Passthru
|
||||
$handle = $proc.Handle # cache proc.Handle
|
||||
$proc.WaitForExit();
|
||||
$LASTEXITCODE
|
||||
env:
|
||||
ProgressPreference: SilentlyContinue
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: Windows Spack Installer
|
||||
path: ${{ env.spack_installer }}
|
||||
- name: Execute MSI
|
||||
run: |
|
||||
$proc = Start-Process ${{ env.spack_installer }}\spack.msi "/quiet" -Passthru
|
||||
$handle = $proc.Handle # cache proc.Handle
|
||||
$proc.WaitForExit();
|
||||
$LASTEXITCODE
|
||||
16
CHANGELOG.md
16
CHANGELOG.md
@@ -1,19 +1,3 @@
|
||||
# v0.17.2 (2022-04-13)
|
||||
|
||||
### Spack bugfixes
|
||||
* Fix --reuse with upstreams set in an environment (#29680)
|
||||
* config add: fix parsing of validator error to infer type from oneOf (#29475)
|
||||
* Fix spack -C command_line_scope used in conjunction with other flags (#28418)
|
||||
* Use Spec.constrain to construct spec lists for stacks (#28783)
|
||||
* Fix bug occurring when searching for inherited patches in packages (#29574)
|
||||
* Fixed a few bugs when manipulating symlinks (#28318, #29515, #29636)
|
||||
* Fixed a few minor bugs affecting command prompt, terminal title and argument completion (#28279, #28278, #28939, #29405, #29070, #29402)
|
||||
* Fixed a few bugs affecting the spack ci command (#29518, #29419)
|
||||
* Fix handling of Intel compiler environment (#29439)
|
||||
* Fix a few edge cases when reindexing the DB (#28764)
|
||||
* Remove "Known issues" from documentation (#29664)
|
||||
* Other miscellaneous bugfixes (0b72e070583fc5bcd016f5adc8a84c99f2b7805f, #28403, #29261)
|
||||
|
||||
# v0.17.1 (2021-12-23)
|
||||
|
||||
### Spack Bugfixes
|
||||
|
||||
20
COPYRIGHT
20
COPYRIGHT
@@ -34,18 +34,10 @@ includes the sbang tool directly in bin/sbang. These packages are covered
|
||||
by various permissive licenses. A summary listing follows. See the
|
||||
license included with each package for full details.
|
||||
|
||||
PackageName: altgraph
|
||||
PackageHomePage: https://altgraph.readthedocs.io/en/latest/index.html
|
||||
PackageLicenseDeclared: MIT
|
||||
|
||||
PackageName: argparse
|
||||
PackageHomePage: https://pypi.python.org/pypi/argparse
|
||||
PackageLicenseDeclared: Python-2.0
|
||||
|
||||
PackageName: astunparse
|
||||
PackageHomePage: https://github.com/simonpercivall/astunparse
|
||||
PackageLicenseDeclared: Python-2.0
|
||||
|
||||
PackageName: attrs
|
||||
PackageHomePage: https://github.com/python-attrs/attrs
|
||||
PackageLicenseDeclared: MIT
|
||||
@@ -70,10 +62,6 @@ PackageName: jsonschema
|
||||
PackageHomePage: https://pypi.python.org/pypi/jsonschema
|
||||
PackageLicenseDeclared: MIT
|
||||
|
||||
PackageName: macholib
|
||||
PackageHomePage: https://macholib.readthedocs.io/en/latest/index.html
|
||||
PackageLicenseDeclared: MIT
|
||||
|
||||
PackageName: markupsafe
|
||||
PackageHomePage: https://pypi.python.org/pypi/MarkupSafe
|
||||
PackageLicenseDeclared: BSD-3-Clause
|
||||
@@ -105,3 +93,11 @@ PackageLicenseDeclared: Apache-2.0 OR MIT
|
||||
PackageName: six
|
||||
PackageHomePage: https://pypi.python.org/pypi/six
|
||||
PackageLicenseDeclared: MIT
|
||||
|
||||
PackageName: macholib
|
||||
PackageHomePage: https://macholib.readthedocs.io/en/latest/index.html
|
||||
PackageLicenseDeclared: MIT
|
||||
|
||||
PackageName: altgraph
|
||||
PackageHomePage: https://altgraph.readthedocs.io/en/latest/index.html
|
||||
PackageLicenseDeclared: MIT
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2013-2022 LLNS, LLC and other Spack Project Developers.
|
||||
Copyright (c) 2013-2020 LLNS, LLC and other Spack Project Developers.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
||||
@@ -10,7 +10,6 @@ For more on Spack's release structure, see
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| develop | :white_check_mark: |
|
||||
| 0.17.x | :white_check_mark: |
|
||||
| 0.16.x | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def getpywin():
|
||||
try:
|
||||
import win32con # noqa
|
||||
except ImportError:
|
||||
subprocess.check_call(
|
||||
[sys.executable, "-m", "pip", "-q", "install", "--upgrade", "pip"])
|
||||
subprocess.check_call(
|
||||
[sys.executable, "-m", "pip", "-q", "install", "pywin32"])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
getpywin()
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/bin/sh
|
||||
# -*- python -*-
|
||||
#
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
223
bin/spack.bat
223
bin/spack.bat
@@ -1,223 +0,0 @@
|
||||
:: Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
:: Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
::
|
||||
:: SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
::#######################################################################
|
||||
::
|
||||
:: This file is part of Spack and sets up the spack environment for batch,
|
||||
:: This includes environment modules and lmod support,
|
||||
:: and it also puts spack in your path. The script also checks that at least
|
||||
:: module support exists, and provides suggestions if it doesn't. Source
|
||||
:: it like this:
|
||||
::
|
||||
:: . /path/to/spack/install/spack_cmd.bat
|
||||
::
|
||||
@echo off
|
||||
|
||||
set spack=%SPACK_ROOT%\bin\spack
|
||||
|
||||
::#######################################################################
|
||||
:: This is a wrapper around the spack command that forwards calls to
|
||||
:: 'spack load' and 'spack unload' to shell functions. This in turn
|
||||
:: allows them to be used to invoke environment modules functions.
|
||||
::
|
||||
:: 'spack load' is smarter than just 'load' because it converts its
|
||||
:: arguments into a unique Spack spec that is then passed to module
|
||||
:: commands. This allows the user to use packages without knowing all
|
||||
:: their installation details.
|
||||
::
|
||||
:: e.g., rather than requiring a full spec for libelf, the user can type:
|
||||
::
|
||||
:: spack load libelf
|
||||
::
|
||||
:: This will first find the available libelf module file and use a
|
||||
:: matching one. If there are two versions of libelf, the user would
|
||||
:: need to be more specific, e.g.:
|
||||
::
|
||||
:: spack load libelf@0.8.13
|
||||
::
|
||||
:: This is very similar to how regular spack commands work and it
|
||||
:: avoids the need to come up with a user-friendly naming scheme for
|
||||
:: spack module files.
|
||||
::#######################################################################
|
||||
|
||||
:_sp_shell_wrapper
|
||||
set "_sp_flags="
|
||||
set "_sp_args="
|
||||
set "_sp_subcommand="
|
||||
setlocal enabledelayedexpansion
|
||||
:: commands have the form '[flags] [subcommand] [args]'
|
||||
:: flags will always start with '-', e.g. --help or -V
|
||||
:: subcommands will never start with '-'
|
||||
:: everything after the subcommand is an arg
|
||||
for %%x in (%*) do (
|
||||
set t="%%~x"
|
||||
if "!t:~0,1!" == "-" (
|
||||
if defined _sp_subcommand (
|
||||
:: We already have a subcommand, processing args now
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
) else (
|
||||
set "_sp_flags=!_sp_flags! !t!"
|
||||
shift
|
||||
)
|
||||
) else if not defined _sp_subcommand (
|
||||
set "_sp_subcommand=!t!"
|
||||
shift
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
shift
|
||||
)
|
||||
)
|
||||
|
||||
:: --help, -h and -V flags don't require further output parsing.
|
||||
:: If we encounter, execute and exit
|
||||
if defined _sp_flags (
|
||||
if NOT "%_sp_flags%"=="%_sp_flags:-h=%" (
|
||||
python "%spack%" %_sp_flags%
|
||||
exit /B 0
|
||||
) else if NOT "%_sp_flags%"=="%_sp_flags:--help=%" (
|
||||
python "%spack%" %_sp_flags%
|
||||
exit /B 0
|
||||
) else if NOT "%_sp_flags%"=="%_sp_flags:-V=%" (
|
||||
python "%spack%" %_sp_flags%
|
||||
exit /B 0
|
||||
)
|
||||
)
|
||||
:: pass parsed variables outside of local scope. Need to do
|
||||
:: this because delayedexpansion can only be set by setlocal
|
||||
echo %_sp_flags%>flags
|
||||
echo %_sp_args%>args
|
||||
echo %_sp_subcommand%>subcmd
|
||||
endlocal
|
||||
set /p _sp_subcommand=<subcmd
|
||||
set /p _sp_flags=<flags
|
||||
set /p _sp_args=<args
|
||||
set str_subcommand=%_sp_subcommand:"='%
|
||||
set str_flags=%_sp_flags:"='%
|
||||
set str_args=%_sp_args:"='%
|
||||
if "%str_subcommand%"=="ECHO is off." (set "_sp_subcommand=")
|
||||
if "%str_flags%"=="ECHO is off." (set "_sp_flags=")
|
||||
if "%str_args%"=="ECHO is off." (set "_sp_args=")
|
||||
del subcmd
|
||||
del flags
|
||||
del args
|
||||
|
||||
:: Filter out some commands. For any others, just run the command.
|
||||
if "%_sp_subcommand%" == "cd" (
|
||||
goto :case_cd
|
||||
) else if "%_sp_subcommand%" == "env" (
|
||||
goto :case_env
|
||||
) else if "%_sp_subcommand%" == "load" (
|
||||
goto :case_load
|
||||
) else if "%_sp_subcommand%" == "unload" (
|
||||
goto :case_load
|
||||
) else (
|
||||
goto :default_case
|
||||
)
|
||||
|
||||
::#######################################################################
|
||||
|
||||
:case_cd
|
||||
:: Check for --help or -h
|
||||
:: TODO: This is not exactly the same as setup-env.
|
||||
:: In setup-env, '--help' or '-h' must follow the cd
|
||||
:: Here, they may be anywhere in the args
|
||||
if defined _sp_args (
|
||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
python "%spack%" cd -h
|
||||
goto :end_switch
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
||||
python "%spack%" cd -h
|
||||
goto :end_switch
|
||||
)
|
||||
)
|
||||
|
||||
for /F "tokens=* USEBACKQ" %%F in (
|
||||
`python "%spack%" location %_sp_args%`) do (
|
||||
set "LOC=%%F"
|
||||
)
|
||||
for %%Z in ("%LOC%") do if EXIST %%~sZ\NUL (cd /d "%LOC%")
|
||||
goto :end_switch
|
||||
|
||||
:case_env
|
||||
:: If no args or args contain --bat or -h/--help: just execute.
|
||||
if NOT defined _sp_args (
|
||||
goto :default_case
|
||||
)else if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args: -h=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:deactivate=%" (
|
||||
for /f "tokens=* USEBACKQ" %%I in (
|
||||
`call python "%spack%" %_sp_flags% env deactivate --bat %_sp_args:deactivate=%`
|
||||
) do %%I
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:activate=%" (
|
||||
for /f "tokens=* USEBACKQ" %%I in (
|
||||
`call python "%spack%" %_sp_flags% env activate --bat %_sp_args:activate=%`
|
||||
) do %%I
|
||||
) else (
|
||||
goto :default_case
|
||||
)
|
||||
goto :end_switch
|
||||
|
||||
:case_load
|
||||
:: If args contain --sh, --csh, or -h/--help: just execute.
|
||||
if defined _sp_args (
|
||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args: -h=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||
goto :default_case
|
||||
)
|
||||
)
|
||||
|
||||
for /f "tokens=* USEBACKQ" %%I in (
|
||||
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`) do %%I
|
||||
)
|
||||
goto :end_switch
|
||||
|
||||
:case_unload
|
||||
goto :case_load
|
||||
|
||||
:default_case
|
||||
python "%spack%" %_sp_flags% %_sp_subcommand% %_sp_args%
|
||||
goto :end_switch
|
||||
|
||||
:end_switch
|
||||
exit /B %ERRORLEVEL%
|
||||
|
||||
|
||||
::########################################################################
|
||||
:: Prepends directories to path, if they exist.
|
||||
:: pathadd /path/to/dir # add to PATH
|
||||
:: or pathadd OTHERPATH /path/to/dir # add to OTHERPATH
|
||||
::########################################################################
|
||||
|
||||
:_spack_pathadd
|
||||
set "_pa_varname=PATH"
|
||||
set "_pa_new_path=%~1"
|
||||
if NOT "%~2" == "" (
|
||||
set "_pa_varname=%~1"
|
||||
set "_pa_new_path=%~2"
|
||||
)
|
||||
set "_pa_oldvalue=%_pa_varname%"
|
||||
for %%Z in ("%_pa_new_path%") do if EXIST %%~sZ\NUL (
|
||||
if defined %_pa_oldvalue% (
|
||||
set "_pa_varname=%_pa_new_path%:%_pa_oldvalue%"
|
||||
) else (
|
||||
set "_pa_varname=%_pa_new_path%"
|
||||
)
|
||||
)
|
||||
exit /b 0
|
||||
|
||||
:: set module system roots
|
||||
:_sp_multi_pathadd
|
||||
for %%I in (%~2) do (
|
||||
for %%Z in (%_sp_compatible_sys_types%) do (
|
||||
:pathadd "%~1" "%%I\%%Z"
|
||||
)
|
||||
)
|
||||
exit /B %ERRORLEVEL%
|
||||
@@ -1,72 +0,0 @@
|
||||
@ECHO OFF
|
||||
setlocal EnableDelayedExpansion
|
||||
:: (c) 2021 Lawrence Livermore National Laboratory
|
||||
:: To use this file independently of Spack's installer, execute this script in its directory, or add the
|
||||
:: associated bin directory to your PATH. Invoke to launch Spack Shell.
|
||||
::
|
||||
:: source_dir/spack/bin/spack_cmd.bat
|
||||
::
|
||||
pushd %~dp0..
|
||||
set SPACK_ROOT=%CD%
|
||||
pushd %CD%\..
|
||||
set spackinstdir=%CD%
|
||||
popd
|
||||
|
||||
|
||||
:: Check if Python is on the PATH
|
||||
if not defined python_pf_ver (
|
||||
(for /f "delims=" %%F in ('where python.exe') do (
|
||||
set "python_pf_ver=%%F"
|
||||
goto :found_python
|
||||
) ) 2> NUL
|
||||
)
|
||||
:found_python
|
||||
if not defined python_pf_ver (
|
||||
:: If not, look for Python from the Spack installer
|
||||
:get_builtin
|
||||
(for /f "tokens=*" %%g in ('dir /b /a:d "!spackinstdir!\Python*"') do (
|
||||
set "python_ver=%%g")) 2> NUL
|
||||
|
||||
if not defined python_ver (
|
||||
echo Python was not found on your system.
|
||||
echo Please install Python or add Python to your PATH.
|
||||
) else (
|
||||
set "py_path=!spackinstdir!\!python_ver!"
|
||||
set "py_exe=!py_path!\python.exe"
|
||||
)
|
||||
goto :exitpoint
|
||||
) else (
|
||||
:: Python is already on the path
|
||||
set "py_exe=!python_pf_ver!"
|
||||
(for /F "tokens=* USEBACKQ" %%F in (
|
||||
`"!py_exe!" --version`) do (set "output=%%F")) 2>NUL
|
||||
if not "!output:Microsoft Store=!"=="!output!" goto :get_builtin
|
||||
goto :exitpoint
|
||||
)
|
||||
:exitpoint
|
||||
|
||||
set "PATH=%SPACK_ROOT%\bin\;%PATH%"
|
||||
if defined py_path (
|
||||
set "PATH=%py_path%;%PATH%"
|
||||
)
|
||||
|
||||
if defined py_exe (
|
||||
"%py_exe%" "%SPACK_ROOT%\bin\haspywin.py"
|
||||
"%py_exe%" "%SPACK_ROOT%\bin\spack" external find python >NUL
|
||||
)
|
||||
|
||||
set "EDITOR=notepad"
|
||||
|
||||
DOSKEY spacktivate=spack env activate $*
|
||||
|
||||
@echo **********************************************************************
|
||||
@echo ** Spack Package Manager
|
||||
@echo **********************************************************************
|
||||
|
||||
IF "%1"=="" GOTO CONTINUE
|
||||
set
|
||||
GOTO:EOF
|
||||
|
||||
:continue
|
||||
set PROMPT=[spack] %PROMPT%
|
||||
%comspec% /k
|
||||
@@ -1,10 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
$Env:SPACK_PS1_PATH="$PSScriptRoot\..\share\spack\setup-env.ps1"
|
||||
& (Get-Process -Id $pid).Path -NoExit {
|
||||
. $Env:SPACK_PS1_PATH ;
|
||||
Push-Location $ENV:SPACK_ROOT
|
||||
}
|
||||
@@ -9,24 +9,15 @@ bootstrap:
|
||||
# may not be able to bootstrap all of the software that Spack needs,
|
||||
# depending on its type.
|
||||
sources:
|
||||
- name: 'github-actions-v0.2'
|
||||
type: buildcache
|
||||
description: |
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
info:
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.2
|
||||
homepage: https://github.com/spack/spack-bootstrap-mirrors
|
||||
releases: https://github.com/spack/spack-bootstrap-mirrors/releases
|
||||
- name: 'github-actions-v0.1'
|
||||
- name: 'github-actions'
|
||||
type: buildcache
|
||||
description: |
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
info:
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.1
|
||||
homepage: https://github.com/spack/spack-bootstrap-mirrors
|
||||
releases: https://github.com/spack/spack-bootstrap-mirrors/releases
|
||||
homepage: https://github.com/alalazo/spack-bootstrap-mirrors
|
||||
releases: https://github.com/alalazo/spack-bootstrap-mirrors/releases
|
||||
# This method is just Spack bootstrapping the software it needs from sources.
|
||||
# It has been added here so that users can selectively disable bootstrapping
|
||||
# from sources by "untrusting" it.
|
||||
@@ -37,5 +28,5 @@ bootstrap:
|
||||
trusted:
|
||||
# By default we trust bootstrapping from sources and from binaries
|
||||
# produced on Github via the workflow
|
||||
github-actions-v0.2: true
|
||||
github-actions: true
|
||||
spack-install: true
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
# -------------------------------------------------------------------------
|
||||
# This is the default spack configuration file.
|
||||
#
|
||||
# Settings here are versioned with Spack and are intended to provide
|
||||
# sensible defaults out of the box. Spack maintainers should edit this
|
||||
# file to keep it current.
|
||||
#
|
||||
# Users can override these settings by editing
|
||||
# `$SPACK_ROOT/etc/spack/concretizer.yaml`, `~/.spack/concretizer.yaml`,
|
||||
# or by adding a `concretizer:` section to an environment.
|
||||
# -------------------------------------------------------------------------
|
||||
concretizer:
|
||||
# Whether to consider installed packages or packages from buildcaches when
|
||||
# concretizing specs. If `true`, we'll try to use as many installs/binaries
|
||||
# as possible, rather than building. If `false`, we'll always give you a fresh
|
||||
# concretization.
|
||||
reuse: true
|
||||
# Options that tune which targets are considered for concretization. The
|
||||
# concretization process is very sensitive to the number targets, and the time
|
||||
# needed to reach a solution increases noticeably with the number of targets
|
||||
# considered.
|
||||
targets:
|
||||
# Determine whether we want to target specific or generic microarchitectures.
|
||||
# An example of the first kind might be for instance "skylake" or "bulldozer",
|
||||
# while generic microarchitectures are for instance "aarch64" or "x86_64_v4".
|
||||
granularity: microarchitectures
|
||||
# If "false" allow targets that are incompatible with the current host (for
|
||||
# instance concretize with target "icelake" while running on "haswell").
|
||||
# If "true" only allow targets that are compatible with the host.
|
||||
host_compatible: true
|
||||
@@ -155,17 +155,14 @@ config:
|
||||
|
||||
# The concretization algorithm to use in Spack. Options are:
|
||||
#
|
||||
# 'original': Spack's original greedy, fixed-point concretizer. This
|
||||
# algorithm can make decisions too early and will not backtrack
|
||||
# sufficiently for many specs.
|
||||
#
|
||||
# 'clingo': Uses a logic solver under the hood to solve DAGs with full
|
||||
# backtracking and optimization for user preferences. Spack will
|
||||
# try to bootstrap the logic solver, if not already available.
|
||||
#
|
||||
# 'original': Spack's original greedy, fixed-point concretizer. This
|
||||
# algorithm can make decisions too early and will not backtrack
|
||||
# sufficiently for many specs. This will soon be deprecated in
|
||||
# favor of clingo.
|
||||
#
|
||||
# See `concretizer.yaml` for more settings you can fine-tune when
|
||||
# using clingo.
|
||||
concretizer: clingo
|
||||
|
||||
|
||||
|
||||
@@ -35,21 +35,18 @@ modules:
|
||||
|
||||
# These are configurations for the module set named "default"
|
||||
default:
|
||||
# These values are defaulted in the code. They are not defaulted here so
|
||||
# that we can enable backwards compatibility with the old syntax more
|
||||
# easily (old value is in the config yaml, config:module_roots)
|
||||
# Where to install modules
|
||||
roots:
|
||||
tcl: $spack/share/spack/modules
|
||||
lmod: $spack/share/spack/lmod
|
||||
# roots:
|
||||
# tcl: $spack/share/spack/modules
|
||||
# lmod: $spack/share/spack/lmod
|
||||
# What type of modules to use
|
||||
enable:
|
||||
- tcl
|
||||
|
||||
tcl:
|
||||
all:
|
||||
autoload: none
|
||||
|
||||
# Default configurations if lmod is enabled
|
||||
lmod:
|
||||
all:
|
||||
autoload: direct
|
||||
hierarchy:
|
||||
- mpi
|
||||
|
||||
@@ -34,9 +34,7 @@ packages:
|
||||
java: [openjdk, jdk, ibm-java]
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
lapack: [openblas, amdlibflame]
|
||||
libllvm: [llvm, llvm-amdgpu]
|
||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||
luajit: [lua-luajit-openresty, lua-luajit]
|
||||
lua-lang: [lua, lua-luajit]
|
||||
mariadb-client: [mariadb-c-client, mariadb]
|
||||
mkl: [intel-mkl]
|
||||
mpe: [mpe2]
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
config:
|
||||
locks: false
|
||||
concretizer: original
|
||||
build_stage::
|
||||
- '$spack/.staging'
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -192,33 +192,30 @@ you can use them to customize an installation in :ref:`sec-specs`.
|
||||
Reusing installed dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
By default, when you run ``spack install``, Spack tries hard to reuse existing installations
|
||||
as dependencies, either from a local store or from remote buildcaches if configured.
|
||||
This minimizes unwanted rebuilds of common dependencies, in particular if
|
||||
you update Spack frequently.
|
||||
.. warning::
|
||||
|
||||
In case you want the latest versions and configurations to be installed instead,
|
||||
you can add the ``--fresh`` option:
|
||||
The ``--reuse`` option described here is experimental, and it will
|
||||
likely be replaced with a different option and configuration settings
|
||||
in the next Spack release.
|
||||
|
||||
By default, when you run ``spack install``, Spack tries to build a new
|
||||
version of the package you asked for, along with updated versions of
|
||||
its dependencies. This gets you the latest versions and configurations,
|
||||
but it can result in unwanted rebuilds if you update Spack frequently.
|
||||
|
||||
If you want Spack to try hard to reuse existing installations as dependencies,
|
||||
you can add the ``--reuse`` option:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --fresh mpich
|
||||
$ spack install --reuse mpich
|
||||
|
||||
Reusing installations in this mode is "accidental", and happening only if
|
||||
there's a match between existing installations and what Spack would have installed
|
||||
anyhow.
|
||||
|
||||
You can use the ``spack spec -I mpich`` command to see what
|
||||
This will not do anything if ``mpich`` is already installed. If ``mpich``
|
||||
is not installed, but dependencies like ``hwloc`` and ``libfabric`` are,
|
||||
the ``mpich`` will be build with the installed versions, if possible.
|
||||
You can use the :ref:`spack spec -I <cmd-spack-spec>` command to see what
|
||||
will be reused and what will be built before you install.
|
||||
|
||||
You can configure Spack to use the ``--fresh`` behavior by default in
|
||||
``concretizer.yaml``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
reuse: false
|
||||
|
||||
.. _cmd-spack-uninstall:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
@@ -1283,7 +1280,7 @@ Normally users don't have to bother specifying the architecture if they
|
||||
are installing software for their current host, as in that case the
|
||||
values will be detected automatically. If you need fine-grained control
|
||||
over which packages use which targets (or over *all* packages' default
|
||||
target), see :ref:`package-preferences`.
|
||||
target), see :ref:`concretization-preferences`.
|
||||
|
||||
.. admonition:: Cray machines
|
||||
|
||||
@@ -1723,8 +1720,8 @@ Activating Extensions in a View
|
||||
|
||||
Another way to use extensions is to create a view, which merges the
|
||||
python installation along with the extensions into a single prefix.
|
||||
See :ref:`configuring_environment_views` for a more in-depth description
|
||||
of views.
|
||||
See :ref:`filesystem-views` for a more in-depth description of views and
|
||||
:ref:`cmd-spack-view` for usage of the ``spack view`` command.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Activating Extensions Globally
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _build-settings:
|
||||
|
||||
================================
|
||||
Package Settings (packages.yaml)
|
||||
================================
|
||||
===================
|
||||
Build Customization
|
||||
===================
|
||||
|
||||
Spack allows you to customize how your software is built through the
|
||||
``packages.yaml`` file. Using it, you can make Spack prefer particular
|
||||
@@ -209,81 +209,11 @@ Specific limitations include:
|
||||
then Spack will not add a new external entry (``spack config blame packages``
|
||||
can help locate all external entries).
|
||||
|
||||
.. _concretizer-options:
|
||||
.. _concretization-preferences:
|
||||
|
||||
----------------------
|
||||
Concretizer options
|
||||
----------------------
|
||||
|
||||
``packages.yaml`` gives the concretizer preferences for specific packages,
|
||||
but you can also use ``concretizer.yaml`` to customize aspects of the
|
||||
algorithm it uses to select the dependencies you install:
|
||||
|
||||
.. literalinclude:: _spack_root/etc/spack/defaults/concretizer.yaml
|
||||
:language: yaml
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Reuse already installed packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or
|
||||
whether it will do a "fresh" installation and prefer the latest settings from
|
||||
``package.py`` files and ``packages.yaml`` (``false``).
|
||||
You can use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack install --reuse <spec>
|
||||
|
||||
to enable reuse for a single installation, and you can use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack install --fresh <spec>
|
||||
|
||||
to do a fresh install if ``reuse`` is enabled by default.
|
||||
``reuse: true`` is the default.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Selection of the target microarchitectures
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The options under the ``targets`` attribute control which targets are considered during a solve.
|
||||
Currently the options in this section are only configurable from the ``concretization.yaml`` file
|
||||
and there are no corresponding command line arguments to enable them for a single solve.
|
||||
|
||||
The ``granularity`` option can take two possible values: ``microarchitectures`` and ``generic``.
|
||||
If set to:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
targets:
|
||||
granularity: microarchitectures
|
||||
|
||||
Spack will consider all the microarchitectures known to ``archspec`` to label nodes for
|
||||
compatibility. If instead the option is set to:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
targets:
|
||||
granularity: generic
|
||||
|
||||
Spack will consider only generic microarchitectures. For instance, when running on an
|
||||
Haswell node, Spack will consider ``haswell`` as the best target in the former case and
|
||||
``x86_64_v3`` as the best target in the latter case.
|
||||
|
||||
The ``host_compatible`` option is a Boolean option that determines whether or not the
|
||||
microarchitectures considered during the solve are constrained to be compatible with the
|
||||
host Spack is currently running on. For instance, if this option is set to ``true``, a
|
||||
user cannot concretize for ``target=icelake`` while running on an Haswell node.
|
||||
|
||||
.. _package-preferences:
|
||||
|
||||
-------------------
|
||||
Package Preferences
|
||||
-------------------
|
||||
--------------------------
|
||||
Concretization Preferences
|
||||
--------------------------
|
||||
|
||||
Spack can be configured to prefer certain compilers, package
|
||||
versions, dependencies, and variants during concretization.
|
||||
@@ -339,7 +269,6 @@ concretization rules. A provider lists a value that packages may
|
||||
``depend_on`` (e.g, MPI) and a list of rules for fulfilling that
|
||||
dependency.
|
||||
|
||||
|
||||
.. _package_permissions:
|
||||
|
||||
-------------------
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -47,12 +47,10 @@ on these ideas for each distinct build system that Spack supports:
|
||||
:maxdepth: 1
|
||||
:caption: Language-specific
|
||||
|
||||
build_systems/luapackage
|
||||
build_systems/octavepackage
|
||||
build_systems/perlpackage
|
||||
build_systems/pythonpackage
|
||||
build_systems/rpackage
|
||||
build_systems/racketpackage
|
||||
build_systems/rubypackage
|
||||
|
||||
.. toctree::
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -433,7 +433,7 @@ For example:
|
||||
.. code-block:: python
|
||||
|
||||
variant('profiler', when='@2.0:')
|
||||
config_args += self.with_or_without('profiler')
|
||||
config_args += self.with_or_without('profiler)
|
||||
|
||||
will neither add ``--with-profiler`` nor ``--without-profiler`` when the version is
|
||||
below ``2.0``.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -159,85 +159,6 @@ and CMake simply ignores the empty command line argument. For example the follow
|
||||
will generate ``'cmake' '-DEXAMPLE=ON' ...`` when `@2.0: +example` is met, but will
|
||||
result in ``'cmake' '' ...`` when the spec version is below ``2.0``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
CMake arguments provided by Spack
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The following default arguments are controlled by Spack:
|
||||
|
||||
|
||||
``CMAKE_INSTALL_PREFIX``
|
||||
------------------------
|
||||
|
||||
Is set to the the package's install directory.
|
||||
|
||||
|
||||
``CMAKE_PREFIX_PATH``
|
||||
---------------------
|
||||
|
||||
CMake finds dependencies through calls to ``find_package()``, ``find_program()``,
|
||||
``find_library()``, ``find_file()``, and ``find_path()``, which use a list of search
|
||||
paths from ``CMAKE_PREFIX_PATH``. Spack sets this variable to a list of prefixes of the
|
||||
spec's transitive dependencies.
|
||||
|
||||
For troubleshooting cases where CMake fails to find a dependency, add the
|
||||
``--debug-find`` flag to ``cmake_args``.
|
||||
|
||||
``CMAKE_BUILD_TYPE``
|
||||
--------------------
|
||||
|
||||
Every CMake-based package accepts a ``-DCMAKE_BUILD_TYPE`` flag to
|
||||
dictate which level of optimization to use. In order to ensure
|
||||
uniformity across packages, the ``CMakePackage`` base class adds
|
||||
a variant to control this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('build_type', default='RelWithDebInfo',
|
||||
description='CMake build type',
|
||||
values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel'))
|
||||
|
||||
However, not every CMake package accepts all four of these options.
|
||||
Grep the ``CMakeLists.txt`` file to see if the default values are
|
||||
missing or replaced. For example, the
|
||||
`dealii <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/dealii/package.py>`_
|
||||
package overrides the default variant with:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('build_type', default='DebugRelease',
|
||||
description='The build type to build',
|
||||
values=('Debug', 'Release', 'DebugRelease'))
|
||||
|
||||
For more information on ``CMAKE_BUILD_TYPE``, see:
|
||||
https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
|
||||
|
||||
|
||||
``CMAKE_INSTALL_RPATH`` and ``CMAKE_INSTALL_RPATH_USE_LINK_PATH=ON``
|
||||
--------------------------------------------------------------------
|
||||
|
||||
CMake uses different RPATHs during the build and after installation, so that executables
|
||||
can locate the libraries they're linked to during the build, and installed executables
|
||||
do not have RPATHs to build directories. In Spack, we have to make sure that RPATHs are
|
||||
set properly after installation.
|
||||
|
||||
Spack sets ``CMAKE_INSTALL_RPATH`` to a list of ``<prefix>/lib`` or ``<prefix>/lib64``
|
||||
directories of the spec's link-type dependencies. Apart from that, it sets
|
||||
``-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=ON``, which should add RPATHs for directories of
|
||||
linked libraries not in the directories covered by ``CMAKE_INSTALL_RPATH``.
|
||||
|
||||
Usually it's enough to set only ``-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=ON``, but the
|
||||
reason to provide both options is that packages may dynamically open shared libraries,
|
||||
which CMake cannot detect. In those cases, the RPATHs from ``CMAKE_INSTALL_RPATH`` are
|
||||
used as search paths.
|
||||
|
||||
.. note::
|
||||
|
||||
Some packages provide stub libraries, which contain an interface for linking without
|
||||
an implementation. When using such libraries, it's best to override the option
|
||||
``-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=OFF`` in ``cmake_args``, so that stub libraries
|
||||
are not used at runtime.
|
||||
|
||||
|
||||
^^^^^^^^^^
|
||||
Generators
|
||||
@@ -275,6 +196,36 @@ generators, but it should be simple to add support for alternative
|
||||
generators. For more information on CMake generators, see:
|
||||
https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
CMAKE_BUILD_TYPE
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
Every CMake-based package accepts a ``-DCMAKE_BUILD_TYPE`` flag to
|
||||
dictate which level of optimization to use. In order to ensure
|
||||
uniformity across packages, the ``CMakePackage`` base class adds
|
||||
a variant to control this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('build_type', default='RelWithDebInfo',
|
||||
description='CMake build type',
|
||||
values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel'))
|
||||
|
||||
However, not every CMake package accepts all four of these options.
|
||||
Grep the ``CMakeLists.txt`` file to see if the default values are
|
||||
missing or replaced. For example, the
|
||||
`dealii <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/dealii/package.py>`_
|
||||
package overrides the default variant with:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('build_type', default='DebugRelease',
|
||||
description='The build type to build',
|
||||
values=('Debug', 'Release', 'DebugRelease'))
|
||||
|
||||
For more information on ``CMAKE_BUILD_TYPE``, see:
|
||||
https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
CMakeLists.txt in a sub-directory
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -649,7 +649,7 @@ follow `the next section <intel-install-libs_>`_ instead.
|
||||
|
||||
* If you specified a custom variant (for example ``+vtune``) you may want to add this as your
|
||||
preferred variant in the packages configuration for the ``intel-parallel-studio`` package
|
||||
as described in :ref:`package-preferences`. Otherwise you will have to specify
|
||||
as described in :ref:`concretization-preferences`. Otherwise you will have to specify
|
||||
the variant everytime ``intel-parallel-studio`` is being used as ``mkl``, ``fftw`` or ``mpi``
|
||||
implementation to avoid pulling in a different variant.
|
||||
|
||||
@@ -811,13 +811,13 @@ by one of the following means:
|
||||
$ spack install libxc@3.0.0%intel
|
||||
|
||||
|
||||
* Alternatively, request Intel compilers implicitly by package preferences.
|
||||
* Alternatively, request Intel compilers implicitly by concretization preferences.
|
||||
Configure the order of compilers in the appropriate ``packages.yaml`` file,
|
||||
under either an ``all:`` or client-package-specific entry, in a
|
||||
``compiler:`` list. Consult the Spack documentation for
|
||||
`Configuring Package Preferences <https://spack-tutorial.readthedocs.io/en/latest/tutorial_configuration.html#configuring-package-preferences>`_
|
||||
and
|
||||
:ref:`Package Preferences <package-preferences>`.
|
||||
:ref:`Concretization Preferences <concretization-preferences>`.
|
||||
|
||||
Example: ``etc/spack/packages.yaml`` might simply contain:
|
||||
|
||||
@@ -867,7 +867,7 @@ virtual package, in order of decreasing preference. To learn more about the
|
||||
``providers:`` settings, see the Spack tutorial for
|
||||
`Configuring Package Preferences <https://spack-tutorial.readthedocs.io/en/latest/tutorial_configuration.html#configuring-package-preferences>`_
|
||||
and the section
|
||||
:ref:`Package Preferences <package-preferences>`.
|
||||
:ref:`Concretization Preferences <concretization-preferences>`.
|
||||
|
||||
Example: The following fairly minimal example for ``packages.yaml`` shows how
|
||||
to exclusively use the standalone ``intel-mkl`` package for all the linear
|
||||
|
||||
@@ -1,105 +0,0 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _luapackage:
|
||||
|
||||
------------
|
||||
LuaPackage
|
||||
------------
|
||||
|
||||
LuaPackage is a helper for the common case of Lua packages that provide
|
||||
a rockspec file. This is not meant to take a rock archive, but to build
|
||||
a source archive or repository that provides a rockspec, which should cover
|
||||
most lua packages. In the case a Lua package builds by Make rather than
|
||||
luarocks, prefer MakefilePackage.
|
||||
|
||||
^^^^^^
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``LuaPackage`` base class comes with the following phases:
|
||||
|
||||
#. ``unpack`` - if using a rock, unpacks the rock and moves into the source directory
|
||||
#. ``preprocess`` - adjust sources or rockspec to fix build
|
||||
#. ``install`` - install the project
|
||||
|
||||
By default, these phases run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# If the archive is a source rock
|
||||
$ luarocks unpack <archive>.src.rock
|
||||
$ # preprocess is a noop by default
|
||||
$ luarocks make <name>.rockspec
|
||||
|
||||
|
||||
Any of these phases can be overridden in your package as necessary.
|
||||
|
||||
^^^^^^^^^^^^^^^
|
||||
Important files
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
Packages that use the Lua/LuaRocks build system can be identified by the
|
||||
presence of a ``*.rockspec`` file in their sourcetree, or can be fetched as
|
||||
a source rock archive (``.src.rock``). This file declares things like build
|
||||
instructions and dependencies, the ``.src.rock`` also contains all code.
|
||||
|
||||
It is common for the rockspec file to list the lua version required in
|
||||
a dependency. The LuaPackage class adds appropriate dependencies on a Lua
|
||||
implementation, but it is a good idea to specify the version required with
|
||||
a ``depends_on`` statement. The block normally will be a table definition like
|
||||
this:
|
||||
|
||||
.. code-block:: lua
|
||||
|
||||
dependencies = {
|
||||
"lua >= 5.1",
|
||||
}
|
||||
|
||||
The LuaPackage class supports source repositories and archives containing
|
||||
a rockspec and directly downloading source rock files. It *does not* support
|
||||
downloading dependencies listed inside a rockspec, and thus does not support
|
||||
directly downloading a rockspec as an archive.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build system dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
All base dependencies are added by the build system, but LuaRocks is run to
|
||||
avoid downloading extra Lua dependencies during build. If the package needs
|
||||
Lua libraries outside the standard set, they should be added as dependencies.
|
||||
|
||||
To specify a Lua version constraint but allow all lua implementations, prefer
|
||||
to use ``depends_on("lua-lang@5.1:5.1.99")`` to express any 5.1 compatible
|
||||
version. If the package requires LuaJit rather than Lua,
|
||||
a ``depends_on("luajit")`` should be used to ensure a LuaJit distribution is
|
||||
used instead of the Lua interpreter. Alternately, if only interpreted Lua will
|
||||
work ``depends_on("lua")`` will express that.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Passing arguments to luarocks make
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you need to pass any arguments to the ``luarocks make`` call, you can
|
||||
override the ``luarocks_args`` method like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def luarocks_args(self):
|
||||
return ['flag1', 'flag2']
|
||||
|
||||
One common use of this is to override warnings or flags for newer compilers, as in:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def luarocks_args(self):
|
||||
return ["CFLAGS='-Wno-error=implicit-function-declaration'"]
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
External documentation
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
For more information on the LuaRocks build system, see:
|
||||
https://luarocks.org/
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -9,80 +9,216 @@
|
||||
PythonPackage
|
||||
-------------
|
||||
|
||||
Python packages and modules have their own special build system. This
|
||||
documentation covers everything you'll need to know in order to write
|
||||
a Spack build recipe for a Python library.
|
||||
Python packages and modules have their own special build system.
|
||||
|
||||
^^^^^^^^^^^
|
||||
Terminology
|
||||
^^^^^^^^^^^
|
||||
^^^^^^
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
In the Python ecosystem, there are a number of terms that are
|
||||
important to understand.
|
||||
The ``PythonPackage`` base class provides the following phases that
|
||||
can be overridden:
|
||||
|
||||
**PyPI**
|
||||
The `Python Package Index <https://pypi.org/>`_, where most Python
|
||||
libraries are hosted.
|
||||
* ``build``
|
||||
* ``build_py``
|
||||
* ``build_ext``
|
||||
* ``build_clib``
|
||||
* ``build_scripts``
|
||||
* ``install``
|
||||
* ``install_lib``
|
||||
* ``install_headers``
|
||||
* ``install_scripts``
|
||||
* ``install_data``
|
||||
|
||||
**sdist**
|
||||
Source distributions, distributed as tarballs (.tar.gz) and zip
|
||||
files (.zip). Contain the source code of the package.
|
||||
|
||||
**bdist**
|
||||
Built distributions, distributed as wheels (.whl). Contain the
|
||||
pre-built library.
|
||||
|
||||
**wheel**
|
||||
A binary distribution format common in the Python ecosystem. This
|
||||
file is actually just a zip file containing specific metadata and
|
||||
code. See the
|
||||
`documentation <https://packaging.python.org/en/latest/specifications/binary-distribution-format/>`_
|
||||
for more details.
|
||||
|
||||
**build frontend**
|
||||
Command-line tools used to build and install wheels. Examples
|
||||
include `pip <https://pip.pypa.io/>`_,
|
||||
`build <https://pypa-build.readthedocs.io/>`_, and
|
||||
`installer <https://installer.readthedocs.io/>`_.
|
||||
|
||||
**build backend**
|
||||
Libraries used to define how to build a wheel. Examples
|
||||
include `setuptools <https://setuptools.pypa.io/>`__,
|
||||
`flit <https://flit.readthedocs.io/>`_, and
|
||||
`poetry <https://python-poetry.org/>`_.
|
||||
|
||||
^^^^^^^^^^^
|
||||
Downloading
|
||||
^^^^^^^^^^^
|
||||
|
||||
The first step in packaging a Python library is to figure out where
|
||||
to download it from. The vast majority of Python packages are hosted
|
||||
on `PyPI <https://pypi.org/>`_, which is
|
||||
:ref:`preferred over GitHub <pypi-vs-github>` for downloading
|
||||
packages. Search for the package name on PyPI to find the project
|
||||
page. The project page is usually located at::
|
||||
|
||||
https://pypi.org/project/<package-name>
|
||||
|
||||
On the project page, there is a "Download files" tab containing
|
||||
download URLs. Whenever possible, we prefer to build Spack packages
|
||||
from source. If PyPI only has wheels, check to see if the project is
|
||||
hosted on GitHub and see if GitHub has source distributions. The
|
||||
project page usually has a "Homepage" and/or "Source code" link for
|
||||
this. If the project is closed-source, it may only have wheels
|
||||
available. For example, ``py-azureml-sdk`` is closed-source and can
|
||||
be downloaded from::
|
||||
|
||||
https://pypi.io/packages/py3/a/azureml_sdk/azureml_sdk-1.11.0-py3-none-any.whl
|
||||
|
||||
Once you've found a URL to download the package from, run:
|
||||
These are all standard ``setup.py`` commands and can be found by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack create <url>
|
||||
$ python setup.py --help-commands
|
||||
|
||||
|
||||
to create a new package template.
|
||||
By default, only the ``build`` and ``install`` phases are run:
|
||||
|
||||
#. ``build`` - build everything needed to install
|
||||
#. ``install`` - install everything from build directory
|
||||
|
||||
If for whatever reason you need to run more phases, simply modify your
|
||||
``phases`` list like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
phases = ['build_ext', 'install']
|
||||
|
||||
|
||||
Each phase provides a function ``<phase>`` that runs:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ python -s setup.py --no-user-cfg <phase>
|
||||
|
||||
|
||||
Each phase also has a ``<phase_args>`` function that can pass arguments to
|
||||
this call. All of these functions are empty except for the ``install_args``
|
||||
function, which passes ``--prefix=/path/to/installation/prefix``. There is
|
||||
also some additional logic specific to setuptools and eggs.
|
||||
|
||||
If you need to run a phase that is not a standard ``setup.py`` command,
|
||||
you'll need to define a function for it like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
phases = ['configure', 'build', 'install']
|
||||
|
||||
def configure(self, spec, prefix):
|
||||
self.setup_py('configure')
|
||||
|
||||
|
||||
^^^^^^
|
||||
Wheels
|
||||
^^^^^^
|
||||
|
||||
Some Python packages are closed-source and distributed as wheels.
|
||||
Instead of using the ``PythonPackage`` base class, you should extend
|
||||
the ``Package`` base class and implement the following custom installation
|
||||
procedure:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def install(self, spec, prefix):
|
||||
pip = which('pip')
|
||||
pip('install', self.stage.archive_file, '--prefix={0}'.format(prefix))
|
||||
|
||||
|
||||
This will require a dependency on pip, as mentioned below.
|
||||
|
||||
^^^^^^^^^^^^^^^
|
||||
Important files
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
Python packages can be identified by the presence of a ``setup.py`` file.
|
||||
This file is used by package managers like ``pip`` to determine a
|
||||
package's dependencies and the version of dependencies required, so if
|
||||
the ``setup.py`` file is not accurate, the package will not build properly.
|
||||
For this reason, the ``setup.py`` file should be fairly reliable. If the
|
||||
documentation and ``setup.py`` disagree on something, the ``setup.py``
|
||||
file should be considered to be the truth. As dependencies are added or
|
||||
removed, the documentation is much more likely to become outdated than
|
||||
the ``setup.py``.
|
||||
|
||||
The Python ecosystem has evolved significantly over the years. Before
|
||||
setuptools became popular, most packages listed their dependencies in a
|
||||
``requirements.txt`` file. Once setuptools took over, these dependencies
|
||||
were listed directly in the ``setup.py``. Newer PEPs introduced additional
|
||||
files, like ``setup.cfg`` and ``pyproject.toml``. You should look out for
|
||||
all of these files, as they may all contain important information about
|
||||
package dependencies.
|
||||
|
||||
Some Python packages are closed-source and are distributed as Python
|
||||
wheels. For example, ``py-azureml-sdk`` downloads a ``.whl`` file. This
|
||||
file is simply a zip file, and can be extracted using:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ unzip *.whl
|
||||
|
||||
|
||||
The zip file will not contain a ``setup.py``, but it will contain a
|
||||
``METADATA`` file which contains all the information you need to
|
||||
write a ``package.py`` build recipe.
|
||||
|
||||
.. _pypi:
|
||||
|
||||
^^^^
|
||||
PyPI
|
||||
^^^^
|
||||
|
||||
The vast majority of Python packages are hosted on PyPI (The Python
|
||||
Package Index), which is :ref:`preferred over GitHub <pypi-vs-github>`
|
||||
for downloading packages. ``pip`` only supports packages hosted on PyPI, making
|
||||
it the only option for developers who want a simple installation.
|
||||
Search for "PyPI <package-name>" to find the download page. Note that
|
||||
some pages are versioned, and the first result may not be the newest
|
||||
version. Click on the "Latest Version" button to the top right to see
|
||||
if a newer version is available. The download page is usually at::
|
||||
|
||||
https://pypi.org/project/<package-name>
|
||||
|
||||
|
||||
Since PyPI is so common, the ``PythonPackage`` base class has a
|
||||
``pypi`` attribute that can be set. Once set, ``pypi`` will be used
|
||||
to define the ``homepage``, ``url``, and ``list_url``. For example,
|
||||
the following:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
homepage = 'https://pypi.org/project/setuptools/'
|
||||
url = 'https://pypi.org/packages/source/s/setuptools/setuptools-49.2.0.zip'
|
||||
list_url = 'https://pypi.org/simple/setuptools/'
|
||||
|
||||
|
||||
is equivalent to:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
pypi = 'setuptools/setuptools-49.2.0.zip'
|
||||
|
||||
|
||||
^^^^^^^^^^^
|
||||
Description
|
||||
^^^^^^^^^^^
|
||||
|
||||
The top of the PyPI downloads page contains a description of the
|
||||
package. The first line is usually a short description, while there
|
||||
may be a several line "Project Description" that follows. Choose whichever
|
||||
is more useful. You can also get these descriptions on the command-line
|
||||
using:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ python setup.py --description
|
||||
$ python setup.py --long-description
|
||||
|
||||
|
||||
^^^^^^^^
|
||||
Homepage
|
||||
^^^^^^^^
|
||||
|
||||
Package developers use ``setup.py`` to upload new versions to PyPI.
|
||||
The ``setup`` method often passes metadata like ``homepage`` to PyPI.
|
||||
This metadata is displayed on the left side of the download page.
|
||||
Search for the text "Homepage" under "Project links" to find it. You
|
||||
should use this page instead of the PyPI page if they differ. You can
|
||||
also get the homepage on the command-line by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ python setup.py --url
|
||||
|
||||
|
||||
^^^
|
||||
URL
|
||||
^^^
|
||||
|
||||
If ``pypi`` is set as mentioned above, ``url`` and ``list_url`` will
|
||||
be automatically set for you. If both ``.tar.gz`` and ``.zip`` versions
|
||||
are available, ``.tar.gz`` is preferred. If some releases offer both
|
||||
``.tar.gz`` and ``.zip`` versions, but some only offer ``.zip`` versions,
|
||||
use ``.zip``.
|
||||
|
||||
Some Python packages are closed-source and do not ship ``.tar.gz`` or ``.zip``
|
||||
files on either PyPI or GitHub. If this is the case, you can still download
|
||||
and install a Python wheel. For example, ``py-azureml-sdk`` is closed source
|
||||
and can be downloaded from::
|
||||
|
||||
https://pypi.io/packages/py3/a/azureml_sdk/azureml_sdk-1.11.0-py3-none-any.whl
|
||||
|
||||
|
||||
You may see Python-specific or OS-specific URLs. Note that when you add a
|
||||
``.whl`` URL, you should add ``expand=False`` to ensure that Spack doesn't
|
||||
try to extract the wheel:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
version('1.11.0', sha256='d8c9d24ea90457214d798b0d922489863dad518adde3638e08ef62de28fb183a', expand=False)
|
||||
|
||||
.. _pypi-vs-github:
|
||||
|
||||
@@ -90,13 +226,11 @@ to create a new package template.
|
||||
PyPI vs. GitHub
|
||||
"""""""""""""""
|
||||
|
||||
Many packages are hosted on PyPI, but are developed on GitHub or
|
||||
another version control system hosting service. The source code can
|
||||
be downloaded from either location, but PyPI is preferred for the
|
||||
following reasons:
|
||||
Many packages are hosted on PyPI, but are developed on GitHub or another
|
||||
version control systems. The tarball can be downloaded from either
|
||||
location, but PyPI is preferred for the following reasons:
|
||||
|
||||
#. PyPI contains the bare minimum number of files needed to install
|
||||
the package.
|
||||
#. PyPI contains the bare minimum number of files needed to install the package.
|
||||
|
||||
You may notice that the tarball you download from PyPI does not
|
||||
have the same checksum as the tarball you download from GitHub.
|
||||
@@ -133,124 +267,252 @@ following reasons:
|
||||
PyPI is nice because it makes it physically impossible to
|
||||
re-release the same version of a package with a different checksum.
|
||||
|
||||
The only reason to use GitHub instead of PyPI is if PyPI only has
|
||||
wheels or if the PyPI sdist is missing a file needed to build the
|
||||
package. If this is the case, please add a comment above the ``url``
|
||||
explaining this.
|
||||
Use the :ref:`pypi attribute <pypi>` to facilitate construction of PyPI package
|
||||
references.
|
||||
|
||||
^^^^
|
||||
PyPI
|
||||
^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build system dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Since PyPI is so commonly used to host Python libraries, the
|
||||
``PythonPackage`` base class has a ``pypi`` attribute that can be
|
||||
set. Once set, ``pypi`` will be used to define the ``homepage``,
|
||||
``url``, and ``list_url``. For example, the following:
|
||||
There are a few dependencies common to the ``PythonPackage`` build system.
|
||||
|
||||
""""""
|
||||
Python
|
||||
""""""
|
||||
|
||||
Obviously, every ``PythonPackage`` needs Python at build-time to run
|
||||
``python setup.py build && python setup.py install``. Python is also
|
||||
needed at run-time if you want to import the module. Due to backwards
|
||||
incompatible changes between Python 2 and 3, it is very important to
|
||||
specify which versions of Python are supported. If the documentation
|
||||
mentions that Python 3 is required, this can be specified as:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
homepage = 'https://pypi.org/project/setuptools/'
|
||||
url = 'https://pypi.org/packages/source/s/setuptools/setuptools-49.2.0.zip'
|
||||
list_url = 'https://pypi.org/simple/setuptools/'
|
||||
depends_on('python@3:', type=('build', 'run'))
|
||||
|
||||
|
||||
is equivalent to:
|
||||
If Python 2 is required, this would look like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
pypi = 'setuptools/setuptools-49.2.0.zip'
|
||||
depends_on('python@:2', type=('build', 'run'))
|
||||
|
||||
|
||||
If a package has a different homepage listed on PyPI, you can
|
||||
override it by setting your own ``homepage``.
|
||||
|
||||
^^^^^^^^^^^
|
||||
Description
|
||||
^^^^^^^^^^^
|
||||
|
||||
The top of the PyPI project page contains a short description of the
|
||||
package. The "Project description" tab may also contain a longer
|
||||
description of the package. Either of these can be used to populate
|
||||
the package docstring.
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
Build backend
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
Once you've determined the basic metadata for a package, the next
|
||||
step is to determine the build backend. ``PythonPackage`` uses
|
||||
`pip <https://pip.pypa.io/>`_ to install the package, but pip
|
||||
requires a backend to actually build the package.
|
||||
|
||||
To determine the build backend, look for a ``pyproject.toml`` file.
|
||||
If there is no ``pyproject.toml`` file and only a ``setup.py`` or
|
||||
``setup.cfg`` file, you can assume that the project uses
|
||||
:ref:`setuptools`. If there is a ``pyproject.toml`` file, see if it
|
||||
contains a ``[build-system]`` section. For example:
|
||||
|
||||
.. code-block:: toml
|
||||
|
||||
[build-system]
|
||||
requires = [
|
||||
"setuptools>=42",
|
||||
"wheel",
|
||||
]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
|
||||
This section does two things: the ``requires`` key lists build
|
||||
dependencies of the project, and the ``build-backend`` key defines
|
||||
the build backend. All of these build dependencies should be added as
|
||||
dependencies to your package:
|
||||
If Python 2.7 is the only version that works, you can use:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('py-setuptools@42:', type='build')
|
||||
depends_on('python@2.7:2.8', type=('build', 'run'))
|
||||
|
||||
|
||||
Note that ``py-wheel`` is already listed as a build dependency in the
|
||||
``PythonPackage`` base class, so you don't need to add it unless you
|
||||
need to specify a specific version requirement or change the
|
||||
dependency type.
|
||||
The documentation may not always specify supported Python versions.
|
||||
Another place to check is in the ``setup.py`` or ``setup.cfg`` file.
|
||||
Look for a line containing ``python_requires``. An example from
|
||||
`py-numpy <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/py-numpy/package.py>`_
|
||||
looks like:
|
||||
|
||||
See `PEP 517 <https://www.python.org/dev/peps/pep-0517/>`_ and
|
||||
`PEP 518 <https://www.python.org/dev/peps/pep-0518/>`_ for more
|
||||
information on the design of ``pyproject.toml``.
|
||||
.. code-block:: python
|
||||
|
||||
Depending on which build backend a project uses, there are various
|
||||
places that run-time dependencies can be listed.
|
||||
python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*'
|
||||
|
||||
"""""""""
|
||||
distutils
|
||||
"""""""""
|
||||
|
||||
Before the introduction of setuptools and other build backends,
|
||||
Python packages had to rely on the built-in distutils library.
|
||||
Distutils is missing many of the features that setuptools and other
|
||||
build backends offer, and users are encouraged to use setuptools
|
||||
instead. In fact, distutils was deprecated in Python 3.10 and will be
|
||||
removed in Python 3.12. Because of this, pip actually replaces all
|
||||
imports of distutils with setuptools. If a package uses distutils,
|
||||
you should instead add a build dependency on setuptools. Check for a
|
||||
``requirements.txt`` file that may list dependencies of the project.
|
||||
You may also find a version check at the top of the ``setup.py``:
|
||||
|
||||
.. _setuptools:
|
||||
.. code-block:: python
|
||||
|
||||
if sys.version_info[:2] < (2, 7) or (3, 0) <= sys.version_info[:2] < (3, 4):
|
||||
raise RuntimeError("Python version 2.7 or >= 3.4 required.")
|
||||
|
||||
|
||||
This can be converted to Spack's spec notation like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('python@2.7:2.8,3.4:', type=('build', 'run'))
|
||||
|
||||
|
||||
If you are writing a recipe for a package that only distributes
|
||||
wheels, look for a section in the ``METADATA`` file that looks like::
|
||||
|
||||
Requires-Python: >=3.5,<4
|
||||
|
||||
|
||||
This would be translated to:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
extends('python')
|
||||
depends_on('python@3.5:3', type=('build', 'run'))
|
||||
|
||||
|
||||
Many ``setup.py`` or ``setup.cfg`` files also contain information like::
|
||||
|
||||
Programming Language :: Python :: 2
|
||||
Programming Language :: Python :: 2.6
|
||||
Programming Language :: Python :: 2.7
|
||||
Programming Language :: Python :: 3
|
||||
Programming Language :: Python :: 3.3
|
||||
Programming Language :: Python :: 3.4
|
||||
Programming Language :: Python :: 3.5
|
||||
Programming Language :: Python :: 3.6
|
||||
|
||||
|
||||
This is a list of versions of Python that the developer likely tests.
|
||||
However, you should not use this to restrict the versions of Python
|
||||
the package uses unless one of the two former methods (``python_requires``
|
||||
or ``sys.version_info``) is used. There is no logic in setuptools
|
||||
that prevents the package from building for Python versions not in
|
||||
this list, and often new releases like Python 3.7 or 3.8 work just fine.
|
||||
|
||||
""""""""""
|
||||
setuptools
|
||||
""""""""""
|
||||
|
||||
If the ``pyproject.toml`` lists ``setuptools.build_meta`` as a
|
||||
``build-backend``, or if the package has a ``setup.py`` that imports
|
||||
``setuptools``, or if the package has a ``setup.cfg`` file, then it
|
||||
uses setuptools to build. Setuptools is a replacement for the
|
||||
distutils library, and has almost the exact same API. Dependencies
|
||||
can be listed in the ``setup.py`` or ``setup.cfg`` file. Look for the
|
||||
following arguments:
|
||||
Originally, the Python language had a single build system called
|
||||
distutils, which is built into Python. Distutils provided a common
|
||||
framework for package authors to describe their project and how it
|
||||
should be built. However, distutils was not without limitations.
|
||||
Most notably, there was no way to list a project's dependencies
|
||||
with distutils. Along came setuptools, a non-builtin build system
|
||||
designed to overcome the limitations of distutils. Both projects
|
||||
use a similar API, making the transition easy while adding much
|
||||
needed functionality. Today, setuptools is used in around 90% of
|
||||
the Python packages in Spack.
|
||||
|
||||
Since setuptools isn't built-in to Python, you need to add it as a
|
||||
dependency. To determine whether or not a package uses setuptools,
|
||||
search the file for an import statement like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import setuptools
|
||||
|
||||
|
||||
or:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from setuptools import setup
|
||||
|
||||
|
||||
Some packages are designed to work with both setuptools and distutils,
|
||||
so you may find something like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
try:
|
||||
from setuptools import setup
|
||||
except ImportError:
|
||||
from distutils.core import setup
|
||||
|
||||
|
||||
This uses setuptools if available, and falls back to distutils if not.
|
||||
In this case, you would still want to add a setuptools dependency, as
|
||||
it offers us more control over the installation.
|
||||
|
||||
Unless specified otherwise, setuptools is usually a build-only dependency.
|
||||
That is, it is needed to install the software, but is not needed at
|
||||
run-time. This can be specified as:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('py-setuptools', type='build')
|
||||
|
||||
|
||||
"""
|
||||
pip
|
||||
"""
|
||||
|
||||
Packages distributed as Python wheels will require an extra dependency
|
||||
on pip:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('py-pip', type='build')
|
||||
|
||||
|
||||
We will use pip to install the actual wheel.
|
||||
|
||||
""""""
|
||||
cython
|
||||
""""""
|
||||
|
||||
Compared to compiled languages, interpreted languages like Python can
|
||||
be quite a bit slower. To work around this, some Python developers
|
||||
rewrite computationally demanding sections of code in C, a process
|
||||
referred to as "cythonizing". In order to build these package, you
|
||||
need to add a build dependency on cython:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('py-cython', type='build')
|
||||
|
||||
|
||||
Look for references to "cython" in the ``setup.py`` to determine
|
||||
whether or not this is necessary. Cython may be optional, but
|
||||
even then you should list it as a required dependency. Spack is
|
||||
designed to compile software, and is meant for HPC facilities
|
||||
where speed is crucial. There is no reason why someone would not
|
||||
want an optimized version of a library instead of the pure-Python
|
||||
version.
|
||||
|
||||
Note that some release tarballs come pre-cythonized, and cython is
|
||||
not needed as a dependency. However, this is becoming less common
|
||||
as Python continues to evolve and developers discover that cythonized
|
||||
sources are no longer compatible with newer versions of Python and
|
||||
need to be re-cythonized.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Python dependencies
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
When you install a package with ``pip``, it reads the ``setup.py``
|
||||
file in order to determine the dependencies of the package.
|
||||
If the dependencies are not yet installed, ``pip`` downloads them
|
||||
and installs them for you. This may sound convenient, but Spack
|
||||
cannot rely on this behavior for two reasons:
|
||||
|
||||
#. Spack needs to be able to install packages on air-gapped networks.
|
||||
|
||||
If there is no internet connection, ``pip`` can't download the
|
||||
package dependencies. By explicitly listing every dependency in
|
||||
the ``package.py``, Spack knows what to download ahead of time.
|
||||
|
||||
#. Duplicate installations of the same dependency may occur.
|
||||
|
||||
Spack supports *activation* of Python extensions, which involves
|
||||
symlinking the package installation prefix to the Python installation
|
||||
prefix. If your package is missing a dependency, that dependency
|
||||
will be installed to the installation directory of the same package.
|
||||
If you try to activate the package + dependency, it may cause a
|
||||
problem if that package has already been activated.
|
||||
|
||||
For these reasons, you must always explicitly list all dependencies.
|
||||
Although the documentation may list the package's dependencies,
|
||||
often the developers assume people will use ``pip`` and won't have to
|
||||
worry about it. Always check the ``setup.py`` to find the true
|
||||
dependencies.
|
||||
|
||||
If the package relies on ``distutils``, it may not explicitly list its
|
||||
dependencies. Check for statements like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
try:
|
||||
import numpy
|
||||
except ImportError:
|
||||
raise ImportError("numpy must be installed prior to installation")
|
||||
|
||||
|
||||
Obviously, this means that ``py-numpy`` is a dependency.
|
||||
|
||||
If the package uses ``setuptools``, check for the following clues:
|
||||
|
||||
* ``python_requires``
|
||||
|
||||
This specifies the version of Python that is required.
|
||||
As mentioned above, this specifies which versions of Python are
|
||||
required.
|
||||
|
||||
* ``setup_requires``
|
||||
|
||||
@@ -262,88 +524,43 @@ following arguments:
|
||||
These packages are required for building and installation. You can
|
||||
add them with ``type=('build', 'run')``.
|
||||
|
||||
* ``extras_require``
|
||||
* ``extra_requires``
|
||||
|
||||
These packages are optional dependencies that enable additional
|
||||
functionality. You should add a variant that optionally adds these
|
||||
dependencies. This variant should be False by default.
|
||||
|
||||
* ``tests_require``
|
||||
* ``test_requires``
|
||||
|
||||
These are packages that are required to run the unit tests for the
|
||||
package. These dependencies can be specified using the
|
||||
``type='test'`` dependency type. However, the PyPI tarballs rarely
|
||||
contain unit tests, so there is usually no reason to add these.
|
||||
|
||||
See https://setuptools.pypa.io/en/latest/userguide/dependency_management.html
|
||||
for more information on how setuptools handles dependency management.
|
||||
See `PEP 440 <https://www.python.org/dev/peps/pep-0440/#version-specifiers>`_
|
||||
for documentation on version specifiers in setuptools.
|
||||
In the root directory of the package, you may notice a
|
||||
``requirements.txt`` file. It may look like this file contains a list
|
||||
of all of the package's dependencies. Don't be fooled. This file is
|
||||
used by tools like Travis to install the pre-requisites for the
|
||||
package... and a whole bunch of other things. It often contains
|
||||
dependencies only needed for unit tests, like:
|
||||
|
||||
""""
|
||||
flit
|
||||
""""
|
||||
* mock
|
||||
* nose
|
||||
* pytest
|
||||
|
||||
There are actually two possible ``build-backend`` for flit, ``flit``
|
||||
and ``flit_core``. If you see these in the ``pyproject.toml``, add a
|
||||
build dependency to your package. With flit, all dependencies are
|
||||
listed directly in the ``pyproject.toml`` file. Older versions of
|
||||
flit used to store this info in a ``flit.ini`` file, so check for
|
||||
this too.
|
||||
It can also contain dependencies for building the documentation, like
|
||||
sphinx. If you can't find any information about the package's
|
||||
dependencies, you can take a look in ``requirements.txt``, but be sure
|
||||
not to add test or documentation dependencies.
|
||||
|
||||
Either of these files may contain keys like:
|
||||
Newer PEPs have added alternative ways to specify a package's dependencies.
|
||||
If you don't see any dependencies listed in the ``setup.py``, look for a
|
||||
``setup.cfg`` or ``pyproject.toml``. These files can be used to store the
|
||||
same ``install_requires`` information that ``setup.py`` used to use.
|
||||
|
||||
* ``requires-python``
|
||||
If you are write a recipe for a package that only distributes wheels,
|
||||
check the ``METADATA`` file for lines like::
|
||||
|
||||
This specifies the version of Python that is required
|
||||
|
||||
* ``dependencies`` or ``requires``
|
||||
|
||||
These packages are required for building and installation. You can
|
||||
add them with ``type=('build', 'run')``.
|
||||
|
||||
* ``project.optional-dependencies`` or ``requires-extra``
|
||||
|
||||
This section includes keys with lists of optional dependencies
|
||||
needed to enable those features. You should add a variant that
|
||||
optionally adds these dependencies. This variant should be False
|
||||
by default.
|
||||
|
||||
See https://flit.readthedocs.io/en/latest/pyproject_toml.html for
|
||||
more information.
|
||||
|
||||
""""""
|
||||
poetry
|
||||
""""""
|
||||
|
||||
Like flit, poetry also has two possible ``build-backend``, ``poetry``
|
||||
and ``poetry_core``. If you see these in the ``pyproject.toml``, add
|
||||
a build dependency to your package. With poetry, all dependencies are
|
||||
listed directly in the ``pyproject.toml`` file. Dependencies are
|
||||
listed in a ``[tool.poetry.dependencies]`` section, and use a
|
||||
`custom syntax <https://python-poetry.org/docs/dependency-specification/#version-constraints>`_
|
||||
for specifying the version requirements. Note that ``~=`` works
|
||||
differently in poetry than in setuptools and flit for versions that
|
||||
start with a zero.
|
||||
|
||||
""""""
|
||||
wheels
|
||||
""""""
|
||||
|
||||
Some Python packages are closed-source and are distributed as Python
|
||||
wheels. For example, ``py-azureml-sdk`` downloads a ``.whl`` file. This
|
||||
file is simply a zip file, and can be extracted using:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ unzip *.whl
|
||||
|
||||
|
||||
The zip file will not contain a ``setup.py``, but it will contain a
|
||||
``METADATA`` file which contains all the information you need to
|
||||
write a ``package.py`` build recipe. Check for lines like::
|
||||
|
||||
Requires-Python: >=3.5,<4
|
||||
Requires-Dist: azureml-core (~=1.11.0)
|
||||
Requires-Dist: azureml-dataset-runtime[fuse] (~=1.11.0)
|
||||
Requires-Dist: azureml-train (~=1.11.0)
|
||||
@@ -355,58 +572,62 @@ write a ``package.py`` build recipe. Check for lines like::
|
||||
Requires-Dist: azureml-train-automl (~=1.11.0); extra == 'automl'
|
||||
|
||||
|
||||
``Requires-Python`` is equivalent to ``python_requires`` and
|
||||
``Requires-Dist`` is equivalent to ``install_requires``.
|
||||
``Provides-Extra`` is used to name optional features (variants) and
|
||||
a ``Requires-Dist`` with ``extra == 'foo'`` will list any
|
||||
dependencies needed for that feature.
|
||||
Lines that use ``Requires-Dist`` are similar to ``install_requires``.
|
||||
Lines that use ``Provides-Extra`` are similar to ``extra_requires``,
|
||||
and you can add a variant for those dependencies. The ``~=1.11.0``
|
||||
syntax is equivalent to ``1.11.0:1.11``.
|
||||
|
||||
""""""""""
|
||||
setuptools
|
||||
""""""""""
|
||||
|
||||
Setuptools is a bit of a special case. If a package requires setuptools
|
||||
at run-time, how do they express this? They could add it to
|
||||
``install_requires``, but setuptools is imported long before this and is
|
||||
needed to read this line. And since you can't install the package
|
||||
without setuptools, the developers assume that setuptools will already
|
||||
be there, so they never mention when it is required. We don't want to
|
||||
add run-time dependencies if they aren't needed, so you need to
|
||||
determine whether or not setuptools is needed. Grep the installation
|
||||
directory for any files containing a reference to ``setuptools`` or
|
||||
``pkg_resources``. Both modules come from ``py-setuptools``.
|
||||
``pkg_resources`` is particularly common in scripts found in
|
||||
``prefix/bin``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Passing arguments to setup.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The default install phase should be sufficient to install most
|
||||
packages. However, the installation instructions for a package may
|
||||
suggest passing certain flags to the ``setup.py`` call. The
|
||||
``PythonPackage`` class has two techniques for doing this.
|
||||
The default build and install phases should be sufficient to install
|
||||
most packages. However, you may want to pass additional flags to
|
||||
either phase.
|
||||
|
||||
""""""""""""""
|
||||
Global options
|
||||
""""""""""""""
|
||||
You can view the available options for a particular phase with:
|
||||
|
||||
These flags are added directly after ``setup.py`` when pip runs
|
||||
``python setup.py install``. For example, the ``py-pyyaml`` package
|
||||
has an optional dependency on ``libyaml`` that can be enabled like so:
|
||||
.. code-block:: console
|
||||
|
||||
$ python setup.py <phase> --help
|
||||
|
||||
|
||||
Each phase provides a ``<phase_args>`` function that can be used to
|
||||
pass arguments to that phase. For example,
|
||||
`py-numpy <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/py-numpy/package.py>`_
|
||||
adds:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def global_options(self, spec, prefix):
|
||||
options = []
|
||||
if '+libyaml' in spec:
|
||||
options.append('--with-libyaml')
|
||||
else:
|
||||
options.append('--without-libyaml')
|
||||
return options
|
||||
def build_args(self, spec, prefix):
|
||||
args = []
|
||||
|
||||
# From NumPy 1.10.0 on it's possible to do a parallel build.
|
||||
if self.version >= Version('1.10.0'):
|
||||
# But Parallel build in Python 3.5+ is broken. See:
|
||||
# https://github.com/spack/spack/issues/7927
|
||||
# https://github.com/scipy/scipy/issues/7112
|
||||
if spec['python'].version < Version('3.5'):
|
||||
args = ['-j', str(make_jobs)]
|
||||
|
||||
"""""""""""""""
|
||||
Install options
|
||||
"""""""""""""""
|
||||
|
||||
These flags are added directly after ``install`` when pip runs
|
||||
``python setup.py install``. For example, the ``py-pyyaml`` package
|
||||
allows you to specify the directories to search for ``libyaml``:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def install_options(self, spec, prefix):
|
||||
options = []
|
||||
if '+libyaml' in spec:
|
||||
options.extend([
|
||||
spec['libyaml'].libs.search_flags,
|
||||
spec['libyaml'].headers.include_flags,
|
||||
])
|
||||
return options
|
||||
return args
|
||||
|
||||
|
||||
^^^^^^^
|
||||
@@ -448,9 +669,9 @@ a "package" is a directory containing files like:
|
||||
|
||||
whereas a "module" is a single Python file.
|
||||
|
||||
The ``PythonPackage`` base class automatically detects these package
|
||||
and module names for you. If, for whatever reason, the module names
|
||||
detected are wrong, you can provide the names yourself by overriding
|
||||
The ``PythonPackage`` base class automatically detects these module
|
||||
names for you. If, for whatever reason, the module names detected
|
||||
are wrong, you can provide the names yourself by overriding
|
||||
``import_modules`` like so:
|
||||
|
||||
.. code-block:: python
|
||||
@@ -471,8 +692,10 @@ This can be expressed like so:
|
||||
@property
|
||||
def import_modules(self):
|
||||
modules = ['yaml']
|
||||
|
||||
if '+libyaml' in self.spec:
|
||||
modules.append('yaml.cyaml')
|
||||
|
||||
return modules
|
||||
|
||||
|
||||
@@ -490,8 +713,8 @@ Unit tests
|
||||
""""""""""
|
||||
|
||||
The package may have its own unit or regression tests. Spack can
|
||||
run these tests during the installation by adding test methods after
|
||||
installation.
|
||||
run these tests during the installation by adding phase-appropriate
|
||||
test methods.
|
||||
|
||||
For example, ``py-numpy`` adds the following as a check to run
|
||||
after the ``install`` phase:
|
||||
@@ -517,14 +740,34 @@ when testing is enabled during the installation (i.e., ``spack install
|
||||
Setup file in a sub-directory
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Many C/C++ libraries provide optional Python bindings in a
|
||||
subdirectory. To tell pip which directory to build from, you can
|
||||
override the ``build_directory`` attribute. For example, if a package
|
||||
provides Python bindings in a ``python`` directory, you can use:
|
||||
In order to be compatible with package managers like ``pip``, the package
|
||||
is required to place its ``setup.py`` in the root of the tarball. However,
|
||||
not every Python package cares about ``pip`` or PyPI. If you are installing
|
||||
a package that is not hosted on PyPI, you may find that it places its
|
||||
``setup.py`` in a sub-directory. To handle this, add the directory containing
|
||||
``setup.py`` to the package like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_directory = 'python'
|
||||
build_directory = 'source'
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Alternate names for setup.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
As previously mentioned, packages need to call their setup script ``setup.py``
|
||||
in order to be compatible with package managers like ``pip``. However, some
|
||||
packages like
|
||||
`py-meep <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/py-meep/package.py>`_ and
|
||||
`py-adios <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/py-adios/package.py>`_
|
||||
come with multiple setup scripts, one for a serial build and another for a
|
||||
parallel build. You can override the default name to use like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def setup_file(self):
|
||||
return 'setup-mpi.py' if '+mpi' in self.spec else 'setup.py'
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -538,14 +781,10 @@ on Python are not necessarily ``PythonPackage``'s.
|
||||
Choosing a build system
|
||||
"""""""""""""""""""""""
|
||||
|
||||
First of all, you need to select a build system. ``spack create``
|
||||
usually does this for you, but if for whatever reason you need to do
|
||||
this manually, choose ``PythonPackage`` if and only if the package
|
||||
contains one of the following files:
|
||||
|
||||
* ``pyproject.toml``
|
||||
* ``setup.py``
|
||||
* ``setup.cfg``
|
||||
First of all, you need to select a build system. ``spack create`` usually
|
||||
does this for you, but if for whatever reason you need to do this manually,
|
||||
choose ``PythonPackage`` if and only if the package contains a ``setup.py``
|
||||
file.
|
||||
|
||||
"""""""""""""""""""""""
|
||||
Choosing a package name
|
||||
@@ -618,9 +857,10 @@ having to add that module to ``PYTHONPATH``.
|
||||
|
||||
When deciding between ``extends`` and ``depends_on``, the best rule of
|
||||
thumb is to check the installation prefix. If Python libraries are
|
||||
installed to ``<prefix>/lib/pythonX.Y/site-packages``, then you
|
||||
should use ``extends``. If Python libraries are installed elsewhere
|
||||
or the only files that get installed reside in ``<prefix>/bin``, then
|
||||
installed to ``prefix/lib/python2.7/site-packages`` (where 2.7 is the
|
||||
MAJOR.MINOR version of Python you used to install the package), then
|
||||
you should use ``extends``. If Python libraries are installed elsewhere
|
||||
or the only files that get installed reside in ``prefix/bin``, then
|
||||
don't use ``extends``, as symlinking the package wouldn't be useful.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -645,24 +885,12 @@ are not yet in Spack, and Spack contains many Python packages that are
|
||||
not yet in Anaconda. The main advantage of Spack over Anaconda is its
|
||||
ability to choose a specific compiler and BLAS/LAPACK or MPI library.
|
||||
Spack also has better platform support for supercomputers, and can build
|
||||
optimized binaries for your specific microarchitecture.
|
||||
optimized binaries for your specific microarchitecture. On the other hand,
|
||||
Anaconda offers Windows support.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
External documentation
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
For more information on Python packaging, see:
|
||||
|
||||
* https://packaging.python.org/
|
||||
|
||||
For more information on build and installation frontend tools, see:
|
||||
|
||||
* pip: https://pip.pypa.io/
|
||||
* build: https://pypa-build.readthedocs.io/
|
||||
* installer: https://installer.readthedocs.io/
|
||||
|
||||
For more information on build backend tools, see:
|
||||
|
||||
* setuptools: https://setuptools.pypa.io/
|
||||
* flit: https://flit.readthedocs.io/
|
||||
* poetry: https://python-poetry.org/
|
||||
https://packaging.python.org/
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _racketpackage:
|
||||
|
||||
-------------
|
||||
RacketPackage
|
||||
-------------
|
||||
|
||||
Much like Python, Racket packages and modules have their own special build system.
|
||||
To learn more about the specifics of Racket package system, please refer to the
|
||||
`Racket Docs <https://docs.racket-lang.org/pkg/cmdline.html>`_.
|
||||
|
||||
^^^^^^
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``RacketPackage`` base class provides an ``install`` phase that
|
||||
can be overridden, corresponding to the use of:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ raco pkg install
|
||||
|
||||
^^^^^^^
|
||||
Caveats
|
||||
^^^^^^^
|
||||
|
||||
In principle, ``raco`` supports a second, ``setup`` phase; however, we have not
|
||||
implemented this separately, as in normal circumstances, ``install`` also handles
|
||||
running ``setup`` automatically.
|
||||
|
||||
Unlike Python, Racket currently on supports two installation scopes for packages, user
|
||||
or system, and keeps a registry of installed packages at each scope in its configuration files.
|
||||
This means we can't simply compose a "``RACKET_PATH``" environment variable listing all of the
|
||||
places packages are installed, and update this at will.
|
||||
|
||||
Unfortunately this means that all currently installed packages which extend Racket via ``raco pkg install``
|
||||
are accessible whenever Racket is accessible.
|
||||
|
||||
Additionally, because Spack does not implement uninstall hooks, uninstalling a Spack ``rkt-`` package
|
||||
will have no effect on the ``raco`` installed packages visible to your Racket installation.
|
||||
Instead, you must manually run ``raco pkg remove`` to keep the two package managers in a mutually
|
||||
consistent state.
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -95,7 +95,7 @@ class of your package. For example, you can add it to your
|
||||
# Set up the hip macros needed by the build
|
||||
args.extend([
|
||||
'-DENABLE_HIP=ON',
|
||||
'-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix)])
|
||||
'-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix])
|
||||
rocm_archs = spec.variants['amdgpu_target'].value
|
||||
if 'none' not in rocm_archs:
|
||||
args.append('-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -23,10 +23,7 @@
|
||||
import sys
|
||||
from glob import glob
|
||||
|
||||
from docutils.statemachine import StringList
|
||||
from sphinx.domains.python import PythonDomain
|
||||
from sphinx.ext.apidoc import main as sphinx_apidoc
|
||||
from sphinx.parsers import RSTParser
|
||||
|
||||
# -- Spack customizations -----------------------------------------------------
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
@@ -85,6 +82,9 @@
|
||||
#
|
||||
# Disable duplicate cross-reference warnings.
|
||||
#
|
||||
from sphinx.domains.python import PythonDomain
|
||||
|
||||
|
||||
class PatchedPythonDomain(PythonDomain):
|
||||
def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||
if 'refspecific' in node:
|
||||
@@ -92,20 +92,8 @@ def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||
return super(PatchedPythonDomain, self).resolve_xref(
|
||||
env, fromdocname, builder, typ, target, node, contnode)
|
||||
|
||||
#
|
||||
# Disable tabs to space expansion in code blocks
|
||||
# since Makefiles require tabs.
|
||||
#
|
||||
class NoTabExpansionRSTParser(RSTParser):
|
||||
def parse(self, inputstring, document):
|
||||
if isinstance(inputstring, str):
|
||||
lines = inputstring.splitlines()
|
||||
inputstring = StringList(lines, document.current_source)
|
||||
super().parse(inputstring, document)
|
||||
|
||||
def setup(sphinx):
|
||||
sphinx.add_domain(PatchedPythonDomain, override=True)
|
||||
sphinx.add_source_parser(NoTabExpansionRSTParser, override=True)
|
||||
|
||||
# -- General configuration -----------------------------------------------------
|
||||
|
||||
@@ -192,7 +180,6 @@ def setup(sphinx):
|
||||
('py:class', '_frozen_importlib_external.SourceFileLoader'),
|
||||
# Spack classes that are private and we don't want to expose
|
||||
('py:class', 'spack.provider_index._IndexBase'),
|
||||
('py:class', 'spack.repo._PrependFileLoader'),
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _config-yaml:
|
||||
|
||||
============================
|
||||
Spack Settings (config.yaml)
|
||||
============================
|
||||
==============
|
||||
Basic Settings
|
||||
==============
|
||||
|
||||
Spack's basic configuration options are set in ``config.yaml``. You can
|
||||
see the default settings by looking at
|
||||
@@ -72,6 +72,21 @@ used to configure module names.
|
||||
packages have been installed will prevent Spack from being
|
||||
able to find the old installation directories.
|
||||
|
||||
--------------------
|
||||
``module_roots``
|
||||
--------------------
|
||||
|
||||
Controls where Spack installs generated module files. You can customize
|
||||
the location for each type of module. e.g.:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
module_roots:
|
||||
tcl: $spack/share/spack/modules
|
||||
lmod: $spack/share/spack/lmod
|
||||
|
||||
See :ref:`modules` for details.
|
||||
|
||||
--------------------
|
||||
``build_stage``
|
||||
--------------------
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -13,16 +13,12 @@ Spack has many configuration files. Here is a quick list of them, in
|
||||
case you want to skip directly to specific docs:
|
||||
|
||||
* :ref:`compilers.yaml <compiler-config>`
|
||||
* :ref:`concretizer.yaml <concretizer-options>`
|
||||
* :ref:`config.yaml <config-yaml>`
|
||||
* :ref:`mirrors.yaml <mirrors>`
|
||||
* :ref:`modules.yaml <modules>`
|
||||
* :ref:`packages.yaml <build-settings>`
|
||||
* :ref:`repos.yaml <repositories>`
|
||||
|
||||
You can also add any of these as inline configuration in ``spack.yaml``
|
||||
in an :ref:`environment <environment-configuration>`.
|
||||
|
||||
-----------
|
||||
YAML Format
|
||||
-----------
|
||||
@@ -37,6 +33,8 @@ Here is an example ``config.yaml`` file:
|
||||
|
||||
config:
|
||||
install_tree: $spack/opt/spack
|
||||
module_roots:
|
||||
lmod: $spack/share/spack/lmod
|
||||
build_stage:
|
||||
- $tempdir/$user/spack-stage
|
||||
- ~/.spack/stage
|
||||
@@ -251,6 +249,8 @@ your configurations look like this:
|
||||
|
||||
config:
|
||||
install_tree: $spack/opt/spack
|
||||
module_roots:
|
||||
lmod: $spack/share/spack/lmod
|
||||
build_stage:
|
||||
- $tempdir/$user/spack-stage
|
||||
- ~/.spack/stage
|
||||
@@ -274,6 +274,8 @@ command:
|
||||
$ spack config get config
|
||||
config:
|
||||
install_tree: /some/other/directory
|
||||
module_roots:
|
||||
lmod: $spack/share/spack/lmod
|
||||
build_stage:
|
||||
- $tempdir/$user/spack-stage
|
||||
- ~/.spack/stage
|
||||
@@ -339,11 +341,13 @@ higher-precedence scope is *prepended* to the defaults. ``spack config
|
||||
get config`` shows the result:
|
||||
|
||||
.. code-block:: console
|
||||
:emphasize-lines: 5-8
|
||||
:emphasize-lines: 7-10
|
||||
|
||||
$ spack config get config
|
||||
config:
|
||||
install_tree: /some/other/directory
|
||||
module_roots:
|
||||
lmod: $spack/share/spack/lmod
|
||||
build_stage:
|
||||
- /lustre-scratch/$user/spack
|
||||
- ~/mystage
|
||||
@@ -367,11 +371,13 @@ user config looked like this:
|
||||
The merged configuration would look like this:
|
||||
|
||||
.. code-block:: console
|
||||
:emphasize-lines: 5-6
|
||||
:emphasize-lines: 7-8
|
||||
|
||||
$ spack config get config
|
||||
config:
|
||||
install_tree: /some/other/directory
|
||||
module_roots:
|
||||
lmod: $spack/share/spack/lmod
|
||||
build_stage:
|
||||
- /lustre-scratch/$user/spack
|
||||
- ~/mystage
|
||||
@@ -492,6 +498,9 @@ account all scopes. For example, to see the fully merged
|
||||
template_dirs:
|
||||
- $spack/templates
|
||||
directory_layout: {architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}
|
||||
module_roots:
|
||||
tcl: $spack/share/spack/modules
|
||||
lmod: $spack/share/spack/lmod
|
||||
build_stage:
|
||||
- $tempdir/$user/spack-stage
|
||||
- ~/.spack/stage
|
||||
@@ -539,6 +548,9 @@ down the problem:
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:23 template_dirs:
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:24 - $spack/templates
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:28 directory_layout: {architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:32 module_roots:
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:33 tcl: $spack/share/spack/modules
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:34 lmod: $spack/share/spack/lmod
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:49 build_stage:
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:50 - $tempdir/$user/spack-stage
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:51 - ~/.spack/stage
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -1057,39 +1057,39 @@ Release branches
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
There are currently two types of Spack releases: :ref:`major releases
|
||||
<major-releases>` (``0.17.0``, ``0.18.0``, etc.) and :ref:`point releases
|
||||
<point-releases>` (``0.17.1``, ``0.17.2``, ``0.17.3``, etc.). Here is a
|
||||
<major-releases>` (``0.13.0``, ``0.14.0``, etc.) and :ref:`point releases
|
||||
<point-releases>` (``0.13.1``, ``0.13.2``, ``0.13.3``, etc.). Here is a
|
||||
diagram of how Spack release branches work::
|
||||
|
||||
o branch: develop (latest version, v0.19.0.dev0)
|
||||
o branch: develop (latest version)
|
||||
|
|
||||
o
|
||||
| o branch: releases/v0.18, tag: v0.18.1
|
||||
o |
|
||||
| o tag: v0.18.0
|
||||
o |
|
||||
| o
|
||||
o merge v0.14.1 into develop
|
||||
|\
|
||||
| o branch: releases/v0.14, tag: v0.14.1
|
||||
o | merge v0.14.0 into develop
|
||||
|\|
|
||||
| o tag: v0.14.0
|
||||
|/
|
||||
o
|
||||
|
|
||||
o
|
||||
| o branch: releases/v0.17, tag: v0.17.2
|
||||
o |
|
||||
| o tag: v0.17.1
|
||||
o |
|
||||
| o tag: v0.17.0
|
||||
o merge v0.13.2 into develop
|
||||
|\
|
||||
| o branch: releases/v0.13, tag: v0.13.2
|
||||
o | merge v0.13.1 into develop
|
||||
|\|
|
||||
| o tag: v0.13.1
|
||||
o | merge v0.13.0 into develop
|
||||
|\|
|
||||
| o tag: v0.13.0
|
||||
o |
|
||||
| o
|
||||
|/
|
||||
o
|
||||
|
||||
The ``develop`` branch has the latest contributions, and nearly all pull
|
||||
requests target ``develop``. The ``develop`` branch will report that its
|
||||
version is that of the next **major** release with a ``.dev0`` suffix.
|
||||
requests target ``develop``.
|
||||
|
||||
Each Spack release series also has a corresponding branch, e.g.
|
||||
``releases/v0.18`` has ``0.18.x`` versions of Spack, and
|
||||
``releases/v0.17`` has ``0.17.x`` versions. A major release is the first
|
||||
``releases/v0.14`` has ``0.14.x`` versions of Spack, and
|
||||
``releases/v0.13`` has ``0.13.x`` versions. A major release is the first
|
||||
tagged version on a release branch. Minor releases are back-ported from
|
||||
develop onto release branches. This is typically done by cherry-picking
|
||||
bugfix commits off of ``develop``.
|
||||
@@ -1100,20 +1100,12 @@ packages. They should generally only contain fixes to the Spack core.
|
||||
However, sometimes priorities are such that new functionality needs to
|
||||
be added to a minor release.
|
||||
|
||||
Both major and minor releases are tagged. As a convenience, we also tag
|
||||
the latest release as ``releases/latest``, so that users can easily check
|
||||
it out to get the latest stable version. See :ref:`updating-latest-release`
|
||||
for more details.
|
||||
|
||||
.. note::
|
||||
|
||||
Older spack releases were merged **back** into develop so that we could
|
||||
do fancy things with tags, but since tarballs and many git checkouts do
|
||||
not have tags, this proved overly complex and confusing.
|
||||
|
||||
We have since converted to using `PEP 440 <https://peps.python.org/pep-0440/>`_
|
||||
compliant versions. `See here <https://github.com/spack/spack/pull/25267>`_ for
|
||||
details.
|
||||
Both major and minor releases are tagged. After each release, we merge
|
||||
the release branch back into ``develop`` so that the version bump and any
|
||||
other release-specific changes are visible in the mainline. As a
|
||||
convenience, we also tag the latest release as ``releases/latest``,
|
||||
so that users can easily check it out to get the latest
|
||||
stable version. See :ref:`merging-releases` for more details.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Scheduling work for releases
|
||||
@@ -1171,11 +1163,10 @@ completed, the steps to make the major release are:
|
||||
``releases/vX.Y``. That is, you should create a ``releases/vX.Y``
|
||||
branch if you are preparing the ``X.Y.0`` release.
|
||||
|
||||
#. Remove the ``dev0`` development release segment from the version tuple in
|
||||
``lib/spack/spack/__init__.py``.
|
||||
#. Bump the version in ``lib/spack/spack/__init__.py``.
|
||||
|
||||
The version number itself should already be correct and should not be
|
||||
modified.
|
||||
See `this example from 0.13.0
|
||||
<https://github.com/spack/spack/commit/8eeb64096c98b8a43d1c587f13ece743c864fba9>`_
|
||||
|
||||
#. Update ``CHANGELOG.md`` with major highlights in bullet form.
|
||||
|
||||
@@ -1197,16 +1188,9 @@ completed, the steps to make the major release are:
|
||||
is outdated submit pull requests to ``develop`` as normal
|
||||
and keep rebasing the release branch on ``develop``.
|
||||
|
||||
#. Bump the major version in the ``develop`` branch.
|
||||
|
||||
Create a pull request targeting the ``develop`` branch, bumping the major
|
||||
version in ``lib/spack/spack/__init__.py`` with a ``dev0`` release segment.
|
||||
For instance when you have just released ``v0.15.0``, set the version
|
||||
to ``(0, 16, 0, 'dev0')`` on ``develop``.
|
||||
|
||||
#. Follow the steps in :ref:`publishing-releases`.
|
||||
|
||||
#. Follow the steps in :ref:`updating-latest-release`.
|
||||
#. Follow the steps in :ref:`merging-releases`.
|
||||
|
||||
#. Follow the steps in :ref:`announcing-releases`.
|
||||
|
||||
@@ -1282,6 +1266,9 @@ completed, the steps to make the point release are:
|
||||
|
||||
#. Bump the version in ``lib/spack/spack/__init__.py``.
|
||||
|
||||
See `this example from 0.14.1
|
||||
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
|
||||
|
||||
#. Update ``CHANGELOG.md`` with a list of the changes.
|
||||
|
||||
This is typically a summary of the commits you cherry-picked onto the
|
||||
@@ -1303,7 +1290,7 @@ completed, the steps to make the point release are:
|
||||
|
||||
#. Follow the steps in :ref:`publishing-releases`.
|
||||
|
||||
#. Follow the steps in :ref:`updating-latest-release`.
|
||||
#. Follow the steps in :ref:`merging-releases`.
|
||||
|
||||
#. Follow the steps in :ref:`announcing-releases`.
|
||||
|
||||
@@ -1364,11 +1351,11 @@ Publishing a release on GitHub
|
||||
selectable in the versions menu.
|
||||
|
||||
|
||||
.. _updating-latest-release:
|
||||
.. _merging-releases:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Updating `releases/latest`
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Updating `releases/latest` and `develop`
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If the new release is the **highest** Spack release yet, you should
|
||||
also tag it as ``releases/latest``. For example, suppose the highest
|
||||
@@ -1392,6 +1379,40 @@ To tag ``releases/latest``, do this:
|
||||
The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing
|
||||
``releases/latest`` tag with the new one.
|
||||
|
||||
We also merge each release that we tag as ``releases/latest`` into ``develop``.
|
||||
Make sure to do this with a merge commit:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git checkout develop
|
||||
$ git merge --no-ff -s ours vX.Y.Z # vX.Y.Z is the new release's tag
|
||||
$ git push
|
||||
|
||||
We merge back to ``develop`` because it:
|
||||
|
||||
* updates the version and ``CHANGELOG.md`` on ``develop``; and
|
||||
* ensures that your release tag is reachable from the head of
|
||||
``develop``.
|
||||
|
||||
We *must* use a real merge commit (via the ``--no-ff`` option) to
|
||||
ensure that the release tag is reachable from the tip of ``develop``.
|
||||
This is necessary for ``spack -V`` to work properly -- it uses ``git
|
||||
describe --tags`` to find the last reachable tag in the repository and
|
||||
reports how far we are from it. For example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -V
|
||||
0.14.2-1486-b80d5e74e5
|
||||
|
||||
This says that we are at commit ``b80d5e74e5``, which is 1,486 commits
|
||||
ahead of the ``0.14.2`` release.
|
||||
|
||||
We put this step last in the process because it's best to do it only once
|
||||
the release is complete and tagged. If you do it before you've tagged the
|
||||
release and later decide you want to tag some later commit, you'll need
|
||||
to merge again.
|
||||
|
||||
|
||||
.. _announcing-releases:
|
||||
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _environments:
|
||||
|
||||
=========================
|
||||
Environments (spack.yaml)
|
||||
=========================
|
||||
============
|
||||
Environments
|
||||
============
|
||||
|
||||
An environment is used to group together a set of specs for the
|
||||
purpose of building, rebuilding and deploying in a coherent fashion.
|
||||
@@ -349,24 +349,6 @@ If the Environment has been concretized, Spack will install the
|
||||
concretized specs. Otherwise, ``spack install`` will first concretize
|
||||
the Environment and then install the concretized specs.
|
||||
|
||||
.. note::
|
||||
|
||||
Every ``spack install`` process builds one package at a time with multiple build
|
||||
jobs, controlled by the ``-j`` flag and the ``config:build_jobs`` option
|
||||
(see :ref:`build-jobs`). To speed up environment builds further, independent
|
||||
packages can be installed in parallel by launching more Spack instances. For
|
||||
example, the following will build at most four packages in parallel using
|
||||
three background jobs:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
[myenv]$ spack install & spack install & spack install & spack install
|
||||
|
||||
Another option is to generate a ``Makefile`` and run ``make -j<N>`` to control
|
||||
the number of parallel install processes. See :ref:`env-generate-depfile`
|
||||
for details.
|
||||
|
||||
|
||||
As it installs, ``spack install`` creates symbolic links in the
|
||||
``logs/`` directory in the Environment, allowing for easy inspection
|
||||
of build logs related to that environment. The ``spack install``
|
||||
@@ -402,11 +384,18 @@ Sourcing that file in Bash will make the environment available to the
|
||||
user; and can be included in ``.bashrc`` files, etc. The ``loads``
|
||||
file may also be copied out of the environment, renamed, etc.
|
||||
|
||||
----------
|
||||
spack.yaml
|
||||
----------
|
||||
|
||||
Spack environments can be customized at finer granularity by editing
|
||||
the ``spack.yaml`` manifest file directly.
|
||||
|
||||
.. _environment-configuration:
|
||||
|
||||
------------------------
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Configuring Environments
|
||||
------------------------
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
A variety of Spack behaviors are changed through Spack configuration
|
||||
files, covered in more detail in the :ref:`configuration`
|
||||
@@ -428,9 +417,9 @@ environment can be specified by ``env:NAME`` (to affect environment
|
||||
``foo``, set ``--scope env:foo``). These commands will automatically
|
||||
manipulate configuration inline in the ``spack.yaml`` file.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""""
|
||||
Inline configurations
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""""
|
||||
|
||||
Inline Environment-scope configuration is done using the same yaml
|
||||
format as standard Spack configuration scopes, covered in the
|
||||
@@ -451,9 +440,9 @@ a ``packages.yaml`` file) could contain:
|
||||
This configuration sets the default compiler for all packages to
|
||||
``intel``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""""""
|
||||
Included configurations
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""""""
|
||||
|
||||
Spack environments allow an ``include`` heading in their yaml
|
||||
schema. This heading pulls in external configuration files and applies
|
||||
@@ -473,9 +462,9 @@ to make small changes to an individual Environment. Included configs
|
||||
listed earlier will have higher precedence, as the included configs are
|
||||
applied in reverse order.
|
||||
|
||||
-------------------------------
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Manually Editing the Specs List
|
||||
-------------------------------
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The list of abstract/root specs in the Environment is maintained in
|
||||
the ``spack.yaml`` manifest under the heading ``specs``.
|
||||
@@ -493,9 +482,9 @@ Appending to this list in the yaml is identical to using the ``spack
|
||||
add`` command from the command line. However, there is more power
|
||||
available from the yaml file.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""
|
||||
Spec concretization
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""
|
||||
|
||||
Specs can be concretized separately or together, as already
|
||||
explained in :ref:`environments_concretization`. The behavior active
|
||||
@@ -521,9 +510,9 @@ which can currently take either one of the two allowed values ``together`` or ``
|
||||
the environment remains consistent. When instead the specs are concretized
|
||||
separately only the new specs will be re-concretized after any addition.
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
"""""""""""""
|
||||
Spec Matrices
|
||||
^^^^^^^^^^^^^
|
||||
"""""""""""""
|
||||
|
||||
Entries in the ``specs`` list can be individual abstract specs or a
|
||||
spec matrix.
|
||||
@@ -583,9 +572,9 @@ This allows one to create toolchains out of combinations of
|
||||
constraints and apply them somewhat indiscriminately to packages,
|
||||
without regard for the applicability of the constraint.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""
|
||||
Spec List References
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""
|
||||
|
||||
The last type of possible entry in the specs list is a reference.
|
||||
|
||||
@@ -685,9 +674,9 @@ The valid variables for a ``when`` clause are:
|
||||
#. ``hostname``. The hostname of the system (if ``hostname`` is an
|
||||
executable in the user's PATH).
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""""""
|
||||
SpecLists as Constraints
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""""""
|
||||
|
||||
Dependencies and compilers in Spack can be both packages in an
|
||||
environment and constraints on other packages. References to SpecLists
|
||||
@@ -719,41 +708,41 @@ For example, the following environment has three root packages:
|
||||
This allows for a much-needed reduction in redundancy between packages
|
||||
and constraints.
|
||||
|
||||
----------------
|
||||
Filesystem Views
|
||||
----------------
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Environment-managed Views
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack Environments can define filesystem views, which provide a direct access point
|
||||
for software similar to the directory hierarchy that might exist under ``/usr/local``.
|
||||
Filesystem views are updated every time the environment is written out to the lock
|
||||
file ``spack.lock``, so the concrete environment and the view are always compatible.
|
||||
The files of the view's installed packages are brought into the view by symbolic or
|
||||
hard links, referencing the original Spack installation, or by copy.
|
||||
Spack Environments can define filesystem views of their software,
|
||||
which are maintained as packages and can be installed and uninstalled from
|
||||
the Environment. Filesystem views provide an access point for packages
|
||||
from the filesystem for users who want to access those packages
|
||||
directly. For more information on filesystem views, see the section
|
||||
:ref:`filesystem-views`.
|
||||
|
||||
Spack Environment managed views are updated every time the environment
|
||||
is written out to the lock file ``spack.lock``, so the concrete
|
||||
environment and the view are always compatible.
|
||||
|
||||
.. _configuring_environment_views:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Configuration in ``spack.yaml``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""""""""""""
|
||||
Configuring environment views
|
||||
"""""""""""""""""""""""""""""
|
||||
|
||||
The Spack Environment manifest file has a top-level keyword
|
||||
``view``. Each entry under that heading is a **view descriptor**, headed
|
||||
by a name. Any number of views may be defined under the ``view`` heading.
|
||||
The view descriptor contains the root of the view, and
|
||||
``view``. Each entry under that heading is a view descriptor, headed
|
||||
by a name. The view descriptor contains the root of the view, and
|
||||
optionally the projections for the view, ``select`` and
|
||||
``exclude`` lists for the view and link information via ``link`` and
|
||||
``link_type``.
|
||||
|
||||
For example, in the following manifest
|
||||
``link_type``. For example, in the following manifest
|
||||
file snippet we define a view named ``mpis``, rooted at
|
||||
``/path/to/view`` in which all projections use the package name,
|
||||
version, and compiler name to determine the path for a given
|
||||
package. This view selects all packages that depend on MPI, and
|
||||
excludes those built with the PGI compiler at version 18.5.
|
||||
The root specs with their (transitive) link and run type dependencies
|
||||
will be put in the view due to the ``link: all`` option,
|
||||
and the files in the view will be symlinks to the spack install
|
||||
directories.
|
||||
All the dependencies of each root spec in the environment will be linked
|
||||
in the view due to the command ``link: all`` and the files in the view will
|
||||
be symlinks to the spack install directories.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -769,26 +758,16 @@ directories.
|
||||
link: all
|
||||
link_type: symlink
|
||||
|
||||
The default for the ``select`` and
|
||||
For more information on using view projections, see the section on
|
||||
:ref:`adding_projections_to_views`. The default for the ``select`` and
|
||||
``exclude`` values is to select everything and exclude nothing. The
|
||||
default projection is the default view projection (``{}``). The ``link``
|
||||
attribute allows the following values:
|
||||
|
||||
#. ``link: all`` include root specs with their transitive run and link type
|
||||
dependencies (default);
|
||||
#. ``link: run`` include root specs with their transitive run type dependencies;
|
||||
#. ``link: roots`` include root specs without their dependencies.
|
||||
|
||||
The ``link_type`` defaults to ``symlink`` but can also take the value
|
||||
of ``hardlink`` or ``copy``.
|
||||
|
||||
.. tip::
|
||||
|
||||
The option ``link: run`` can be used to create small environment views for
|
||||
Python packages. Python will be able to import packages *inside* of the view even
|
||||
when the environment is not activated, and linked libraries will be located
|
||||
*outside* of the view thanks to rpaths.
|
||||
defaults to ``all`` but can also be ``roots`` when only the root specs
|
||||
in the environment are desired in the view. The ``link_type`` defaults
|
||||
to ``symlink`` but can also take the value of ``hardlink`` or ``copy``.
|
||||
|
||||
Any number of views may be defined under the ``view`` heading in a
|
||||
Spack Environment.
|
||||
|
||||
There are two shorthands for environments with a single view. If the
|
||||
environment at ``/path/to/env`` has a single view, with a root at
|
||||
@@ -854,47 +833,9 @@ regenerate`` will regenerate the views for the environment. This will
|
||||
apply any updates in the environment configuration that have not yet
|
||||
been applied.
|
||||
|
||||
.. _view_projections:
|
||||
|
||||
""""""""""""""""
|
||||
View Projections
|
||||
""""""""""""""""
|
||||
The default projection into a view is to link every package into the
|
||||
root of the view. The projections attribute is a mapping of partial specs to
|
||||
spec format strings, defined by the :meth:`~spack.spec.Spec.format`
|
||||
function, as shown in the example below:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
projections:
|
||||
zlib: {name}-{version}
|
||||
^mpi: {name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}
|
||||
all: {name}-{version}/{compiler.name}-{compiler.version}
|
||||
|
||||
The entries in the projections configuration file must all be either
|
||||
specs or the keyword ``all``. For each spec, the projection used will
|
||||
be the first non-``all`` entry that the spec satisfies, or ``all`` if
|
||||
there is an entry for ``all`` and no other entry is satisfied by the
|
||||
spec. Where the keyword ``all`` appears in the file does not
|
||||
matter.
|
||||
|
||||
Given the example above, the spec ``zlib@1.2.8``
|
||||
will be linked into ``/my/view/zlib-1.2.8/``, the spec
|
||||
``hdf5@1.8.10+mpi %gcc@4.9.3 ^mvapich2@2.2`` will be linked into
|
||||
``/my/view/hdf5-1.8.10/mvapich2-2.2-gcc-4.9.3``, and the spec
|
||||
``hdf5@1.8.10~mpi %gcc@4.9.3`` will be linked into
|
||||
``/my/view/hdf5-1.8.10/gcc-4.9.3``.
|
||||
|
||||
If the keyword ``all`` does not appear in the projections
|
||||
configuration file, any spec that does not satisfy any entry in the
|
||||
file will be linked into the root of the view as in a single-prefix
|
||||
view. Any entries that appear below the keyword ``all`` in the
|
||||
projections configuration file will not be used, as all specs will use
|
||||
the projection under ``all`` before reaching those entries.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""""""""""
|
||||
Activating environment views
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""""""""""
|
||||
|
||||
The ``spack env activate`` command will put the default view for the
|
||||
environment into the user's path, in addition to activating the
|
||||
@@ -928,93 +869,3 @@ environment.
|
||||
|
||||
The ``spack env deactivate`` command will remove the default view of
|
||||
the environment from the user's path.
|
||||
|
||||
|
||||
.. _env-generate-depfile:
|
||||
|
||||
|
||||
------------------------------------------
|
||||
Generating Depfiles from Environments
|
||||
------------------------------------------
|
||||
|
||||
Spack can generate ``Makefile``\s to make it easier to build multiple
|
||||
packages in an environment in parallel. Generated ``Makefile``\s expose
|
||||
targets that can be included in existing ``Makefile``\s, to allow
|
||||
other targets to depend on the environment installation.
|
||||
|
||||
A typical workflow is as follows:
|
||||
|
||||
.. code:: console
|
||||
|
||||
spack env create -d .
|
||||
spack -e . add perl
|
||||
spack -e . concretize
|
||||
spack -e . env depfile > Makefile
|
||||
make -j64
|
||||
|
||||
This generates a ``Makefile`` from a concretized environment in the
|
||||
current working directory, and ``make -j64`` installs the environment,
|
||||
exploiting parallelism across packages as much as possible. Spack
|
||||
respects the Make jobserver and forwards it to the build environment
|
||||
of packages, meaning that a single ``-j`` flag is enough to control the
|
||||
load, even when packages are built in parallel.
|
||||
|
||||
By default the following phony convenience targets are available:
|
||||
|
||||
- ``make all``: installs the environment (default target);
|
||||
- ``make fetch-all``: only fetch sources of all packages;
|
||||
- ``make clean``: cleans files used by make, but does not uninstall packages.
|
||||
|
||||
.. tip::
|
||||
|
||||
GNU Make version 4.3 and above have great support for output synchronization
|
||||
through the ``-O`` and ``--output-sync`` flags, which ensure that output is
|
||||
printed orderly per package install. To get synchronized output with colors,
|
||||
use ``make -j<N> SPACK_COLOR=always --output-sync=recurse``.
|
||||
|
||||
The following advanced example shows how generated targets can be used in a
|
||||
``Makefile``:
|
||||
|
||||
.. code:: Makefile
|
||||
|
||||
SPACK ?= spack
|
||||
|
||||
.PHONY: all clean fetch env
|
||||
|
||||
all: env
|
||||
|
||||
spack.lock: spack.yaml
|
||||
$(SPACK) -e . concretize -f
|
||||
|
||||
env.mk: spack.lock
|
||||
$(SPACK) -e . env depfile -o $@ --make-target-prefix spack
|
||||
|
||||
fetch: spack/fetch
|
||||
$(info Environment fetched!)
|
||||
|
||||
env: spack/env
|
||||
$(info Environment installed!)
|
||||
|
||||
clean:
|
||||
rm -rf spack.lock env.mk spack/
|
||||
|
||||
ifeq (,$(filter clean,$(MAKECMDGOALS)))
|
||||
include env.mk
|
||||
endif
|
||||
|
||||
When ``make`` is invoked, it first "remakes" the missing include ``env.mk``
|
||||
from its rule, which triggers concretization. When done, the generated targets
|
||||
``spack/fetch`` and ``spack/env`` are available. In the above
|
||||
example, the ``env`` target uses the latter as a prerequisite, meaning
|
||||
that it can make use of the installed packages in its commands.
|
||||
|
||||
As it is typically undesirable to remake ``env.mk`` as part of ``make clean``,
|
||||
the include is conditional.
|
||||
|
||||
.. note::
|
||||
|
||||
When including generated ``Makefile``\s, it is important to use
|
||||
the ``--make-target-prefix`` flag and use the non-phony targets
|
||||
``<target-prefix>/env`` and ``<target-prefix>/fetch`` as
|
||||
prerequisites, instead of the phony targets ``<target-prefix>/all``
|
||||
and ``<target-prefix>/fetch-all`` respectively.
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -38,7 +38,8 @@ obtained by cloning the corresponding git repository:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ cd ~/
|
||||
$ pwd
|
||||
/home/user
|
||||
$ mkdir tmp && cd tmp
|
||||
$ git clone https://github.com/alalazo/spack-scripting.git
|
||||
Cloning into 'spack-scripting'...
|
||||
@@ -61,7 +62,7 @@ paths to ``config.yaml``. In the case of our example this means ensuring that:
|
||||
|
||||
config:
|
||||
extensions:
|
||||
- ~/tmp/spack-scripting
|
||||
- /home/user/tmp/spack-scripting
|
||||
|
||||
is part of your configuration file. Once this is setup any command that the extension provides
|
||||
will be available from the command line:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -149,28 +149,27 @@ Spack fall back to bootstrapping from sources:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack bootstrap untrust github-actions-v0.2
|
||||
==> "github-actions-v0.2" is now untrusted and will not be used for bootstrapping
|
||||
$ spack bootstrap untrust github-actions
|
||||
==> "github-actions" is now untrusted and will not be used for bootstrapping
|
||||
|
||||
You can verify that the new settings are effective with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack bootstrap list
|
||||
Name: github-actions-v0.2 UNTRUSTED
|
||||
Name: github-actions UNTRUSTED
|
||||
|
||||
Type: buildcache
|
||||
|
||||
Info:
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.2
|
||||
homepage: https://github.com/spack/spack-bootstrap-mirrors
|
||||
releases: https://github.com/spack/spack-bootstrap-mirrors/releases
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.1
|
||||
homepage: https://github.com/alalazo/spack-bootstrap-mirrors
|
||||
releases: https://github.com/alalazo/spack-bootstrap-mirrors/releases
|
||||
|
||||
Description:
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
|
||||
[ ... ]
|
||||
|
||||
Name: spack-install TRUSTED
|
||||
|
||||
@@ -272,10 +271,9 @@ Compiler configuration
|
||||
----------------------
|
||||
|
||||
Spack has the ability to build packages with multiple compilers and
|
||||
compiler versions. Compilers can be made available to Spack by
|
||||
specifying them manually in ``compilers.yaml``, or automatically by
|
||||
running ``spack compiler find``, but for convenience Spack will
|
||||
automatically detect compilers the first time it needs them.
|
||||
compiler versions. Spack searches for compilers on your machine
|
||||
automatically the first time it is run. It does this by inspecting
|
||||
your ``PATH``.
|
||||
|
||||
.. _cmd-spack-compilers:
|
||||
|
||||
@@ -283,7 +281,7 @@ automatically detect compilers the first time it needs them.
|
||||
``spack compilers``
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You can see which compilers are available to Spack by running ``spack
|
||||
You can see which compilers spack has found by running ``spack
|
||||
compilers`` or ``spack compiler list``:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -322,10 +320,9 @@ An alias for ``spack compiler find``.
|
||||
``spack compiler find``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Lists the compilers currently available to Spack. If you do not see
|
||||
a compiler in this list, but you want to use it with Spack, you can
|
||||
simply run ``spack compiler find`` with the path to where the
|
||||
compiler is installed. For example:
|
||||
If you do not see a compiler in this list, but you want to use it with
|
||||
Spack, you can simply run ``spack compiler find`` with the path to
|
||||
where the compiler is installed. For example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -1517,238 +1514,3 @@ To ensure that Spack does not autodetect the Cray programming
|
||||
environment, unset the environment variable ``MODULEPATH``. This
|
||||
will cause Spack to treat a linux container on a Cray system as a base
|
||||
linux distro.
|
||||
|
||||
.. _windows_support:
|
||||
|
||||
----------------
|
||||
Spack On Windows
|
||||
----------------
|
||||
|
||||
Windows support for Spack is currently under development. While this work is still in an early stage,
|
||||
it is currently possible to set up Spack and perform a few operations on Windows. This section will guide
|
||||
you through the steps needed to install Spack and start running it on a fresh Windows machine.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Step 1: Install prerequisites
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To use Spack on Windows, you will need the following packages:
|
||||
|
||||
Required:
|
||||
* Microsoft Visual Studio
|
||||
* Python
|
||||
* Git
|
||||
|
||||
Optional:
|
||||
* Intel Fortran (needed for some packages)
|
||||
|
||||
.. note::
|
||||
|
||||
Currently MSVC is the only compiler tested for C/C++ projects. Intel OneAPI provides Fortran support.
|
||||
|
||||
"""""""""""""""""""""""
|
||||
Microsoft Visual Studio
|
||||
"""""""""""""""""""""""
|
||||
|
||||
Microsoft Visual Studio provides the only Windows C/C++ compiler that is currently supported by Spack.
|
||||
|
||||
We require several specific components to be included in the Visual Studio installation.
|
||||
One is the C/C++ toolset, which can be selected as "Desktop development with C++" or "C++ build tools,"
|
||||
depending on installation type (Professional, Build Tools, etc.) The other required component is
|
||||
"C++ CMake tools for Windows," which can be selected from among the optional packages.
|
||||
This provides CMake and Ninja for use during Spack configuration.
|
||||
|
||||
If you already have Visual Studio installed, you can make sure these components are installed by
|
||||
rerunning the installer. Next to your installation, select "Modify" and look at the
|
||||
"Installation details" pane on the right.
|
||||
|
||||
"""""""""""""
|
||||
Intel Fortran
|
||||
"""""""""""""
|
||||
|
||||
For Fortran-based packages on Windows, we strongly recommend Intel's oneAPI Fortran compilers.
|
||||
The suite is free to download from Intel's website, located at
|
||||
https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/fortran-compiler.html#gs.70t5tw.
|
||||
The executable of choice for Spack will be Intel's Beta Compiler, ifx, which supports the classic
|
||||
compiler's (ifort's) frontend and runtime libraries by using LLVM.
|
||||
|
||||
""""""
|
||||
Python
|
||||
""""""
|
||||
|
||||
As Spack is a Python-based package, an installation of Python will be needed to run it.
|
||||
Python 3 can be downloaded and installed from the Windows Store, and will be automatically added
|
||||
to your ``PATH`` in this case.
|
||||
|
||||
.. note::
|
||||
Spack currently supports Python versions later than 3.2 inclusive.
|
||||
|
||||
"""
|
||||
Git
|
||||
"""
|
||||
|
||||
A bash console and GUI can be downloaded from https://git-scm.com/downloads.
|
||||
If you are unfamiliar with Git, there are a myriad of resources online to help
|
||||
guide you through checking out repositories and switching development branches.
|
||||
|
||||
When given the option of adjusting your ``PATH``, choose the ``Git from the
|
||||
command line and also from 3rd-party software`` option. This will automatically
|
||||
update your ``PATH`` variable to include the ``git`` command.
|
||||
|
||||
Spack support on Windows is currently dependent on installing the Git for Windows project
|
||||
as the project providing Git support on Windows. This is additionally the recommended method
|
||||
for installing Git on Windows, a link to which can be found above. Spack requires the
|
||||
utilities vendored by this project.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Step 2: Install and setup Spack
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
We are now ready to get the Spack environment set up on our machine. We
|
||||
begin by using Git to clone the Spack repo, hosted at https://github.com/spack/spack.git
|
||||
into a desired directory, for our purposes today, called ``spack_install``.
|
||||
|
||||
In order to install Spack with Windows support, run the following one liner
|
||||
in a Windows CMD prompt.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
git clone https://github.com/spack/spack.git
|
||||
|
||||
.. note::
|
||||
If you chose to install Spack into a directory on Windows that is set up to require Administrative
|
||||
Privleges, Spack will require elevated privleges to run.
|
||||
Administrative Privleges can be denoted either by default such as
|
||||
``C:\Program Files``, or aministrator applied administrative restrictions
|
||||
on a directory that spack installs files to such as ``C:\Users``
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Step 3: Run and configure Spack
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To use Spack, run ``bin\spack_cmd.bat`` (you may need to Run as Administrator) from the top-level spack
|
||||
directory. This will provide a Windows command prompt with an environment properly set up with Spack
|
||||
and its prerequisites. If you receive a warning message that Python is not in your ``PATH``
|
||||
(which may happen if you installed Python from the website and not the Windows Store) add the location
|
||||
of the Python executable to your ``PATH`` now. You can permanently add Python to your ``PATH`` variable
|
||||
by using the ``Edit the system environment variables`` utility in Windows Control Panel.
|
||||
|
||||
.. note::
|
||||
Alternatively, Powershell can be used in place of CMD
|
||||
|
||||
To configure Spack, first run the following command inside the Spack console:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack compiler find
|
||||
|
||||
This creates a ``.staging`` directory in our Spack prefix, along with a ``windows`` subdirectory
|
||||
containing a ``compilers.yaml`` file. On a fresh Windows install with the above packages
|
||||
installed, this command should only detect Microsoft Visual Studio and the Intel Fortran
|
||||
compiler will be integrated within the first version of MSVC present in the ``compilers.yaml``
|
||||
output.
|
||||
|
||||
Spack provides a default ``config.yaml`` file for Windows that it will use unless overridden.
|
||||
This file is located at ``etc\spack\defaults\windows\config.yaml``. You can read more on how to
|
||||
do this and write your own configuration files in the :ref:`Configuration Files<configuration>` section of our
|
||||
documentation. If you do this, pay particular attention to the ``build_stage`` block of the file
|
||||
as this specifies the directory that will temporarily hold the source code for the packages to
|
||||
be installed. This path name must be sufficiently short for compliance with cmd, otherwise you
|
||||
will see build errors during installation (particularly with CMake) tied to long path names.
|
||||
|
||||
To allow Spack use of external tools and dependencies already on your system, the
|
||||
external pieces of software must be described in the ``packages.yaml`` file.
|
||||
There are two methods to populate this file:
|
||||
|
||||
The first and easiest choice is to use Spack to find installation on your system. In
|
||||
the Spack terminal, run the following commands:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack external find cmake
|
||||
spack external find ninja
|
||||
|
||||
The ``spack external find <name>`` will find executables on your system
|
||||
with the same name given. The command will store the items found in
|
||||
``packages.yaml`` in the ``.staging\`` directory.
|
||||
|
||||
Assuming that the command found CMake and Ninja executables in the previous
|
||||
step, continue to Step 4. If no executables were found, we may need to manually direct spack towards the CMake
|
||||
and Ninja installations we set up with Visual Studio. Therefore, your ``packages.yaml`` file will look something
|
||||
like this, with possibly slight variants in the paths to CMake and Ninja:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
cmake:
|
||||
externals:
|
||||
- spec: cmake@3.19
|
||||
prefix: 'c:\Program Files (x86)\Microsoft Visual Studio\2019\Professional\Common7\IDE\CommonExtensions\Microsoft\CMake\CMake'
|
||||
buildable: False
|
||||
ninja:
|
||||
externals:
|
||||
- spec: ninja@1.8.2
|
||||
prefix: 'c:\Program Files (x86)\Microsoft Visual Studio\2019\Professional\Common7\IDE\CommonExtensions\Microsoft\CMake\Ninja'
|
||||
buildable: False
|
||||
|
||||
You can also use an separate installation of CMake if you have one and prefer
|
||||
to use it. If you don't have a path to Ninja analogous to the above, then you can
|
||||
obtain it by running the Visual Studio Installer and following the instructions
|
||||
at the start of this section. Also note that .yaml files use spaces for indentation
|
||||
and not tabs, so ensure that this is the case when editing one directly.
|
||||
|
||||
|
||||
.. note:: Cygwin
|
||||
The use of Cygwin is not officially supported by Spack and is not tested.
|
||||
However Spack will not throw an error, so use if choosing to use Spack
|
||||
with Cygwin, know that no functionality is garunteed.
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
Step 4: Use Spack
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
Once the configuration is complete, it is time to give the installation a test. Install a basic package though the
|
||||
Spack console via:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack install cpuinfo
|
||||
|
||||
If in the previous step, you did not have CMake or Ninja installed, running the command above should boostrap both packages
|
||||
|
||||
"""""""""""""""""""""""""""
|
||||
Windows Compatible Packages
|
||||
"""""""""""""""""""""""""""
|
||||
|
||||
Many Spack packages are not currently compatible with Windows, due to Unix
|
||||
dependencies or incompatible build tools like autoconf. Here are several
|
||||
packages known to work on Windows:
|
||||
|
||||
* abseil-cpp
|
||||
* clingo
|
||||
* cpuinfo
|
||||
* cmake
|
||||
* glm
|
||||
* nasm
|
||||
* netlib-lapack (requires Intel Fortran)
|
||||
* ninja
|
||||
* openssl
|
||||
* perl
|
||||
* python
|
||||
* ruby
|
||||
* wrf
|
||||
* zlib
|
||||
|
||||
.. note::
|
||||
This is by no means a comprehensive list
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
For developers
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
The intent is to provide a Windows installer that will automatically set up
|
||||
Python, Git, and Spack, instead of requiring the user to do so manually.
|
||||
Instructions for creating the installer are at
|
||||
https://github.com/spack/spack/blob/develop/lib/spack/spack/cmd/installer/README.md
|
||||
|
||||
Alternatively a pre-built copy of the Windows installer is available as an artifact of Spack's Windows CI
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -54,8 +54,9 @@ or refer to the full manual below.
|
||||
features
|
||||
getting_started
|
||||
basic_usage
|
||||
workflows
|
||||
Tutorial: Spack 101 <https://spack-tutorial.readthedocs.io>
|
||||
replace_conda_homebrew
|
||||
known_issues
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
77
lib/spack/docs/known_issues.rst
Normal file
77
lib/spack/docs/known_issues.rst
Normal file
@@ -0,0 +1,77 @@
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
============
|
||||
Known Issues
|
||||
============
|
||||
|
||||
This is a list of known bugs in Spack. It provides ways of getting around these
|
||||
problems if you encounter them.
|
||||
|
||||
---------------------------------------------------
|
||||
Variants are not properly forwarded to dependencies
|
||||
---------------------------------------------------
|
||||
|
||||
**Status:** Expected to be fixed by Spack's new concretizer
|
||||
|
||||
Sometimes, a variant of a package can also affect how its dependencies are
|
||||
built. For example, in order to build MPI support for a package, it may
|
||||
require that its dependencies are also built with MPI support. In the
|
||||
``package.py``, this looks like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('hdf5~mpi', when='~mpi')
|
||||
depends_on('hdf5+mpi', when='+mpi')
|
||||
|
||||
Spack handles this situation properly for *immediate* dependencies, and
|
||||
builds ``hdf5`` with the same variant you used for the package that
|
||||
depends on it. However, for *indirect* dependencies (dependencies of
|
||||
dependencies), Spack does not backtrack up the DAG far enough to handle
|
||||
this. Users commonly run into this situation when trying to build R with
|
||||
X11 support:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install r+X
|
||||
...
|
||||
==> Error: Invalid spec: 'cairo@1.14.8%gcc@6.2.1+X arch=linux-fedora25-x86_64 ^bzip2@1.0.6%gcc@6.2.1+shared arch=linux-fedora25-x86_64 ^font-util@1.3.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^fontconfig@2.12.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^freetype@2.7.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^gettext@0.19.8.1%gcc@6.2.1+bzip2+curses+git~libunistring+libxml2+tar+xz arch=linux-fedora25-x86_64 ^glib@2.53.1%gcc@6.2.1~libmount arch=linux-fedora25-x86_64 ^inputproto@2.3.2%gcc@6.2.1 arch=linux-fedora25-x86_64 ^kbproto@1.0.7%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libffi@3.2.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libpng@1.6.29%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libpthread-stubs@0.4%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libx11@1.6.5%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxau@1.0.8%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxcb@1.12%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxdmcp@1.1.2%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxext@1.3.3%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxml2@2.9.4%gcc@6.2.1~python arch=linux-fedora25-x86_64 ^libxrender@0.9.10%gcc@6.2.1 arch=linux-fedora25-x86_64 ^ncurses@6.0%gcc@6.2.1~symlinks arch=linux-fedora25-x86_64 ^openssl@1.0.2k%gcc@6.2.1 arch=linux-fedora25-x86_64 ^pcre@8.40%gcc@6.2.1+utf arch=linux-fedora25-x86_64 ^pixman@0.34.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^pkg-config@0.29.2%gcc@6.2.1+internal_glib arch=linux-fedora25-x86_64 ^python@2.7.13%gcc@6.2.1+shared~tk~ucs4 arch=linux-fedora25-x86_64 ^readline@7.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^renderproto@0.11.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^sqlite@3.18.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^tar^util-macros@1.19.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xcb-proto@1.12%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xextproto@7.3.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xproto@7.0.31%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xtrans@1.3.5%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xz@5.2.3%gcc@6.2.1 arch=linux-fedora25-x86_64 ^zlib@1.2.11%gcc@6.2.1+pic+shared arch=linux-fedora25-x86_64'.
|
||||
Package cairo requires variant ~X, but spec asked for +X
|
||||
|
||||
A workaround is to explicitly activate the variants of dependencies as well:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install r+X ^cairo+X ^pango+X
|
||||
|
||||
See https://github.com/spack/spack/issues/267 and
|
||||
https://github.com/spack/spack/issues/2546 for further details.
|
||||
|
||||
-----------------------------------------------
|
||||
depends_on cannot handle recursive dependencies
|
||||
-----------------------------------------------
|
||||
|
||||
**Status:** Not yet a work in progress
|
||||
|
||||
Although ``depends_on`` can handle any aspect of Spack's spec syntax,
|
||||
it currently cannot handle recursive dependencies. If the ``^`` sigil
|
||||
appears in a ``depends_on`` statement, the concretizer will hang.
|
||||
For example, something like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('mfem+cuda ^hypre+cuda', when='+cuda')
|
||||
|
||||
|
||||
should be rewritten as:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('mfem+cuda', when='+cuda')
|
||||
depends_on('hypre+cuda', when='+cuda')
|
||||
|
||||
|
||||
See https://github.com/spack/spack/issues/17660 and
|
||||
https://github.com/spack/spack/issues/11160 for more details.
|
||||
@@ -1,13 +1,13 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _mirrors:
|
||||
|
||||
======================
|
||||
Mirrors (mirrors.yaml)
|
||||
======================
|
||||
=======
|
||||
Mirrors
|
||||
=======
|
||||
|
||||
Some sites may not have access to the internet for fetching packages.
|
||||
These sites will need a local repository of tarballs from which they
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _modules:
|
||||
|
||||
======================
|
||||
Modules (modules.yaml)
|
||||
======================
|
||||
=======
|
||||
Modules
|
||||
=======
|
||||
|
||||
The use of module systems to manage user environment in a controlled way
|
||||
is a common practice at HPC centers that is often embraced also by
|
||||
@@ -181,7 +181,10 @@ to the environment variables listed below the folder name.
|
||||
Spack modules can be configured for multiple module sets. The default
|
||||
module set is named ``default``. All Spack commands which operate on
|
||||
modules default to apply the ``default`` module set, but can be
|
||||
applied to any module set in the configuration.
|
||||
applied to any module set in the configuration. Settings applied at
|
||||
the root of the configuration (e.g. ``modules:enable`` rather than
|
||||
``modules:default:enable``) are applied to the default module set for
|
||||
backwards compatibility.
|
||||
|
||||
"""""""""""""""""""""""""
|
||||
Changing the modules root
|
||||
@@ -375,7 +378,7 @@ most likely via the ``+blas`` variant specification.
|
||||
|
||||
The most heavyweight solution to module naming is to change the entire
|
||||
naming convention for module files. This uses the projections format
|
||||
covered in :ref:`view_projections`.
|
||||
covered in :ref:`adding_projections_to_views`.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -537,7 +540,8 @@ configuration:
|
||||
|
||||
#. The configuration is for an :ref:`environment <environments>` and
|
||||
will never be applied outside the environment,
|
||||
#. The environment in question is configured to use a view,
|
||||
#. The environment in question is configured to use a :ref:`view
|
||||
<filesystem-views>`,
|
||||
#. The :ref:`environment view is configured
|
||||
<configuring_environment_views>` with a projection that ensures
|
||||
every package is linked to a unique directory,
|
||||
@@ -611,39 +615,44 @@ modifications to either ``CPATH`` or ``LIBRARY_PATH``.
|
||||
Autoload dependencies
|
||||
"""""""""""""""""""""
|
||||
|
||||
Often it is required for a module to have its (transient) dependencies loaded as well.
|
||||
One example where this is useful is when one package needs to use executables provided
|
||||
by its dependency; when the dependency is autoloaded, the executable will be in the
|
||||
PATH. Similarly for scripting languages such as Python, packages and their dependencies
|
||||
have to be loaded together.
|
||||
|
||||
Autoloading is enabled by default for LMod, as it has great builtin support for through
|
||||
the ``depends_on`` function. For Environment Modules it is disabled by default.
|
||||
|
||||
Autoloading can also be enabled conditionally:
|
||||
In some cases it can be useful to have module files that automatically load
|
||||
their dependencies. This may be the case for Python extensions, if not
|
||||
activated using ``spack activate``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
default:
|
||||
tcl:
|
||||
all:
|
||||
autoload: none
|
||||
^python:
|
||||
autoload: direct
|
||||
modules:
|
||||
default:
|
||||
tcl:
|
||||
^python:
|
||||
autoload: 'direct'
|
||||
|
||||
The configuration file above will produce module files that will
|
||||
load their direct dependencies if the package installed depends on ``python``.
|
||||
The allowed values for the ``autoload`` statement are either ``none``,
|
||||
``direct`` or ``all``.
|
||||
``direct`` or ``all``. The default is ``none``.
|
||||
|
||||
.. tip::
|
||||
Building external software
|
||||
Setting ``autoload`` to ``direct`` for all packages can be useful
|
||||
when building software outside of a Spack installation that depends on
|
||||
artifacts in that installation. E.g. (adjust ``lmod`` vs ``tcl``
|
||||
as appropriate):
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
default:
|
||||
lmod:
|
||||
all:
|
||||
autoload: 'direct'
|
||||
|
||||
.. note::
|
||||
TCL prerequisites
|
||||
In the ``tcl`` section of the configuration file it is possible to use
|
||||
the ``prerequisites`` directive that accepts the same values as
|
||||
``autoload``. It will produce module files that have a ``prereq``
|
||||
statement, which can be used to autoload dependencies in some versions
|
||||
of Environment Modules.
|
||||
statement instead of automatically loading other modules.
|
||||
|
||||
------------------------
|
||||
Maintaining Module Files
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -705,8 +705,7 @@ as follows:
|
||||
|
||||
#. The following special strings are considered larger than any other
|
||||
numeric or non-numeric version component, and satisfy the following
|
||||
order between themselves:
|
||||
``develop > main > master > head > trunk > stable``.
|
||||
order between themselves: ``develop > main > master > head > trunk``.
|
||||
|
||||
#. Numbers are ordered numerically, are less than special strings, and
|
||||
larger than other non-numeric components.
|
||||
@@ -1423,37 +1422,6 @@ other similar operations:
|
||||
).with_default('auto').with_non_feature_values('auto'),
|
||||
)
|
||||
|
||||
"""""""""""""""""""""""""""
|
||||
Conditional Possible Values
|
||||
"""""""""""""""""""""""""""
|
||||
|
||||
There are cases where a variant may take multiple values, and the list of allowed values
|
||||
expand over time. Think for instance at the C++ standard with which we might compile
|
||||
Boost, which can take one of multiple possible values with the latest standards
|
||||
only available from a certain version on.
|
||||
|
||||
To model a similar situation we can use *conditional possible values* in the variant declaration:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant(
|
||||
'cxxstd', default='98',
|
||||
values=(
|
||||
'98', '11', '14',
|
||||
# C++17 is not supported by Boost < 1.63.0.
|
||||
conditional('17', when='@1.63.0:'),
|
||||
# C++20/2a is not support by Boost < 1.73.0
|
||||
conditional('2a', '2b', when='@1.73.0:')
|
||||
),
|
||||
multi=False,
|
||||
description='Use the specified C++ standard when building.',
|
||||
)
|
||||
|
||||
The snippet above allows ``98``, ``11`` and ``14`` as unconditional possible values for the
|
||||
``cxxstd`` variant, while ``17`` requires a version greater or equal to ``1.63.0``
|
||||
and both ``2a`` and ``2b`` require a version greater or equal to ``1.73.0``.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
Conditional Variants
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -1473,32 +1441,6 @@ The ``when`` clause follows the same syntax and accepts the same
|
||||
values as the ``when`` argument of
|
||||
:py:func:`spack.directives.depends_on`
|
||||
|
||||
^^^^^^^^^^^^^^^
|
||||
Sticky Variants
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
The variant directive can be marked as ``sticky`` by setting to ``True`` the
|
||||
corresponding argument:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('bar', default=False, sticky=True)
|
||||
|
||||
A ``sticky`` variant differs from a regular one in that it is always set
|
||||
to either:
|
||||
|
||||
#. An explicit value appearing in a spec literal or
|
||||
#. Its default value
|
||||
|
||||
The concretizer thus is not free to pick an alternate value to work
|
||||
around conflicts, but will error out instead.
|
||||
Setting this property on a variant is useful in cases where the
|
||||
variant allows some dangerous or controversial options (e.g. using unsupported versions
|
||||
of a compiler for a library) and the packager wants to ensure that
|
||||
allowing these options is done on purpose by the user, rather than
|
||||
automatically by the solver.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Overriding Variants
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
@@ -2501,24 +2443,6 @@ Now, the ``py-numpy`` package can be used as an argument to ``spack
|
||||
activate``. When it is activated, all the files in its prefix will be
|
||||
symbolically linked into the prefix of the python package.
|
||||
|
||||
A package can only extend one other package at a time. To support packages
|
||||
that may extend one of a list of other packages, Spack supports multiple
|
||||
``extends`` directives as long as at most one of them is selected as
|
||||
a dependency during concretization. For example, a lua package could extend
|
||||
either lua or luajit, but not both:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class LuaLpeg(Package):
|
||||
...
|
||||
variant('use_lua', default=True)
|
||||
extends('lua', when='+use_lua')
|
||||
extends('lua-luajit', when='~use_lua')
|
||||
...
|
||||
|
||||
Now, a user can install, and activate, the ``lua-lpeg`` package for either
|
||||
lua or luajit.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Adding additional constraints
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -2574,7 +2498,7 @@ from being linked in at activation time.
|
||||
Views
|
||||
-----
|
||||
|
||||
The ``spack view`` command can be
|
||||
As covered in :ref:`filesystem-views`, the ``spack view`` command can be
|
||||
used to symlink a number of packages into a merged prefix. The methods of
|
||||
``PackageViewMixin`` can be overridden to customize how packages are added
|
||||
to views. Generally this can be used to create copies of specific files rather
|
||||
@@ -2908,7 +2832,7 @@ be concretized on their system. For example, one user may prefer packages
|
||||
built with OpenMPI and the Intel compiler. Another user may prefer
|
||||
packages be built with MVAPICH and GCC.
|
||||
|
||||
See the :ref:`package-preferences` section for more details.
|
||||
See the :ref:`concretization-preferences` section for more details.
|
||||
|
||||
|
||||
.. _group_when_spec:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,206 +0,0 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
=====================================
|
||||
Using Spack to Replace Homebrew/Conda
|
||||
=====================================
|
||||
|
||||
Spack is an incredibly powerful package manager, designed for supercomputers
|
||||
where users have diverse installation needs. But Spack can also be used to
|
||||
handle simple single-user installations on your laptop. Most macOS users are
|
||||
already familiar with package managers like Homebrew and Conda, where all
|
||||
installed packages are symlinked to a single central location like ``/usr/local``.
|
||||
In this section, we will show you how to emulate the behavior of Homebrew/Conda
|
||||
using :ref:`environments`!
|
||||
|
||||
-----
|
||||
Setup
|
||||
-----
|
||||
|
||||
First, let's create a new environment. We'll assume that Spack is already set up
|
||||
correctly, and that you've already sourced the setup script for your shell.
|
||||
To create a new environment, simply run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create myenv
|
||||
|
||||
Here, *myenv* can be anything you want to name your environment. Next, we can add
|
||||
a list of packages we would like to install into our environment. Let's say we
|
||||
want a newer version of Bash than the one that comes with macOS, and we want a
|
||||
few Python libraries. We can run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -e myenv add bash@5 python py-numpy py-scipy py-matplotlib
|
||||
|
||||
Each package can be listed on a separate line, or combined into a single line like we did above.
|
||||
Notice that we're explicitly asking for Bash 5 here. You can use any spec
|
||||
you would normally use on the command line with other Spack commands.
|
||||
|
||||
Next, we want to manually configure a couple of things:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -e myenv config edit
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
# This is a Spack Environment file.
|
||||
#
|
||||
# It describes a set of packages to be installed, along with
|
||||
# configuration settings.
|
||||
spack:
|
||||
# add package specs to the `specs` list
|
||||
specs: [bash@5, python, py-numpy, py-scipy, py-matplotlib]
|
||||
view: true
|
||||
|
||||
You can see the packages we added earlier in the ``specs:`` section. If you
|
||||
ever want to add more packages, you can either use ``spack add`` or manually
|
||||
edit this file.
|
||||
|
||||
We also need to change the ``concretization:`` option. By default, Spack
|
||||
concretizes each spec *separately*, allowing multiple versions of the same
|
||||
package to coexist. Since we want a single consistent environment, we want to
|
||||
concretize all of the specs *together*.
|
||||
|
||||
Here is what your ``spack.yaml`` looks like with this new setting:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
# This is a Spack Environment file.
|
||||
#
|
||||
# It describes a set of packages to be installed, along with
|
||||
# configuration settings.
|
||||
spack:
|
||||
# add package specs to the `specs` list
|
||||
specs: [bash@5, python, py-numpy, py-scipy, py-matplotlib]
|
||||
view: true
|
||||
concretization: together
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
Symlink location
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack symlinks all installations to ``/Users/me/spack/var/spack/environments/myenv/.spack-env/view``,
|
||||
which is the default when ``view: true``.
|
||||
You can actually change this to any directory you want. For example, Homebrew
|
||||
uses ``/usr/local``, while Conda uses ``/Users/me/anaconda``. In order to access
|
||||
files in these locations, you need to update ``PATH`` and other environment variables
|
||||
to point to them. Activating the Spack environment does this automatically, but
|
||||
you can also manually set them in your ``.bashrc``.
|
||||
|
||||
.. warning::
|
||||
|
||||
There are several reasons why you shouldn't use ``/usr/local``:
|
||||
|
||||
1. If you are on macOS 10.11+ (El Capitan and newer), Apple makes it hard
|
||||
for you. You may notice permissions issues on ``/usr/local`` due to their
|
||||
`System Integrity Protection <https://support.apple.com/en-us/HT204899>`_.
|
||||
By default, users don't have permissions to install anything in ``/usr/local``,
|
||||
and you can't even change this using ``sudo chown`` or ``sudo chmod``.
|
||||
2. Other package managers like Homebrew will try to install things to the
|
||||
same directory. If you plan on using Homebrew in conjunction with Spack,
|
||||
don't symlink things to ``/usr/local``.
|
||||
3. If you are on a shared workstation, or don't have sudo privileges, you
|
||||
can't do this.
|
||||
|
||||
If you still want to do this anyway, there are several ways around SIP.
|
||||
You could disable SIP by booting into recovery mode and running
|
||||
``csrutil disable``, but this is not recommended, as it can open up your OS
|
||||
to security vulnerabilities. Another technique is to run ``spack concretize``
|
||||
and ``spack install`` using ``sudo``. This is also not recommended.
|
||||
|
||||
The safest way I've found is to create your installation directories using
|
||||
sudo, then change ownership back to the user like so:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
for directory in .spack bin contrib include lib man share
|
||||
do
|
||||
sudo mkdir -p /usr/local/$directory
|
||||
sudo chown $(id -un):$(id -gn) /usr/local/$directory
|
||||
done
|
||||
|
||||
Depending on the packages you install in your environment, the exact list of
|
||||
directories you need to create may vary. You may also find some packages
|
||||
like Java libraries that install a single file to the installation prefix
|
||||
instead of in a subdirectory. In this case, the action is the same, just replace
|
||||
``mkdir -p`` with ``touch`` in the for-loop above.
|
||||
|
||||
But again, it's safer just to use the default symlink location.
|
||||
|
||||
|
||||
------------
|
||||
Installation
|
||||
------------
|
||||
|
||||
To actually concretize the environment, run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -e myenv concretize
|
||||
|
||||
This will tell you which if any packages are already installed, and alert you
|
||||
to any conflicting specs.
|
||||
|
||||
To actually install these packages and symlink them to your ``view:``
|
||||
directory, simply run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -e myenv install
|
||||
$ spack env activate myenv
|
||||
|
||||
Now, when you type ``which python3``, it should find the one you just installed.
|
||||
|
||||
In order to change the default shell to our newer Bash installation, we first
|
||||
need to add it to this list of acceptable shells. Run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ sudo vim /etc/shells
|
||||
|
||||
and add the absolute path to your bash executable. Then run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ chsh -s /path/to/bash
|
||||
|
||||
Now, when you log out and log back in, ``echo $SHELL`` should point to the
|
||||
newer version of Bash.
|
||||
|
||||
---------------------------
|
||||
Updating Installed Packages
|
||||
---------------------------
|
||||
|
||||
Let's say you upgraded to a new version of macOS, or a new version of Python
|
||||
was released, and you want to rebuild your entire software stack. To do this,
|
||||
simply run the following commands:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate myenv
|
||||
$ spack concretize --force
|
||||
$ spack install
|
||||
|
||||
The ``--force`` flag tells Spack to overwrite its previous concretization
|
||||
decisions, allowing you to choose a new version of Python. If any of the new
|
||||
packages like Bash are already installed, ``spack install`` won't re-install
|
||||
them, it will keep the symlinks in place.
|
||||
|
||||
--------------
|
||||
Uninstallation
|
||||
--------------
|
||||
|
||||
If you decide that Spack isn't right for you, uninstallation is simple.
|
||||
Just run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate myenv
|
||||
$ spack uninstall --all
|
||||
|
||||
This will uninstall all packages in your environment and remove the symlinks.
|
||||
@@ -1,13 +1,13 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _repositories:
|
||||
|
||||
=================================
|
||||
Package Repositories (repos.yaml)
|
||||
=================================
|
||||
=============================
|
||||
Package Repositories
|
||||
=============================
|
||||
|
||||
Spack comes with thousands of built-in package recipes in
|
||||
``var/spack/repos/builtin/``. This is a **package repository** -- a
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
Name, Supported Versions, Notes, Requirement Reason
|
||||
Python, 2.7/3.5-3.10, , Interpreter for Spack
|
||||
Python, 2.7/3.5-3.9, , Interpreter for Spack
|
||||
C/C++ Compilers, , , Building software
|
||||
make, , , Build software
|
||||
patch, , , Build software
|
||||
|
||||
|
1193
lib/spack/docs/workflows.rst
Normal file
1193
lib/spack/docs/workflows.rst
Normal file
File diff suppressed because it is too large
Load Diff
10
lib/spack/env/cc
vendored
10
lib/spack/env/cc
vendored
@@ -1,7 +1,7 @@
|
||||
#!/bin/sh
|
||||
# shellcheck disable=SC2034 # evals in this script fool shellcheck
|
||||
#
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -241,28 +241,28 @@ case "$command" in
|
||||
mode=cpp
|
||||
debug_flags="-g"
|
||||
;;
|
||||
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc|amdclang|cl.exe)
|
||||
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc)
|
||||
command="$SPACK_CC"
|
||||
language="C"
|
||||
comp="CC"
|
||||
lang_flags=C
|
||||
debug_flags="-g"
|
||||
;;
|
||||
c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++)
|
||||
c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC)
|
||||
command="$SPACK_CXX"
|
||||
language="C++"
|
||||
comp="CXX"
|
||||
lang_flags=CXX
|
||||
debug_flags="-g"
|
||||
;;
|
||||
ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt|amdflang)
|
||||
ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt)
|
||||
command="$SPACK_FC"
|
||||
language="Fortran 90"
|
||||
comp="FC"
|
||||
lang_flags=F
|
||||
debug_flags="-g"
|
||||
;;
|
||||
f77|xlf|xlf_r|pgf77|amdflang)
|
||||
f77|xlf|xlf_r|pgf77)
|
||||
command="$SPACK_F77"
|
||||
language="Fortran 77"
|
||||
comp="F77"
|
||||
|
||||
1
lib/spack/env/rocmcc/amdclang
vendored
1
lib/spack/env/rocmcc/amdclang
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
||||
1
lib/spack/env/rocmcc/amdclang++
vendored
1
lib/spack/env/rocmcc/amdclang++
vendored
@@ -1 +0,0 @@
|
||||
../cpp
|
||||
1
lib/spack/env/rocmcc/amdflang
vendored
1
lib/spack/env/rocmcc/amdflang
vendored
@@ -1 +0,0 @@
|
||||
../fc
|
||||
46
lib/spack/external/__init__.py
vendored
46
lib/spack/external/__init__.py
vendored
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -6,13 +6,6 @@
|
||||
"""This module contains the following external, potentially separately
|
||||
licensed, packages that are included in Spack:
|
||||
|
||||
altgraph
|
||||
--------
|
||||
|
||||
* Homepage: https://altgraph.readthedocs.io/en/latest/index.html
|
||||
* Usage: dependency of macholib
|
||||
* Version: 0.17.2
|
||||
|
||||
archspec
|
||||
--------
|
||||
|
||||
@@ -31,22 +24,6 @@
|
||||
vendored copy ever needs to be updated again:
|
||||
https://github.com/spack/spack/pull/6786/commits/dfcef577b77249106ea4e4c69a6cd9e64fa6c418
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
|
||||
* Homepage: https://github.com/simonpercivall/astunparse
|
||||
* Usage: Unparsing Python ASTs for package hashes in Spack
|
||||
* Version: 1.6.3 (plus modifications)
|
||||
* Note: This is in ``spack.util.unparse`` because it's very heavily
|
||||
modified, and we want to track coverage for it.
|
||||
Specifically, we have modified this library to generate consistent unparsed ASTs
|
||||
regardless of the Python version. It is based on:
|
||||
1. The original ``astunparse`` library;
|
||||
2. Modifications for consistency;
|
||||
3. Backports from the ``ast.unparse`` function in Python 3.9 and later
|
||||
The unparsing is now mostly consistent with upstream ``ast.unparse``, so if
|
||||
we ever require Python 3.9 or higher, we can drop this external package.
|
||||
|
||||
attrs
|
||||
----------------
|
||||
|
||||
@@ -91,13 +68,6 @@
|
||||
* Version: 3.2.0 (last version before 2.7 and 3.6 support was dropped)
|
||||
* Note: We don't include tests or benchmarks; just what Spack needs.
|
||||
|
||||
macholib
|
||||
--------
|
||||
|
||||
* Homepage: https://macholib.readthedocs.io/en/latest/index.html#
|
||||
* Usage: Manipulation of Mach-o binaries for relocating macOS buildcaches on Linux
|
||||
* Version: 1.15.2
|
||||
|
||||
markupsafe
|
||||
----------
|
||||
|
||||
@@ -154,4 +124,18 @@
|
||||
* Usage: Python 2 and 3 compatibility utilities.
|
||||
* Version: 1.16.0
|
||||
|
||||
macholib
|
||||
--------
|
||||
|
||||
* Homepage: https://macholib.readthedocs.io/en/latest/index.html#
|
||||
* Usage: Manipulation of Mach-o binaries for relocating macOS buildcaches on Linux
|
||||
* Version: 1.12
|
||||
|
||||
altgraph
|
||||
--------
|
||||
|
||||
* Homepage: https://altgraph.readthedocs.io/en/latest/index.html
|
||||
* Usage: dependency of macholib
|
||||
* Version: 0.16.1
|
||||
|
||||
"""
|
||||
|
||||
100
lib/spack/external/altgraph/Dot.py
vendored
100
lib/spack/external/altgraph/Dot.py
vendored
@@ -1,4 +1,4 @@
|
||||
"""
|
||||
'''
|
||||
altgraph.Dot - Interface to the dot language
|
||||
============================================
|
||||
|
||||
@@ -107,7 +107,7 @@
|
||||
- for more details on how to control the graph drawing process see the
|
||||
`graphviz reference
|
||||
<http://www.research.att.com/sw/tools/graphviz/refs.html>`_.
|
||||
"""
|
||||
'''
|
||||
import os
|
||||
import warnings
|
||||
|
||||
@@ -115,34 +115,25 @@
|
||||
|
||||
|
||||
class Dot(object):
|
||||
"""
|
||||
'''
|
||||
A class providing a **graphviz** (dot language) representation
|
||||
allowing a fine grained control over how the graph is being
|
||||
displayed.
|
||||
|
||||
If the :command:`dot` and :command:`dotty` programs are not in the current
|
||||
system path their location needs to be specified in the contructor.
|
||||
"""
|
||||
'''
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
graph=None,
|
||||
nodes=None,
|
||||
edgefn=None,
|
||||
nodevisitor=None,
|
||||
edgevisitor=None,
|
||||
name="G",
|
||||
dot="dot",
|
||||
dotty="dotty",
|
||||
neato="neato",
|
||||
graphtype="digraph",
|
||||
):
|
||||
"""
|
||||
self, graph=None, nodes=None, edgefn=None, nodevisitor=None,
|
||||
edgevisitor=None, name="G", dot='dot', dotty='dotty',
|
||||
neato='neato', graphtype="digraph"):
|
||||
'''
|
||||
Initialization.
|
||||
"""
|
||||
'''
|
||||
self.name, self.attr = name, {}
|
||||
|
||||
assert graphtype in ["graph", "digraph"]
|
||||
assert graphtype in ['graph', 'digraph']
|
||||
self.type = graphtype
|
||||
|
||||
self.temp_dot = "tmp_dot.dot"
|
||||
@@ -157,10 +148,8 @@ def __init__(
|
||||
if graph is not None and nodes is None:
|
||||
nodes = graph
|
||||
if graph is not None and edgefn is None:
|
||||
|
||||
def edgefn(node, graph=graph):
|
||||
return graph.out_nbrs(node)
|
||||
|
||||
if nodes is None:
|
||||
nodes = ()
|
||||
|
||||
@@ -188,19 +177,20 @@ def edgefn(node, graph=graph):
|
||||
self.edge_style(head, tail, **edgestyle)
|
||||
|
||||
def style(self, **attr):
|
||||
"""
|
||||
'''
|
||||
Changes the overall style
|
||||
"""
|
||||
'''
|
||||
self.attr = attr
|
||||
|
||||
def display(self, mode="dot"):
|
||||
"""
|
||||
def display(self, mode='dot'):
|
||||
'''
|
||||
Displays the current graph via dotty
|
||||
"""
|
||||
'''
|
||||
|
||||
if mode == "neato":
|
||||
if mode == 'neato':
|
||||
self.save_dot(self.temp_neo)
|
||||
neato_cmd = "%s -o %s %s" % (self.neato, self.temp_dot, self.temp_neo)
|
||||
neato_cmd = "%s -o %s %s" % (
|
||||
self.neato, self.temp_dot, self.temp_neo)
|
||||
os.system(neato_cmd)
|
||||
else:
|
||||
self.save_dot(self.temp_dot)
|
||||
@@ -209,24 +199,24 @@ def display(self, mode="dot"):
|
||||
os.system(plot_cmd)
|
||||
|
||||
def node_style(self, node, **kwargs):
|
||||
"""
|
||||
'''
|
||||
Modifies a node style to the dot representation.
|
||||
"""
|
||||
'''
|
||||
if node not in self.edges:
|
||||
self.edges[node] = {}
|
||||
self.nodes[node] = kwargs
|
||||
|
||||
def all_node_style(self, **kwargs):
|
||||
"""
|
||||
'''
|
||||
Modifies all node styles
|
||||
"""
|
||||
'''
|
||||
for node in self.nodes:
|
||||
self.node_style(node, **kwargs)
|
||||
|
||||
def edge_style(self, head, tail, **kwargs):
|
||||
"""
|
||||
'''
|
||||
Modifies an edge style to the dot representation.
|
||||
"""
|
||||
'''
|
||||
if tail not in self.nodes:
|
||||
raise GraphError("invalid node %s" % (tail,))
|
||||
|
||||
@@ -239,10 +229,10 @@ def edge_style(self, head, tail, **kwargs):
|
||||
|
||||
def iterdot(self):
|
||||
# write graph title
|
||||
if self.type == "digraph":
|
||||
yield "digraph %s {\n" % (self.name,)
|
||||
elif self.type == "graph":
|
||||
yield "graph %s {\n" % (self.name,)
|
||||
if self.type == 'digraph':
|
||||
yield 'digraph %s {\n' % (self.name,)
|
||||
elif self.type == 'graph':
|
||||
yield 'graph %s {\n' % (self.name,)
|
||||
|
||||
else:
|
||||
raise GraphError("unsupported graphtype %s" % (self.type,))
|
||||
@@ -250,11 +240,11 @@ def iterdot(self):
|
||||
# write overall graph attributes
|
||||
for attr_name, attr_value in sorted(self.attr.items()):
|
||||
yield '%s="%s";' % (attr_name, attr_value)
|
||||
yield "\n"
|
||||
yield '\n'
|
||||
|
||||
# some reusable patterns
|
||||
cpatt = '%s="%s",' # to separate attributes
|
||||
epatt = "];\n" # to end attributes
|
||||
cpatt = '%s="%s",' # to separate attributes
|
||||
epatt = '];\n' # to end attributes
|
||||
|
||||
# write node attributes
|
||||
for node_name, node_attr in sorted(self.nodes.items()):
|
||||
@@ -266,24 +256,25 @@ def iterdot(self):
|
||||
# write edge attributes
|
||||
for head in sorted(self.edges):
|
||||
for tail in sorted(self.edges[head]):
|
||||
if self.type == "digraph":
|
||||
if self.type == 'digraph':
|
||||
yield '\t"%s" -> "%s" [' % (head, tail)
|
||||
else:
|
||||
yield '\t"%s" -- "%s" [' % (head, tail)
|
||||
for attr_name, attr_value in sorted(self.edges[head][tail].items()):
|
||||
for attr_name, attr_value in \
|
||||
sorted(self.edges[head][tail].items()):
|
||||
yield cpatt % (attr_name, attr_value)
|
||||
yield epatt
|
||||
|
||||
# finish file
|
||||
yield "}\n"
|
||||
yield '}\n'
|
||||
|
||||
def __iter__(self):
|
||||
return self.iterdot()
|
||||
|
||||
def save_dot(self, file_name=None):
|
||||
"""
|
||||
'''
|
||||
Saves the current graph representation into a file
|
||||
"""
|
||||
'''
|
||||
|
||||
if not file_name:
|
||||
warnings.warn(DeprecationWarning, "always pass a file_name")
|
||||
@@ -293,18 +284,19 @@ def save_dot(self, file_name=None):
|
||||
for chunk in self.iterdot():
|
||||
fp.write(chunk)
|
||||
|
||||
def save_img(self, file_name=None, file_type="gif", mode="dot"):
|
||||
"""
|
||||
def save_img(self, file_name=None, file_type="gif", mode='dot'):
|
||||
'''
|
||||
Saves the dot file as an image file
|
||||
"""
|
||||
'''
|
||||
|
||||
if not file_name:
|
||||
warnings.warn(DeprecationWarning, "always pass a file_name")
|
||||
file_name = "out"
|
||||
|
||||
if mode == "neato":
|
||||
if mode == 'neato':
|
||||
self.save_dot(self.temp_neo)
|
||||
neato_cmd = "%s -o %s %s" % (self.neato, self.temp_dot, self.temp_neo)
|
||||
neato_cmd = "%s -o %s %s" % (
|
||||
self.neato, self.temp_dot, self.temp_neo)
|
||||
os.system(neato_cmd)
|
||||
plot_cmd = self.dot
|
||||
else:
|
||||
@@ -313,9 +305,5 @@ def save_img(self, file_name=None, file_type="gif", mode="dot"):
|
||||
|
||||
file_name = "%s.%s" % (file_name, file_type)
|
||||
create_cmd = "%s -T%s %s -o %s" % (
|
||||
plot_cmd,
|
||||
file_type,
|
||||
self.temp_dot,
|
||||
file_name,
|
||||
)
|
||||
plot_cmd, file_type, self.temp_dot, file_name)
|
||||
os.system(create_cmd)
|
||||
|
||||
34
lib/spack/external/altgraph/Graph.py
vendored
34
lib/spack/external/altgraph/Graph.py
vendored
@@ -13,9 +13,8 @@
|
||||
#--Nathan Denny, May 27, 1999
|
||||
"""
|
||||
|
||||
from collections import deque
|
||||
|
||||
from altgraph import GraphError
|
||||
from collections import deque
|
||||
|
||||
|
||||
class Graph(object):
|
||||
@@ -59,10 +58,8 @@ def __init__(self, edges=None):
|
||||
raise GraphError("Cannot create edge from %s" % (item,))
|
||||
|
||||
def __repr__(self):
|
||||
return "<Graph: %d nodes, %d edges>" % (
|
||||
self.number_of_nodes(),
|
||||
self.number_of_edges(),
|
||||
)
|
||||
return '<Graph: %d nodes, %d edges>' % (
|
||||
self.number_of_nodes(), self.number_of_edges())
|
||||
|
||||
def add_node(self, node, node_data=None):
|
||||
"""
|
||||
@@ -114,7 +111,7 @@ def add_edge(self, head_id, tail_id, edge_data=1, create_nodes=True):
|
||||
self.nodes[tail_id][0].append(edge)
|
||||
self.nodes[head_id][1].append(edge)
|
||||
except KeyError:
|
||||
raise GraphError("Invalid nodes %s -> %s" % (head_id, tail_id))
|
||||
raise GraphError('Invalid nodes %s -> %s' % (head_id, tail_id))
|
||||
|
||||
# store edge information
|
||||
self.edges[edge] = (head_id, tail_id, edge_data)
|
||||
@@ -127,12 +124,13 @@ def hide_edge(self, edge):
|
||||
time.
|
||||
"""
|
||||
try:
|
||||
head_id, tail_id, edge_data = self.hidden_edges[edge] = self.edges[edge]
|
||||
head_id, tail_id, edge_data = \
|
||||
self.hidden_edges[edge] = self.edges[edge]
|
||||
self.nodes[tail_id][0].remove(edge)
|
||||
self.nodes[head_id][1].remove(edge)
|
||||
del self.edges[edge]
|
||||
except KeyError:
|
||||
raise GraphError("Invalid edge %s" % edge)
|
||||
raise GraphError('Invalid edge %s' % edge)
|
||||
|
||||
def hide_node(self, node):
|
||||
"""
|
||||
@@ -146,7 +144,7 @@ def hide_node(self, node):
|
||||
self.hide_edge(edge)
|
||||
del self.nodes[node]
|
||||
except KeyError:
|
||||
raise GraphError("Invalid node %s" % node)
|
||||
raise GraphError('Invalid node %s' % node)
|
||||
|
||||
def restore_node(self, node):
|
||||
"""
|
||||
@@ -159,7 +157,7 @@ def restore_node(self, node):
|
||||
self.restore_edge(edge)
|
||||
del self.hidden_nodes[node]
|
||||
except KeyError:
|
||||
raise GraphError("Invalid node %s" % node)
|
||||
raise GraphError('Invalid node %s' % node)
|
||||
|
||||
def restore_edge(self, edge):
|
||||
"""
|
||||
@@ -172,7 +170,7 @@ def restore_edge(self, edge):
|
||||
self.edges[edge] = head_id, tail_id, data
|
||||
del self.hidden_edges[edge]
|
||||
except KeyError:
|
||||
raise GraphError("Invalid edge %s" % edge)
|
||||
raise GraphError('Invalid edge %s' % edge)
|
||||
|
||||
def restore_all_edges(self):
|
||||
"""
|
||||
@@ -205,7 +203,7 @@ def edge_by_id(self, edge):
|
||||
head, tail, data = self.edges[edge]
|
||||
except KeyError:
|
||||
head, tail = None, None
|
||||
raise GraphError("Invalid edge %s" % edge)
|
||||
raise GraphError('Invalid edge %s' % edge)
|
||||
|
||||
return (head, tail)
|
||||
|
||||
@@ -341,7 +339,7 @@ def out_edges(self, node):
|
||||
try:
|
||||
return list(self.nodes[node][1])
|
||||
except KeyError:
|
||||
raise GraphError("Invalid node %s" % node)
|
||||
raise GraphError('Invalid node %s' % node)
|
||||
|
||||
def inc_edges(self, node):
|
||||
"""
|
||||
@@ -350,7 +348,7 @@ def inc_edges(self, node):
|
||||
try:
|
||||
return list(self.nodes[node][0])
|
||||
except KeyError:
|
||||
raise GraphError("Invalid node %s" % node)
|
||||
raise GraphError('Invalid node %s' % node)
|
||||
|
||||
def all_edges(self, node):
|
||||
"""
|
||||
@@ -490,7 +488,7 @@ def iterdfs(self, start, end=None, forward=True):
|
||||
The forward parameter specifies whether it is a forward or backward
|
||||
traversal.
|
||||
"""
|
||||
visited, stack = {start}, deque([start])
|
||||
visited, stack = set([start]), deque([start])
|
||||
|
||||
if forward:
|
||||
get_edges = self.out_edges
|
||||
@@ -517,7 +515,7 @@ def iterdata(self, start, end=None, forward=True, condition=None):
|
||||
condition callback is only called when node_data is not None.
|
||||
"""
|
||||
|
||||
visited, stack = {start}, deque([start])
|
||||
visited, stack = set([start]), deque([start])
|
||||
|
||||
if forward:
|
||||
get_edges = self.out_edges
|
||||
@@ -549,7 +547,7 @@ def _iterbfs(self, start, end=None, forward=True):
|
||||
traversal. Returns a list of tuples where the first value is the hop
|
||||
value the second value is the node id.
|
||||
"""
|
||||
queue, visited = deque([(start, 0)]), {start}
|
||||
queue, visited = deque([(start, 0)]), set([start])
|
||||
|
||||
# the direction of the bfs depends on the edges that are sampled
|
||||
if forward:
|
||||
|
||||
53
lib/spack/external/altgraph/GraphAlgo.py
vendored
53
lib/spack/external/altgraph/GraphAlgo.py
vendored
@@ -1,7 +1,7 @@
|
||||
"""
|
||||
'''
|
||||
altgraph.GraphAlgo - Graph algorithms
|
||||
=====================================
|
||||
"""
|
||||
'''
|
||||
from altgraph import GraphError
|
||||
|
||||
|
||||
@@ -28,9 +28,9 @@ def dijkstra(graph, start, end=None):
|
||||
Adapted to altgraph by Istvan Albert, Pennsylvania State University -
|
||||
June, 9 2004
|
||||
"""
|
||||
D = {} # dictionary of final distances
|
||||
P = {} # dictionary of predecessors
|
||||
Q = _priorityDictionary() # estimated distances of non-final vertices
|
||||
D = {} # dictionary of final distances
|
||||
P = {} # dictionary of predecessors
|
||||
Q = _priorityDictionary() # estimated distances of non-final vertices
|
||||
Q[start] = 0
|
||||
|
||||
for v in Q:
|
||||
@@ -44,8 +44,7 @@ def dijkstra(graph, start, end=None):
|
||||
if w in D:
|
||||
if vwLength < D[w]:
|
||||
raise GraphError(
|
||||
"Dijkstra: found better path to already-final vertex"
|
||||
)
|
||||
"Dijkstra: found better path to already-final vertex")
|
||||
elif w not in Q or vwLength < Q[w]:
|
||||
Q[w] = vwLength
|
||||
P[w] = v
|
||||
@@ -77,7 +76,7 @@ def shortest_path(graph, start, end):
|
||||
# Utility classes and functions
|
||||
#
|
||||
class _priorityDictionary(dict):
|
||||
"""
|
||||
'''
|
||||
Priority dictionary using binary heaps (internal use only)
|
||||
|
||||
David Eppstein, UC Irvine, 8 Mar 2002
|
||||
@@ -93,22 +92,22 @@ class _priorityDictionary(dict):
|
||||
order. Each item is not removed until the next item is requested,
|
||||
so D[x] will still return a useful value until the next iteration
|
||||
of the for-loop. Each operation takes logarithmic amortized time.
|
||||
"""
|
||||
'''
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
'''
|
||||
Initialize priorityDictionary by creating binary heap of pairs
|
||||
(value,key). Note that changing or removing a dict entry will not
|
||||
remove the old pair from the heap until it is found by smallest()
|
||||
or until the heap is rebuilt.
|
||||
"""
|
||||
'''
|
||||
self.__heap = []
|
||||
dict.__init__(self)
|
||||
|
||||
def smallest(self):
|
||||
"""
|
||||
'''
|
||||
Find smallest item after removing deleted items from front of heap.
|
||||
"""
|
||||
'''
|
||||
if len(self) == 0:
|
||||
raise IndexError("smallest of empty priorityDictionary")
|
||||
heap = self.__heap
|
||||
@@ -116,11 +115,9 @@ def smallest(self):
|
||||
lastItem = heap.pop()
|
||||
insertionPoint = 0
|
||||
while 1:
|
||||
smallChild = 2 * insertionPoint + 1
|
||||
if (
|
||||
smallChild + 1 < len(heap)
|
||||
and heap[smallChild] > heap[smallChild + 1]
|
||||
):
|
||||
smallChild = 2*insertionPoint+1
|
||||
if smallChild+1 < len(heap) and \
|
||||
heap[smallChild] > heap[smallChild+1]:
|
||||
smallChild += 1
|
||||
if smallChild >= len(heap) or lastItem <= heap[smallChild]:
|
||||
heap[insertionPoint] = lastItem
|
||||
@@ -130,24 +127,22 @@ def smallest(self):
|
||||
return heap[0][1]
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
'''
|
||||
Create destructive sorted iterator of priorityDictionary.
|
||||
"""
|
||||
|
||||
'''
|
||||
def iterfn():
|
||||
while len(self) > 0:
|
||||
x = self.smallest()
|
||||
yield x
|
||||
del self[x]
|
||||
|
||||
return iterfn()
|
||||
|
||||
def __setitem__(self, key, val):
|
||||
"""
|
||||
'''
|
||||
Change value stored in dictionary and add corresponding pair to heap.
|
||||
Rebuilds the heap if the number of deleted items gets large, to avoid
|
||||
memory leakage.
|
||||
"""
|
||||
'''
|
||||
dict.__setitem__(self, key, val)
|
||||
heap = self.__heap
|
||||
if len(heap) > 2 * len(self):
|
||||
@@ -157,15 +152,15 @@ def __setitem__(self, key, val):
|
||||
newPair = (val, key)
|
||||
insertionPoint = len(heap)
|
||||
heap.append(None)
|
||||
while insertionPoint > 0 and newPair < heap[(insertionPoint - 1) // 2]:
|
||||
heap[insertionPoint] = heap[(insertionPoint - 1) // 2]
|
||||
insertionPoint = (insertionPoint - 1) // 2
|
||||
while insertionPoint > 0 and newPair < heap[(insertionPoint-1)//2]:
|
||||
heap[insertionPoint] = heap[(insertionPoint-1)//2]
|
||||
insertionPoint = (insertionPoint-1)//2
|
||||
heap[insertionPoint] = newPair
|
||||
|
||||
def setdefault(self, key, val):
|
||||
"""
|
||||
'''
|
||||
Reimplement setdefault to pass through our customized __setitem__.
|
||||
"""
|
||||
'''
|
||||
if key not in self:
|
||||
self[key] = val
|
||||
return self[key]
|
||||
|
||||
24
lib/spack/external/altgraph/GraphStat.py
vendored
24
lib/spack/external/altgraph/GraphStat.py
vendored
@@ -1,11 +1,11 @@
|
||||
"""
|
||||
'''
|
||||
altgraph.GraphStat - Functions providing various graph statistics
|
||||
=================================================================
|
||||
"""
|
||||
'''
|
||||
|
||||
|
||||
def degree_dist(graph, limits=(0, 0), bin_num=10, mode="out"):
|
||||
"""
|
||||
def degree_dist(graph, limits=(0, 0), bin_num=10, mode='out'):
|
||||
'''
|
||||
Computes the degree distribution for a graph.
|
||||
|
||||
Returns a list of tuples where the first element of the tuple is the
|
||||
@@ -15,10 +15,10 @@ def degree_dist(graph, limits=(0, 0), bin_num=10, mode="out"):
|
||||
Example::
|
||||
|
||||
....
|
||||
"""
|
||||
'''
|
||||
|
||||
deg = []
|
||||
if mode == "inc":
|
||||
if mode == 'inc':
|
||||
get_deg = graph.inc_degree
|
||||
else:
|
||||
get_deg = graph.out_degree
|
||||
@@ -34,38 +34,38 @@ def degree_dist(graph, limits=(0, 0), bin_num=10, mode="out"):
|
||||
return results
|
||||
|
||||
|
||||
_EPS = 1.0 / (2.0 ** 32)
|
||||
_EPS = 1.0/(2.0**32)
|
||||
|
||||
|
||||
def _binning(values, limits=(0, 0), bin_num=10):
|
||||
"""
|
||||
'''
|
||||
Bins data that falls between certain limits, if the limits are (0, 0) the
|
||||
minimum and maximum values are used.
|
||||
|
||||
Returns a list of tuples where the first element of the tuple is the
|
||||
center of the bin and the second element of the tuple are the counts.
|
||||
"""
|
||||
'''
|
||||
if limits == (0, 0):
|
||||
min_val, max_val = min(values) - _EPS, max(values) + _EPS
|
||||
else:
|
||||
min_val, max_val = limits
|
||||
|
||||
# get bin size
|
||||
bin_size = (max_val - min_val) / float(bin_num)
|
||||
bin_size = (max_val - min_val)/float(bin_num)
|
||||
bins = [0] * (bin_num)
|
||||
|
||||
# will ignore these outliers for now
|
||||
for value in values:
|
||||
try:
|
||||
if (value - min_val) >= 0:
|
||||
index = int((value - min_val) / float(bin_size))
|
||||
index = int((value - min_val)/float(bin_size))
|
||||
bins[index] += 1
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
# make it ready for an x,y plot
|
||||
result = []
|
||||
center = (bin_size / 2) + min_val
|
||||
center = (bin_size/2) + min_val
|
||||
for i, y in enumerate(bins):
|
||||
x = center + bin_size * i
|
||||
result.append((x, y))
|
||||
|
||||
37
lib/spack/external/altgraph/GraphUtil.py
vendored
37
lib/spack/external/altgraph/GraphUtil.py
vendored
@@ -1,29 +1,31 @@
|
||||
"""
|
||||
'''
|
||||
altgraph.GraphUtil - Utility classes and functions
|
||||
==================================================
|
||||
"""
|
||||
'''
|
||||
|
||||
import random
|
||||
from collections import deque
|
||||
|
||||
from altgraph import Graph, GraphError
|
||||
from altgraph import Graph
|
||||
from altgraph import GraphError
|
||||
|
||||
|
||||
def generate_random_graph(node_num, edge_num, self_loops=False, multi_edges=False):
|
||||
"""
|
||||
def generate_random_graph(
|
||||
node_num, edge_num, self_loops=False, multi_edges=False):
|
||||
'''
|
||||
Generates and returns a :py:class:`~altgraph.Graph.Graph` instance with
|
||||
*node_num* nodes randomly connected by *edge_num* edges.
|
||||
"""
|
||||
'''
|
||||
g = Graph.Graph()
|
||||
|
||||
if not multi_edges:
|
||||
if self_loops:
|
||||
max_edges = node_num * node_num
|
||||
else:
|
||||
max_edges = node_num * (node_num - 1)
|
||||
max_edges = node_num * (node_num-1)
|
||||
|
||||
if edge_num > max_edges:
|
||||
raise GraphError("inconsistent arguments to 'generate_random_graph'")
|
||||
raise GraphError(
|
||||
"inconsistent arguments to 'generate_random_graph'")
|
||||
|
||||
nodes = range(node_num)
|
||||
|
||||
@@ -50,16 +52,17 @@ def generate_random_graph(node_num, edge_num, self_loops=False, multi_edges=Fals
|
||||
return g
|
||||
|
||||
|
||||
def generate_scale_free_graph(steps, growth_num, self_loops=False, multi_edges=False):
|
||||
"""
|
||||
def generate_scale_free_graph(
|
||||
steps, growth_num, self_loops=False, multi_edges=False):
|
||||
'''
|
||||
Generates and returns a :py:class:`~altgraph.Graph.Graph` instance that
|
||||
will have *steps* \\* *growth_num* nodes and a scale free (powerlaw)
|
||||
will have *steps* \* *growth_num* nodes and a scale free (powerlaw)
|
||||
connectivity. Starting with a fully connected graph with *growth_num*
|
||||
nodes at every step *growth_num* nodes are added to the graph and are
|
||||
connected to existing nodes with a probability proportional to the degree
|
||||
of these existing nodes.
|
||||
"""
|
||||
# The code doesn't seem to do what the documentation claims.
|
||||
'''
|
||||
# FIXME: The code doesn't seem to do what the documentation claims.
|
||||
graph = Graph.Graph()
|
||||
|
||||
# initialize the graph
|
||||
@@ -110,7 +113,7 @@ def filter_stack(graph, head, filters):
|
||||
in *removes*.
|
||||
"""
|
||||
|
||||
visited, removes, orphans = {head}, set(), set()
|
||||
visited, removes, orphans = set([head]), set(), set()
|
||||
stack = deque([(head, head)])
|
||||
get_data = graph.node_data
|
||||
get_edges = graph.out_edges
|
||||
@@ -134,6 +137,8 @@ def filter_stack(graph, head, filters):
|
||||
visited.add(tail)
|
||||
stack.append((last_good, tail))
|
||||
|
||||
orphans = [(lg, tl) for (lg, tl) in orphans if tl not in removes]
|
||||
orphans = [
|
||||
(lg, tl)
|
||||
for (lg, tl) in orphans if tl not in removes]
|
||||
|
||||
return visited, removes, orphans
|
||||
|
||||
12
lib/spack/external/altgraph/ObjectGraph.py
vendored
12
lib/spack/external/altgraph/ObjectGraph.py
vendored
@@ -27,7 +27,7 @@ def __init__(self, graph=None, debug=0):
|
||||
graph.add_node(self, None)
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s>" % (type(self).__name__,)
|
||||
return '<%s>' % (type(self).__name__,)
|
||||
|
||||
def flatten(self, condition=None, start=None):
|
||||
"""
|
||||
@@ -58,7 +58,6 @@ def iter_edges(lst, n):
|
||||
if ident not in seen:
|
||||
yield self.findNode(ident)
|
||||
seen.add(ident)
|
||||
|
||||
return iter_edges(outraw, 3), iter_edges(incraw, 2)
|
||||
|
||||
def edgeData(self, fromNode, toNode):
|
||||
@@ -88,12 +87,12 @@ def filterStack(self, filters):
|
||||
visited, removes, orphans = filter_stack(self.graph, self, filters)
|
||||
|
||||
for last_good, tail in orphans:
|
||||
self.graph.add_edge(last_good, tail, edge_data="orphan")
|
||||
self.graph.add_edge(last_good, tail, edge_data='orphan')
|
||||
|
||||
for node in removes:
|
||||
self.graph.hide_node(node)
|
||||
|
||||
return len(visited) - 1, len(removes), len(orphans)
|
||||
return len(visited)-1, len(removes), len(orphans)
|
||||
|
||||
def removeNode(self, node):
|
||||
"""
|
||||
@@ -136,7 +135,7 @@ def getRawIdent(self, node):
|
||||
"""
|
||||
if node is self:
|
||||
return node
|
||||
ident = getattr(node, "graphident", None)
|
||||
ident = getattr(node, 'graphident', None)
|
||||
return ident
|
||||
|
||||
def __contains__(self, node):
|
||||
@@ -193,7 +192,8 @@ def msg(self, level, s, *args):
|
||||
Print a debug message with the given level
|
||||
"""
|
||||
if s and level <= self.debug:
|
||||
print("%s%s %s" % (" " * self.indent, s, " ".join(map(repr, args))))
|
||||
print("%s%s %s" % (
|
||||
" " * self.indent, s, ' '.join(map(repr, args))))
|
||||
|
||||
def msgin(self, level, s, *args):
|
||||
"""
|
||||
|
||||
14
lib/spack/external/altgraph/__init__.py
vendored
14
lib/spack/external/altgraph/__init__.py
vendored
@@ -1,4 +1,4 @@
|
||||
"""
|
||||
'''
|
||||
altgraph - a python graph library
|
||||
=================================
|
||||
|
||||
@@ -138,11 +138,13 @@
|
||||
@newfield contributor: Contributors:
|
||||
@contributor: U{Reka Albert <http://www.phys.psu.edu/~ralbert/>}
|
||||
|
||||
"""
|
||||
import pkg_resources
|
||||
|
||||
__version__ = pkg_resources.require("altgraph")[0].version
|
||||
|
||||
'''
|
||||
# import pkg_resources
|
||||
# __version__ = pkg_resources.require('altgraph')[0].version
|
||||
# pkg_resources is not finding the altgraph import despite the fact that it is in sys.path
|
||||
# there is no .dist-info or .egg-info for pkg_resources to query the version from
|
||||
# so it must be set manually
|
||||
__version__ = '0.16.1'
|
||||
|
||||
class GraphError(ValueError):
|
||||
pass
|
||||
|
||||
174
lib/spack/external/macholib/MachO.py
vendored
174
lib/spack/external/macholib/MachO.py
vendored
@@ -3,43 +3,21 @@
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import struct
|
||||
import sys
|
||||
import struct
|
||||
import os
|
||||
|
||||
from macholib.util import fileview
|
||||
|
||||
from .mach_o import (
|
||||
FAT_MAGIC,
|
||||
FAT_MAGIC_64,
|
||||
LC_DYSYMTAB,
|
||||
LC_ID_DYLIB,
|
||||
LC_LOAD_DYLIB,
|
||||
LC_LOAD_UPWARD_DYLIB,
|
||||
LC_LOAD_WEAK_DYLIB,
|
||||
LC_PREBOUND_DYLIB,
|
||||
LC_REEXPORT_DYLIB,
|
||||
LC_REGISTRY,
|
||||
LC_SEGMENT,
|
||||
LC_SEGMENT_64,
|
||||
LC_SYMTAB,
|
||||
MH_CIGAM,
|
||||
MH_CIGAM_64,
|
||||
MH_FILETYPE_SHORTNAMES,
|
||||
MH_MAGIC,
|
||||
MH_MAGIC_64,
|
||||
S_ZEROFILL,
|
||||
fat_arch,
|
||||
fat_arch64,
|
||||
fat_header,
|
||||
load_command,
|
||||
mach_header,
|
||||
mach_header_64,
|
||||
section,
|
||||
section_64,
|
||||
)
|
||||
from .mach_o import MH_FILETYPE_SHORTNAMES, LC_DYSYMTAB, LC_SYMTAB
|
||||
from .mach_o import load_command, S_ZEROFILL, section_64, section
|
||||
from .mach_o import LC_REGISTRY, LC_ID_DYLIB, LC_SEGMENT, fat_header
|
||||
from .mach_o import LC_SEGMENT_64, MH_CIGAM_64, MH_MAGIC_64, FAT_MAGIC
|
||||
from .mach_o import mach_header, fat_arch64, FAT_MAGIC_64, fat_arch
|
||||
from .mach_o import LC_REEXPORT_DYLIB, LC_PREBOUND_DYLIB, LC_LOAD_WEAK_DYLIB
|
||||
from .mach_o import LC_LOAD_UPWARD_DYLIB, LC_LOAD_DYLIB, mach_header_64
|
||||
from .mach_o import MH_CIGAM, MH_MAGIC
|
||||
from .ptypes import sizeof
|
||||
|
||||
from macholib.util import fileview
|
||||
try:
|
||||
from macholib.compat import bytes
|
||||
except ImportError:
|
||||
@@ -53,23 +31,23 @@
|
||||
if sys.version_info[0] == 2:
|
||||
range = xrange # noqa: F821
|
||||
|
||||
__all__ = ["MachO"]
|
||||
__all__ = ['MachO']
|
||||
|
||||
_RELOCATABLE = {
|
||||
_RELOCATABLE = set((
|
||||
# relocatable commands that should be used for dependency walking
|
||||
LC_LOAD_DYLIB,
|
||||
LC_LOAD_UPWARD_DYLIB,
|
||||
LC_LOAD_WEAK_DYLIB,
|
||||
LC_PREBOUND_DYLIB,
|
||||
LC_REEXPORT_DYLIB,
|
||||
}
|
||||
))
|
||||
|
||||
_RELOCATABLE_NAMES = {
|
||||
LC_LOAD_DYLIB: "load_dylib",
|
||||
LC_LOAD_UPWARD_DYLIB: "load_upward_dylib",
|
||||
LC_LOAD_WEAK_DYLIB: "load_weak_dylib",
|
||||
LC_PREBOUND_DYLIB: "prebound_dylib",
|
||||
LC_REEXPORT_DYLIB: "reexport_dylib",
|
||||
LC_LOAD_DYLIB: 'load_dylib',
|
||||
LC_LOAD_UPWARD_DYLIB: 'load_upward_dylib',
|
||||
LC_LOAD_WEAK_DYLIB: 'load_weak_dylib',
|
||||
LC_PREBOUND_DYLIB: 'prebound_dylib',
|
||||
LC_REEXPORT_DYLIB: 'reexport_dylib',
|
||||
}
|
||||
|
||||
|
||||
@@ -87,14 +65,13 @@ def lc_str_value(offset, cmd_info):
|
||||
cmd_load, cmd_cmd, cmd_data = cmd_info
|
||||
|
||||
offset -= sizeof(cmd_load) + sizeof(cmd_cmd)
|
||||
return cmd_data[offset:].strip(b"\x00")
|
||||
return cmd_data[offset:].strip(b'\x00')
|
||||
|
||||
|
||||
class MachO(object):
|
||||
"""
|
||||
Provides reading/writing the Mach-O header of a specific existing file
|
||||
"""
|
||||
|
||||
# filename - the original filename of this mach-o
|
||||
# sizediff - the current deviation from the initial mach-o size
|
||||
# header - the mach-o header
|
||||
@@ -114,7 +91,7 @@ def __init__(self, filename):
|
||||
# initialized by load
|
||||
self.fat = None
|
||||
self.headers = []
|
||||
with open(filename, "rb") as fp:
|
||||
with open(filename, 'rb') as fp:
|
||||
self.load(fp)
|
||||
|
||||
def __repr__(self):
|
||||
@@ -122,7 +99,7 @@ def __repr__(self):
|
||||
|
||||
def load(self, fh):
|
||||
assert fh.tell() == 0
|
||||
header = struct.unpack(">I", fh.read(4))[0]
|
||||
header = struct.unpack('>I', fh.read(4))[0]
|
||||
fh.seek(0)
|
||||
if header in (FAT_MAGIC, FAT_MAGIC_64):
|
||||
self.load_fat(fh)
|
||||
@@ -135,9 +112,11 @@ def load(self, fh):
|
||||
def load_fat(self, fh):
|
||||
self.fat = fat_header.from_fileobj(fh)
|
||||
if self.fat.magic == FAT_MAGIC:
|
||||
archs = [fat_arch.from_fileobj(fh) for i in range(self.fat.nfat_arch)]
|
||||
archs = [fat_arch.from_fileobj(fh)
|
||||
for i in range(self.fat.nfat_arch)]
|
||||
elif self.fat.magic == FAT_MAGIC_64:
|
||||
archs = [fat_arch64.from_fileobj(fh) for i in range(self.fat.nfat_arch)]
|
||||
archs = [fat_arch64.from_fileobj(fh)
|
||||
for i in range(self.fat.nfat_arch)]
|
||||
else:
|
||||
raise ValueError("Unknown fat header magic: %r" % (self.fat.magic))
|
||||
|
||||
@@ -153,18 +132,19 @@ def rewriteLoadCommands(self, *args, **kw):
|
||||
|
||||
def load_header(self, fh, offset, size):
|
||||
fh.seek(offset)
|
||||
header = struct.unpack(">I", fh.read(4))[0]
|
||||
header = struct.unpack('>I', fh.read(4))[0]
|
||||
fh.seek(offset)
|
||||
if header == MH_MAGIC:
|
||||
magic, hdr, endian = MH_MAGIC, mach_header, ">"
|
||||
magic, hdr, endian = MH_MAGIC, mach_header, '>'
|
||||
elif header == MH_CIGAM:
|
||||
magic, hdr, endian = MH_CIGAM, mach_header, "<"
|
||||
magic, hdr, endian = MH_CIGAM, mach_header, '<'
|
||||
elif header == MH_MAGIC_64:
|
||||
magic, hdr, endian = MH_MAGIC_64, mach_header_64, ">"
|
||||
magic, hdr, endian = MH_MAGIC_64, mach_header_64, '>'
|
||||
elif header == MH_CIGAM_64:
|
||||
magic, hdr, endian = MH_CIGAM_64, mach_header_64, "<"
|
||||
magic, hdr, endian = MH_CIGAM_64, mach_header_64, '<'
|
||||
else:
|
||||
raise ValueError("Unknown Mach-O header: 0x%08x in %r" % (header, fh))
|
||||
raise ValueError("Unknown Mach-O header: 0x%08x in %r" % (
|
||||
header, fh))
|
||||
hdr = MachOHeader(self, fh, offset, size, magic, hdr, endian)
|
||||
self.headers.append(hdr)
|
||||
|
||||
@@ -177,7 +157,6 @@ class MachOHeader(object):
|
||||
"""
|
||||
Provides reading/writing the Mach-O header of a specific existing file
|
||||
"""
|
||||
|
||||
# filename - the original filename of this mach-o
|
||||
# sizediff - the current deviation from the initial mach-o size
|
||||
# header - the mach-o header
|
||||
@@ -210,19 +189,15 @@ def __init__(self, parent, fh, offset, size, magic, hdr, endian):
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s filename=%r offset=%d size=%d endian=%r>" % (
|
||||
type(self).__name__,
|
||||
self.parent.filename,
|
||||
self.offset,
|
||||
self.size,
|
||||
self.endian,
|
||||
)
|
||||
type(self).__name__, self.parent.filename, self.offset, self.size,
|
||||
self.endian)
|
||||
|
||||
def load(self, fh):
|
||||
fh = fileview(fh, self.offset, self.size)
|
||||
fh.seek(0)
|
||||
|
||||
self.sizediff = 0
|
||||
kw = {"_endian_": self.endian}
|
||||
kw = {'_endian_': self.endian}
|
||||
header = self.mach_header.from_fileobj(fh, **kw)
|
||||
self.header = header
|
||||
# if header.magic != self.MH_MAGIC:
|
||||
@@ -261,9 +236,8 @@ def load(self, fh):
|
||||
section_cls = section_64
|
||||
|
||||
expected_size = (
|
||||
sizeof(klass)
|
||||
+ sizeof(load_command)
|
||||
+ (sizeof(section_cls) * cmd_cmd.nsects)
|
||||
sizeof(klass) + sizeof(load_command) +
|
||||
(sizeof(section_cls) * cmd_cmd.nsects)
|
||||
)
|
||||
if cmd_load.cmdsize != expected_size:
|
||||
raise ValueError("Segment size mismatch")
|
||||
@@ -274,12 +248,12 @@ def load(self, fh):
|
||||
low_offset = min(low_offset, cmd_cmd.fileoff)
|
||||
else:
|
||||
# this one has multiple segments
|
||||
for _j in range(cmd_cmd.nsects):
|
||||
for j in range(cmd_cmd.nsects):
|
||||
# read the segment
|
||||
seg = section_cls.from_fileobj(fh, **kw)
|
||||
# if the segment has a size and is not zero filled
|
||||
# then its beginning is the offset of this segment
|
||||
not_zerofill = (seg.flags & S_ZEROFILL) != S_ZEROFILL
|
||||
not_zerofill = ((seg.flags & S_ZEROFILL) != S_ZEROFILL)
|
||||
if seg.offset > 0 and seg.size > 0 and not_zerofill:
|
||||
low_offset = min(low_offset, seg.offset)
|
||||
if not_zerofill:
|
||||
@@ -292,7 +266,7 @@ def load(self, fh):
|
||||
# data is a list of segments
|
||||
cmd_data = segs
|
||||
|
||||
# These are disabled for now because writing back doesn't work
|
||||
# XXX: Disabled for now because writing back doesn't work
|
||||
# elif cmd_load.cmd == LC_CODE_SIGNATURE:
|
||||
# c = fh.tell()
|
||||
# fh.seek(cmd_cmd.dataoff)
|
||||
@@ -306,17 +280,17 @@ def load(self, fh):
|
||||
|
||||
else:
|
||||
# data is a raw str
|
||||
data_size = cmd_load.cmdsize - sizeof(klass) - sizeof(load_command)
|
||||
data_size = (
|
||||
cmd_load.cmdsize - sizeof(klass) - sizeof(load_command)
|
||||
)
|
||||
cmd_data = fh.read(data_size)
|
||||
cmd.append((cmd_load, cmd_cmd, cmd_data))
|
||||
read_bytes += cmd_load.cmdsize
|
||||
|
||||
# make sure the header made sense
|
||||
if read_bytes != header.sizeofcmds:
|
||||
raise ValueError(
|
||||
"Read %d bytes, header reports %d bytes"
|
||||
% (read_bytes, header.sizeofcmds)
|
||||
)
|
||||
raise ValueError("Read %d bytes, header reports %d bytes" % (
|
||||
read_bytes, header.sizeofcmds))
|
||||
self.total_size = sizeof(self.mach_header) + read_bytes
|
||||
self.low_offset = low_offset
|
||||
|
||||
@@ -329,9 +303,8 @@ def walkRelocatables(self, shouldRelocateCommand=_shouldRelocateCommand):
|
||||
if shouldRelocateCommand(lc.cmd):
|
||||
name = _RELOCATABLE_NAMES[lc.cmd]
|
||||
ofs = cmd.name - sizeof(lc.__class__) - sizeof(cmd.__class__)
|
||||
yield idx, name, data[
|
||||
ofs : data.find(b"\x00", ofs) # noqa: E203
|
||||
].decode(sys.getfilesystemencoding())
|
||||
yield idx, name, data[ofs:data.find(b'\x00', ofs)].decode(
|
||||
sys.getfilesystemencoding())
|
||||
|
||||
def rewriteInstallNameCommand(self, loadcmd):
|
||||
"""Rewrite the load command of this dylib"""
|
||||
@@ -344,9 +317,8 @@ def changedHeaderSizeBy(self, bytes):
|
||||
self.sizediff += bytes
|
||||
if (self.total_size + self.sizediff) > self.low_offset:
|
||||
print(
|
||||
"WARNING: Mach-O header in %r may be too large to relocate"
|
||||
% (self.parent.filename,)
|
||||
)
|
||||
"WARNING: Mach-O header in %r may be too large to relocate" % (
|
||||
self.parent.filename,))
|
||||
|
||||
def rewriteLoadCommands(self, changefunc):
|
||||
"""
|
||||
@@ -355,22 +327,22 @@ def rewriteLoadCommands(self, changefunc):
|
||||
data = changefunc(self.parent.filename)
|
||||
changed = False
|
||||
if data is not None:
|
||||
if self.rewriteInstallNameCommand(data.encode(sys.getfilesystemencoding())):
|
||||
if self.rewriteInstallNameCommand(
|
||||
data.encode(sys.getfilesystemencoding())):
|
||||
changed = True
|
||||
for idx, _name, filename in self.walkRelocatables():
|
||||
for idx, name, filename in self.walkRelocatables():
|
||||
data = changefunc(filename)
|
||||
if data is not None:
|
||||
if self.rewriteDataForCommand(
|
||||
idx, data.encode(sys.getfilesystemencoding())
|
||||
):
|
||||
if self.rewriteDataForCommand(idx, data.encode(
|
||||
sys.getfilesystemencoding())):
|
||||
changed = True
|
||||
return changed
|
||||
|
||||
def rewriteDataForCommand(self, idx, data):
|
||||
lc, cmd, old_data = self.commands[idx]
|
||||
hdrsize = sizeof(lc.__class__) + sizeof(cmd.__class__)
|
||||
align = struct.calcsize("Q")
|
||||
data = data + (b"\x00" * (align - (len(data) % align)))
|
||||
align = struct.calcsize('Q')
|
||||
data = data + (b'\x00' * (align - (len(data) % align)))
|
||||
newsize = hdrsize + len(data)
|
||||
self.commands[idx] = (lc, cmd, data)
|
||||
self.changedHeaderSizeBy(newsize - lc.cmdsize)
|
||||
@@ -380,17 +352,10 @@ def rewriteDataForCommand(self, idx, data):
|
||||
def synchronize_size(self):
|
||||
if (self.total_size + self.sizediff) > self.low_offset:
|
||||
raise ValueError(
|
||||
(
|
||||
"New Mach-O header is too large to relocate in %r "
|
||||
"(new size=%r, max size=%r, delta=%r)"
|
||||
)
|
||||
% (
|
||||
self.parent.filename,
|
||||
self.total_size + self.sizediff,
|
||||
self.low_offset,
|
||||
self.sizediff,
|
||||
)
|
||||
)
|
||||
("New Mach-O header is too large to relocate in %r "
|
||||
"(new size=%r, max size=%r, delta=%r)") % (
|
||||
self.parent.filename, self.total_size + self.sizediff,
|
||||
self.low_offset, self.sizediff))
|
||||
self.header.sizeofcmds += self.sizediff
|
||||
self.total_size = sizeof(self.mach_header) + self.header.sizeofcmds
|
||||
self.sizediff = 0
|
||||
@@ -431,16 +396,16 @@ def write(self, fileobj):
|
||||
|
||||
# zero out the unused space, doubt this is strictly necessary
|
||||
# and is generally probably already the case
|
||||
fileobj.write(b"\x00" * (self.low_offset - fileobj.tell()))
|
||||
fileobj.write(b'\x00' * (self.low_offset - fileobj.tell()))
|
||||
|
||||
def getSymbolTableCommand(self):
|
||||
for lc, cmd, _data in self.commands:
|
||||
for lc, cmd, data in self.commands:
|
||||
if lc.cmd == LC_SYMTAB:
|
||||
return cmd
|
||||
return None
|
||||
|
||||
def getDynamicSymbolTableCommand(self):
|
||||
for lc, cmd, _data in self.commands:
|
||||
for lc, cmd, data in self.commands:
|
||||
if lc.cmd == LC_DYSYMTAB:
|
||||
return cmd
|
||||
return None
|
||||
@@ -449,23 +414,22 @@ def get_filetype_shortname(self, filetype):
|
||||
if filetype in MH_FILETYPE_SHORTNAMES:
|
||||
return MH_FILETYPE_SHORTNAMES[filetype]
|
||||
else:
|
||||
return "unknown"
|
||||
return 'unknown'
|
||||
|
||||
|
||||
def main(fn):
|
||||
m = MachO(fn)
|
||||
seen = set()
|
||||
for header in m.headers:
|
||||
for _idx, name, other in header.walkRelocatables():
|
||||
for idx, name, other in header.walkRelocatables():
|
||||
if other not in seen:
|
||||
seen.add(other)
|
||||
print("\t" + name + ": " + other)
|
||||
print('\t' + name + ": " + other)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
|
||||
files = sys.argv[1:] or ["/bin/ls"]
|
||||
files = sys.argv[1:] or ['/bin/ls']
|
||||
for fn in files:
|
||||
print(fn)
|
||||
main(fn)
|
||||
|
||||
39
lib/spack/external/macholib/MachOGraph.py
vendored
39
lib/spack/external/macholib/MachOGraph.py
vendored
@@ -8,10 +8,10 @@
|
||||
from altgraph.ObjectGraph import ObjectGraph
|
||||
|
||||
from macholib.dyld import dyld_find
|
||||
from macholib.itergraphreport import itergraphreport
|
||||
from macholib.MachO import MachO
|
||||
from macholib.itergraphreport import itergraphreport
|
||||
|
||||
__all__ = ["MachOGraph"]
|
||||
__all__ = ['MachOGraph']
|
||||
|
||||
try:
|
||||
unicode
|
||||
@@ -25,14 +25,13 @@ def __init__(self, filename):
|
||||
self.headers = ()
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s graphident=%r>" % (type(self).__name__, self.graphident)
|
||||
return '<%s graphident=%r>' % (type(self).__name__, self.graphident)
|
||||
|
||||
|
||||
class MachOGraph(ObjectGraph):
|
||||
"""
|
||||
Graph data structure of Mach-O dependencies
|
||||
"""
|
||||
|
||||
def __init__(self, debug=0, graph=None, env=None, executable_path=None):
|
||||
super(MachOGraph, self).__init__(debug=debug, graph=graph)
|
||||
self.env = env
|
||||
@@ -42,18 +41,16 @@ def __init__(self, debug=0, graph=None, env=None, executable_path=None):
|
||||
def locate(self, filename, loader=None):
|
||||
if not isinstance(filename, (str, unicode)):
|
||||
raise TypeError("%r is not a string" % (filename,))
|
||||
if filename.startswith("@loader_path/") and loader is not None:
|
||||
if filename.startswith('@loader_path/') and loader is not None:
|
||||
fn = self.trans_table.get((loader.filename, filename))
|
||||
if fn is None:
|
||||
loader_path = loader.loader_path
|
||||
|
||||
try:
|
||||
fn = dyld_find(
|
||||
filename,
|
||||
env=self.env,
|
||||
filename, env=self.env,
|
||||
executable_path=self.executable_path,
|
||||
loader_path=loader_path,
|
||||
)
|
||||
loader_path=loader_path)
|
||||
self.trans_table[(loader.filename, filename)] = fn
|
||||
except ValueError:
|
||||
return None
|
||||
@@ -63,8 +60,8 @@ def locate(self, filename, loader=None):
|
||||
if fn is None:
|
||||
try:
|
||||
fn = dyld_find(
|
||||
filename, env=self.env, executable_path=self.executable_path
|
||||
)
|
||||
filename, env=self.env,
|
||||
executable_path=self.executable_path)
|
||||
self.trans_table[filename] = fn
|
||||
except ValueError:
|
||||
return None
|
||||
@@ -86,11 +83,11 @@ def run_file(self, pathname, caller=None):
|
||||
m = self.findNode(pathname, loader=caller)
|
||||
if m is None:
|
||||
if not os.path.exists(pathname):
|
||||
raise ValueError("%r does not exist" % (pathname,))
|
||||
raise ValueError('%r does not exist' % (pathname,))
|
||||
m = self.createNode(MachO, pathname)
|
||||
self.createReference(caller, m, edge_data="run_file")
|
||||
self.createReference(caller, m, edge_data='run_file')
|
||||
self.scan_node(m)
|
||||
self.msgout(2, "")
|
||||
self.msgout(2, '')
|
||||
return m
|
||||
|
||||
def load_file(self, name, caller=None):
|
||||
@@ -106,20 +103,20 @@ def load_file(self, name, caller=None):
|
||||
self.scan_node(m)
|
||||
else:
|
||||
m = self.createNode(MissingMachO, name)
|
||||
self.msgout(2, "")
|
||||
self.msgout(2, '')
|
||||
return m
|
||||
|
||||
def scan_node(self, node):
|
||||
self.msgin(2, "scan_node", node)
|
||||
self.msgin(2, 'scan_node', node)
|
||||
for header in node.headers:
|
||||
for _idx, name, filename in header.walkRelocatables():
|
||||
for idx, name, filename in header.walkRelocatables():
|
||||
assert isinstance(name, (str, unicode))
|
||||
assert isinstance(filename, (str, unicode))
|
||||
m = self.load_file(filename, caller=node)
|
||||
self.createReference(node, m, edge_data=name)
|
||||
self.msgout(2, "", node)
|
||||
self.msgout(2, '', node)
|
||||
|
||||
def itergraphreport(self, name="G"):
|
||||
def itergraphreport(self, name='G'):
|
||||
nodes = map(self.graph.describe_node, self.graph.iterdfs(self))
|
||||
describe_edge = self.graph.describe_edge
|
||||
return itergraphreport(nodes, describe_edge, name=name)
|
||||
@@ -137,5 +134,5 @@ def main(args):
|
||||
g.graphreport()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main(sys.argv[1:] or ["/bin/ls"])
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv[1:] or ['/bin/ls'])
|
||||
|
||||
58
lib/spack/external/macholib/MachOStandalone.py
vendored
58
lib/spack/external/macholib/MachOStandalone.py
vendored
@@ -1,16 +1,10 @@
|
||||
import os
|
||||
from collections import deque
|
||||
|
||||
from macholib.dyld import framework_info
|
||||
from macholib.MachOGraph import MachOGraph, MissingMachO
|
||||
from macholib.util import (
|
||||
flipwritable,
|
||||
has_filename_filter,
|
||||
in_system_path,
|
||||
iter_platform_files,
|
||||
mergecopy,
|
||||
mergetree,
|
||||
)
|
||||
from macholib.util import iter_platform_files, in_system_path, mergecopy, \
|
||||
mergetree, flipwritable, has_filename_filter
|
||||
from macholib.dyld import framework_info
|
||||
from collections import deque
|
||||
|
||||
|
||||
class ExcludedMachO(MissingMachO):
|
||||
@@ -29,20 +23,22 @@ def createNode(self, cls, name):
|
||||
|
||||
def locate(self, filename, loader=None):
|
||||
newname = super(FilteredMachOGraph, self).locate(filename, loader)
|
||||
print("locate", filename, loader, "->", newname)
|
||||
if newname is None:
|
||||
return None
|
||||
return self.delegate.locate(newname, loader=loader)
|
||||
|
||||
|
||||
class MachOStandalone(object):
|
||||
def __init__(self, base, dest=None, graph=None, env=None, executable_path=None):
|
||||
self.base = os.path.join(os.path.abspath(base), "")
|
||||
def __init__(
|
||||
self, base, dest=None, graph=None, env=None,
|
||||
executable_path=None):
|
||||
self.base = os.path.join(os.path.abspath(base), '')
|
||||
if dest is None:
|
||||
dest = os.path.join(self.base, "Contents", "Frameworks")
|
||||
dest = os.path.join(self.base, 'Contents', 'Frameworks')
|
||||
self.dest = dest
|
||||
self.mm = FilteredMachOGraph(
|
||||
self, graph=graph, env=env, executable_path=executable_path
|
||||
)
|
||||
self, graph=graph, env=env, executable_path=executable_path)
|
||||
self.changemap = {}
|
||||
self.excludes = []
|
||||
self.pending = deque()
|
||||
@@ -84,7 +80,8 @@ def copy_dylib(self, filename):
|
||||
# when two libraries link to the same dylib but using different
|
||||
# symlinks.
|
||||
if os.path.islink(filename):
|
||||
dest = os.path.join(self.dest, os.path.basename(os.path.realpath(filename)))
|
||||
dest = os.path.join(
|
||||
self.dest, os.path.basename(os.path.realpath(filename)))
|
||||
else:
|
||||
dest = os.path.join(self.dest, os.path.basename(filename))
|
||||
|
||||
@@ -99,9 +96,9 @@ def mergetree(self, src, dest):
|
||||
return mergetree(src, dest)
|
||||
|
||||
def copy_framework(self, info):
|
||||
dest = os.path.join(self.dest, info["shortname"] + ".framework")
|
||||
destfn = os.path.join(self.dest, info["name"])
|
||||
src = os.path.join(info["location"], info["shortname"] + ".framework")
|
||||
dest = os.path.join(self.dest, info['shortname'] + '.framework')
|
||||
destfn = os.path.join(self.dest, info['name'])
|
||||
src = os.path.join(info['location'], info['shortname'] + '.framework')
|
||||
if not os.path.exists(dest):
|
||||
self.mergetree(src, dest)
|
||||
self.pending.append((destfn, iter_platform_files(dest)))
|
||||
@@ -110,7 +107,7 @@ def copy_framework(self, info):
|
||||
def run(self, platfiles=None, contents=None):
|
||||
mm = self.mm
|
||||
if contents is None:
|
||||
contents = "@executable_path/.."
|
||||
contents = '@executable_path/..'
|
||||
if platfiles is None:
|
||||
platfiles = iter_platform_files(self.base)
|
||||
|
||||
@@ -124,20 +121,18 @@ def run(self, platfiles=None, contents=None):
|
||||
mm.run_file(fn, caller=ref)
|
||||
|
||||
changemap = {}
|
||||
skipcontents = os.path.join(os.path.dirname(self.dest), "")
|
||||
skipcontents = os.path.join(os.path.dirname(self.dest), '')
|
||||
machfiles = []
|
||||
|
||||
for node in mm.flatten(has_filename_filter):
|
||||
machfiles.append(node)
|
||||
dest = os.path.join(
|
||||
contents,
|
||||
os.path.normpath(node.filename[len(skipcontents) :]), # noqa: E203
|
||||
)
|
||||
contents, os.path.normpath(node.filename[len(skipcontents):]))
|
||||
changemap[node.filename] = dest
|
||||
|
||||
def changefunc(path):
|
||||
if path.startswith("@loader_path/"):
|
||||
# This is a quick hack for py2app: In that
|
||||
if path.startswith('@loader_path/'):
|
||||
# XXX: This is a quick hack for py2app: In that
|
||||
# usecase paths like this are found in the load
|
||||
# commands of relocatable wheels. Those don't
|
||||
# need rewriting.
|
||||
@@ -145,8 +140,9 @@ def changefunc(path):
|
||||
|
||||
res = mm.locate(path)
|
||||
rv = changemap.get(res)
|
||||
if rv is None and path.startswith("@loader_path/"):
|
||||
rv = changemap.get(mm.locate(mm.trans_table.get((node.filename, path))))
|
||||
if rv is None and path.startswith('@loader_path/'):
|
||||
rv = changemap.get(mm.locate(mm.trans_table.get(
|
||||
(node.filename, path))))
|
||||
return rv
|
||||
|
||||
for node in machfiles:
|
||||
@@ -154,14 +150,14 @@ def changefunc(path):
|
||||
if fn is None:
|
||||
continue
|
||||
rewroteAny = False
|
||||
for _header in node.headers:
|
||||
for header in node.headers:
|
||||
if node.rewriteLoadCommands(changefunc):
|
||||
rewroteAny = True
|
||||
if rewroteAny:
|
||||
old_mode = flipwritable(fn)
|
||||
try:
|
||||
with open(fn, "rb+") as f:
|
||||
for _header in node.headers:
|
||||
with open(fn, 'rb+') as f:
|
||||
for header in node.headers:
|
||||
f.seek(0)
|
||||
node.write(f)
|
||||
f.seek(0, 2)
|
||||
|
||||
42
lib/spack/external/macholib/SymbolTable.py
vendored
42
lib/spack/external/macholib/SymbolTable.py
vendored
@@ -3,20 +3,12 @@
|
||||
"""
|
||||
from __future__ import with_statement
|
||||
|
||||
from macholib.mach_o import relocation_info, dylib_reference, dylib_module
|
||||
from macholib.mach_o import dylib_table_of_contents, nlist, nlist_64
|
||||
from macholib.mach_o import MH_CIGAM_64, MH_MAGIC_64
|
||||
import sys
|
||||
|
||||
from macholib.mach_o import (
|
||||
MH_CIGAM_64,
|
||||
MH_MAGIC_64,
|
||||
dylib_module,
|
||||
dylib_reference,
|
||||
dylib_table_of_contents,
|
||||
nlist,
|
||||
nlist_64,
|
||||
relocation_info,
|
||||
)
|
||||
|
||||
__all__ = ["SymbolTable"]
|
||||
__all__ = ['SymbolTable']
|
||||
|
||||
if sys.version_info[0] == 2:
|
||||
range = xrange # noqa: F821
|
||||
@@ -29,7 +21,7 @@ def __init__(self, macho, header=None, openfile=None):
|
||||
if header is None:
|
||||
header = macho.headers[0]
|
||||
self.macho_header = header
|
||||
with openfile(macho.filename, "rb") as fh:
|
||||
with openfile(macho.filename, 'rb') as fh:
|
||||
self.symtab = header.getSymbolTableCommand()
|
||||
self.dysymtab = header.getDynamicSymbolTableCommand()
|
||||
|
||||
@@ -51,32 +43,22 @@ def readSymbolTable(self, fh):
|
||||
else:
|
||||
cls = nlist
|
||||
|
||||
for _i in range(cmd.nsyms):
|
||||
for i in range(cmd.nsyms):
|
||||
cmd = cls.from_fileobj(fh, _endian_=self.macho_header.endian)
|
||||
if cmd.n_un == 0:
|
||||
nlists.append((cmd, ""))
|
||||
nlists.append((cmd, ''))
|
||||
else:
|
||||
nlists.append(
|
||||
(
|
||||
cmd,
|
||||
strtab[cmd.n_un : strtab.find(b"\x00", cmd.n_un)], # noqa: E203
|
||||
)
|
||||
)
|
||||
(cmd, strtab[cmd.n_un:strtab.find(b'\x00', cmd.n_un)]))
|
||||
return nlists
|
||||
|
||||
def readDynamicSymbolTable(self, fh):
|
||||
cmd = self.dysymtab
|
||||
nlists = self.nlists
|
||||
|
||||
self.localsyms = nlists[
|
||||
cmd.ilocalsym : cmd.ilocalsym + cmd.nlocalsym # noqa: E203
|
||||
]
|
||||
self.extdefsyms = nlists[
|
||||
cmd.iextdefsym : cmd.iextdefsym + cmd.nextdefsym # noqa: E203
|
||||
]
|
||||
self.undefsyms = nlists[
|
||||
cmd.iundefsym : cmd.iundefsym + cmd.nundefsym # noqa: E203
|
||||
]
|
||||
self.localsyms = nlists[cmd.ilocalsym:cmd.ilocalsym+cmd.nlocalsym]
|
||||
self.extdefsyms = nlists[cmd.iextdefsym:cmd.iextdefsym+cmd.nextdefsym]
|
||||
self.undefsyms = nlists[cmd.iundefsym:cmd.iundefsym+cmd.nundefsym]
|
||||
if cmd.tocoff == 0:
|
||||
self.toc = None
|
||||
else:
|
||||
@@ -93,7 +75,7 @@ def readmodtab(self, fh, off, n):
|
||||
def readsym(self, fh, off, n):
|
||||
fh.seek(self.macho_header.offset + off)
|
||||
refs = []
|
||||
for _i in range(n):
|
||||
for i in range(n):
|
||||
ref = dylib_reference.from_fileobj(fh)
|
||||
isym, flags = divmod(ref.isym_flags, 256)
|
||||
refs.append((self.nlists[isym], flags))
|
||||
|
||||
2
lib/spack/external/macholib/__init__.py
vendored
2
lib/spack/external/macholib/__init__.py
vendored
@@ -5,4 +5,4 @@
|
||||
|
||||
And also Apple's documentation.
|
||||
"""
|
||||
__version__ = "1.15.2"
|
||||
__version__ = '1.10'
|
||||
|
||||
25
lib/spack/external/macholib/__main__.py
vendored
25
lib/spack/external/macholib/__main__.py
vendored
@@ -1,24 +1,26 @@
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
from __future__ import print_function, absolute_import
|
||||
import os
|
||||
import sys
|
||||
|
||||
from macholib import macho_dump, macho_standalone
|
||||
from macholib.util import is_platform_file
|
||||
from macholib import macho_dump
|
||||
from macholib import macho_standalone
|
||||
|
||||
gCommand = None
|
||||
|
||||
|
||||
def check_file(fp, path, callback):
|
||||
if not os.path.exists(path):
|
||||
print("%s: %s: No such file or directory" % (gCommand, path), file=sys.stderr)
|
||||
print(
|
||||
'%s: %s: No such file or directory' % (gCommand, path),
|
||||
file=sys.stderr)
|
||||
return 1
|
||||
|
||||
try:
|
||||
is_plat = is_platform_file(path)
|
||||
|
||||
except IOError as msg:
|
||||
print("%s: %s: %s" % (gCommand, path, msg), file=sys.stderr)
|
||||
print('%s: %s: %s' % (gCommand, path, msg), file=sys.stderr)
|
||||
return 1
|
||||
|
||||
else:
|
||||
@@ -32,9 +34,10 @@ def walk_tree(callback, paths):
|
||||
|
||||
for base in paths:
|
||||
if os.path.isdir(base):
|
||||
for root, _dirs, files in os.walk(base):
|
||||
for root, dirs, files in os.walk(base):
|
||||
for fn in files:
|
||||
err |= check_file(sys.stdout, os.path.join(root, fn), callback)
|
||||
err |= check_file(
|
||||
sys.stdout, os.path.join(root, fn), callback)
|
||||
else:
|
||||
err |= check_file(sys.stdout, base, callback)
|
||||
|
||||
@@ -57,17 +60,17 @@ def main():
|
||||
|
||||
gCommand = sys.argv[1]
|
||||
|
||||
if gCommand == "dump":
|
||||
if gCommand == 'dump':
|
||||
walk_tree(macho_dump.print_file, sys.argv[2:])
|
||||
|
||||
elif gCommand == "find":
|
||||
elif gCommand == 'find':
|
||||
walk_tree(lambda fp, path: print(path, file=fp), sys.argv[2:])
|
||||
|
||||
elif gCommand == "standalone":
|
||||
elif gCommand == 'standalone':
|
||||
for dn in sys.argv[2:]:
|
||||
macho_standalone.standaloneApp(dn)
|
||||
|
||||
elif gCommand in ("help", "--help"):
|
||||
elif gCommand in ('help', '--help'):
|
||||
print_usage(sys.stdout)
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
15
lib/spack/external/macholib/_cmdline.py
vendored
15
lib/spack/external/macholib/_cmdline.py
vendored
@@ -1,8 +1,7 @@
|
||||
"""
|
||||
Internal helpers for basic commandline tools
|
||||
"""
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
from __future__ import print_function, absolute_import
|
||||
import os
|
||||
import sys
|
||||
|
||||
@@ -11,16 +10,15 @@
|
||||
|
||||
def check_file(fp, path, callback):
|
||||
if not os.path.exists(path):
|
||||
print(
|
||||
"%s: %s: No such file or directory" % (sys.argv[0], path), file=sys.stderr
|
||||
)
|
||||
print('%s: %s: No such file or directory' % (
|
||||
sys.argv[0], path), file=sys.stderr)
|
||||
return 1
|
||||
|
||||
try:
|
||||
is_plat = is_platform_file(path)
|
||||
|
||||
except IOError as msg:
|
||||
print("%s: %s: %s" % (sys.argv[0], path, msg), file=sys.stderr)
|
||||
print('%s: %s: %s' % (sys.argv[0], path, msg), file=sys.stderr)
|
||||
return 1
|
||||
|
||||
else:
|
||||
@@ -40,9 +38,10 @@ def main(callback):
|
||||
|
||||
for base in args:
|
||||
if os.path.isdir(base):
|
||||
for root, _dirs, files in os.walk(base):
|
||||
for root, dirs, files in os.walk(base):
|
||||
for fn in files:
|
||||
err |= check_file(sys.stdout, os.path.join(root, fn), callback)
|
||||
err |= check_file(
|
||||
sys.stdout, os.path.join(root, fn), callback)
|
||||
else:
|
||||
err |= check_file(sys.stdout, base, callback)
|
||||
|
||||
|
||||
108
lib/spack/external/macholib/dyld.py
vendored
108
lib/spack/external/macholib/dyld.py
vendored
@@ -2,45 +2,18 @@
|
||||
dyld emulation
|
||||
"""
|
||||
|
||||
import ctypes
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
from itertools import chain
|
||||
|
||||
from macholib.dylib import dylib_info
|
||||
import os
|
||||
import sys
|
||||
|
||||
from macholib.framework import framework_info
|
||||
from macholib.dylib import dylib_info
|
||||
|
||||
__all__ = ["dyld_find", "framework_find", "framework_info", "dylib_info"]
|
||||
|
||||
if sys.platform == "darwin" and [
|
||||
int(x) for x in platform.mac_ver()[0].split(".")[:2]
|
||||
] >= [10, 16]:
|
||||
try:
|
||||
libc = ctypes.CDLL("libSystem.dylib")
|
||||
|
||||
except OSError:
|
||||
_dyld_shared_cache_contains_path = None
|
||||
|
||||
else:
|
||||
try:
|
||||
_dyld_shared_cache_contains_path = libc._dyld_shared_cache_contains_path
|
||||
except AttributeError:
|
||||
_dyld_shared_cache_contains_path = None
|
||||
|
||||
else:
|
||||
_dyld_shared_cache_contains_path.restype = ctypes.c_bool
|
||||
_dyld_shared_cache_contains_path.argtypes = [ctypes.c_char_p]
|
||||
|
||||
if sys.version_info[0] != 2:
|
||||
__dyld_shared_cache_contains_path = _dyld_shared_cache_contains_path
|
||||
|
||||
def _dyld_shared_cache_contains_path(path):
|
||||
return __dyld_shared_cache_contains_path(path.encode())
|
||||
|
||||
|
||||
else:
|
||||
_dyld_shared_cache_contains_path = None
|
||||
__all__ = [
|
||||
'dyld_find', 'framework_find',
|
||||
'framework_info', 'dylib_info',
|
||||
]
|
||||
|
||||
# These are the defaults as per man dyld(1)
|
||||
#
|
||||
@@ -58,16 +31,13 @@ def _dyld_shared_cache_contains_path(path):
|
||||
"/usr/lib",
|
||||
]
|
||||
|
||||
# XXX: Is this function still needed?
|
||||
if sys.version_info[0] == 2:
|
||||
|
||||
def _ensure_utf8(s):
|
||||
if isinstance(s, unicode): # noqa: F821
|
||||
return s.encode("utf8")
|
||||
return s.encode('utf8')
|
||||
return s
|
||||
|
||||
|
||||
else:
|
||||
|
||||
def _ensure_utf8(s):
|
||||
if s is not None and not isinstance(s, str):
|
||||
raise ValueError(s)
|
||||
@@ -78,31 +48,31 @@ def _dyld_env(env, var):
|
||||
if env is None:
|
||||
env = os.environ
|
||||
rval = env.get(var)
|
||||
if rval is None or rval == "":
|
||||
if rval is None or rval == '':
|
||||
return []
|
||||
return rval.split(":")
|
||||
return rval.split(':')
|
||||
|
||||
|
||||
def dyld_image_suffix(env=None):
|
||||
if env is None:
|
||||
env = os.environ
|
||||
return env.get("DYLD_IMAGE_SUFFIX")
|
||||
return env.get('DYLD_IMAGE_SUFFIX')
|
||||
|
||||
|
||||
def dyld_framework_path(env=None):
|
||||
return _dyld_env(env, "DYLD_FRAMEWORK_PATH")
|
||||
return _dyld_env(env, 'DYLD_FRAMEWORK_PATH')
|
||||
|
||||
|
||||
def dyld_library_path(env=None):
|
||||
return _dyld_env(env, "DYLD_LIBRARY_PATH")
|
||||
return _dyld_env(env, 'DYLD_LIBRARY_PATH')
|
||||
|
||||
|
||||
def dyld_fallback_framework_path(env=None):
|
||||
return _dyld_env(env, "DYLD_FALLBACK_FRAMEWORK_PATH")
|
||||
return _dyld_env(env, 'DYLD_FALLBACK_FRAMEWORK_PATH')
|
||||
|
||||
|
||||
def dyld_fallback_library_path(env=None):
|
||||
return _dyld_env(env, "DYLD_FALLBACK_LIBRARY_PATH")
|
||||
return _dyld_env(env, 'DYLD_FALLBACK_LIBRARY_PATH')
|
||||
|
||||
|
||||
def dyld_image_suffix_search(iterator, env=None):
|
||||
@@ -113,8 +83,8 @@ def dyld_image_suffix_search(iterator, env=None):
|
||||
|
||||
def _inject(iterator=iterator, suffix=suffix):
|
||||
for path in iterator:
|
||||
if path.endswith(".dylib"):
|
||||
yield path[: -len(".dylib")] + suffix + ".dylib"
|
||||
if path.endswith('.dylib'):
|
||||
yield path[:-len('.dylib')] + suffix + '.dylib'
|
||||
else:
|
||||
yield path + suffix
|
||||
yield path
|
||||
@@ -132,7 +102,7 @@ def dyld_override_search(name, env=None):
|
||||
|
||||
if framework is not None:
|
||||
for path in dyld_framework_path(env):
|
||||
yield os.path.join(path, framework["name"])
|
||||
yield os.path.join(path, framework['name'])
|
||||
|
||||
# If DYLD_LIBRARY_PATH is set then use the first file that exists
|
||||
# in the path. If none use the original name.
|
||||
@@ -144,18 +114,16 @@ def dyld_executable_path_search(name, executable_path=None):
|
||||
# If we haven't done any searching and found a library and the
|
||||
# dylib_name starts with "@executable_path/" then construct the
|
||||
# library name.
|
||||
if name.startswith("@executable_path/") and executable_path is not None:
|
||||
yield os.path.join(
|
||||
executable_path, name[len("@executable_path/") :] # noqa: E203
|
||||
)
|
||||
if name.startswith('@executable_path/') and executable_path is not None:
|
||||
yield os.path.join(executable_path, name[len('@executable_path/'):])
|
||||
|
||||
|
||||
def dyld_loader_search(name, loader_path=None):
|
||||
# If we haven't done any searching and found a library and the
|
||||
# dylib_name starts with "@loader_path/" then construct the
|
||||
# library name.
|
||||
if name.startswith("@loader_path/") and loader_path is not None:
|
||||
yield os.path.join(loader_path, name[len("@loader_path/") :]) # noqa: E203
|
||||
if name.startswith('@loader_path/') and loader_path is not None:
|
||||
yield os.path.join(loader_path, name[len('@loader_path/'):])
|
||||
|
||||
|
||||
def dyld_default_search(name, env=None):
|
||||
@@ -168,11 +136,11 @@ def dyld_default_search(name, env=None):
|
||||
|
||||
if fallback_framework_path:
|
||||
for path in fallback_framework_path:
|
||||
yield os.path.join(path, framework["name"])
|
||||
yield os.path.join(path, framework['name'])
|
||||
|
||||
else:
|
||||
for path in _DEFAULT_FRAMEWORK_FALLBACK:
|
||||
yield os.path.join(path, framework["name"])
|
||||
yield os.path.join(path, framework['name'])
|
||||
|
||||
fallback_library_path = dyld_fallback_library_path(env)
|
||||
if fallback_library_path:
|
||||
@@ -190,20 +158,12 @@ def dyld_find(name, executable_path=None, env=None, loader_path=None):
|
||||
"""
|
||||
name = _ensure_utf8(name)
|
||||
executable_path = _ensure_utf8(executable_path)
|
||||
for path in dyld_image_suffix_search(
|
||||
chain(
|
||||
dyld_override_search(name, env),
|
||||
dyld_executable_path_search(name, executable_path),
|
||||
dyld_loader_search(name, loader_path),
|
||||
dyld_default_search(name, env),
|
||||
),
|
||||
env,
|
||||
):
|
||||
if (
|
||||
_dyld_shared_cache_contains_path is not None
|
||||
and _dyld_shared_cache_contains_path(path)
|
||||
):
|
||||
return path
|
||||
for path in dyld_image_suffix_search(chain(
|
||||
dyld_override_search(name, env),
|
||||
dyld_executable_path_search(name, executable_path),
|
||||
dyld_loader_search(name, loader_path),
|
||||
dyld_default_search(name, env),
|
||||
), env):
|
||||
if os.path.isfile(path):
|
||||
return path
|
||||
raise ValueError("dylib %s could not be found" % (name,))
|
||||
@@ -222,9 +182,9 @@ def framework_find(fn, executable_path=None, env=None):
|
||||
return dyld_find(fn, executable_path=executable_path, env=env)
|
||||
except ValueError:
|
||||
pass
|
||||
fmwk_index = fn.rfind(".framework")
|
||||
fmwk_index = fn.rfind('.framework')
|
||||
if fmwk_index == -1:
|
||||
fmwk_index = len(fn)
|
||||
fn += ".framework"
|
||||
fn += '.framework'
|
||||
fn = os.path.join(fn, os.path.basename(fn[:fmwk_index]))
|
||||
return dyld_find(fn, executable_path=executable_path, env=env)
|
||||
|
||||
8
lib/spack/external/macholib/dylib.py
vendored
8
lib/spack/external/macholib/dylib.py
vendored
@@ -4,10 +4,9 @@
|
||||
|
||||
import re
|
||||
|
||||
__all__ = ["dylib_info"]
|
||||
__all__ = ['dylib_info']
|
||||
|
||||
_DYLIB_RE = re.compile(
|
||||
r"""(?x)
|
||||
_DYLIB_RE = re.compile(r"""(?x)
|
||||
(?P<location>^.*)(?:^|/)
|
||||
(?P<name>
|
||||
(?P<shortname>\w+?)
|
||||
@@ -15,8 +14,7 @@
|
||||
(?:_(?P<suffix>[^._]+))?
|
||||
\.dylib$
|
||||
)
|
||||
"""
|
||||
)
|
||||
""")
|
||||
|
||||
|
||||
def dylib_info(filename):
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user