Compare commits
2 Commits
v0.19.1
...
bugfix/bin
Author | SHA1 | Date | |
---|---|---|---|
![]() |
d82b537339 | ||
![]() |
3d8d1fe924 |
43
.flake8
43
.flake8
@@ -1,25 +1,43 @@
|
||||
# -*- conf -*-
|
||||
# flake8 settings for Spack.
|
||||
# flake8 settings for Spack core files.
|
||||
#
|
||||
# These exceptions are for Spack core files. We're slightly more lenient
|
||||
# with packages. See .flake8_packages for that.
|
||||
#
|
||||
# This is the only flake8 rule Spack violates somewhat flagrantly
|
||||
# E1: Indentation
|
||||
# - E129: visually indented line with same indent as next logical line
|
||||
#
|
||||
# E2: Whitespace
|
||||
# - E221: multiple spaces before operator
|
||||
# - E241: multiple spaces after ','
|
||||
# - E272: multiple spaces before keyword
|
||||
#
|
||||
# E7: Statement
|
||||
# - E731: do not assign a lambda expression, use a def
|
||||
#
|
||||
# This is the only flake8 exception needed when using Black.
|
||||
# - E203: white space around slice operators can be required, ignore : warn
|
||||
# W5: Line break warning
|
||||
# - W503: line break before binary operator
|
||||
# - W504: line break after binary operator
|
||||
#
|
||||
# We still allow these in packages (Would like to get rid of them or rely on mypy
|
||||
# in the future)
|
||||
# - F403: from/import * used; unable to detect undefined names
|
||||
# These are required to get the package.py files to test clean:
|
||||
# - F999: syntax error in doctest
|
||||
#
|
||||
# N8: PEP8-naming
|
||||
# - N801: class names should use CapWords convention
|
||||
# - N813: camelcase imported as lowercase
|
||||
# - N814: camelcase imported as constant
|
||||
#
|
||||
# F4: pyflakes import checks, these are now checked by mypy more precisely
|
||||
# - F403: from module import *
|
||||
# - F405: undefined name or from *
|
||||
# - F821: undefined name (needed with from/import *)
|
||||
#
|
||||
# Black ignores, these are incompatible with black style and do not follow PEP-8
|
||||
# - E203: white space around slice operators can be required, ignore : warn
|
||||
# - W503: see above, already ignored for line-breaks
|
||||
#
|
||||
[flake8]
|
||||
#ignore = E129,,W503,W504,F999,N801,N813,N814,F403,F405,E203
|
||||
extend-ignore = E731,E203
|
||||
max-line-length = 99
|
||||
ignore = E129,E221,E241,E272,E731,W503,W504,F999,N801,N813,N814,F403,F405
|
||||
max-line-length = 88
|
||||
|
||||
# F4: Import
|
||||
# - F405: `name` may be undefined, or undefined from star imports: `module`
|
||||
@@ -28,8 +46,7 @@ max-line-length = 99
|
||||
# - F821: undefined name `name`
|
||||
#
|
||||
per-file-ignores =
|
||||
var/spack/repos/*/package.py:F403,F405,F821
|
||||
*-ci-package.py:F403,F405,F821
|
||||
var/spack/repos/*/package.py:F405,F821
|
||||
|
||||
# exclude things we usually do not want linting for.
|
||||
# These still get linted when passed explicitly, as when spack flake8 passes
|
||||
|
@@ -1,3 +0,0 @@
|
||||
# .git-blame-ignore-revs
|
||||
# Formatted entire codebase with black
|
||||
f52f6e99dbf1131886a80112b8c79dfc414afb7c
|
62
.github/ISSUE_TEMPLATE/test_error.yml
vendored
62
.github/ISSUE_TEMPLATE/test_error.yml
vendored
@@ -1,62 +0,0 @@
|
||||
name: "\U0001F4A5 Tests error"
|
||||
description: Some package in Spack had stand-alone tests that didn't pass
|
||||
title: "Testing issue: "
|
||||
labels: [test-error]
|
||||
body:
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
attributes:
|
||||
label: Steps to reproduce the failure(s) or link(s) to test output(s)
|
||||
description: |
|
||||
Fill in the test output from the exact spec that is having stand-alone test failures. Links to test outputs (e.g., CDash) can also be provided.
|
||||
value: |
|
||||
```console
|
||||
$ spack spec -I <spec>
|
||||
...
|
||||
```
|
||||
- type: textarea
|
||||
id: error
|
||||
attributes:
|
||||
label: Error message
|
||||
description: |
|
||||
Please post the error message from spack inside the `<details>` tag below:
|
||||
value: |
|
||||
<details><summary>Error message</summary><pre>
|
||||
...
|
||||
</pre></details>
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: information
|
||||
attributes:
|
||||
label: Information on your system or the test runner
|
||||
description: Please include the output of `spack debug report` for your system.
|
||||
validations:
|
||||
required: true
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
If you have any relevant configuration detail (custom `packages.yaml` or `modules.yaml`, etc.) you can add that here as well.
|
||||
- type: textarea
|
||||
id: additional_information
|
||||
attributes:
|
||||
label: Additional information
|
||||
description: |
|
||||
Please upload test logs or any additional information about the problem.
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Some packages have maintainers who have volunteered to debug build failures. Run `spack maintainers <name-of-the-package>` and **@mention** them here if they exist.
|
||||
- type: checkboxes
|
||||
id: checks
|
||||
attributes:
|
||||
label: General information
|
||||
options:
|
||||
- label: I have reported the version of Spack/Python/Platform/Runner
|
||||
required: true
|
||||
- label: I have run `spack maintainers <name-of-the-package>` and **@mentioned** any maintainers
|
||||
required: true
|
||||
- label: I have uploaded any available logs
|
||||
required: true
|
||||
- label: I have searched the issues of this repo and believe this is not a duplicate
|
||||
required: true
|
44
.github/workflows/audit.yaml
vendored
44
.github/workflows/audit.yaml
vendored
@@ -1,44 +0,0 @@
|
||||
name: audit
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
with_coverage:
|
||||
required: true
|
||||
type: string
|
||||
python_version:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
concurrency:
|
||||
group: audit-${{inputs.python_version}}-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# Run audits on all the packages in the built-in repository
|
||||
package-audits:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml]
|
||||
- name: Package audits (with coverage)
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
coverage run $(which spack) audit packages
|
||||
coverage combine
|
||||
coverage xml
|
||||
- name: Package audits (without coverage)
|
||||
if: ${{ inputs.with_coverage == 'false' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # @v2.1.0
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,audits
|
7
.github/workflows/bootstrap-test.sh
vendored
7
.github/workflows/bootstrap-test.sh
vendored
@@ -1,7 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -ex
|
||||
source share/spack/setup-env.sh
|
||||
$PYTHON bin/spack bootstrap disable spack-install
|
||||
$PYTHON bin/spack -d solve zlib
|
||||
tree $BOOTSTRAP/store
|
||||
exit 0
|
150
.github/workflows/bootstrap.yml
vendored
150
.github/workflows/bootstrap.yml
vendored
@@ -3,19 +3,33 @@ name: Bootstrapping
|
||||
on:
|
||||
# This Workflow can be triggered manually
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
paths-ignore:
|
||||
# Don't run if we only modified packages in the
|
||||
# built-in repository or documentation
|
||||
- 'var/spack/repos/builtin/**'
|
||||
- '!var/spack/repos/builtin/packages/clingo-bootstrap/**'
|
||||
- '!var/spack/repos/builtin/packages/clingo/**'
|
||||
- '!var/spack/repos/builtin/packages/python/**'
|
||||
- '!var/spack/repos/builtin/packages/re2c/**'
|
||||
- 'lib/spack/docs/**'
|
||||
schedule:
|
||||
# nightly at 2:16 AM
|
||||
- cron: '16 2 * * *'
|
||||
|
||||
concurrency:
|
||||
group: bootstrap-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
fedora-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "fedora:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -24,9 +38,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
@@ -37,13 +49,13 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -51,6 +63,7 @@ jobs:
|
||||
ubuntu-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -62,9 +75,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
@@ -75,13 +86,13 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -89,6 +100,7 @@ jobs:
|
||||
ubuntu-clingo-binaries-and-patchelf:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -99,9 +111,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
@@ -112,6 +122,7 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
@@ -123,6 +134,7 @@ jobs:
|
||||
opensuse-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "opensuse/leap:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -133,38 +145,36 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup repo
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-clingo-sources:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
export PATH=/usr/local/opt/bison@2.7/bin:$PATH
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -173,70 +183,53 @@ jobs:
|
||||
runs-on: ${{ matrix.macos-version }}
|
||||
strategy:
|
||||
matrix:
|
||||
macos-version: ['macos-11', 'macos-12']
|
||||
python-version: ['3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
macos-version: ['macos-10.15', 'macos-11', 'macos-12']
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
for ver in '3.6' '3.7' '3.8' '3.9' '3.10' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
echo "Testing $ver_dir"
|
||||
if [[ -d "$ver_dir" ]] ; then
|
||||
if $ver_dir/python --version ; then
|
||||
export PYTHON="$ver_dir/python"
|
||||
not_found=0
|
||||
old_path="$PATH"
|
||||
export PATH="$ver_dir:$PATH"
|
||||
./bin/spack-tmpconfig -b ./.github/workflows/bootstrap-test.sh
|
||||
export PATH="$old_path"
|
||||
fi
|
||||
fi
|
||||
# NOTE: test all pythons that exist, not all do on 12
|
||||
done
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust spack-install
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
ubuntu-clingo-binaries:
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
fetch-depth: 0
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Setup repo
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
for ver in '2.7' '3.6' '3.7' '3.8' '3.9' '3.10' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
echo "Testing $ver_dir"
|
||||
if [[ -d "$ver_dir" ]] ; then
|
||||
if $ver_dir/python --version ; then
|
||||
export PYTHON="$ver_dir/python"
|
||||
not_found=0
|
||||
old_path="$PATH"
|
||||
export PATH="$ver_dir:$PATH"
|
||||
./bin/spack-tmpconfig -b ./.github/workflows/bootstrap-test.sh
|
||||
export PATH="$old_path"
|
||||
fi
|
||||
fi
|
||||
if (($not_found)) ; then
|
||||
echo Required python version $ver not found in runner!
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust spack-install
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
ubuntu-gnupg-binaries:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -247,9 +240,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
@@ -260,18 +251,20 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap GnuPG
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable spack-install
|
||||
spack bootstrap untrust spack-install
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
ubuntu-gnupg-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -283,9 +276,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
@@ -296,19 +287,20 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap GnuPG
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-gnupg-binaries:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -316,16 +308,17 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable spack-install
|
||||
spack bootstrap untrust spack-install
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-gnupg-sources:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -333,13 +326,12 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
|
18
.github/workflows/build-containers.yml
vendored
18
.github/workflows/build-containers.yml
vendored
@@ -13,14 +13,14 @@ on:
|
||||
paths:
|
||||
- '.github/workflows/build-containers.yml'
|
||||
- 'share/spack/docker/*'
|
||||
- 'share/spack/templates/container/*'
|
||||
- 'share/templates/container/*'
|
||||
- 'lib/spack/spack/container/*'
|
||||
# Let's also build & tag Spack containers on releases.
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
concurrency:
|
||||
group: build_containers-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
@@ -50,7 +50,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
@@ -80,19 +80,19 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
with:
|
||||
name: dockerfiles
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # @v1
|
||||
uses: docker/setup-qemu-action@8b122486cedac8393e77aa9734c3528886e4a1a8 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@8c0edbc76e98fa90f69d9a2c020dcb50019dc325 # @v1
|
||||
uses: docker/setup-buildx-action@dc7b9719a96d48369863986a06765841d7ea23f6 # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # @v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -100,13 +100,13 @@ jobs:
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@c56af957549030174b10d6867f20e78cfd7debc5 # @v2
|
||||
uses: docker/build-push-action@e551b19e49efd4e98792db7592c17c09b89db8d8 # @v2
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
|
92
.github/workflows/ci.yaml
vendored
92
.github/workflows/ci.yaml
vendored
@@ -1,92 +0,0 @@
|
||||
name: ci
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
|
||||
concurrency:
|
||||
group: ci-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
prechecks:
|
||||
needs: [ changes ]
|
||||
uses: ./.github/workflows/valid-style.yml
|
||||
with:
|
||||
with_coverage: ${{ needs.changes.outputs.core }}
|
||||
audit-ancient-python:
|
||||
uses: ./.github/workflows/audit.yaml
|
||||
needs: [ changes ]
|
||||
with:
|
||||
with_coverage: ${{ needs.changes.outputs.core }}
|
||||
python_version: 2.7
|
||||
all-prechecks:
|
||||
needs: [ prechecks ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Success
|
||||
run: "true"
|
||||
# Check which files have been updated by the PR
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
# Set job outputs to values from filter step
|
||||
outputs:
|
||||
bootstrap: ${{ steps.filter.outputs.bootstrap }}
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# For pull requests it's not necessary to checkout the code
|
||||
- uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50
|
||||
id: filter
|
||||
with:
|
||||
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
|
||||
# Don't run if we only modified packages in the
|
||||
# built-in repository or documentation
|
||||
filters: |
|
||||
bootstrap:
|
||||
- 'var/spack/repos/builtin/packages/clingo-bootstrap/**'
|
||||
- 'var/spack/repos/builtin/packages/clingo/**'
|
||||
- 'var/spack/repos/builtin/packages/python/**'
|
||||
- 'var/spack/repos/builtin/packages/re2c/**'
|
||||
- 'lib/spack/**'
|
||||
- 'share/spack/**'
|
||||
- '.github/workflows/bootstrap.yml'
|
||||
- '.github/workflows/ci.yaml'
|
||||
core:
|
||||
- './!(var/**)/**'
|
||||
packages:
|
||||
- 'var/**'
|
||||
# Some links for easier reference:
|
||||
#
|
||||
# "github" context: https://docs.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions#github-context
|
||||
# job outputs: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idoutputs
|
||||
# setting environment variables from earlier steps: https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-environment-variable
|
||||
#
|
||||
bootstrap:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.bootstrap == 'true' }}
|
||||
needs: [ prechecks, changes ]
|
||||
uses: ./.github/workflows/bootstrap.yml
|
||||
unit-tests:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
||||
needs: [ prechecks, changes ]
|
||||
uses: ./.github/workflows/unit_tests.yaml
|
||||
windows:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
||||
needs: [ prechecks ]
|
||||
uses: ./.github/workflows/windows_python.yml
|
||||
all:
|
||||
needs: [ windows, unit-tests, bootstrap, audit-ancient-python ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Success
|
||||
run: "true"
|
71
.github/workflows/macos_python.yml
vendored
Normal file
71
.github/workflows/macos_python.yml
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
# These are nightly package tests for macOS
|
||||
# focus areas:
|
||||
# - initial user experience
|
||||
# - scientific python stack
|
||||
name: macOS builds nightly
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# nightly at 1 AM
|
||||
- cron: '0 1 * * *'
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
# Run if we modify this yaml file
|
||||
- '.github/workflows/macos_python.yml'
|
||||
# TODO: run if we touch any of the recipes involved in this
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
# GitHub Action Limits
|
||||
# https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions
|
||||
|
||||
jobs:
|
||||
install_gcc:
|
||||
name: gcc with clang
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
run: |
|
||||
. .github/workflows/install_spack.sh
|
||||
# 9.2.0 is the latest version on which we apply homebrew patch
|
||||
spack install -v --fail-fast gcc@11.2.0 %apple-clang
|
||||
|
||||
install_jupyter_clang:
|
||||
name: jupyter
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 700
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
run: |
|
||||
. .github/workflows/install_spack.sh
|
||||
spack install -v --fail-fast py-jupyterlab %apple-clang
|
||||
|
||||
install_scipy_clang:
|
||||
name: scipy, mpl, pd
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
run: |
|
||||
. .github/workflows/install_spack.sh
|
||||
spack install -v --fail-fast py-scipy %apple-clang
|
||||
spack install -v --fail-fast py-matplotlib %apple-clang
|
||||
spack install -v --fail-fast py-pandas %apple-clang
|
4
.github/workflows/setup_git.ps1
vendored
4
.github/workflows/setup_git.ps1
vendored
@@ -6,10 +6,6 @@ git config --global user.email "spack@example.com"
|
||||
git config --global user.name "Test User"
|
||||
git config --global core.longpaths true
|
||||
|
||||
# See https://github.com/git/git/security/advisories/GHSA-3wp6-j8xr-qw85 (CVE-2022-39253)
|
||||
# This is needed to let some fixture in our unit-test suite run
|
||||
git config --global protocol.file.allow always
|
||||
|
||||
if ($(git branch --show-current) -ne "develop")
|
||||
{
|
||||
git branch develop origin/develop
|
||||
|
4
.github/workflows/setup_git.sh
vendored
4
.github/workflows/setup_git.sh
vendored
@@ -2,10 +2,6 @@
|
||||
git config --global user.email "spack@example.com"
|
||||
git config --global user.name "Test User"
|
||||
|
||||
# See https://github.com/git/git/security/advisories/GHSA-3wp6-j8xr-qw85 (CVE-2022-39253)
|
||||
# This is needed to let some fixture in our unit-test suite run
|
||||
git config --global protocol.file.allow always
|
||||
|
||||
# create a local pr base branch
|
||||
if [[ -n $GITHUB_BASE_REF ]]; then
|
||||
git fetch origin "${GITHUB_BASE_REF}:${GITHUB_BASE_REF}"
|
||||
|
266
.github/workflows/unit_tests.yaml
vendored
266
.github/workflows/unit_tests.yaml
vendored
@@ -1,49 +1,120 @@
|
||||
name: unit tests
|
||||
name: linux tests
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
|
||||
concurrency:
|
||||
group: unit_tests-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# Validate that the code can be run on all the Python versions
|
||||
# supported by Spack
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade vermin
|
||||
- name: vermin (Spack's Core)
|
||||
run: vermin --backport argparse --violations --backport typing -t=2.7- -t=3.5- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: vermin (Repositories)
|
||||
run: vermin --backport argparse --violations --backport typing -t=2.7- -t=3.5- -vvv var/spack/repos
|
||||
# Run style checks on the files that have been changed
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools types-six
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Run style tests
|
||||
run: |
|
||||
share/spack/qa/run-style-tests
|
||||
# Check which files have been updated by the PR
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
# Set job outputs to values from filter step
|
||||
outputs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
with_coverage: ${{ steps.coverage.outputs.with_coverage }}
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# For pull requests it's not necessary to checkout the code
|
||||
- uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721
|
||||
id: filter
|
||||
with:
|
||||
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
|
||||
filters: |
|
||||
core:
|
||||
- './!(var/**)/**'
|
||||
packages:
|
||||
- 'var/**'
|
||||
# Some links for easier reference:
|
||||
#
|
||||
# "github" context: https://docs.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions#github-context
|
||||
# job outputs: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idoutputs
|
||||
# setting environment variables from earlier steps: https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-environment-variable
|
||||
#
|
||||
- id: coverage
|
||||
# Run the subsequent jobs with coverage if core has been modified,
|
||||
# regardless of whether this is a pull request or a push to a branch
|
||||
run: |
|
||||
echo Core changes: ${{ steps.filter.outputs.core }}
|
||||
echo Event name: ${{ github.event_name }}
|
||||
if [ "${{ steps.filter.outputs.core }}" == "true" ]
|
||||
then
|
||||
echo "::set-output name=with_coverage::true"
|
||||
else
|
||||
echo "::set-output name=with_coverage::false"
|
||||
fi
|
||||
|
||||
# Run unit tests with different configurations on linux
|
||||
ubuntu:
|
||||
runs-on: ubuntu-20.04
|
||||
unittests:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['2.7', '3.6', '3.7', '3.8', '3.9', '3.10', '3.11']
|
||||
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
concretizer: ['clingo']
|
||||
on_develop:
|
||||
- ${{ github.ref == 'refs/heads/develop' }}
|
||||
include:
|
||||
- python-version: 2.7
|
||||
concretizer: original
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
- python-version: '3.11'
|
||||
- python-version: 3.6
|
||||
concretizer: original
|
||||
- python-version: 3.9
|
||||
concretizer: original
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
exclude:
|
||||
- python-version: '3.7'
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.8'
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.9'
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.10'
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -55,21 +126,16 @@ jobs:
|
||||
patchelf cmake bison libbison-dev kcov
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov[toml] pytest-xdist
|
||||
# Install pytest-cov only on recent Python, to avoid stalling on Python 2.7 due
|
||||
# to bugs on an unmaintained version of the package when used with xdist.
|
||||
if [[ ${{ matrix.python-version }} != "2.7" ]]; then
|
||||
pip install --upgrade pytest-cov
|
||||
fi
|
||||
pip install --upgrade pip six setuptools pytest codecov "coverage[toml]<=6.2"
|
||||
# ensure style checks are not skipped in unit tests for python >= 3.6
|
||||
# note that true/false (i.e., 1/0) are opposite in conditions in python and bash
|
||||
if python -c 'import sys; sys.exit(not sys.version_info >= (3, 6))'; then
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click==8.0.4" "black<=21.12b0"
|
||||
pip install --upgrade flake8 isort>=4.3.5 mypy>=0.900 black
|
||||
fi
|
||||
- name: Pin pathlib for Python 2.7
|
||||
if: ${{ matrix.python-version == 2.7 }}
|
||||
run: |
|
||||
pip install -U pathlib2==2.3.6 toml
|
||||
pip install -U pathlib2==2.3.6
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -81,30 +147,41 @@ jobs:
|
||||
SPACK_PYTHON: python
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
spack bootstrap disable spack-install
|
||||
spack bootstrap untrust spack-install
|
||||
spack -v solve zlib
|
||||
- name: Run unit tests
|
||||
- name: Run unit tests (full suite with coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
env:
|
||||
SPACK_PYTHON: python
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
SPACK_TEST_PARALLEL: 2
|
||||
COVERAGE: true
|
||||
UNIT_TEST_COVERAGE: ${{ (matrix.python-version == '3.11') }}
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
coverage combine
|
||||
coverage xml
|
||||
- name: Run unit tests (reduced suite without coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
||||
env:
|
||||
SPACK_PYTHON: python
|
||||
ONLY_PACKAGES: true
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
# Test shell integration
|
||||
shell:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
python-version: '3.10'
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
@@ -112,25 +189,33 @@ jobs:
|
||||
sudo apt-get install -y coreutils kcov csh zsh tcsh fish dash bash
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml] pytest-xdist
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml]==6.2
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Run shell tests
|
||||
- name: Run shell tests (without coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- name: Run shell tests (with coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
env:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
|
||||
# Test RHEL8 UBI with platform Python. This job is run
|
||||
# only on PRs modifying core Spack
|
||||
rhel8-platform-python:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
container: registry.access.redhat.com/ubi8/ubi
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
@@ -138,7 +223,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -154,14 +239,15 @@ jobs:
|
||||
spack unit-test -k 'not cvs and not svn and not hg' -x --verbose
|
||||
# Test for the clingo based solver (using clingo-cffi)
|
||||
clingo-cffi:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
python-version: '3.10'
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
@@ -171,53 +257,105 @@ jobs:
|
||||
patchelf kcov
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml] pytest-cov clingo pytest-xdist
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml]==6.2 clingo
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Run unit tests (full suite with coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
env:
|
||||
COVERAGE: true
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # @v2.1.0
|
||||
coverage combine
|
||||
coverage xml
|
||||
- name: Run unit tests (reduced suite without coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
||||
env:
|
||||
ONLY_PACKAGES: true
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
# Run unit tests on MacOS
|
||||
macos:
|
||||
build:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
python-version: [3.8]
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools
|
||||
pip install --upgrade pytest codecov coverage[toml] pytest-xdist pytest-cov
|
||||
pip install --upgrade pytest codecov coverage[toml]==6.2
|
||||
- name: Setup Homebrew packages
|
||||
run: |
|
||||
brew install dash fish gcc gnupg2 kcov
|
||||
- name: Run unit tests
|
||||
env:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
SPACK_TEST_PARALLEL: 4
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) bootstrap disable spack-install
|
||||
$(which spack) bootstrap untrust spack-install
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
if [ "${{ needs.changes.outputs.with_coverage }}" == "true" ]
|
||||
then
|
||||
coverage run $(which spack) unit-test -x
|
||||
coverage combine
|
||||
coverage xml
|
||||
# Delete the symlink going from ./lib/spack/docs/_spack_root back to
|
||||
# the initial directory, since it causes ELOOP errors with codecov/actions@2
|
||||
rm lib/spack/docs/_spack_root
|
||||
else
|
||||
echo "ONLY PACKAGE RECIPES CHANGED [skipping coverage]"
|
||||
$(which spack) unit-test -x -m "not maybeslow" -k "package_sanity"
|
||||
fi
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
files: ./coverage.xml
|
||||
flags: unittests,macos
|
||||
|
||||
# Run audits on all the packages in the built-in repository
|
||||
package-audits:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml]==6.2
|
||||
- name: Package audits (with coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
coverage run $(which spack) audit packages
|
||||
coverage combine
|
||||
coverage xml
|
||||
- name: Package audits (without coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,audits
|
||||
|
60
.github/workflows/valid-style.yml
vendored
60
.github/workflows/valid-style.yml
vendored
@@ -1,60 +0,0 @@
|
||||
name: style
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
with_coverage:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
concurrency:
|
||||
group: style-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
cancel-in-progress: true
|
||||
|
||||
|
||||
jobs:
|
||||
# Validate that the code can be run on all the Python versions
|
||||
# supported by Spack
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade vermin
|
||||
- name: vermin (Spack's Core)
|
||||
run: vermin --backport argparse --violations --backport typing -t=2.7- -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: vermin (Repositories)
|
||||
run: vermin --backport argparse --violations --backport typing -t=2.7- -t=3.6- -vvv var/spack/repos
|
||||
# Run style checks on the files that have been changed
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python3 -m pip install --upgrade pip six setuptools types-six click==8.0.2 'black==21.12b0' mypy isort clingo flake8
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Run style tests
|
||||
run: |
|
||||
share/spack/qa/run-style-tests
|
||||
audit:
|
||||
uses: ./.github/workflows/audit.yaml
|
||||
with:
|
||||
with_coverage: ${{ inputs.with_coverage }}
|
||||
python_version: '3.11'
|
115
.github/workflows/windows_python.yml
vendored
115
.github/workflows/windows_python.yml
vendored
@@ -1,10 +1,17 @@
|
||||
name: windows
|
||||
name: windows tests
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
|
||||
concurrency:
|
||||
group: windows-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
@@ -12,64 +19,91 @@ defaults:
|
||||
shell:
|
||||
powershell Invoke-Expression -Command ".\share\spack\qa\windows_test_setup.ps1"; {0}
|
||||
jobs:
|
||||
unit-tests:
|
||||
validate:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install --upgrade vermin
|
||||
- name: vermin (Spack's Core)
|
||||
run: vermin --backport argparse --backport typing -t='2.7-' -t='3.5-' -v spack/lib/spack/spack/ spack/lib/spack/llnl/ spack/bin/
|
||||
- name: vermin (Repositories)
|
||||
run: vermin --backport argparse --backport typing -t='2.7-' -t='3.5-' -v spack/var/spack/repos
|
||||
# Run style checks on the files that have been changed
|
||||
style:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov pytest-cov clingo
|
||||
python -m pip install --upgrade pip six setuptools flake8 isort>=4.3.5 mypy>=0.800 black pywin32 types-python-dateutil
|
||||
- name: Create local develop
|
||||
run: |
|
||||
.\spack\.github\workflows\setup_git.ps1
|
||||
- name: Run style tests
|
||||
run: |
|
||||
spack style
|
||||
- name: Verify license headers
|
||||
run: |
|
||||
python spack\bin\spack license verify
|
||||
unittest:
|
||||
needs: [ validate, style ]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
||||
- name: Create local develop
|
||||
run: |
|
||||
.\spack\.github\workflows\setup_git.ps1
|
||||
- name: Unit Test
|
||||
run: |
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
cd spack
|
||||
dir
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml --ignore=lib/spack/spack/test/cmd
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
with:
|
||||
flags: unittests,windows
|
||||
unit-tests-cmd:
|
||||
spack unit-test --verbose --ignore=lib/spack/spack/test/cmd
|
||||
unittest-cmd:
|
||||
needs: [ validate, style ]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage pytest-cov clingo
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
||||
- name: Create local develop
|
||||
run: |
|
||||
.\spack\.github\workflows\setup_git.ps1
|
||||
- name: Command Unit Test
|
||||
run: |
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
cd spack
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml lib/spack/spack/test/cmd
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
with:
|
||||
flags: unittests,windows
|
||||
build-abseil:
|
||||
spack unit-test lib/spack/spack/test/cmd --verbose
|
||||
buildtest:
|
||||
needs: [ validate, style ]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -81,8 +115,9 @@ jobs:
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
spack external find cmake
|
||||
spack external find ninja
|
||||
spack -d install abseil-cpp
|
||||
make-installer:
|
||||
spack install abseil-cpp
|
||||
generate-installer-test:
|
||||
needs: [ validate, style ]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- name: Disable Windows Symlinks
|
||||
@@ -90,15 +125,15 @@ jobs:
|
||||
git config --global core.symlinks false
|
||||
shell:
|
||||
powershell
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
||||
- name: Add Light and Candle to Path
|
||||
run: |
|
||||
$env:WIX >> $GITHUB_PATH
|
||||
@@ -109,27 +144,27 @@ jobs:
|
||||
echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
env:
|
||||
ProgressPreference: SilentlyContinue
|
||||
- uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
- uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
with:
|
||||
name: Windows Spack Installer Bundle
|
||||
path: ${{ env.installer_root }}\pkg\Spack.exe
|
||||
- uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
- uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
with:
|
||||
name: Windows Spack Installer
|
||||
path: ${{ env.installer_root}}\pkg\Spack.msi
|
||||
execute-installer:
|
||||
needs: make-installer
|
||||
needs: generate-installer-test
|
||||
runs-on: windows-latest
|
||||
defaults:
|
||||
run:
|
||||
shell: pwsh
|
||||
steps:
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
||||
- name: Setup installer directory
|
||||
run: |
|
||||
mkdir -p spack_installer
|
||||
@@ -155,4 +190,4 @@ jobs:
|
||||
$proc = Start-Process ${{ env.spack_installer }}\spack.msi "/quiet" -Passthru
|
||||
$handle = $proc.Handle # cache proc.Handle
|
||||
$proc.WaitForExit();
|
||||
$LASTEXITCODE
|
||||
$LASTEXITCODE
|
320
CHANGELOG.md
320
CHANGELOG.md
@@ -1,314 +1,3 @@
|
||||
# v0.19.1 (2023-02-07)
|
||||
|
||||
### Spack Bugfixes
|
||||
|
||||
* `buildcache create`: make "file exists" less verbose (#35019)
|
||||
* `spack mirror create`: don't change paths to urls (#34992)
|
||||
* Improve error message for requirements (#33988)
|
||||
* uninstall: fix accidental cubic complexity (#34005)
|
||||
* scons: fix signature for `install_args` (#34481)
|
||||
* Fix `combine_phase_logs` text encoding issues (#34657)
|
||||
* Use a module-like object to propagate changes in the MRO, when setting build env (#34059)
|
||||
* PackageBase should not define builder legacy attributes (#33942)
|
||||
* Forward lookup of the "run_tests" attribute (#34531)
|
||||
* Bugfix for timers (#33917, #33900)
|
||||
* Fix path handling in prefix inspections (#35318)
|
||||
* Fix libtool filter for Fujitsu compilers (#34916)
|
||||
* Bug fix for duplicate rpath errors on macOS when creating build caches (#34375)
|
||||
* FileCache: delete the new cache file on exception (#34623)
|
||||
* Propagate exceptions from Spack python console (#34547)
|
||||
* Tests: Fix a bug/typo in a `config_values.py` fixture (#33886)
|
||||
* Various CI fixes (#33953, #34560, #34560, #34828)
|
||||
* Docs: remove monitors and analyzers, typos (#34358, #33926)
|
||||
* bump release version for tutorial command (#33859)
|
||||
|
||||
|
||||
# v0.19.0 (2022-11-11)
|
||||
|
||||
`v0.19.0` is a major feature release.
|
||||
|
||||
## Major features in this release
|
||||
|
||||
1. **Package requirements**
|
||||
|
||||
Spack's traditional [package preferences](
|
||||
https://spack.readthedocs.io/en/latest/build_settings.html#package-preferences)
|
||||
are soft, but we've added hard requriements to `packages.yaml` and `spack.yaml`
|
||||
(#32528, #32369). Package requirements use the same syntax as specs:
|
||||
|
||||
```yaml
|
||||
packages:
|
||||
libfabric:
|
||||
require: "@1.13.2"
|
||||
mpich:
|
||||
require:
|
||||
- one_of: ["+cuda", "+rocm"]
|
||||
```
|
||||
|
||||
More details in [the docs](
|
||||
https://spack.readthedocs.io/en/latest/build_settings.html#package-requirements).
|
||||
|
||||
2. **Environment UI Improvements**
|
||||
|
||||
* Fewer surprising modifications to `spack.yaml` (#33711):
|
||||
|
||||
* `spack install` in an environment will no longer add to the `specs:` list; you'll
|
||||
need to either use `spack add <spec>` or `spack install --add <spec>`.
|
||||
|
||||
* Similarly, `spack uninstall` will not remove from your environment's `specs:`
|
||||
list; you'll need to use `spack remove` or `spack uninstall --remove`.
|
||||
|
||||
This will make it easier to manage an environment, as there is clear separation
|
||||
between the stack to be installed (`spack.yaml`/`spack.lock`) and which parts of
|
||||
it should be installed (`spack install` / `spack uninstall`).
|
||||
|
||||
* `concretizer:unify:true` is now the default mode for new environments (#31787)
|
||||
|
||||
We see more users creating `unify:true` environments now. Users who need
|
||||
`unify:false` can add it to their environment to get the old behavior. This will
|
||||
concretize every spec in the environment independently.
|
||||
|
||||
* Include environment configuration from URLs (#29026, [docs](
|
||||
https://spack.readthedocs.io/en/latest/environments.html#included-configurations))
|
||||
|
||||
You can now include configuration in your environment directly from a URL:
|
||||
|
||||
```yaml
|
||||
spack:
|
||||
include:
|
||||
- https://github.com/path/to/raw/config/compilers.yaml
|
||||
```
|
||||
|
||||
4. **Multiple Build Systems**
|
||||
|
||||
An increasing number of packages in the ecosystem need the ability to support
|
||||
multiple build systems (#30738, [docs](
|
||||
https://spack.readthedocs.io/en/latest/packaging_guide.html#multiple-build-systems)),
|
||||
either across versions, across platforms, or within the same version of the software.
|
||||
This has been hard to support through multiple inheritance, as methods from different
|
||||
build system superclasses would conflict. `package.py` files can now define separate
|
||||
builder classes with installation logic for different build systems, e.g.:
|
||||
|
||||
```python
|
||||
class ArpackNg(CMakePackage, AutotoolsPackage):
|
||||
|
||||
build_system(
|
||||
conditional("cmake", when="@0.64:"),
|
||||
conditional("autotools", when="@:0.63"),
|
||||
default="cmake",
|
||||
)
|
||||
|
||||
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||
def cmake_args(self):
|
||||
pass
|
||||
|
||||
class Autotoolsbuilder(spack.build_systems.autotools.AutotoolsBuilder):
|
||||
def configure_args(self):
|
||||
pass
|
||||
```
|
||||
|
||||
5. **Compiler and variant propagation**
|
||||
|
||||
Currently, compiler flags and variants are inconsistent: compiler flags set for a
|
||||
package are inherited by its dependencies, while variants are not. We should have
|
||||
these be consistent by allowing for inheritance to be enabled or disabled for both
|
||||
variants and compiler flags.
|
||||
|
||||
Example syntax:
|
||||
- `package ++variant`:
|
||||
enabled variant that will be propagated to dependencies
|
||||
- `package +variant`:
|
||||
enabled variant that will NOT be propagated to dependencies
|
||||
- `package ~~variant`:
|
||||
disabled variant that will be propagated to dependencies
|
||||
- `package ~variant`:
|
||||
disabled variant that will NOT be propagated to dependencies
|
||||
- `package cflags==-g`:
|
||||
`cflags` will be propagated to dependencies
|
||||
- `package cflags=-g`:
|
||||
`cflags` will NOT be propagated to dependencies
|
||||
|
||||
Syntax for non-boolan variants is similar to compiler flags. More in the docs for
|
||||
[variants](
|
||||
https://spack.readthedocs.io/en/latest/basic_usage.html#variants) and [compiler flags](
|
||||
https://spack.readthedocs.io/en/latest/basic_usage.html#compiler-flags).
|
||||
|
||||
6. **Enhancements to git version specifiers**
|
||||
|
||||
* `v0.18.0` added the ability to use git commits as versions. You can now use the
|
||||
`git.` prefix to specify git tags or branches as versions. All of these are valid git
|
||||
versions in `v0.19` (#31200):
|
||||
|
||||
```console
|
||||
foo@abcdef1234abcdef1234abcdef1234abcdef1234 # raw commit
|
||||
foo@git.abcdef1234abcdef1234abcdef1234abcdef1234 # commit with git prefix
|
||||
foo@git.develop # the develop branch
|
||||
foo@git.0.19 # use the 0.19 tag
|
||||
```
|
||||
|
||||
* `v0.19` also gives you more control over how Spack interprets git versions, in case
|
||||
Spack cannot detect the version from the git repository. You can suffix a git
|
||||
version with `=<version>` to force Spack to concretize it as a particular version
|
||||
(#30998, #31914, #32257):
|
||||
|
||||
```console
|
||||
# use mybranch, but treat it as version 3.2 for version comparison
|
||||
foo@git.mybranch=3.2
|
||||
|
||||
# use the given commit, but treat it as develop for version comparison
|
||||
foo@git.abcdef1234abcdef1234abcdef1234abcdef1234=develop
|
||||
```
|
||||
|
||||
More in [the docs](
|
||||
https://spack.readthedocs.io/en/latest/basic_usage.html#version-specifier)
|
||||
|
||||
7. **Changes to Cray EX Support**
|
||||
|
||||
Cray machines have historically had their own "platform" within Spack, because we
|
||||
needed to go through the module system to leverage compilers and MPI installations on
|
||||
these machines. The Cray EX programming environment now provides standalone `craycc`
|
||||
executables and proper `mpicc` wrappers, so Spack can treat EX machines like Linux
|
||||
with extra packages (#29392).
|
||||
|
||||
We expect this to greatly reduce bugs, as external packages and compilers can now be
|
||||
used by prefix instead of through modules. We will also no longer be subject to
|
||||
reproducibility issues when modules change from Cray PE release to release and from
|
||||
site to site. This also simplifies dealing with the underlying Linux OS on cray
|
||||
systems, as Spack will properly model the machine's OS as either SuSE or RHEL.
|
||||
|
||||
8. **Improvements to tests and testing in CI**
|
||||
|
||||
* `spack ci generate --tests` will generate a `.gitlab-ci.yml` file that not only does
|
||||
builds but also runs tests for built packages (#27877). Public GitHub pipelines now
|
||||
also run tests in CI.
|
||||
|
||||
* `spack test run --explicit` will only run tests for packages that are explicitly
|
||||
installed, instead of all packages.
|
||||
|
||||
9. **Experimental binding link model**
|
||||
|
||||
You can add a new option to `config.yaml` to make Spack embed absolute paths to
|
||||
needed shared libraries in ELF executables and shared libraries on Linux (#31948, [docs](
|
||||
https://spack.readthedocs.io/en/latest/config_yaml.html#shared-linking-bind)):
|
||||
|
||||
```yaml
|
||||
config:
|
||||
shared_linking:
|
||||
type: rpath
|
||||
bind: true
|
||||
```
|
||||
|
||||
This can improve launch time at scale for parallel applications, and it can make
|
||||
installations less susceptible to environment variables like `LD_LIBRARY_PATH`, even
|
||||
especially when dealing with external libraries that use `RUNPATH`. You can think of
|
||||
this as a faster, even higher-precedence version of `RPATH`.
|
||||
|
||||
## Other new features of note
|
||||
|
||||
* `spack spec` prints dependencies more legibly. Dependencies in the output now appear
|
||||
at the *earliest* level of indentation possible (#33406)
|
||||
* You can override `package.py` attributes like `url`, directly in `packages.yaml`
|
||||
(#33275, [docs](
|
||||
https://spack.readthedocs.io/en/latest/build_settings.html#assigning-package-attributes))
|
||||
* There are a number of new architecture-related format strings you can use in Spack
|
||||
configuration files to specify paths (#29810, [docs](
|
||||
https://spack.readthedocs.io/en/latest/configuration.html#config-file-variables))
|
||||
* Spack now supports bootstrapping Clingo on Windows (#33400)
|
||||
* There is now support for an `RPATH`-like library model on Windows (#31930)
|
||||
|
||||
## Performance Improvements
|
||||
|
||||
* Major performance improvements for installation from binary caches (#27610, #33628,
|
||||
#33636, #33608, #33590, #33496)
|
||||
* Test suite can now be parallelized using `xdist` (used in GitHub Actions) (#32361)
|
||||
* Reduce lock contention for parallel builds in environments (#31643)
|
||||
|
||||
## New binary caches and stacks
|
||||
|
||||
* We now build nearly all of E4S with `oneapi` in our buildcache (#31781, #31804,
|
||||
#31804, #31803, #31840, #31991, #32117, #32107, #32239)
|
||||
* Added 3 new machine learning-centric stacks to binary cache: `x86_64_v3`, CUDA, ROCm
|
||||
(#31592, #33463)
|
||||
|
||||
## Removals and Deprecations
|
||||
|
||||
* Support for Python 3.5 is dropped (#31908). Only Python 2.7 and 3.6+ are officially
|
||||
supported.
|
||||
|
||||
* This is the last Spack release that will support Python 2 (#32615). Spack `v0.19`
|
||||
will emit a deprecation warning if you run it with Python 2, and Python 2 support will
|
||||
soon be removed from the `develop` branch.
|
||||
|
||||
* `LD_LIBRARY_PATH` is no longer set by default by `spack load` or module loads.
|
||||
|
||||
Setting `LD_LIBRARY_PATH` in Spack environments/modules can cause binaries from
|
||||
outside of Spack to crash, and Spack's own builds use `RPATH` and do not need
|
||||
`LD_LIBRARY_PATH` set in order to run. If you still want the old behavior, you
|
||||
can run these commands to configure Spack to set `LD_LIBRARY_PATH`:
|
||||
|
||||
```console
|
||||
spack config add modules:prefix_inspections:lib64:[LD_LIBRARY_PATH]
|
||||
spack config add modules:prefix_inspections:lib:[LD_LIBRARY_PATH]
|
||||
```
|
||||
|
||||
* The `spack:concretization:[together|separately]` has been removed after being
|
||||
deprecated in `v0.18`. Use `concretizer:unify:[true|false]`.
|
||||
* `config:module_roots` is no longer supported after being deprecated in `v0.18`. Use
|
||||
configuration in module sets instead (#28659, [docs](
|
||||
https://spack.readthedocs.io/en/latest/module_file_support.html)).
|
||||
* `spack activate` and `spack deactivate` are no longer supported, having been
|
||||
deprecated in `v0.18`. Use an environment with a view instead of
|
||||
activating/deactivating ([docs](
|
||||
https://spack.readthedocs.io/en/latest/environments.html#configuration-in-spack-yaml)).
|
||||
* The old YAML format for buildcaches is now deprecated (#33707). If you are using an
|
||||
old buildcache with YAML metadata you will need to regenerate it with JSON metadata.
|
||||
* `spack bootstrap trust` and `spack bootstrap untrust` are deprecated in favor of
|
||||
`spack bootstrap enable` and `spack bootstrap disable` and will be removed in `v0.20`.
|
||||
(#33600)
|
||||
* The `graviton2` architecture has been renamed to `neoverse_n1`, and `graviton3`
|
||||
is now `neoverse_v1`. Buildcaches using the old architecture names will need to be rebuilt.
|
||||
* The terms `blacklist` and `whitelist` have been replaced with `include` and `exclude`
|
||||
in all configuration files (#31569). You can use `spack config update` to
|
||||
automatically fix your configuration files.
|
||||
|
||||
## Notable Bugfixes
|
||||
|
||||
* Permission setting on installation now handles effective uid properly (#19980)
|
||||
* `buildable:true` for an MPI implementation now overrides `buildable:false` for `mpi` (#18269)
|
||||
* Improved error messages when attempting to use an unconfigured compiler (#32084)
|
||||
* Do not punish explicitly requested compiler mismatches in the solver (#30074)
|
||||
* `spack stage`: add missing --fresh and --reuse (#31626)
|
||||
* Fixes for adding build system executables like `cmake` to package scope (#31739)
|
||||
* Bugfix for binary relocation with aliased strings produced by newer `binutils` (#32253)
|
||||
|
||||
## Spack community stats
|
||||
|
||||
* 6,751 total packages, 335 new since `v0.18.0`
|
||||
* 141 new Python packages
|
||||
* 89 new R packages
|
||||
* 303 people contributed to this release
|
||||
* 287 committers to packages
|
||||
* 57 committers to core
|
||||
|
||||
|
||||
# v0.18.1 (2022-07-19)
|
||||
|
||||
### Spack Bugfixes
|
||||
* Fix several bugs related to bootstrapping (#30834,#31042,#31180)
|
||||
* Fix a regression that was causing spec hashes to differ between
|
||||
Python 2 and Python 3 (#31092)
|
||||
* Fixed compiler flags for oneAPI and DPC++ (#30856)
|
||||
* Fixed several issues related to concretization (#31142,#31153,#31170,#31226)
|
||||
* Improved support for Cray manifest file and `spack external find` (#31144,#31201,#31173,#31186)
|
||||
* Assign a version to openSUSE Tumbleweed according to the GLIBC version
|
||||
in the system (#19895)
|
||||
* Improved Dockerfile generation for `spack containerize` (#29741,#31321)
|
||||
* Fixed a few bugs related to concurrent execution of commands (#31509,#31493,#31477)
|
||||
|
||||
### Package updates
|
||||
* WarpX: add v22.06, fixed libs property (#30866,#31102)
|
||||
* openPMD: add v0.14.5, update recipe for @develop (#29484,#31023)
|
||||
|
||||
# v0.18.0 (2022-05-28)
|
||||
|
||||
`v0.18.0` is a major feature release.
|
||||
@@ -510,15 +199,6 @@
|
||||
* 337 committers to packages
|
||||
* 85 committers to core
|
||||
|
||||
# v0.17.3 (2022-07-14)
|
||||
|
||||
### Spack bugfixes
|
||||
|
||||
* Fix missing chgrp on symlinks in package installations (#30743)
|
||||
* Allow having non-existing upstreams (#30744, #30746)
|
||||
* Fix `spack stage` with custom paths (#30448)
|
||||
* Fix failing call for `spack buildcache save-specfile` (#30637)
|
||||
* Fix globbing in compiler wrapper (#30699)
|
||||
|
||||
# v0.17.2 (2022-04-13)
|
||||
|
||||
|
@@ -2,10 +2,10 @@
|
||||
|
||||
[](https://github.com/spack/spack/actions)
|
||||
[](https://github.com/spack/spack/actions/workflows/bootstrap.yml)
|
||||
[](https://github.com/spack/spack/actions?query=workflow%3A%22macOS+builds+nightly%22)
|
||||
[](https://codecov.io/gh/spack/spack)
|
||||
[](https://github.com/spack/spack/actions/workflows/build-containers.yml)
|
||||
[](https://spack.readthedocs.io)
|
||||
[](https://github.com/psf/black)
|
||||
[](https://slack.spack.io)
|
||||
|
||||
Spack is a multi-platform package manager that builds and installs
|
||||
@@ -62,7 +62,6 @@ Resources:
|
||||
|
||||
* **Slack workspace**: [spackpm.slack.com](https://spackpm.slack.com).
|
||||
To get an invitation, visit [slack.spack.io](https://slack.spack.io).
|
||||
* [**Github Discussions**](https://github.com/spack/spack/discussions): not just for discussions, also Q&A.
|
||||
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack)
|
||||
* **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to
|
||||
`@mention` us!
|
||||
|
@@ -10,8 +10,8 @@ For more on Spack's release structure, see
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| develop | :white_check_mark: |
|
||||
| 0.19.x | :white_check_mark: |
|
||||
| 0.18.x | :white_check_mark: |
|
||||
| 0.17.x | :white_check_mark: |
|
||||
| 0.16.x | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
|
@@ -8,11 +8,13 @@
|
||||
|
||||
def getpywin():
|
||||
try:
|
||||
import win32con # noqa: F401
|
||||
import win32con # noqa
|
||||
except ImportError:
|
||||
subprocess.check_call([sys.executable, "-m", "pip", "-q", "install", "--upgrade", "pip"])
|
||||
subprocess.check_call([sys.executable, "-m", "pip", "-q", "install", "pywin32"])
|
||||
subprocess.check_call(
|
||||
[sys.executable, "-m", "pip", "-q", "install", "--upgrade", "pip"])
|
||||
subprocess.check_call(
|
||||
[sys.executable, "-m", "pip", "-q", "install", "pywin32"])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == '__main__':
|
||||
getpywin()
|
||||
|
46
bin/spack
46
bin/spack
@@ -49,8 +49,50 @@ spack_prefix = os.path.dirname(os.path.dirname(spack_file))
|
||||
spack_lib_path = os.path.join(spack_prefix, "lib", "spack")
|
||||
sys.path.insert(0, spack_lib_path)
|
||||
|
||||
from spack_installable.main import main # noqa: E402
|
||||
# Add external libs
|
||||
spack_external_libs = os.path.join(spack_lib_path, "external")
|
||||
|
||||
if sys.version_info[:2] <= (2, 7):
|
||||
sys.path.insert(0, os.path.join(spack_external_libs, "py2"))
|
||||
|
||||
sys.path.insert(0, spack_external_libs)
|
||||
|
||||
# Here we delete ruamel.yaml in case it has been already imported from site
|
||||
# (see #9206 for a broader description of the issue).
|
||||
#
|
||||
# Briefly: ruamel.yaml produces a .pth file when installed with pip that
|
||||
# makes the site installed package the preferred one, even though sys.path
|
||||
# is modified to point to another version of ruamel.yaml.
|
||||
if "ruamel.yaml" in sys.modules:
|
||||
del sys.modules["ruamel.yaml"]
|
||||
|
||||
if "ruamel" in sys.modules:
|
||||
del sys.modules["ruamel"]
|
||||
|
||||
# The following code is here to avoid failures when updating
|
||||
# the develop version, due to spurious argparse.pyc files remaining
|
||||
# in the libs/spack/external directory, see:
|
||||
# https://github.com/spack/spack/pull/25376
|
||||
# TODO: Remove in v0.18.0 or later
|
||||
try:
|
||||
import argparse
|
||||
except ImportError:
|
||||
argparse_pyc = os.path.join(spack_external_libs, 'argparse.pyc')
|
||||
if not os.path.exists(argparse_pyc):
|
||||
raise
|
||||
try:
|
||||
os.remove(argparse_pyc)
|
||||
import argparse # noqa
|
||||
except Exception:
|
||||
msg = ('The file\n\n\t{0}\n\nis corrupted and cannot be deleted by Spack. '
|
||||
'Either delete it manually or ask some administrator to '
|
||||
'delete it for you.')
|
||||
print(msg.format(argparse_pyc))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
import spack.main # noqa
|
||||
|
||||
# Once we've set up the system path, run the spack main method
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
sys.exit(spack.main.main())
|
||||
|
@@ -1,95 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
[[ -n "${TMPCONFIG_DEBUG:=}" ]] && set -x
|
||||
DIR="$(cd -P "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
mkdir -p "${XDG_RUNTIME_DIR:=/tmp}/spack-tests"
|
||||
export TMPDIR="${XDG_RUNTIME_DIR}"
|
||||
export TMP_DIR="$(mktemp -d -t spack-test-XXXXX)"
|
||||
clean_up() {
|
||||
[[ -n "$TMPCONFIG_DEBUG" ]] && printf "cleaning up: $TMP_DIR\n"
|
||||
rm -rf "$TMP_DIR"
|
||||
}
|
||||
trap clean_up EXIT
|
||||
trap clean_up ERR
|
||||
|
||||
[[ -n "$TMPCONFIG_DEBUG" ]] && printf "Redirecting TMP_DIR and spack directories to $TMP_DIR\n"
|
||||
|
||||
export BOOTSTRAP="${SPACK_USER_CACHE_PATH:=$HOME/.spack}/bootstrap"
|
||||
export SPACK_USER_CACHE_PATH="$TMP_DIR/user_cache"
|
||||
mkdir -p "$SPACK_USER_CACHE_PATH"
|
||||
|
||||
private_bootstrap="$SPACK_USER_CACHE_PATH/bootstrap"
|
||||
use_spack=''
|
||||
use_bwrap=''
|
||||
# argument handling
|
||||
while (($# >= 1)) ; do
|
||||
case "$1" in
|
||||
-b) # privatize bootstrap too, useful for CI but not always cheap
|
||||
shift
|
||||
export BOOTSTRAP="$private_bootstrap"
|
||||
;;
|
||||
-B) # use specified bootstrap dir
|
||||
export BOOTSTRAP="$2"
|
||||
shift 2
|
||||
;;
|
||||
-s) # run spack directly with remaining args
|
||||
shift
|
||||
use_spack=1
|
||||
;;
|
||||
--contain=bwrap)
|
||||
if bwrap --help 2>&1 > /dev/null ; then
|
||||
use_bwrap=1
|
||||
else
|
||||
echo Bubblewrap containment requested, but no bwrap command found
|
||||
exit 1
|
||||
fi
|
||||
shift
|
||||
;;
|
||||
--)
|
||||
shift
|
||||
break
|
||||
;;
|
||||
*)
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
typeset -a CMD
|
||||
if [[ -n "$use_spack" ]] ; then
|
||||
CMD=("$DIR/spack" "$@")
|
||||
else
|
||||
CMD=("$@")
|
||||
fi
|
||||
|
||||
mkdir -p "$BOOTSTRAP"
|
||||
|
||||
export SPACK_SYSTEM_CONFIG_PATH="$TMP_DIR/sys_conf"
|
||||
export SPACK_USER_CONFIG_PATH="$TMP_DIR/user_conf"
|
||||
mkdir -p "$SPACK_USER_CONFIG_PATH"
|
||||
cat >"$SPACK_USER_CONFIG_PATH/config.yaml" <<EOF
|
||||
config:
|
||||
install_tree:
|
||||
root: $TMP_DIR/install
|
||||
misc_cache: $$user_cache_path/cache
|
||||
source_cache: $$user_cache_path/source
|
||||
EOF
|
||||
cat >"$SPACK_USER_CONFIG_PATH/bootstrap.yaml" <<EOF
|
||||
bootstrap:
|
||||
root: $BOOTSTRAP
|
||||
EOF
|
||||
|
||||
if [[ -n "$use_bwrap" ]] ; then
|
||||
CMD=(
|
||||
bwrap
|
||||
--dev-bind / /
|
||||
--ro-bind "$DIR/.." "$DIR/.." # do not touch spack root
|
||||
--ro-bind $HOME/.spack $HOME/.spack # do not touch user config/cache dir
|
||||
--bind "$TMP_DIR" "$TMP_DIR"
|
||||
--bind "$BOOTSTRAP" "$BOOTSTRAP"
|
||||
--die-with-parent
|
||||
"${CMD[@]}"
|
||||
)
|
||||
fi
|
||||
|
||||
(( ${TMPCONFIG_DEBUG:=0} > 1)) && echo "Running: ${CMD[@]}"
|
||||
"${CMD[@]}"
|
@@ -9,15 +9,14 @@ bootstrap:
|
||||
# may not be able to bootstrap all the software that Spack needs,
|
||||
# depending on its type.
|
||||
sources:
|
||||
- name: 'github-actions-v0.4'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.4
|
||||
- name: 'github-actions-v0.3'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.3
|
||||
- name: 'github-actions-v0.2'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.2
|
||||
- name: 'github-actions-v0.1'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.1
|
||||
- name: 'spack-install'
|
||||
metadata: $spack/share/spack/bootstrap/spack-install
|
||||
trusted:
|
||||
# By default we trust bootstrapping from sources and from binaries
|
||||
# produced on Github via the workflow
|
||||
github-actions-v0.4: true
|
||||
github-actions-v0.3: true
|
||||
github-actions-v0.2: true
|
||||
spack-install: true
|
||||
|
@@ -33,4 +33,4 @@ concretizer:
|
||||
# environments can always be activated. When "false" perform concretization separately
|
||||
# on each root spec, allowing different versions and variants of the same package in
|
||||
# an environment.
|
||||
unify: true
|
||||
unify: false
|
@@ -187,20 +187,10 @@ config:
|
||||
package_lock_timeout: null
|
||||
|
||||
|
||||
# Control how shared libraries are located at runtime on Linux. See the
|
||||
# the Spack documentation for details.
|
||||
shared_linking:
|
||||
# Spack automatically embeds runtime search paths in ELF binaries for their
|
||||
# dependencies. Their type can either be "rpath" or "runpath". For glibc, rpath is
|
||||
# inherited and has precedence over LD_LIBRARY_PATH; runpath is not inherited
|
||||
# and of lower precedence. DO NOT MIX these within the same install tree.
|
||||
type: rpath
|
||||
|
||||
|
||||
# (Experimental) Embed absolute paths of dependent libraries directly in ELF
|
||||
# binaries to avoid runtime search. This can improve startup time of
|
||||
# executables with many dependencies, in particular on slow filesystems.
|
||||
bind: false
|
||||
# Control whether Spack embeds RPATH or RUNPATH attributes in ELF binaries.
|
||||
# Has no effect on macOS. DO NOT MIX these within the same install tree.
|
||||
# See the Spack documentation for details.
|
||||
shared_linking: 'rpath'
|
||||
|
||||
|
||||
# Set to 'false' to allow installation on filesystems that doesn't allow setgid bit
|
||||
@@ -211,7 +201,3 @@ config:
|
||||
# building and installing packages. This gives information about Spack's
|
||||
# current progress as well as the current and total number of packages.
|
||||
terminal_title: false
|
||||
|
||||
# Number of seconds a buildcache's index.json is cached locally before probing
|
||||
# for updates, within a single Spack invocation. Defaults to 10 minutes.
|
||||
binary_index_ttl: 600
|
@@ -13,4 +13,9 @@
|
||||
# Per-user settings (overrides default and site settings):
|
||||
# ~/.spack/modules.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
modules: {}
|
||||
modules:
|
||||
prefix_inspections:
|
||||
lib:
|
||||
- LD_LIBRARY_PATH
|
||||
lib64:
|
||||
- LD_LIBRARY_PATH
|
||||
|
@@ -15,7 +15,7 @@
|
||||
# -------------------------------------------------------------------------
|
||||
modules:
|
||||
prefix_inspections:
|
||||
./lib:
|
||||
lib:
|
||||
- DYLD_FALLBACK_LIBRARY_PATH
|
||||
./lib64:
|
||||
lib64:
|
||||
- DYLD_FALLBACK_LIBRARY_PATH
|
||||
|
@@ -13,4 +13,9 @@
|
||||
# Per-user settings (overrides default and site settings):
|
||||
# ~/.spack/modules.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
modules: {}
|
||||
modules:
|
||||
prefix_inspections:
|
||||
lib:
|
||||
- LD_LIBRARY_PATH
|
||||
lib64:
|
||||
- LD_LIBRARY_PATH
|
||||
|
@@ -14,24 +14,23 @@
|
||||
# ~/.spack/modules.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
modules:
|
||||
# This maps paths in the package install prefix to environment variables
|
||||
# they should be added to. For example, <prefix>/bin should be in PATH.
|
||||
# Paths to check when creating modules for all module sets
|
||||
prefix_inspections:
|
||||
./bin:
|
||||
bin:
|
||||
- PATH
|
||||
./man:
|
||||
man:
|
||||
- MANPATH
|
||||
./share/man:
|
||||
share/man:
|
||||
- MANPATH
|
||||
./share/aclocal:
|
||||
share/aclocal:
|
||||
- ACLOCAL_PATH
|
||||
./lib/pkgconfig:
|
||||
lib/pkgconfig:
|
||||
- PKG_CONFIG_PATH
|
||||
./lib64/pkgconfig:
|
||||
lib64/pkgconfig:
|
||||
- PKG_CONFIG_PATH
|
||||
./share/pkgconfig:
|
||||
share/pkgconfig:
|
||||
- PKG_CONFIG_PATH
|
||||
./:
|
||||
'':
|
||||
- CMAKE_PREFIX_PATH
|
||||
|
||||
# These are configurations for the module set named "default"
|
||||
|
@@ -25,18 +25,16 @@ packages:
|
||||
fftw-api: [fftw, amdfftw]
|
||||
flame: [libflame, amdlibflame]
|
||||
fuse: [libfuse]
|
||||
gl: [glx, osmesa]
|
||||
gl: [mesa+opengl, mesa18, opengl]
|
||||
glu: [mesa-glu, openglu]
|
||||
golang: [go, gcc]
|
||||
go-external-or-gccgo-bootstrap: [go-bootstrap, gcc]
|
||||
glx: [mesa+glx, mesa18+glx, opengl]
|
||||
golang: [gcc]
|
||||
iconv: [libiconv]
|
||||
ipp: [intel-ipp]
|
||||
java: [openjdk, jdk, ibm-java]
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
lapack: [openblas, amdlibflame]
|
||||
libglx: [mesa+glx, mesa18+glx]
|
||||
libllvm: [llvm]
|
||||
libosmesa: [mesa+osmesa, mesa18+osmesa]
|
||||
libllvm: [llvm, llvm-amdgpu]
|
||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||
luajit: [lua-luajit-openresty, lua-luajit]
|
||||
mariadb-client: [mariadb-c-client, mariadb]
|
||||
@@ -46,6 +44,7 @@ packages:
|
||||
mysql-client: [mysql, mariadb-c-client]
|
||||
opencl: [pocl]
|
||||
onedal: [intel-oneapi-dal]
|
||||
osmesa: [mesa+osmesa, mesa18+osmesa]
|
||||
pbs: [openpbs, torque]
|
||||
pil: [py-pillow]
|
||||
pkgconfig: [pkgconf, pkg-config]
|
||||
|
@@ -1,5 +1,5 @@
|
||||
config:
|
||||
locks: false
|
||||
concretizer: clingo
|
||||
concretizer: original
|
||||
build_stage::
|
||||
- '$spack/.staging'
|
||||
|
1
lib/spack/docs/_spack_root
Symbolic link
1
lib/spack/docs/_spack_root
Symbolic link
@@ -0,0 +1 @@
|
||||
../../..
|
162
lib/spack/docs/analyze.rst
Normal file
162
lib/spack/docs/analyze.rst
Normal file
@@ -0,0 +1,162 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _analyze:
|
||||
|
||||
=======
|
||||
Analyze
|
||||
=======
|
||||
|
||||
|
||||
The analyze command is a front-end to various tools that let us analyze
|
||||
package installations. Each analyzer is a module for a different kind
|
||||
of analysis that can be done on a package installation, including (but not
|
||||
limited to) binary, log, or text analysis. Thus, the analyze command group
|
||||
allows you to take an existing package install, choose an analyzer,
|
||||
and extract some output for the package using it.
|
||||
|
||||
|
||||
-----------------
|
||||
Analyzer Metadata
|
||||
-----------------
|
||||
|
||||
For all analyzers, we write to an ``analyzers`` folder in ``~/.spack``, or the
|
||||
value that you specify in your spack config at ``config:analyzers_dir``.
|
||||
For example, here we see the results of running an analysis on zlib:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ tree ~/.spack/analyzers/
|
||||
└── linux-ubuntu20.04-skylake
|
||||
└── gcc-9.3.0
|
||||
└── zlib-1.2.11-sl7m27mzkbejtkrajigj3a3m37ygv4u2
|
||||
├── environment_variables
|
||||
│ └── spack-analyzer-environment-variables.json
|
||||
├── install_files
|
||||
│ └── spack-analyzer-install-files.json
|
||||
└── libabigail
|
||||
└── spack-analyzer-libabigail-libz.so.1.2.11.xml
|
||||
|
||||
|
||||
This means that you can always find analyzer output in this folder, and it
|
||||
is organized with the same logic as the package install it was run for.
|
||||
If you want to customize this top level folder, simply provide the ``--path``
|
||||
argument to ``spack analyze run``. The nested organization will be maintained
|
||||
within your custom root.
|
||||
|
||||
-----------------
|
||||
Listing Analyzers
|
||||
-----------------
|
||||
|
||||
If you aren't familiar with Spack's analyzers, you can quickly list those that
|
||||
are available:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze list-analyzers
|
||||
install_files : install file listing read from install_manifest.json
|
||||
environment_variables : environment variables parsed from spack-build-env.txt
|
||||
config_args : config args loaded from spack-configure-args.txt
|
||||
libabigail : Application Binary Interface (ABI) features for objects
|
||||
|
||||
|
||||
In the above, the first three are fairly simple - parsing metadata files from
|
||||
a package install directory to save
|
||||
|
||||
-------------------
|
||||
Analyzing a Package
|
||||
-------------------
|
||||
|
||||
The analyze command, akin to install, will accept a package spec to perform
|
||||
an analysis for. The package must be installed. Let's walk through an example
|
||||
with zlib. We first ask to analyze it. However, since we have more than one
|
||||
install, we are asked to disambiguate:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run zlib
|
||||
==> Error: zlib matches multiple packages.
|
||||
Matching packages:
|
||||
fz2bs56 zlib@1.2.11%gcc@7.5.0 arch=linux-ubuntu18.04-skylake
|
||||
sl7m27m zlib@1.2.11%gcc@9.3.0 arch=linux-ubuntu20.04-skylake
|
||||
Use a more specific spec.
|
||||
|
||||
|
||||
We can then specify the spec version that we want to analyze:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run zlib/fz2bs56
|
||||
|
||||
If you don't provide any specific analyzer names, by default all analyzers
|
||||
(shown in the ``list-analyzers`` subcommand list) will be run. If an analyzer does not
|
||||
have any result, it will be skipped. For example, here is a result running for
|
||||
zlib:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ ls ~/.spack/analyzers/linux-ubuntu20.04-skylake/gcc-9.3.0/zlib-1.2.11-sl7m27mzkbejtkrajigj3a3m37ygv4u2/
|
||||
spack-analyzer-environment-variables.json
|
||||
spack-analyzer-install-files.json
|
||||
spack-analyzer-libabigail-libz.so.1.2.11.xml
|
||||
|
||||
If you want to run a specific analyzer, ask for it with `--analyzer`. Here we run
|
||||
spack analyze on libabigail (already installed) _using_ libabigail1
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run --analyzer abigail libabigail
|
||||
|
||||
|
||||
.. _analyze_monitoring:
|
||||
|
||||
----------------------
|
||||
Monitoring An Analysis
|
||||
----------------------
|
||||
|
||||
For any kind of analysis, you can
|
||||
use a `spack monitor <https://github.com/spack/spack-monitor>`_ "Spackmon"
|
||||
as a server to upload the same run metadata to. You can
|
||||
follow the instructions in the `spack monitor documentation <https://spack-monitor.readthedocs.org>`_
|
||||
to first create a server along with a username and token for yourself.
|
||||
You can then use this guide to interact with the server.
|
||||
|
||||
You should first export our spack monitor token and username to the environment:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438
|
||||
$ export SPACKMON_USER=spacky
|
||||
|
||||
|
||||
By default, the host for your server is expected to be at ``http://127.0.0.1``
|
||||
with a prefix of ``ms1``, and if this is the case, you can simply add the
|
||||
``--monitor`` flag to the install command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run --monitor wget
|
||||
|
||||
If you need to customize the host or the prefix, you can do that as well:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run --monitor --monitor-prefix monitor --monitor-host https://monitor-service.io wget
|
||||
|
||||
If your server doesn't have authentication, you can skip it:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run --monitor --monitor-disable-auth wget
|
||||
|
||||
Regardless of your choice, when you run analyze on an installed package (whether
|
||||
it was installed with ``--monitor`` or not, you'll see the results generating as they did
|
||||
before, and a message that the monitor server was pinged:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze --monitor wget
|
||||
...
|
||||
==> Sending result for wget bin/wget to monitor.
|
@@ -85,7 +85,7 @@ All packages whose names or descriptions contain documentation:
|
||||
To get more information on a particular package from `spack list`, use
|
||||
`spack info`. Just supply the name of a package:
|
||||
|
||||
.. command-output:: spack info --all mpich
|
||||
.. command-output:: spack info mpich
|
||||
|
||||
Most of the information is self-explanatory. The *safe versions* are
|
||||
versions that Spack knows the checksum for, and it will use the
|
||||
@@ -896,8 +896,8 @@ your path:
|
||||
$ which mpicc
|
||||
~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/mpich@3.0.4/bin/mpicc
|
||||
|
||||
These commands will add appropriate directories to your ``PATH``
|
||||
and ``MANPATH`` according to the
|
||||
These commands will add appropriate directories to your ``PATH``,
|
||||
``MANPATH``, ``CPATH``, and ``LD_LIBRARY_PATH`` according to the
|
||||
:ref:`prefix inspections <customize-env-modifications>` defined in your
|
||||
modules configuration.
|
||||
When you no longer want to use a package, you can type unload or
|
||||
@@ -998,15 +998,11 @@ More formally, a spec consists of the following pieces:
|
||||
* ``%`` Optional compiler specifier, with an optional compiler version
|
||||
(``gcc`` or ``gcc@4.7.3``)
|
||||
* ``+`` or ``-`` or ``~`` Optional variant specifiers (``+debug``,
|
||||
``-qt``, or ``~qt``) for boolean variants. Use ``++`` or ``--`` or
|
||||
``~~`` to propagate variants through the dependencies (``++debug``,
|
||||
``--qt``, or ``~~qt``).
|
||||
``-qt``, or ``~qt``) for boolean variants
|
||||
* ``name=<value>`` Optional variant specifiers that are not restricted to
|
||||
boolean variants. Use ``name==<value>`` to propagate variant through the
|
||||
dependencies.
|
||||
boolean variants
|
||||
* ``name=<value>`` Optional compiler flag specifiers. Valid flag names are
|
||||
``cflags``, ``cxxflags``, ``fflags``, ``cppflags``, ``ldflags``, and ``ldlibs``.
|
||||
Use ``name==<value>`` to propagate compiler flags through the dependencies.
|
||||
* ``target=<value> os=<value>`` Optional architecture specifier
|
||||
(``target=haswell os=CNL10``)
|
||||
* ``^`` Dependency specs (``^callpath@1.1``)
|
||||
@@ -1097,8 +1093,6 @@ could depend on ``mpich@1.2:`` if it can only build with version
|
||||
|
||||
Below are more details about the specifiers that you can add to specs.
|
||||
|
||||
.. _version-specifier:
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
Version specifier
|
||||
^^^^^^^^^^^^^^^^^
|
||||
@@ -1114,37 +1108,6 @@ set of arbitrary versions, such as ``@1.0,1.5,1.7`` (``1.0``, ``1.5``,
|
||||
or ``1.7``). When you supply such a specifier to ``spack install``,
|
||||
it constrains the set of versions that Spack will install.
|
||||
|
||||
For packages with a ``git`` attribute, ``git`` references
|
||||
may be specified instead of a numerical version i.e. branches, tags
|
||||
and commits. Spack will stage and build based off the ``git``
|
||||
reference provided. Acceptable syntaxes for this are:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
# branches and tags
|
||||
foo@git.develop # use the develop branch
|
||||
foo@git.0.19 # use the 0.19 tag
|
||||
|
||||
# commit hashes
|
||||
foo@abcdef1234abcdef1234abcdef1234abcdef1234 # 40 character hashes are automatically treated as git commits
|
||||
foo@git.abcdef1234abcdef1234abcdef1234abcdef1234
|
||||
|
||||
Spack versions from git reference either have an associated version supplied by the user,
|
||||
or infer a relationship to known versions from the structure of the git repository. If an
|
||||
associated version is supplied by the user, Spack treats the git version as equivalent to that
|
||||
version for all version comparisons in the package logic (e.g. ``depends_on('foo', when='@1.5')``).
|
||||
|
||||
The associated version can be assigned with ``[git ref]=[version]`` syntax, with the caveat that the specified version is known to Spack from either the package definition, or in the configuration preferences (i.e. ``packages.yaml``).
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
foo@git.my_ref=3.2 # use the my_ref tag or branch, but treat it as version 3.2 for version comparisons
|
||||
foo@git.abcdef1234abcdef1234abcdef1234abcdef1234=develop # use the given commit, but treat it as develop for version comparisons
|
||||
|
||||
If an associated version is not supplied then the tags in the git repo are used to determine
|
||||
the most recent previous version known to Spack. Details about how versions are compared
|
||||
and how Spack determines if one version is less than another are discussed in the developer guide.
|
||||
|
||||
If the version spec is not provided, then Spack will choose one
|
||||
according to policies set for the particular spack installation. If
|
||||
the spec is ambiguous, i.e. it could match multiple versions, Spack
|
||||
@@ -1230,23 +1193,6 @@ variants using the backwards compatibility syntax and uses only ``~``
|
||||
for disabled boolean variants. The ``-`` and spaces on the command
|
||||
line are provided for convenience and legibility.
|
||||
|
||||
Spack allows variants to propagate their value to the package's
|
||||
dependency by using ``++``, ``--``, and ``~~`` for boolean variants.
|
||||
For example, for a ``debug`` variant:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
mpileaks ++debug # enabled debug will be propagated to dependencies
|
||||
mpileaks +debug # only mpileaks will have debug enabled
|
||||
|
||||
To propagate the value of non-boolean variants Spack uses ``name==value``.
|
||||
For example, for the ``stackstart`` variant:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
mpileaks stackstart==4 # variant will be propagated to dependencies
|
||||
mpileaks stackstart=4 # only mpileaks will have this variant value
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
Compiler Flags
|
||||
^^^^^^^^^^^^^^
|
||||
@@ -1254,15 +1200,10 @@ Compiler Flags
|
||||
Compiler flags are specified using the same syntax as non-boolean variants,
|
||||
but fulfill a different purpose. While the function of a variant is set by
|
||||
the package, compiler flags are used by the compiler wrappers to inject
|
||||
flags into the compile line of the build. Additionally, compiler flags can
|
||||
be inherited by dependencies by using ``==``.
|
||||
``spack install libdwarf cppflags=="-g"`` will install both libdwarf and
|
||||
libelf with the ``-g`` flag injected into their compile line.
|
||||
|
||||
.. note::
|
||||
|
||||
versions of spack prior to 0.19.0 will propagate compiler flags using
|
||||
the ``=`` syntax.
|
||||
flags into the compile line of the build. Additionally, compiler flags are
|
||||
inherited by dependencies. ``spack install libdwarf cppflags="-g"`` will
|
||||
install both libdwarf and libelf with the ``-g`` flag injected into their
|
||||
compile line.
|
||||
|
||||
Notice that the value of the compiler flags must be quoted if it
|
||||
contains any spaces. Any of ``cppflags=-O3``, ``cppflags="-O3"``,
|
||||
@@ -1464,7 +1405,7 @@ built.
|
||||
You can see what virtual packages a particular package provides by
|
||||
getting info on it:
|
||||
|
||||
.. command-output:: spack info --virtuals mpich
|
||||
.. command-output:: spack info mpich
|
||||
|
||||
Spack is unique in that its virtual packages can be versioned, just
|
||||
like regular packages. A particular version of a package may provide
|
||||
@@ -1672,13 +1613,9 @@ own install prefix. However, certain packages are typically installed
|
||||
`Python <https://www.python.org>`_ packages are typically installed in the
|
||||
``$prefix/lib/python-2.7/site-packages`` directory.
|
||||
|
||||
In Spack, installation prefixes are immutable, so this type of installation
|
||||
is not directly supported. However, it is possible to create views that
|
||||
allow you to merge install prefixes of multiple packages into a single new prefix.
|
||||
Views are a convenient way to get a more traditional filesystem structure.
|
||||
Using *extensions*, you can ensure that Python packages always share the
|
||||
same prefix in the view as Python itself. Suppose you have
|
||||
Python installed like so:
|
||||
Spack has support for this type of installation as well. In Spack,
|
||||
a package that can live inside the prefix of another package is called
|
||||
an *extension*. Suppose you have Python installed like so:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -1716,6 +1653,8 @@ You can find extensions for your Python installation like this:
|
||||
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
|
||||
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
|
||||
|
||||
==> None activated.
|
||||
|
||||
The extensions are a subset of what's returned by ``spack list``, and
|
||||
they are packages like any other. They are installed into their own
|
||||
prefixes, and you can see this with ``spack find --paths``:
|
||||
@@ -1743,72 +1682,32 @@ directly when you run ``python``:
|
||||
ImportError: No module named numpy
|
||||
>>>
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Using Extensions in Environments
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^
|
||||
Using Extensions
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
The recommended way of working with extensions such as ``py-numpy``
|
||||
above is through :ref:`Environments <environments>`. For example,
|
||||
the following creates an environment in the current working directory
|
||||
with a filesystem view in the ``./view`` directory:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create --with-view view --dir .
|
||||
$ spack -e . add py-numpy
|
||||
$ spack -e . concretize
|
||||
$ spack -e . install
|
||||
|
||||
We recommend environments for two reasons. Firstly, environments
|
||||
can be activated (requires :ref:`shell-support`):
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate .
|
||||
|
||||
which sets all the right environment variables such as ``PATH`` and
|
||||
``PYTHONPATH``. This ensures that
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ python
|
||||
>>> import numpy
|
||||
|
||||
works. Secondly, even without shell support, the view ensures
|
||||
that Python can locate its extensions:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ ./view/bin/python
|
||||
>>> import numpy
|
||||
|
||||
See :ref:`environments` for a more in-depth description of Spack
|
||||
environments and customizations to views.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
Using ``spack load``
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
A more traditional way of using Spack and extensions is ``spack load``
|
||||
(requires :ref:`shell-support`). This will add the extension to ``PYTHONPATH``
|
||||
in your current shell, and Python itself will be available in the ``PATH``:
|
||||
There are four ways to get ``numpy`` working in Python. The first is
|
||||
to use :ref:`shell-support`. You can simply ``load`` the extension,
|
||||
and it will be added to the ``PYTHONPATH`` in your current shell:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack load python
|
||||
$ spack load py-numpy
|
||||
$ python
|
||||
>>> import numpy
|
||||
|
||||
Now ``import numpy`` will succeed for as long as you keep your current
|
||||
session open.
|
||||
The loaded packages can be checked using ``spack find --loaded``
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Loading Extensions via Modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Apart from ``spack env activate`` and ``spack load``, you can load numpy
|
||||
through your environment modules (using ``environment-modules`` or
|
||||
``lmod``). This will also add the extension to the ``PYTHONPATH`` in
|
||||
your current shell.
|
||||
Instead of using Spack's environment modification capabilities through
|
||||
the ``spack load`` command, you can load numpy through your
|
||||
environment modules (using ``environment-modules`` or ``lmod``). This
|
||||
will also add the extension to the ``PYTHONPATH`` in your current
|
||||
shell.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -1818,6 +1717,130 @@ If you do not know the name of the specific numpy module you wish to
|
||||
load, you can use the ``spack module tcl|lmod loads`` command to get
|
||||
the name of the module from the Spack spec.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Activating Extensions in a View
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Another way to use extensions is to create a view, which merges the
|
||||
python installation along with the extensions into a single prefix.
|
||||
See :ref:`configuring_environment_views` for a more in-depth description
|
||||
of views.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Activating Extensions Globally
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
As an alternative to creating a merged prefix with Python and its extensions,
|
||||
and prior to support for views, Spack has provided a means to install the
|
||||
extension into the Spack installation prefix for the extendee. This has
|
||||
typically been useful since extendable packages typically search their own
|
||||
installation path for addons by default.
|
||||
|
||||
Global activations are performed with the ``spack activate`` command:
|
||||
|
||||
.. _cmd-spack-activate:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
``spack activate``
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack activate py-numpy
|
||||
==> Activated extension py-setuptools@11.3.1%gcc@4.4.7 arch=linux-debian7-x86_64-3c74eb69 for python@2.7.8%gcc@4.4.7.
|
||||
==> Activated extension py-nose@1.3.4%gcc@4.4.7 arch=linux-debian7-x86_64-5f70f816 for python@2.7.8%gcc@4.4.7.
|
||||
==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=linux-debian7-x86_64-66733244 for python@2.7.8%gcc@4.4.7.
|
||||
|
||||
Several things have happened here. The user requested that
|
||||
``py-numpy`` be activated in the ``python`` installation it was built
|
||||
with. Spack knows that ``py-numpy`` depends on ``py-nose`` and
|
||||
``py-setuptools``, so it activated those packages first. Finally,
|
||||
once all dependencies were activated in the ``python`` installation,
|
||||
``py-numpy`` was activated as well.
|
||||
|
||||
If we run ``spack extensions`` again, we now see the three new
|
||||
packages listed as activated:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack extensions python
|
||||
==> python@2.7.8%gcc@4.4.7 arch=linux-debian7-x86_64-703c7a96
|
||||
==> 36 extensions:
|
||||
geos py-ipython py-pexpect py-pyside py-sip
|
||||
py-basemap py-libxml2 py-pil py-pytz py-six
|
||||
py-biopython py-mako py-pmw py-rpy2 py-sympy
|
||||
py-cython py-matplotlib py-pychecker py-scientificpython py-virtualenv
|
||||
py-dateutil py-mpi4py py-pygments py-scikit-learn
|
||||
py-epydoc py-mx py-pylint py-scipy
|
||||
py-gnuplot py-nose py-pyparsing py-setuptools
|
||||
py-h5py py-numpy py-pyqt py-shiboken
|
||||
|
||||
==> 12 installed:
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
|
||||
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
|
||||
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
|
||||
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
|
||||
|
||||
==> 3 currently activated:
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
py-nose@1.3.4 py-numpy@1.9.1 py-setuptools@11.3.1
|
||||
|
||||
Now, when a user runs python, ``numpy`` will be available for import
|
||||
*without* the user having to explicitly load it. ``python@2.7.8`` now
|
||||
acts like a system Python installation with ``numpy`` installed inside
|
||||
of it.
|
||||
|
||||
Spack accomplishes this by symbolically linking the *entire* prefix of
|
||||
the ``py-numpy`` package into the prefix of the ``python`` package. To the
|
||||
python interpreter, it looks like ``numpy`` is installed in the
|
||||
``site-packages`` directory.
|
||||
|
||||
The only limitation of global activation is that you can only have a *single*
|
||||
version of an extension activated at a time. This is because multiple
|
||||
versions of the same extension would conflict if symbolically linked
|
||||
into the same prefix. Users who want a different version of a package
|
||||
can still get it by using environment modules or views, but they will have to
|
||||
explicitly load their preferred version.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
``spack activate --force``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If, for some reason, you want to activate a package *without* its
|
||||
dependencies, you can use ``spack activate --force``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack activate --force py-numpy
|
||||
==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=linux-debian7-x86_64-66733244 for python@2.7.8%gcc@4.4.7.
|
||||
|
||||
.. _cmd-spack-deactivate:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
``spack deactivate``
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
We've seen how activating an extension can be used to set up a default
|
||||
version of a Python module. Obviously, you may want to change that at
|
||||
some point. ``spack deactivate`` is the command for this. There are
|
||||
several variants:
|
||||
|
||||
* ``spack deactivate <extension>`` will deactivate a single
|
||||
extension. If another activated extension depends on this one,
|
||||
Spack will warn you and exit with an error.
|
||||
* ``spack deactivate --force <extension>`` deactivates an extension
|
||||
regardless of packages that depend on it.
|
||||
* ``spack deactivate --all <extension>`` deactivates an extension and
|
||||
all of its dependencies. Use ``--force`` to disregard dependents.
|
||||
* ``spack deactivate --all <extendee>`` deactivates *all* activated
|
||||
extensions of a package. For example, to deactivate *all* python
|
||||
extensions, use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack deactivate --all python
|
||||
|
||||
-----------------------
|
||||
Filesystem requirements
|
||||
-----------------------
|
||||
|
@@ -15,13 +15,15 @@ is an entire command dedicated to the management of every aspect of bootstrappin
|
||||
|
||||
.. command-output:: spack bootstrap --help
|
||||
|
||||
Spack is configured to bootstrap its dependencies lazily by default; i.e. the first time they are needed and
|
||||
can't be found. You can readily check if any prerequisite for using Spack is missing by running:
|
||||
The first thing to know to understand bootstrapping in Spack is that each of
|
||||
Spack's dependencies is bootstrapped lazily; i.e. the first time it is needed and
|
||||
can't be found. You can readily check if any prerequisite for using Spack
|
||||
is missing by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap status
|
||||
Spack v0.19.0 - python@3.8
|
||||
Spack v0.17.1 - python@3.8
|
||||
|
||||
[FAIL] Core Functionalities
|
||||
[B] MISSING "clingo": required to concretize specs
|
||||
@@ -46,21 +48,6 @@ they can be bootstrapped. Running a command that concretize a spec, like:
|
||||
|
||||
triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||
|
||||
Users can also bootstrap all the dependencies needed by Spack in a single command, which
|
||||
might be useful to setup containers or other similar environments:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack bootstrap now
|
||||
==> Bootstrapping clingo from pre-built binaries
|
||||
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.3/build_cache/linux-centos7-x86_64-gcc-10.2.1-clingo-bootstrap-spack-shqedxgvjnhiwdcdrvjhbd73jaevv7wt.spec.json
|
||||
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.3/build_cache/linux-centos7-x86_64/gcc-10.2.1/clingo-bootstrap-spack/linux-centos7-x86_64-gcc-10.2.1-clingo-bootstrap-spack-shqedxgvjnhiwdcdrvjhbd73jaevv7wt.spack
|
||||
==> Installing "clingo-bootstrap@spack%gcc@10.2.1~docs~ipo+python+static_libstdcpp build_type=Release arch=linux-centos7-x86_64" from a buildcache
|
||||
==> Bootstrapping patchelf from pre-built binaries
|
||||
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.3/build_cache/linux-centos7-x86_64-gcc-10.2.1-patchelf-0.15.0-htk62k7efo2z22kh6kmhaselru7bfkuc.spec.json
|
||||
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.3/build_cache/linux-centos7-x86_64/gcc-10.2.1/patchelf-0.15.0/linux-centos7-x86_64-gcc-10.2.1-patchelf-0.15.0-htk62k7efo2z22kh6kmhaselru7bfkuc.spack
|
||||
==> Installing "patchelf@0.15.0%gcc@10.2.1 ldflags="-static-libstdc++ -static-libgcc" arch=linux-centos7-x86_64" from a buildcache
|
||||
|
||||
-----------------------
|
||||
The Bootstrapping store
|
||||
-----------------------
|
||||
@@ -120,19 +107,19 @@ If need be, you can disable bootstrapping altogether by running:
|
||||
|
||||
in which case it's your responsibility to ensure Spack runs in an
|
||||
environment where all its prerequisites are installed. You can
|
||||
also configure Spack to skip certain bootstrapping methods by disabling
|
||||
them specifically:
|
||||
also configure Spack to skip certain bootstrapping methods by *untrusting*
|
||||
them. For instance:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap disable github-actions
|
||||
==> "github-actions" is now disabled and will not be used for bootstrapping
|
||||
% spack bootstrap untrust github-actions
|
||||
==> "github-actions" is now untrusted and will not be used for bootstrapping
|
||||
|
||||
tells Spack to skip trying to bootstrap from binaries. To add the "github-actions" method back you can:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap enable github-actions
|
||||
% spack bootstrap trust github-actions
|
||||
|
||||
There is also an option to reset the bootstrapping configuration to Spack's defaults:
|
||||
|
||||
|
@@ -49,8 +49,9 @@ packages rather than building its own packages. This may be desirable
|
||||
if machines ship with system packages, such as a customized MPI
|
||||
that should be used instead of Spack building its own MPI.
|
||||
|
||||
External packages are configured through the ``packages.yaml`` file.
|
||||
Here's an example of an external configuration:
|
||||
External packages are configured through the ``packages.yaml`` file found
|
||||
in a Spack installation's ``etc/spack/`` or a user's ``~/.spack/``
|
||||
directory. Here's an example of an external configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -96,14 +97,11 @@ Each package version and compiler listed in an external should
|
||||
have entries in Spack's packages and compiler configuration, even
|
||||
though the package and compiler may not ever be built.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Prevent packages from being built from sources
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Adding an external spec in ``packages.yaml`` allows Spack to use an external location,
|
||||
but it does not prevent Spack from building packages from sources. In the above example,
|
||||
Spack might choose for many valid reasons to start building and linking with the
|
||||
latest version of OpenMPI rather than continue using the pre-installed OpenMPI versions.
|
||||
The packages configuration can tell Spack to use an external location
|
||||
for certain package versions, but it does not restrict Spack to using
|
||||
external packages. In the above example, since newer versions of OpenMPI
|
||||
are available, Spack will choose to start building and linking with the
|
||||
latest version rather than continue using the pre-installed OpenMPI versions.
|
||||
|
||||
To prevent this, the ``packages.yaml`` configuration also allows packages
|
||||
to be flagged as non-buildable. The previous example could be modified to
|
||||
@@ -123,15 +121,9 @@ be:
|
||||
buildable: False
|
||||
|
||||
The addition of the ``buildable`` flag tells Spack that it should never build
|
||||
its own version of OpenMPI from sources, and it will instead always rely on a pre-built
|
||||
OpenMPI.
|
||||
|
||||
.. note::
|
||||
|
||||
If ``concretizer:reuse`` is on (see :ref:`concretizer-options` for more information on that flag)
|
||||
pre-built specs include specs already available from a local store, an upstream store, a registered
|
||||
buildcache or specs marked as externals in ``packages.yaml``. If ``concretizer:reuse`` is off, only
|
||||
external specs in ``packages.yaml`` are included in the list of pre-built specs.
|
||||
its own version of OpenMPI, and it will instead always rely on a pre-built
|
||||
OpenMPI. Similar to ``paths``, ``buildable`` is specified as a property under
|
||||
a package name.
|
||||
|
||||
If an external module is specified as not buildable, then Spack will load the
|
||||
external module into the build environment which can be used for linking.
|
||||
@@ -140,10 +132,6 @@ The ``buildable`` does not need to be paired with external packages.
|
||||
It could also be used alone to forbid packages that may be
|
||||
buggy or otherwise undesirable.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Non-buildable virtual packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Virtual packages in Spack can also be specified as not buildable, and
|
||||
external implementations can be provided. In the example above,
|
||||
OpenMPI is configured as not buildable, but Spack will often prefer
|
||||
@@ -165,37 +153,21 @@ but more conveniently:
|
||||
- spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.6.5-intel
|
||||
|
||||
Spack can then use any of the listed external implementations of MPI
|
||||
to satisfy a dependency, and will choose depending on the compiler and
|
||||
architecture.
|
||||
|
||||
In cases where the concretizer is configured to reuse specs, and other ``mpi`` providers
|
||||
(available via stores or buildcaches) are not wanted, Spack can be configured to require
|
||||
specs matching only the available externals:
|
||||
Implementations can also be listed immediately under the virtual they provide:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpi:
|
||||
buildable: False
|
||||
require:
|
||||
- one_of: [
|
||||
"openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64",
|
||||
"openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug",
|
||||
"openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
|
||||
]
|
||||
openmpi:
|
||||
externals:
|
||||
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.4.3
|
||||
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
|
||||
prefix: /opt/openmpi-1.4.3-debug
|
||||
- spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.6.5-intel
|
||||
openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64: /opt/openmpi-1.4.3
|
||||
openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug: /opt/openmpi-1.4.3-debug
|
||||
openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64: /opt/openmpi-1.6.5-intel
|
||||
mpich@3.3 %clang@9.0.0 arch=linux-debian7-x86_64: /opt/mpich-3.3-intel
|
||||
|
||||
This configuration prevents any spec using MPI and originating from stores or buildcaches to be reused,
|
||||
unless it matches the requirements under ``packages:mpi:require``. For more information on requirements see
|
||||
:ref:`package-requirements`.
|
||||
Spack can then use any of the listed external implementations of MPI
|
||||
to satisfy a dependency, and will choose depending on the compiler and
|
||||
architecture.
|
||||
|
||||
.. _cmd-spack-external-find:
|
||||
|
||||
@@ -222,6 +194,11 @@ Specific limitations include:
|
||||
* Packages are not discoverable by default: For a package to be
|
||||
discoverable with ``spack external find``, it needs to add special
|
||||
logic. See :ref:`here <make-package-findable>` for more details.
|
||||
* The current implementation only collects and examines executable files,
|
||||
so it is typically only useful for build/run dependencies (in some cases
|
||||
if a library package also provides an executable, it may be possible to
|
||||
extract a meaningful Spec by running the executable - for example the
|
||||
compiler wrappers in MPI implementations).
|
||||
* The logic does not search through module files, it can only detect
|
||||
packages with executables defined in ``PATH``; you can help Spack locate
|
||||
externals which use module files by loading any associated modules for
|
||||
@@ -302,143 +279,17 @@ microarchitectures considered during the solve are constrained to be compatible
|
||||
host Spack is currently running on. For instance, if this option is set to ``true``, a
|
||||
user cannot concretize for ``target=icelake`` while running on an Haswell node.
|
||||
|
||||
.. _package-requirements:
|
||||
|
||||
--------------------
|
||||
Package Requirements
|
||||
--------------------
|
||||
|
||||
Spack can be configured to always use certain compilers, package
|
||||
versions, and variants during concretization through package
|
||||
requirements.
|
||||
|
||||
Package requirements are useful when you find yourself repeatedly
|
||||
specifying the same constraints on the command line, and wish that
|
||||
Spack respects these constraints whether you mention them explicitly
|
||||
or not. Another use case is specifying constraints that should apply
|
||||
to all root specs in an environment, without having to repeat the
|
||||
constraint everywhere.
|
||||
|
||||
Apart from that, requirements config is more flexible than constraints
|
||||
on the command line, because it can specify constraints on packages
|
||||
*when they occur* as a dependency. In contrast, on the command line it
|
||||
is not possible to specify constraints on dependencies while also keeping
|
||||
those dependencies optional.
|
||||
|
||||
The package requirements configuration is specified in ``packages.yaml``
|
||||
keyed by package name:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
libfabric:
|
||||
require: "@1.13.2"
|
||||
openmpi:
|
||||
require:
|
||||
- any_of: ["~cuda", "%gcc"]
|
||||
mpich:
|
||||
require:
|
||||
- one_of: ["+cuda", "+rocm"]
|
||||
|
||||
Requirements are expressed using Spec syntax (the same as what is provided
|
||||
to ``spack install``). In the simplest case, you can specify attributes
|
||||
that you always want the package to have by providing a single spec to
|
||||
``require``; in the above example, ``libfabric`` will always build
|
||||
with version 1.13.2.
|
||||
|
||||
You can provide a more-relaxed constraint and allow the concretizer to
|
||||
choose between a set of options using ``any_of`` or ``one_of``:
|
||||
|
||||
* ``any_of`` is a list of specs. One of those specs must be satisfied
|
||||
and it is also allowed for the concretized spec to match more than one.
|
||||
In the above example, that means you could build ``openmpi+cuda%gcc``,
|
||||
``openmpi~cuda%clang`` or ``openmpi~cuda%gcc`` (in the last case,
|
||||
note that both specs in the ``any_of`` for ``openmpi`` are
|
||||
satisfied).
|
||||
* ``one_of`` is also a list of specs, and the final concretized spec
|
||||
must match exactly one of them. In the above example, that means
|
||||
you could build ``mpich+cuda`` or ``mpich+rocm`` but not
|
||||
``mpich+cuda+rocm`` (note the current package definition for
|
||||
``mpich`` already includes a conflict, so this is redundant but
|
||||
still demonstrates the concept).
|
||||
|
||||
.. note::
|
||||
|
||||
For ``any_of`` and ``one_of``, the order of specs indicates a
|
||||
preference: items that appear earlier in the list are preferred
|
||||
(note that these preferences can be ignored in favor of others).
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Setting default requirements
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You can also set default requirements for all packages under ``all``
|
||||
like this:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
require: '%clang'
|
||||
|
||||
which means every spec will be required to use ``clang`` as a compiler.
|
||||
|
||||
Note that in this case ``all`` represents a *default set of requirements* -
|
||||
if there are specific package requirements, then the default requirements
|
||||
under ``all`` are disregarded. For example, with a configuration like this:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
require: '%clang'
|
||||
cmake:
|
||||
require: '%gcc'
|
||||
|
||||
Spack requires ``cmake`` to use ``gcc`` and all other nodes (including ``cmake``
|
||||
dependencies) to use ``clang``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Setting requirements on virtual specs
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
A requirement on a virtual spec applies whenever that virtual is present in the DAG.
|
||||
This can be useful for fixing which virtual provider you want to use:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpi:
|
||||
require: 'mvapich2 %gcc'
|
||||
|
||||
With the configuration above the only allowed ``mpi`` provider is ``mvapich2 %gcc``.
|
||||
|
||||
Requirements on the virtual spec and on the specific provider are both applied, if
|
||||
present. For instance with a configuration like:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpi:
|
||||
require: 'mvapich2 %gcc'
|
||||
mvapich2:
|
||||
require: '~cuda'
|
||||
|
||||
you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
|
||||
|
||||
.. _package-preferences:
|
||||
|
||||
-------------------
|
||||
Package Preferences
|
||||
-------------------
|
||||
|
||||
In some cases package requirements can be too strong, and package
|
||||
preferences are the better option. Package preferences do not impose
|
||||
constraints on packages for particular versions or variants values,
|
||||
they rather only set defaults -- the concretizer is free to change
|
||||
them if it must due to other constraints. Also note that package
|
||||
preferences are of lower priority than reuse of already installed
|
||||
packages.
|
||||
Spack can be configured to prefer certain compilers, package
|
||||
versions, dependencies, and variants during concretization.
|
||||
The preferred configuration can be controlled via the
|
||||
``~/.spack/packages.yaml`` file for user configurations, or the
|
||||
``etc/spack/packages.yaml`` site configuration.
|
||||
|
||||
Here's an example ``packages.yaml`` file that sets preferred packages:
|
||||
|
||||
@@ -456,7 +307,7 @@ Here's an example ``packages.yaml`` file that sets preferred packages:
|
||||
providers:
|
||||
mpi: [mvapich2, mpich, openmpi]
|
||||
|
||||
At a high level, this example is specifying how packages are preferably
|
||||
At a high level, this example is specifying how packages should be
|
||||
concretized. The opencv package should prefer using GCC 4.9 and
|
||||
be built with debug options. The gperftools package should prefer version
|
||||
2.2 over 2.4. Every package on the system should prefer mvapich2 for
|
||||
@@ -464,11 +315,13 @@ its MPI and GCC 4.4.7 (except for opencv, which overrides this by preferring GCC
|
||||
These options are used to fill in implicit defaults. Any of them can be overwritten
|
||||
on the command line if explicitly requested.
|
||||
|
||||
Package preferences accept the follow keys or components under
|
||||
the specific package (or ``all``) section: ``compiler``, ``variants``,
|
||||
``version``, ``providers``, and ``target``. Each component has an
|
||||
ordered list of spec ``constraints``, with earlier entries in the
|
||||
list being preferred over later entries.
|
||||
Each ``packages.yaml`` file begins with the string ``packages:`` and
|
||||
package names are specified on the next level. The special string ``all``
|
||||
applies settings to *all* packages. Underneath each package name is one
|
||||
or more components: ``compiler``, ``variants``, ``version``,
|
||||
``providers``, and ``target``. Each component has an ordered list of
|
||||
spec ``constraints``, with earlier entries in the list being preferred
|
||||
over later entries.
|
||||
|
||||
Sometimes a package installation may have constraints that forbid
|
||||
the first concretization rule, in which case Spack will use the first
|
||||
@@ -483,9 +336,10 @@ gcc to pgi will thus be preferred over the xlc compiler.
|
||||
|
||||
The syntax for the ``provider`` section differs slightly from other
|
||||
concretization rules. A provider lists a value that packages may
|
||||
``depends_on`` (e.g, MPI) and a list of rules for fulfilling that
|
||||
``depend_on`` (e.g, MPI) and a list of rules for fulfilling that
|
||||
dependency.
|
||||
|
||||
|
||||
.. _package_permissions:
|
||||
|
||||
-------------------
|
||||
@@ -534,25 +388,3 @@ directories inside the install prefix. This will ensure that even
|
||||
manually placed files within the install prefix are owned by the
|
||||
assigned group. If no group is assigned, Spack will allow the OS
|
||||
default behavior to go as expected.
|
||||
|
||||
----------------------------
|
||||
Assigning Package Attributes
|
||||
----------------------------
|
||||
|
||||
You can assign class-level attributes in the configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpileaks:
|
||||
# Override existing attributes
|
||||
url: http://www.somewhereelse.com/mpileaks-1.0.tar.gz
|
||||
# ... or add new ones
|
||||
x: 1
|
||||
|
||||
Attributes set this way will be accessible to any method executed
|
||||
in the package.py file (e.g. the ``install()`` method). Values for these
|
||||
attributes may be any value parseable by yaml.
|
||||
|
||||
These can only be applied to specific packages, not "all" or
|
||||
virtual packages.
|
||||
|
@@ -62,11 +62,11 @@ on these ideas for each distinct build system that Spack supports:
|
||||
|
||||
build_systems/bundlepackage
|
||||
build_systems/cudapackage
|
||||
build_systems/custompackage
|
||||
build_systems/inteloneapipackage
|
||||
build_systems/intelpackage
|
||||
build_systems/rocmpackage
|
||||
build_systems/sourceforgepackage
|
||||
build_systems/custompackage
|
||||
build_systems/multiplepackage
|
||||
|
||||
For reference, the :py:mod:`Build System API docs <spack.build_systems>`
|
||||
provide a list of build systems and methods/attributes that can be
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _autotoolspackage:
|
||||
|
||||
---------
|
||||
Autotools
|
||||
---------
|
||||
----------------
|
||||
AutotoolsPackage
|
||||
----------------
|
||||
|
||||
Autotools is a GNU build system that provides a build-script generator.
|
||||
By running the platform-independent ``./configure`` script that comes
|
||||
@@ -17,7 +17,7 @@ with the package, you can generate a platform-dependent Makefile.
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``AutotoolsBuilder`` and ``AutotoolsPackage`` base classes come with the following phases:
|
||||
The ``AutotoolsPackage`` base class comes with the following phases:
|
||||
|
||||
#. ``autoreconf`` - generate the configure script
|
||||
#. ``configure`` - generate the Makefiles
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _bundlepackage:
|
||||
|
||||
------
|
||||
Bundle
|
||||
------
|
||||
-------------
|
||||
BundlePackage
|
||||
-------------
|
||||
|
||||
``BundlePackage`` represents a set of packages that are expected to work well
|
||||
together, such as a collection of commonly used software libraries. The
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _cmakepackage:
|
||||
|
||||
-----
|
||||
CMake
|
||||
-----
|
||||
------------
|
||||
CMakePackage
|
||||
------------
|
||||
|
||||
Like Autotools, CMake is a widely-used build-script generator. Designed
|
||||
by Kitware, CMake is the most popular build system for new C, C++, and
|
||||
@@ -21,7 +21,7 @@ whereas Autotools is Unix-only.
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``CMakeBuilder`` and ``CMakePackage`` base classes come with the following phases:
|
||||
The ``CMakePackage`` base class comes with the following phases:
|
||||
|
||||
#. ``cmake`` - generate the Makefile
|
||||
#. ``build`` - build the package
|
||||
@@ -130,8 +130,8 @@ Adding flags to cmake
|
||||
To add additional flags to the ``cmake`` call, simply override the
|
||||
``cmake_args`` function. The following example defines values for the flags
|
||||
``WHATEVER``, ``ENABLE_BROKEN_FEATURE``, ``DETECT_HDF5``, and ``THREADS`` with
|
||||
and without the :meth:`~spack.build_systems.cmake.CMakeBuilder.define` and
|
||||
:meth:`~spack.build_systems.cmake.CMakeBuilder.define_from_variant` helper functions:
|
||||
and without the :meth:`~spack.build_systems.cmake.CMakePackage.define` and
|
||||
:meth:`~spack.build_systems.cmake.CMakePackage.define_from_variant` helper functions:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
|
@@ -32,7 +32,7 @@ oneAPI packages or use::
|
||||
|
||||
For more information on a specific package, do::
|
||||
|
||||
spack info --all <package-name>
|
||||
spack info <package-name>
|
||||
|
||||
Intel no longer releases new versions of Parallel Studio, which can be
|
||||
used in Spack via the :ref:`intelpackage`. All of its components can
|
||||
|
@@ -5,11 +5,11 @@
|
||||
|
||||
.. _luapackage:
|
||||
|
||||
---
|
||||
Lua
|
||||
---
|
||||
------------
|
||||
LuaPackage
|
||||
------------
|
||||
|
||||
The ``Lua`` build-system is a helper for the common case of Lua packages that provide
|
||||
LuaPackage is a helper for the common case of Lua packages that provide
|
||||
a rockspec file. This is not meant to take a rock archive, but to build
|
||||
a source archive or repository that provides a rockspec, which should cover
|
||||
most lua packages. In the case a Lua package builds by Make rather than
|
||||
@@ -19,7 +19,7 @@ luarocks, prefer MakefilePackage.
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``LuaBuilder`` and `LuaPackage`` base classes come with the following phases:
|
||||
The ``LuaPackage`` base class comes with the following phases:
|
||||
|
||||
#. ``unpack`` - if using a rock, unpacks the rock and moves into the source directory
|
||||
#. ``preprocess`` - adjust sources or rockspec to fix build
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _makefilepackage:
|
||||
|
||||
--------
|
||||
Makefile
|
||||
--------
|
||||
---------------
|
||||
MakefilePackage
|
||||
---------------
|
||||
|
||||
The most primitive build system a package can use is a plain Makefile.
|
||||
Makefiles are simple to write for small projects, but they usually
|
||||
@@ -18,7 +18,7 @@ variables.
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``MakefileBuilder`` and ``MakefilePackage`` base classes come with 3 phases:
|
||||
The ``MakefilePackage`` base class comes with 3 phases:
|
||||
|
||||
#. ``edit`` - edit the Makefile
|
||||
#. ``build`` - build the project
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _mavenpackage:
|
||||
|
||||
-----
|
||||
Maven
|
||||
-----
|
||||
------------
|
||||
MavenPackage
|
||||
------------
|
||||
|
||||
Apache Maven is a general-purpose build system that does not rely
|
||||
on Makefiles to build software. It is designed for building and
|
||||
@@ -17,7 +17,7 @@ managing and Java-based project.
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``MavenBuilder`` and ``MavenPackage`` base classes come with the following phases:
|
||||
The ``MavenPackage`` base class comes with the following phases:
|
||||
|
||||
#. ``build`` - compile code and package into a JAR file
|
||||
#. ``install`` - copy to installation prefix
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _mesonpackage:
|
||||
|
||||
-----
|
||||
Meson
|
||||
-----
|
||||
------------
|
||||
MesonPackage
|
||||
------------
|
||||
|
||||
Much like Autotools and CMake, Meson is a build system. But it is
|
||||
meant to be both fast and as user friendly as possible. GNOME's goal
|
||||
@@ -17,7 +17,7 @@ is to port modules to use the Meson build system.
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``MesonBuilder`` and ``MesonPackage`` base classes come with the following phases:
|
||||
The ``MesonPackage`` base class comes with the following phases:
|
||||
|
||||
#. ``meson`` - generate ninja files
|
||||
#. ``build`` - build the project
|
||||
|
350
lib/spack/docs/build_systems/multiplepackage.rst
Normal file
350
lib/spack/docs/build_systems/multiplepackage.rst
Normal file
@@ -0,0 +1,350 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _multiplepackage:
|
||||
|
||||
----------------------
|
||||
Multiple Build Systems
|
||||
----------------------
|
||||
|
||||
Quite frequently, a package will change build systems from one version to the
|
||||
next. For example, a small project that once used a single Makefile to build
|
||||
may now require Autotools to handle the increased number of files that need to
|
||||
be compiled. Or, a package that once used Autotools may switch to CMake for
|
||||
Windows support. In this case, it becomes a bit more challenging to write a
|
||||
single build recipe for this package in Spack.
|
||||
|
||||
There are several ways that this can be handled in Spack:
|
||||
|
||||
#. Subclass the new build system, and override phases as needed (preferred)
|
||||
#. Subclass ``Package`` and implement ``install`` as needed
|
||||
#. Create separate ``*-cmake``, ``*-autotools``, etc. packages for each build system
|
||||
#. Rename the old package to ``*-legacy`` and create a new package
|
||||
#. Move the old package to a ``legacy`` repository and create a new package
|
||||
#. Drop older versions that only support the older build system
|
||||
|
||||
Of these options, 1 is preferred, and will be demonstrated in this
|
||||
documentation. Options 3-5 have issues with concretization, so shouldn't be
|
||||
used. Options 4-5 also don't support more than two build systems. Option 6 only
|
||||
works if the old versions are no longer needed. Option 1 is preferred over 2
|
||||
because it makes it easier to drop the old build system entirely.
|
||||
|
||||
The exact syntax of the package depends on which build systems you need to
|
||||
support. Below are a couple of common examples.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Makefile -> Autotools
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Let's say we have the following package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Foo(MakefilePackage):
|
||||
version("1.2.0", sha256="...")
|
||||
|
||||
def edit(self, spec, prefix):
|
||||
filter_file("CC=", "CC=" + spack_cc, "Makefile")
|
||||
|
||||
def install(self, spec, prefix):
|
||||
install_tree(".", prefix)
|
||||
|
||||
|
||||
The package subclasses from :ref:`makefilepackage`, which has three phases:
|
||||
|
||||
#. ``edit`` (does nothing by default)
|
||||
#. ``build`` (runs ``make`` by default)
|
||||
#. ``install`` (runs ``make install`` by default)
|
||||
|
||||
In this case, the ``install`` phase needed to be overridden because the
|
||||
Makefile did not have an install target. We also modify the Makefile to use
|
||||
Spack's compiler wrappers. The default ``build`` phase is not changed.
|
||||
|
||||
Starting with version 1.3.0, we want to use Autotools to build instead.
|
||||
:ref:`autotoolspackage` has four phases:
|
||||
|
||||
#. ``autoreconf`` (does not if a configure script already exists)
|
||||
#. ``configure`` (runs ``./configure --prefix=...`` by default)
|
||||
#. ``build`` (runs ``make`` by default)
|
||||
#. ``install`` (runs ``make install`` by default)
|
||||
|
||||
If the only version we need to support is 1.3.0, the package would look as
|
||||
simple as:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Foo(AutotoolsPackage):
|
||||
version("1.3.0", sha256="...")
|
||||
|
||||
def configure_args(self):
|
||||
return ["--enable-shared"]
|
||||
|
||||
|
||||
In this case, we use the default methods for each phase and only override
|
||||
``configure_args`` to specify additional flags to pass to ``./configure``.
|
||||
|
||||
If we wanted to write a single package that supports both versions 1.2.0 and
|
||||
1.3.0, it would look something like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Foo(AutotoolsPackage):
|
||||
version("1.3.0", sha256="...")
|
||||
version("1.2.0", sha256="...", deprecated=True)
|
||||
|
||||
def configure_args(self):
|
||||
return ["--enable-shared"]
|
||||
|
||||
# Remove the following once version 1.2.0 is dropped
|
||||
@when("@:1.2")
|
||||
def patch(self):
|
||||
filter_file("CC=", "CC=" + spack_cc, "Makefile")
|
||||
|
||||
@when("@:1.2")
|
||||
def autoreconf(self, spec, prefix):
|
||||
pass
|
||||
|
||||
@when("@:1.2")
|
||||
def configure(self, spec, prefix):
|
||||
pass
|
||||
|
||||
@when("@:1.2")
|
||||
def install(self, spec, prefix):
|
||||
install_tree(".", prefix)
|
||||
|
||||
|
||||
There are a few interesting things to note here:
|
||||
|
||||
* We added ``deprecated=True`` to version 1.2.0. This signifies that version
|
||||
1.2.0 is deprecated and shouldn't be used. However, if a user still relies
|
||||
on version 1.2.0, it's still there and builds just fine.
|
||||
* We moved the contents of the ``edit`` phase to the ``patch`` function. Since
|
||||
``AutotoolsPackage`` doesn't have an ``edit`` phase, the only way for this
|
||||
step to be executed is to move it to the ``patch`` function, which always
|
||||
gets run.
|
||||
* The ``autoreconf`` and ``configure`` phases become no-ops. Since the old
|
||||
Makefile-based build system doesn't use these, we ignore these phases when
|
||||
building ``foo@1.2.0``.
|
||||
* The ``@when`` decorator is used to override these phases only for older
|
||||
versions. The default methods are used for ``foo@1.3:``.
|
||||
|
||||
Once a new Spack release comes out, version 1.2.0 and everything below the
|
||||
comment can be safely deleted. The result is the same as if we had written a
|
||||
package for version 1.3.0 from scratch.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
Autotools -> CMake
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Let's say we have the following package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Bar(AutotoolsPackage):
|
||||
version("1.2.0", sha256="...")
|
||||
|
||||
def configure_args(self):
|
||||
return ["--enable-shared"]
|
||||
|
||||
|
||||
The package subclasses from :ref:`autotoolspackage`, which has four phases:
|
||||
|
||||
#. ``autoreconf`` (does not if a configure script already exists)
|
||||
#. ``configure`` (runs ``./configure --prefix=...`` by default)
|
||||
#. ``build`` (runs ``make`` by default)
|
||||
#. ``install`` (runs ``make install`` by default)
|
||||
|
||||
In this case, we use the default methods for each phase and only override
|
||||
``configure_args`` to specify additional flags to pass to ``./configure``.
|
||||
|
||||
Starting with version 1.3.0, we want to use CMake to build instead.
|
||||
:ref:`cmakepackage` has three phases:
|
||||
|
||||
#. ``cmake`` (runs ``cmake ...`` by default)
|
||||
#. ``build`` (runs ``make`` by default)
|
||||
#. ``install`` (runs ``make install`` by default)
|
||||
|
||||
If the only version we need to support is 1.3.0, the package would look as
|
||||
simple as:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Bar(CMakePackage):
|
||||
version("1.3.0", sha256="...")
|
||||
|
||||
def cmake_args(self):
|
||||
return [self.define("BUILD_SHARED_LIBS", True)]
|
||||
|
||||
|
||||
In this case, we use the default methods for each phase and only override
|
||||
``cmake_args`` to specify additional flags to pass to ``cmake``.
|
||||
|
||||
If we wanted to write a single package that supports both versions 1.2.0 and
|
||||
1.3.0, it would look something like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Bar(CMakePackage):
|
||||
version("1.3.0", sha256="...")
|
||||
version("1.2.0", sha256="...", deprecated=True)
|
||||
|
||||
def cmake_args(self):
|
||||
return [self.define("BUILD_SHARED_LIBS", True)]
|
||||
|
||||
# Remove the following once version 1.2.0 is dropped
|
||||
def configure_args(self):
|
||||
return ["--enable-shared"]
|
||||
|
||||
@when("@:1.2")
|
||||
def cmake(self, spec, prefix):
|
||||
configure("--prefix=" + prefix, *self.configure_args())
|
||||
|
||||
|
||||
There are a few interesting things to note here:
|
||||
|
||||
* We added ``deprecated=True`` to version 1.2.0. This signifies that version
|
||||
1.2.0 is deprecated and shouldn't be used. However, if a user still relies
|
||||
on version 1.2.0, it's still there and builds just fine.
|
||||
* Since CMake and Autotools are so similar, we only need to override the
|
||||
``cmake`` phase, we can use the default ``build`` and ``install`` phases.
|
||||
* We override ``cmake`` to run ``./configure`` for older versions.
|
||||
``configure_args`` remains the same.
|
||||
* The ``@when`` decorator is used to override these phases only for older
|
||||
versions. The default methods are used for ``bar@1.3:``.
|
||||
|
||||
Once a new Spack release comes out, version 1.2.0 and everything below the
|
||||
comment can be safely deleted. The result is the same as if we had written a
|
||||
package for version 1.3.0 from scratch.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Multiple build systems for the same version
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
During the transition from one build system to another, developers often
|
||||
support multiple build systems at the same time. Spack can only use a single
|
||||
build system for a single version. To decide which build system to use for a
|
||||
particular version, take the following things into account:
|
||||
|
||||
1. If the developers explicitly state that one build system is preferred over
|
||||
another, use that one.
|
||||
2. If one build system is considered "experimental" while another is considered
|
||||
"stable", use the stable build system.
|
||||
3. Otherwise, use the newer build system.
|
||||
|
||||
The developer preference for which build system to use can change over time as
|
||||
a newer build system becomes stable/recommended.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Dropping support for old build systems
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
When older versions of a package don't support a newer build system, it can be
|
||||
tempting to simply delete them from a package. This significantly reduces
|
||||
package complexity and makes the build recipe much easier to maintain. However,
|
||||
other packages or Spack users may rely on these older versions. The recommended
|
||||
approach is to first support both build systems (as demonstrated above),
|
||||
:ref:`deprecate <deprecate>` versions that rely on the old build system, and
|
||||
remove those versions and any phases that needed to be overridden in the next
|
||||
Spack release.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Three or more build systems
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In rare cases, a package may change build systems multiple times. For example,
|
||||
a package may start with Makefiles, then switch to Autotools, then switch to
|
||||
CMake. The same logic used above can be extended to any number of build systems.
|
||||
For example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Baz(CMakePackage):
|
||||
version("1.4.0", sha256="...") # CMake
|
||||
version("1.3.0", sha256="...") # Autotools
|
||||
version("1.2.0", sha256="...") # Makefile
|
||||
|
||||
def cmake_args(self):
|
||||
return [self.define("BUILD_SHARED_LIBS", True)]
|
||||
|
||||
# Remove the following once version 1.3.0 is dropped
|
||||
def configure_args(self):
|
||||
return ["--enable-shared"]
|
||||
|
||||
@when("@1.3")
|
||||
def cmake(self, spec, prefix):
|
||||
configure("--prefix=" + prefix, *self.configure_args())
|
||||
|
||||
# Remove the following once version 1.2.0 is dropped
|
||||
@when("@:1.2")
|
||||
def patch(self):
|
||||
filter_file("CC=", "CC=" + spack_cc, "Makefile")
|
||||
|
||||
@when("@:1.2")
|
||||
def cmake(self, spec, prefix):
|
||||
pass
|
||||
|
||||
@when("@:1.2")
|
||||
def install(self, spec, prefix):
|
||||
install_tree(".", prefix)
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Additional examples
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
When writing new packages, it often helps to see examples of existing packages.
|
||||
Here is an incomplete list of existing Spack packages that have changed build
|
||||
systems before:
|
||||
|
||||
================ ===================== ================
|
||||
Package Previous Build System New Build System
|
||||
================ ===================== ================
|
||||
amber custom CMake
|
||||
arpack-ng Autotools CMake
|
||||
atk Autotools Meson
|
||||
blast None Autotools
|
||||
dyninst Autotools CMake
|
||||
evtgen Autotools CMake
|
||||
fish Autotools CMake
|
||||
gdk-pixbuf Autotools Meson
|
||||
glib Autotools Meson
|
||||
glog Autotools CMake
|
||||
gmt Autotools CMake
|
||||
gtkplus Autotools Meson
|
||||
hpl Makefile Autotools
|
||||
interproscan Perl Maven
|
||||
jasper Autotools CMake
|
||||
kahip SCons CMake
|
||||
kokkos Makefile CMake
|
||||
kokkos-kernels Makefile CMake
|
||||
leveldb Makefile CMake
|
||||
libdrm Autotools Meson
|
||||
libjpeg-turbo Autotools CMake
|
||||
mesa Autotools Meson
|
||||
metis None CMake
|
||||
mpifileutils Autotools CMake
|
||||
muparser Autotools CMake
|
||||
mxnet Makefile CMake
|
||||
nest Autotools CMake
|
||||
neuron Autotools CMake
|
||||
nsimd CMake nsconfig
|
||||
opennurbs Makefile CMake
|
||||
optional-lite None CMake
|
||||
plasma Makefile CMake
|
||||
preseq Makefile Autotools
|
||||
protobuf Autotools CMake
|
||||
py-pygobject Autotools Python
|
||||
singularity Autotools Makefile
|
||||
span-lite None CMake
|
||||
ssht Makefile CMake
|
||||
string-view-lite None CMake
|
||||
superlu Makefile CMake
|
||||
superlu-dist Makefile CMake
|
||||
uncrustify Autotools CMake
|
||||
================ ===================== ================
|
||||
|
||||
Packages that support multiple build systems can be a bit confusing to write.
|
||||
Don't hesitate to open an issue or draft pull request and ask for advice from
|
||||
other Spack developers!
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _octavepackage:
|
||||
|
||||
------
|
||||
Octave
|
||||
------
|
||||
-------------
|
||||
OctavePackage
|
||||
-------------
|
||||
|
||||
Octave has its own build system for installing packages.
|
||||
|
||||
@@ -15,7 +15,7 @@ Octave has its own build system for installing packages.
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``OctaveBuilder`` and ``OctavePackage`` base classes have a single phase:
|
||||
The ``OctavePackage`` base class has a single phase:
|
||||
|
||||
#. ``install`` - install the package
|
||||
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _perlpackage:
|
||||
|
||||
----
|
||||
Perl
|
||||
----
|
||||
-----------
|
||||
PerlPackage
|
||||
-----------
|
||||
|
||||
Much like Octave, Perl has its own language-specific
|
||||
build system.
|
||||
@@ -16,7 +16,7 @@ build system.
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``PerlBuilder`` and ``PerlPackage`` base classes come with 3 phases that can be overridden:
|
||||
The ``PerlPackage`` base class comes with 3 phases that can be overridden:
|
||||
|
||||
#. ``configure`` - configure the package
|
||||
#. ``build`` - build the package
|
||||
|
@@ -48,11 +48,8 @@ important to understand.
|
||||
**build backend**
|
||||
Libraries used to define how to build a wheel. Examples
|
||||
include `setuptools <https://setuptools.pypa.io/>`__,
|
||||
`flit <https://flit.pypa.io/>`_,
|
||||
`poetry <https://python-poetry.org/>`_,
|
||||
`hatchling <https://hatch.pypa.io/latest/>`_,
|
||||
`meson <https://meson-python.readthedocs.io/>`_, and
|
||||
`pdm <https://pdm.fming.dev/latest/>`_.
|
||||
`flit <https://flit.readthedocs.io/>`_, and
|
||||
`poetry <https://python-poetry.org/>`_.
|
||||
|
||||
^^^^^^^^^^^
|
||||
Downloading
|
||||
@@ -176,9 +173,9 @@ package. The "Project description" tab may also contain a longer
|
||||
description of the package. Either of these can be used to populate
|
||||
the package docstring.
|
||||
|
||||
^^^^^^^^^^^^
|
||||
Dependencies
|
||||
^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^
|
||||
Build backend
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
Once you've determined the basic metadata for a package, the next
|
||||
step is to determine the build backend. ``PythonPackage`` uses
|
||||
@@ -216,33 +213,12 @@ Note that ``py-wheel`` is already listed as a build dependency in the
|
||||
need to specify a specific version requirement or change the
|
||||
dependency type.
|
||||
|
||||
See `PEP 517 <https://www.python.org/dev/peps/pep-0517/>`__ and
|
||||
See `PEP 517 <https://www.python.org/dev/peps/pep-0517/>`_ and
|
||||
`PEP 518 <https://www.python.org/dev/peps/pep-0518/>`_ for more
|
||||
information on the design of ``pyproject.toml``.
|
||||
|
||||
Depending on which build backend a project uses, there are various
|
||||
places that run-time dependencies can be listed. Most modern build
|
||||
backends support listing dependencies directly in ``pyproject.toml``.
|
||||
Look for dependencies under the following keys:
|
||||
|
||||
* ``requires-python`` under ``[project]``
|
||||
|
||||
This specifies the version of Python that is required
|
||||
|
||||
* ``dependencies`` under ``[project]``
|
||||
|
||||
These packages are required for building and installation. You can
|
||||
add them with ``type=('build', 'run')``.
|
||||
|
||||
* ``[project.optional-dependencies]``
|
||||
|
||||
This section includes keys with lists of optional dependencies
|
||||
needed to enable those features. You should add a variant that
|
||||
optionally adds these dependencies. This variant should be ``False``
|
||||
by default.
|
||||
|
||||
Some build backends may have additional locations where dependencies
|
||||
can be found.
|
||||
places that run-time dependencies can be listed.
|
||||
|
||||
"""""""""
|
||||
distutils
|
||||
@@ -268,9 +244,9 @@ If the ``pyproject.toml`` lists ``setuptools.build_meta`` as a
|
||||
``build-backend``, or if the package has a ``setup.py`` that imports
|
||||
``setuptools``, or if the package has a ``setup.cfg`` file, then it
|
||||
uses setuptools to build. Setuptools is a replacement for the
|
||||
distutils library, and has almost the exact same API. In addition to
|
||||
``pyproject.toml``, dependencies can be listed in the ``setup.py`` or
|
||||
``setup.cfg`` file. Look for the following arguments:
|
||||
distutils library, and has almost the exact same API. Dependencies
|
||||
can be listed in the ``setup.py`` or ``setup.cfg`` file. Look for the
|
||||
following arguments:
|
||||
|
||||
* ``python_requires``
|
||||
|
||||
@@ -315,22 +291,25 @@ listed directly in the ``pyproject.toml`` file. Older versions of
|
||||
flit used to store this info in a ``flit.ini`` file, so check for
|
||||
this too.
|
||||
|
||||
In addition to the default ``pyproject.toml`` keys listed above,
|
||||
older versions of flit may use the following keys:
|
||||
Either of these files may contain keys like:
|
||||
|
||||
* ``requires`` under ``[tool.flit.metadata]``
|
||||
* ``requires-python``
|
||||
|
||||
This specifies the version of Python that is required
|
||||
|
||||
* ``dependencies`` or ``requires``
|
||||
|
||||
These packages are required for building and installation. You can
|
||||
add them with ``type=('build', 'run')``.
|
||||
|
||||
* ``[tool.flit.metadata.requires-extra]``
|
||||
* ``project.optional-dependencies`` or ``requires-extra``
|
||||
|
||||
This section includes keys with lists of optional dependencies
|
||||
needed to enable those features. You should add a variant that
|
||||
optionally adds these dependencies. This variant should be False
|
||||
by default.
|
||||
|
||||
See https://flit.pypa.io/en/latest/pyproject_toml.html for
|
||||
See https://flit.readthedocs.io/en/latest/pyproject_toml.html for
|
||||
more information.
|
||||
|
||||
""""""
|
||||
@@ -347,38 +326,6 @@ for specifying the version requirements. Note that ``~=`` works
|
||||
differently in poetry than in setuptools and flit for versions that
|
||||
start with a zero.
|
||||
|
||||
"""""""""
|
||||
hatchling
|
||||
"""""""""
|
||||
|
||||
If the ``pyproject.toml`` lists ``hatchling.build`` as the
|
||||
``build-backend``, it uses the hatchling build system. Hatchling
|
||||
uses the default ``pyproject.toml`` keys to list dependencies.
|
||||
|
||||
See https://hatch.pypa.io/latest/config/dependency/ for more
|
||||
information.
|
||||
|
||||
"""""
|
||||
meson
|
||||
"""""
|
||||
|
||||
If the ``pyproject.toml`` lists ``mesonpy`` as the ``build-backend``,
|
||||
it uses the meson build system. Meson uses the default
|
||||
``pyproject.toml`` keys to list dependencies.
|
||||
|
||||
See https://meson-python.readthedocs.io/en/latest/usage/start.html
|
||||
for more information.
|
||||
|
||||
"""
|
||||
pdm
|
||||
"""
|
||||
|
||||
If the ``pyproject.toml`` lists ``pdm.pep517.api`` as the ``build-backend``,
|
||||
it uses the PDM build system. PDM uses the default ``pyproject.toml``
|
||||
keys to list dependencies.
|
||||
|
||||
See https://pdm.fming.dev/latest/ for more information.
|
||||
|
||||
""""""
|
||||
wheels
|
||||
""""""
|
||||
@@ -423,34 +370,6 @@ packages. However, the installation instructions for a package may
|
||||
suggest passing certain flags to the ``setup.py`` call. The
|
||||
``PythonPackage`` class has two techniques for doing this.
|
||||
|
||||
"""""""""""""""
|
||||
Config settings
|
||||
"""""""""""""""
|
||||
|
||||
These settings are passed to
|
||||
`PEP 517 <https://peps.python.org/pep-0517/>`__ build backends.
|
||||
For example, ``py-scipy`` package allows you to specify the name of
|
||||
the BLAS/LAPACK library you want pkg-config to search for:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('py-pip@22.1:', type='build')
|
||||
|
||||
def config_settings(self, spec, prefix):
|
||||
return {
|
||||
'blas': spec['blas'].libs.names[0],
|
||||
'lapack': spec['lapack'].libs.names[0],
|
||||
}
|
||||
|
||||
|
||||
.. note::
|
||||
|
||||
This flag only works for packages that define a ``build-backend``
|
||||
in ``pyproject.toml``. Also, it is only supported by pip 22.1+,
|
||||
which requires Python 3.7+. For packages that still support Python
|
||||
3.6 and older, ``install_options`` should be used instead.
|
||||
|
||||
|
||||
""""""""""""""
|
||||
Global options
|
||||
""""""""""""""
|
||||
@@ -470,16 +389,6 @@ has an optional dependency on ``libyaml`` that can be enabled like so:
|
||||
return options
|
||||
|
||||
|
||||
.. note::
|
||||
|
||||
Direct invocation of ``setup.py`` is
|
||||
`deprecated <https://blog.ganssle.io/articles/2021/10/setup-py-deprecated.html>`_.
|
||||
This flag forces pip to use a deprecated installation procedure.
|
||||
It should only be used in packages that don't define a
|
||||
``build-backend`` in ``pyproject.toml`` or packages that still
|
||||
support Python 3.6 and older.
|
||||
|
||||
|
||||
"""""""""""""""
|
||||
Install options
|
||||
"""""""""""""""
|
||||
@@ -500,16 +409,6 @@ allows you to specify the directories to search for ``libyaml``:
|
||||
return options
|
||||
|
||||
|
||||
.. note::
|
||||
|
||||
Direct invocation of ``setup.py`` is
|
||||
`deprecated <https://blog.ganssle.io/articles/2021/10/setup-py-deprecated.html>`_.
|
||||
This flag forces pip to use a deprecated installation procedure.
|
||||
It should only be used in packages that don't define a
|
||||
``build-backend`` in ``pyproject.toml`` or packages that still
|
||||
support Python 3.6 and older.
|
||||
|
||||
|
||||
^^^^^^^
|
||||
Testing
|
||||
^^^^^^^
|
||||
@@ -582,19 +481,6 @@ libraries. Make sure not to add modules/packages containing the word
|
||||
"test", as these likely won't end up in the installation directory,
|
||||
or may require test dependencies like pytest to be installed.
|
||||
|
||||
Instead of defining the ``import_modules`` explicity, only the subset
|
||||
of module names to be skipped can be defined by using ``skip_modules``.
|
||||
If a defined module has submodules, they are skipped as well, e.g.,
|
||||
in case the ``plotting`` modules should be excluded from the
|
||||
automatically detected ``import_modules`` ``['nilearn', 'nilearn.surface',
|
||||
'nilearn.plotting', 'nilearn.plotting.data']`` set:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
skip_modules = ['nilearn.plotting']
|
||||
|
||||
This will set ``import_modules`` to ``['nilearn', 'nilearn.surface']``
|
||||
|
||||
Import tests can be run during the installation using ``spack install
|
||||
--test=root`` or at any time after the installation using
|
||||
``spack test run``.
|
||||
@@ -724,9 +610,10 @@ extends vs. depends_on
|
||||
|
||||
This is very similar to the naming dilemma above, with a slight twist.
|
||||
As mentioned in the :ref:`Packaging Guide <packaging_extensions>`,
|
||||
``extends`` and ``depends_on`` are very similar, but ``extends`` ensures
|
||||
that the extension and extendee share the same prefix in views.
|
||||
This allows the user to import a Python module without
|
||||
``extends`` and ``depends_on`` are very similar, but ``extends`` adds
|
||||
the ability to *activate* the package. Activation involves symlinking
|
||||
everything in the installation prefix of the package to the installation
|
||||
prefix of Python. This allows the user to import a Python module without
|
||||
having to add that module to ``PYTHONPATH``.
|
||||
|
||||
When deciding between ``extends`` and ``depends_on``, the best rule of
|
||||
@@ -734,7 +621,7 @@ thumb is to check the installation prefix. If Python libraries are
|
||||
installed to ``<prefix>/lib/pythonX.Y/site-packages``, then you
|
||||
should use ``extends``. If Python libraries are installed elsewhere
|
||||
or the only files that get installed reside in ``<prefix>/bin``, then
|
||||
don't use ``extends``.
|
||||
don't use ``extends``, as symlinking the package wouldn't be useful.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Alternatives to Spack
|
||||
@@ -777,8 +664,5 @@ For more information on build and installation frontend tools, see:
|
||||
For more information on build backend tools, see:
|
||||
|
||||
* setuptools: https://setuptools.pypa.io/
|
||||
* flit: https://flit.pypa.io/
|
||||
* flit: https://flit.readthedocs.io/
|
||||
* poetry: https://python-poetry.org/
|
||||
* hatchling: https://hatch.pypa.io/latest/
|
||||
* meson: https://meson-python.readthedocs.io/
|
||||
* pdm: https://pdm.fming.dev/latest/
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _qmakepackage:
|
||||
|
||||
-----
|
||||
QMake
|
||||
-----
|
||||
------------
|
||||
QMakePackage
|
||||
------------
|
||||
|
||||
Much like Autotools and CMake, QMake is a build-script generator
|
||||
designed by the developers of Qt. In its simplest form, Spack's
|
||||
@@ -29,7 +29,7 @@ variables or edit ``*.pro`` files to get things working properly.
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``QMakeBuilder`` and ``QMakePackage`` base classes come with the following phases:
|
||||
The ``QMakePackage`` base class comes with the following phases:
|
||||
|
||||
#. ``qmake`` - generate Makefiles
|
||||
#. ``build`` - build the project
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _racketpackage:
|
||||
|
||||
------
|
||||
Racket
|
||||
------
|
||||
-------------
|
||||
RacketPackage
|
||||
-------------
|
||||
|
||||
Much like Python, Racket packages and modules have their own special build system.
|
||||
To learn more about the specifics of Racket package system, please refer to the
|
||||
@@ -17,7 +17,7 @@ To learn more about the specifics of Racket package system, please refer to the
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``RacketBuilder`` and ``RacketPackage`` base classes provides an ``install`` phase that
|
||||
The ``RacketPackage`` base class provides an ``install`` phase that
|
||||
can be overridden, corresponding to the use of:
|
||||
|
||||
.. code-block:: console
|
||||
|
@@ -19,7 +19,7 @@ new Spack packages for.
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``RBuilder`` and ``RPackage`` base classes have a single phase:
|
||||
The ``RPackage`` base class has a single phase:
|
||||
|
||||
#. ``install`` - install the package
|
||||
|
||||
@@ -193,10 +193,10 @@ Build system dependencies
|
||||
|
||||
As an extension of the R ecosystem, your package will obviously depend
|
||||
on R to build and run. Normally, we would use ``depends_on`` to express
|
||||
this, but for R packages, we use ``extends``. This implies a special
|
||||
dependency on R, which is used to set environment variables such as
|
||||
``R_LIBS`` uniformly. Since every R package needs this, the ``RPackage``
|
||||
base class contains:
|
||||
this, but for R packages, we use ``extends``. ``extends`` is similar to
|
||||
``depends_on``, but adds an additional feature: the ability to "activate"
|
||||
the package by symlinking it to the R installation directory. Since
|
||||
every R package needs this, the ``RPackage`` base class contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _rubypackage:
|
||||
|
||||
----
|
||||
Ruby
|
||||
----
|
||||
-----------
|
||||
RubyPackage
|
||||
-----------
|
||||
|
||||
Like Perl, Python, and R, Ruby has its own build system for
|
||||
installing Ruby gems.
|
||||
@@ -16,7 +16,7 @@ installing Ruby gems.
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``RubyBuilder`` and ``RubyPackage`` base classes provide the following phases that
|
||||
The ``RubyPackage`` base class provides the following phases that
|
||||
can be overridden:
|
||||
|
||||
#. ``build`` - build everything needed to install
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _sconspackage:
|
||||
|
||||
-----
|
||||
SCons
|
||||
-----
|
||||
------------
|
||||
SConsPackage
|
||||
------------
|
||||
|
||||
SCons is a general-purpose build system that does not rely on
|
||||
Makefiles to build software. SCons is written in Python, and handles
|
||||
@@ -42,7 +42,7 @@ As previously mentioned, SCons allows developers to add subcommands like
|
||||
$ scons install
|
||||
|
||||
|
||||
To facilitate this, the ``SConsBuilder`` and ``SconsPackage`` base classes provide the
|
||||
To facilitate this, the ``SConsPackage`` base class provides the
|
||||
following phases:
|
||||
|
||||
#. ``build`` - build the package
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _sippackage:
|
||||
|
||||
---
|
||||
SIP
|
||||
---
|
||||
----------
|
||||
SIPPackage
|
||||
----------
|
||||
|
||||
SIP is a tool that makes it very easy to create Python bindings for C and C++
|
||||
libraries. It was originally developed to create PyQt, the Python bindings for
|
||||
@@ -22,7 +22,7 @@ provides support functions to the automatically generated code.
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``SIPBuilder`` and ``SIPPackage`` base classes come with the following phases:
|
||||
The ``SIPPackage`` base class comes with the following phases:
|
||||
|
||||
#. ``configure`` - configure the package
|
||||
#. ``build`` - build the package
|
||||
|
@@ -1,55 +0,0 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _sourceforgepackage:
|
||||
|
||||
------------------
|
||||
SourceforgePackage
|
||||
------------------
|
||||
|
||||
``SourceforgePackage`` is a
|
||||
`mixin-class <https://en.wikipedia.org/wiki/Mixin>`_. It automatically
|
||||
sets the URL based on a list of Sourceforge mirrors listed in
|
||||
`sourceforge_mirror_path`, which defaults to a half dozen known mirrors.
|
||||
Refer to the package source
|
||||
(`<https://github.com/spack/spack/blob/develop/lib/spack/spack/build_systems/sourceforge.py>`__) for the current list of mirrors used by Spack.
|
||||
|
||||
|
||||
^^^^^^^
|
||||
Methods
|
||||
^^^^^^^
|
||||
|
||||
This package provides a method for populating mirror URLs.
|
||||
|
||||
**urls**
|
||||
|
||||
This method returns a list of possible URLs for package source.
|
||||
It is decorated with `property` so its results are treated as
|
||||
a package attribute.
|
||||
|
||||
Refer to
|
||||
`<https://spack.readthedocs.io/en/latest/packaging_guide.html#mirrors-of-the-main-url>`__
|
||||
for information on how Spack uses the `urls` attribute during
|
||||
fetching.
|
||||
|
||||
^^^^^
|
||||
Usage
|
||||
^^^^^
|
||||
|
||||
This helper package can be added to your package by adding it as a base
|
||||
class of your package and defining the relative location of an archive
|
||||
file for one version of your software.
|
||||
|
||||
.. code-block:: python
|
||||
:emphasize-lines: 1,3
|
||||
|
||||
class MyPackage(AutotoolsPackage, SourceforgePackage):
|
||||
...
|
||||
sourceforge_mirror_path = "my-package/mypackage.1.0.0.tar.gz"
|
||||
...
|
||||
|
||||
Over 40 packages are using ``SourceforcePackage`` this mix-in as of
|
||||
July 2022 so there are multiple packages to choose from if you want
|
||||
to see a real example.
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _wafpackage:
|
||||
|
||||
---
|
||||
Waf
|
||||
---
|
||||
----------
|
||||
WafPackage
|
||||
----------
|
||||
|
||||
Like SCons, Waf is a general-purpose build system that does not rely
|
||||
on Makefiles to build software.
|
||||
@@ -16,7 +16,7 @@ on Makefiles to build software.
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``WafBuilder`` and ``WafPackage`` base classes come with the following phases:
|
||||
The ``WafPackage`` base class comes with the following phases:
|
||||
|
||||
#. ``configure`` - configure the project
|
||||
#. ``build`` - build the project
|
||||
|
@@ -32,42 +32,37 @@
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
link_name = os.path.abspath("_spack_root")
|
||||
if not os.path.exists(link_name):
|
||||
os.symlink(os.path.abspath("../../.."), link_name, target_is_directory=True)
|
||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external"))
|
||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/pytest-fallback"))
|
||||
sys.path.insert(0, os.path.abspath('_spack_root/lib/spack/external'))
|
||||
sys.path.insert(0, os.path.abspath('_spack_root/lib/spack/external/pytest-fallback'))
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/yaml/lib"))
|
||||
sys.path.insert(
|
||||
0, os.path.abspath('_spack_root/lib/spack/external/yaml/lib'))
|
||||
else:
|
||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/yaml/lib3"))
|
||||
sys.path.insert(
|
||||
0, os.path.abspath('_spack_root/lib/spack/external/yaml/lib3'))
|
||||
|
||||
sys.path.append(os.path.abspath("_spack_root/lib/spack/"))
|
||||
sys.path.append(os.path.abspath('_spack_root/lib/spack/'))
|
||||
|
||||
# Add the Spack bin directory to the path so that we can use its output in docs.
|
||||
os.environ["SPACK_ROOT"] = os.path.abspath("_spack_root")
|
||||
os.environ["PATH"] += "%s%s" % (os.pathsep, os.path.abspath("_spack_root/bin"))
|
||||
os.environ['SPACK_ROOT'] = os.path.abspath('_spack_root')
|
||||
os.environ['PATH'] += "%s%s" % (os.pathsep, os.path.abspath('_spack_root/bin'))
|
||||
|
||||
# Set an environment variable so that colify will print output like it would to
|
||||
# a terminal.
|
||||
os.environ["COLIFY_SIZE"] = "25x120"
|
||||
os.environ["COLUMNS"] = "120"
|
||||
os.environ['COLIFY_SIZE'] = '25x120'
|
||||
os.environ['COLUMNS'] = '120'
|
||||
|
||||
# Generate full package list if needed
|
||||
subprocess.call(["spack", "list", "--format=html", "--update=package_list.html"])
|
||||
subprocess.call([
|
||||
'spack', 'list', '--format=html', '--update=package_list.html'])
|
||||
|
||||
# Generate a command index if an update is needed
|
||||
subprocess.call(
|
||||
[
|
||||
"spack",
|
||||
"commands",
|
||||
"--format=rst",
|
||||
"--header=command_index.in",
|
||||
"--update=command_index.rst",
|
||||
]
|
||||
+ glob("*rst")
|
||||
)
|
||||
subprocess.call([
|
||||
'spack', 'commands',
|
||||
'--format=rst',
|
||||
'--header=command_index.in',
|
||||
'--update=command_index.rst'] + glob('*rst'))
|
||||
|
||||
#
|
||||
# Run sphinx-apidoc
|
||||
@@ -77,12 +72,12 @@
|
||||
# Without this, the API Docs will never actually update
|
||||
#
|
||||
apidoc_args = [
|
||||
"--force", # Overwrite existing files
|
||||
"--no-toc", # Don't create a table of contents file
|
||||
"--output-dir=.", # Directory to place all output
|
||||
'--force', # Overwrite existing files
|
||||
'--no-toc', # Don't create a table of contents file
|
||||
'--output-dir=.', # Directory to place all output
|
||||
]
|
||||
sphinx_apidoc(apidoc_args + ["_spack_root/lib/spack/spack"])
|
||||
sphinx_apidoc(apidoc_args + ["_spack_root/lib/spack/llnl"])
|
||||
sphinx_apidoc(apidoc_args + ['_spack_root/lib/spack/spack'])
|
||||
sphinx_apidoc(apidoc_args + ['_spack_root/lib/spack/llnl'])
|
||||
|
||||
# Enable todo items
|
||||
todo_include_todos = True
|
||||
@@ -92,12 +87,10 @@
|
||||
#
|
||||
class PatchedPythonDomain(PythonDomain):
|
||||
def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||
if "refspecific" in node:
|
||||
del node["refspecific"]
|
||||
if 'refspecific' in node:
|
||||
del node['refspecific']
|
||||
return super(PatchedPythonDomain, self).resolve_xref(
|
||||
env, fromdocname, builder, typ, target, node, contnode
|
||||
)
|
||||
|
||||
env, fromdocname, builder, typ, target, node, contnode)
|
||||
|
||||
#
|
||||
# Disable tabs to space expansion in code blocks
|
||||
@@ -110,58 +103,51 @@ def parse(self, inputstring, document):
|
||||
inputstring = StringList(lines, document.current_source)
|
||||
super().parse(inputstring, document)
|
||||
|
||||
|
||||
def setup(sphinx):
|
||||
sphinx.add_domain(PatchedPythonDomain, override=True)
|
||||
sphinx.add_source_parser(NoTabExpansionRSTParser, override=True)
|
||||
|
||||
|
||||
# -- General configuration -----------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
needs_sphinx = "3.4"
|
||||
needs_sphinx = '3.4'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be extensions
|
||||
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = [
|
||||
"sphinx.ext.autodoc",
|
||||
"sphinx.ext.graphviz",
|
||||
"sphinx.ext.intersphinx",
|
||||
"sphinx.ext.napoleon",
|
||||
"sphinx.ext.todo",
|
||||
"sphinx.ext.viewcode",
|
||||
"sphinx_design",
|
||||
"sphinxcontrib.programoutput",
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.graphviz',
|
||||
'sphinx.ext.intersphinx',
|
||||
'sphinx.ext.napoleon',
|
||||
'sphinx.ext.todo',
|
||||
'sphinx.ext.viewcode',
|
||||
'sphinxcontrib.programoutput',
|
||||
]
|
||||
|
||||
# Set default graphviz options
|
||||
graphviz_dot_args = [
|
||||
"-Grankdir=LR",
|
||||
"-Gbgcolor=transparent",
|
||||
"-Nshape=box",
|
||||
"-Nfontname=monaco",
|
||||
"-Nfontsize=10",
|
||||
]
|
||||
'-Grankdir=LR', '-Gbgcolor=transparent',
|
||||
'-Nshape=box', '-Nfontname=monaco', '-Nfontsize=10']
|
||||
|
||||
# Get nice vector graphics
|
||||
graphviz_output_format = "svg"
|
||||
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ["_templates"]
|
||||
templates_path = ['_templates']
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = ".rst"
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The encoding of source files.
|
||||
source_encoding = "utf-8-sig"
|
||||
source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = "index"
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u"Spack"
|
||||
copyright = u"2013-2021, Lawrence Livermore National Laboratory."
|
||||
project = u'Spack'
|
||||
copyright = u'2013-2021, Lawrence Livermore National Laboratory.'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
@@ -170,16 +156,16 @@ def setup(sphinx):
|
||||
# The short X.Y version.
|
||||
import spack
|
||||
|
||||
version = ".".join(str(s) for s in spack.spack_version_info[:2])
|
||||
version = '.'.join(str(s) for s in spack.spack_version_info[:2])
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = spack.spack_version
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
# language = None
|
||||
#language = None
|
||||
|
||||
# Places to look for .po/.mo files for doc translations
|
||||
# locale_dirs = []
|
||||
#locale_dirs = []
|
||||
|
||||
# Sphinx gettext settings
|
||||
gettext_compact = True
|
||||
@@ -187,46 +173,41 @@ def setup(sphinx):
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
# today = ''
|
||||
#today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
# today_fmt = '%B %d, %Y'
|
||||
#today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = ["_build", "_spack_root", ".spack-env"]
|
||||
exclude_patterns = ['_build', '_spack_root', '.spack-env']
|
||||
|
||||
nitpicky = True
|
||||
nitpick_ignore = [
|
||||
# Python classes that intersphinx is unable to resolve
|
||||
("py:class", "argparse.HelpFormatter"),
|
||||
("py:class", "contextlib.contextmanager"),
|
||||
("py:class", "module"),
|
||||
("py:class", "_io.BufferedReader"),
|
||||
("py:class", "unittest.case.TestCase"),
|
||||
("py:class", "_frozen_importlib_external.SourceFileLoader"),
|
||||
("py:class", "clingo.Control"),
|
||||
("py:class", "six.moves.urllib.parse.ParseResult"),
|
||||
('py:class', 'argparse.HelpFormatter'),
|
||||
('py:class', 'contextlib.contextmanager'),
|
||||
('py:class', 'module'),
|
||||
('py:class', '_io.BufferedReader'),
|
||||
('py:class', 'unittest.case.TestCase'),
|
||||
('py:class', '_frozen_importlib_external.SourceFileLoader'),
|
||||
# Spack classes that are private and we don't want to expose
|
||||
("py:class", "spack.provider_index._IndexBase"),
|
||||
("py:class", "spack.repo._PrependFileLoader"),
|
||||
("py:class", "spack.build_systems._checks.BaseBuilder"),
|
||||
# Spack classes that intersphinx is unable to resolve
|
||||
("py:class", "spack.version.VersionBase"),
|
||||
('py:class', 'spack.provider_index._IndexBase'),
|
||||
('py:class', 'spack.repo._PrependFileLoader'),
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
# default_role = None
|
||||
#default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
# add_function_parentheses = True
|
||||
#add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
# add_module_names = True
|
||||
#add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
# show_authors = False
|
||||
#show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
# We use our own extension of the default style with a few modifications
|
||||
@@ -237,151 +218,156 @@ def setup(sphinx):
|
||||
|
||||
class SpackStyle(DefaultStyle):
|
||||
styles = DefaultStyle.styles.copy()
|
||||
background_color = "#f4f4f8"
|
||||
background_color = "#f4f4f8"
|
||||
styles[Generic.Output] = "#355"
|
||||
styles[Generic.Prompt] = "bold #346ec9"
|
||||
|
||||
|
||||
import pkg_resources
|
||||
|
||||
dist = pkg_resources.Distribution(__file__)
|
||||
sys.path.append(".") # make 'conf' module findable
|
||||
ep = pkg_resources.EntryPoint.parse("spack = conf:SpackStyle", dist=dist)
|
||||
dist._ep_map = {"pygments.styles": {"plugin1": ep}}
|
||||
sys.path.append('.') # make 'conf' module findable
|
||||
ep = pkg_resources.EntryPoint.parse('spack = conf:SpackStyle', dist=dist)
|
||||
dist._ep_map = {'pygments.styles': {'plugin1': ep}}
|
||||
pkg_resources.working_set.add(dist)
|
||||
|
||||
pygments_style = "spack"
|
||||
pygments_style = 'spack'
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
# modindex_common_prefix = []
|
||||
#modindex_common_prefix = []
|
||||
|
||||
|
||||
# -- Options for HTML output ---------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = "sphinx_rtd_theme"
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
html_theme_options = {"logo_only": True}
|
||||
html_theme_options = { 'logo_only' : True }
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
# html_theme_path = ["_themes"]
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
# html_title = None
|
||||
#html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
# html_short_title = None
|
||||
#html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
html_logo = "_spack_root/share/spack/logo/spack-logo-white-text.svg"
|
||||
html_logo = '_spack_root/share/spack/logo/spack-logo-white-text.svg'
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
html_favicon = "_spack_root/share/spack/logo/favicon.ico"
|
||||
html_favicon = '_spack_root/share/spack/logo/favicon.ico'
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ["_static"]
|
||||
html_static_path = ['_static']
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
html_last_updated_fmt = "%b %d, %Y"
|
||||
html_last_updated_fmt = '%b %d, %Y'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
# html_use_smartypants = True
|
||||
#html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
# html_sidebars = {}
|
||||
#html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
# html_additional_pages = {}
|
||||
#html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
# html_domain_indices = True
|
||||
#html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
# html_use_index = True
|
||||
#html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
# html_split_index = False
|
||||
#html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
# html_show_sourcelink = True
|
||||
#html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
# html_show_sphinx = False
|
||||
#html_show_sphinx = False
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
# html_show_copyright = True
|
||||
#html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
# html_use_opensearch = ''
|
||||
#html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
# html_file_suffix = None
|
||||
#html_file_suffix = None
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = "Spackdoc"
|
||||
htmlhelp_basename = 'Spackdoc'
|
||||
|
||||
|
||||
# -- Options for LaTeX output --------------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||
latex_documents = [
|
||||
("index", "Spack.tex", u"Spack Documentation", u"Todd Gamblin", "manual"),
|
||||
('index', 'Spack.tex', u'Spack Documentation',
|
||||
u'Todd Gamblin', 'manual'),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
# latex_logo = None
|
||||
#latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
# latex_use_parts = False
|
||||
#latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
# latex_show_pagerefs = False
|
||||
#latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
# latex_show_urls = False
|
||||
#latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
# latex_appendices = []
|
||||
#latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
# latex_domain_indices = True
|
||||
#latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output --------------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [("index", "spack", u"Spack Documentation", [u"Todd Gamblin"], 1)]
|
||||
man_pages = [
|
||||
('index', 'spack', u'Spack Documentation',
|
||||
[u'Todd Gamblin'], 1)
|
||||
]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
# man_show_urls = False
|
||||
#man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output ------------------------------------------------
|
||||
@@ -390,25 +376,19 @@ class SpackStyle(DefaultStyle):
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
(
|
||||
"index",
|
||||
"Spack",
|
||||
u"Spack Documentation",
|
||||
u"Todd Gamblin",
|
||||
"Spack",
|
||||
"One line description of project.",
|
||||
"Miscellaneous",
|
||||
),
|
||||
('index', 'Spack', u'Spack Documentation',
|
||||
u'Todd Gamblin', 'Spack', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
# texinfo_appendices = []
|
||||
#texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
# texinfo_domain_indices = True
|
||||
#texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
# texinfo_show_urls = 'footnote'
|
||||
#texinfo_show_urls = 'footnote'
|
||||
|
||||
|
||||
# -- Extension configuration -------------------------------------------------
|
||||
|
@@ -19,9 +19,9 @@ see the default settings by looking at
|
||||
These settings can be overridden in ``etc/spack/config.yaml`` or
|
||||
``~/.spack/config.yaml``. See :ref:`configuration-scopes` for details.
|
||||
|
||||
---------------------
|
||||
``install_tree:root``
|
||||
---------------------
|
||||
--------------------
|
||||
``install_tree``
|
||||
--------------------
|
||||
|
||||
The location where Spack will install packages and their dependencies.
|
||||
Default is ``$spack/opt/spack``.
|
||||
@@ -224,9 +224,9 @@ them). Please note that we currently disable ccache's ``hash_dir``
|
||||
feature to avoid an issue with the stage directory (see
|
||||
https://github.com/LLNL/spack/pull/3761#issuecomment-294352232).
|
||||
|
||||
-----------------------
|
||||
``shared_linking:type``
|
||||
-----------------------
|
||||
------------------
|
||||
``shared_linking``
|
||||
------------------
|
||||
|
||||
Control whether Spack embeds ``RPATH`` or ``RUNPATH`` attributes in ELF binaries
|
||||
so that they can find their dependencies. Has no effect on macOS.
|
||||
@@ -245,52 +245,6 @@ the loading object.
|
||||
|
||||
DO NOT MIX the two options within the same install tree.
|
||||
|
||||
-----------------------
|
||||
``shared_linking:bind``
|
||||
-----------------------
|
||||
|
||||
This is an *experimental option* that controls whether Spack embeds absolute paths
|
||||
to needed shared libraries in ELF executables and shared libraries on Linux. Setting
|
||||
this option to ``true`` has two advantages:
|
||||
|
||||
1. **Improved startup time**: when running an executable, the dynamic loader does not
|
||||
have to perform a search for needed libraries, they are loaded directly.
|
||||
2. **Reliability**: libraries loaded at runtime are those that were linked to. This
|
||||
minimizes the risk of accidentally picking up system libraries.
|
||||
|
||||
In the current implementation, Spack sets the soname (shared object name) of
|
||||
libraries to their install path upon installation. This has two implications:
|
||||
|
||||
1. binding does not apply to libraries installed *before* the option was enabled;
|
||||
2. toggling the option off does *not* prevent binding of libraries installed when
|
||||
the option was still enabled.
|
||||
|
||||
It is also worth noting that:
|
||||
|
||||
1. Applications relying on ``dlopen(3)`` will continue to work, even when they open
|
||||
a library by name. This is because ``RPATH``\s are retained in binaries also
|
||||
when ``bind`` is enabled.
|
||||
2. ``LD_PRELOAD`` continues to work for the typical use case of overriding
|
||||
symbols, such as preloading a library with a more efficient ``malloc``.
|
||||
However, the preloaded library will be loaded *additionally to*, instead of
|
||||
*in place of* another library with the same name --- this can be problematic
|
||||
in very rare cases where libraries rely on a particular ``init`` or ``fini``
|
||||
order.
|
||||
|
||||
.. note::
|
||||
|
||||
In some cases packages provide *stub libraries* that only contain an interface
|
||||
for linking, but lack an implementation for runtime. An example of this is
|
||||
``libcuda.so``, provided by the CUDA toolkit; it can be used to link against,
|
||||
but the library needed at runtime is the one installed with the CUDA driver.
|
||||
To avoid binding those libraries, they can be marked as non-bindable using
|
||||
a property in the package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Example(Package):
|
||||
non_bindable_shared_objects = ["libinterface.so"]
|
||||
|
||||
----------------------
|
||||
``terminal_title``
|
||||
----------------------
|
||||
|
@@ -405,17 +405,6 @@ Spack understands several special variables. These are:
|
||||
* ``$user``: name of the current user
|
||||
* ``$user_cache_path``: user cache directory (``~/.spack`` unless
|
||||
:ref:`overridden <local-config-overrides>`)
|
||||
* ``$architecture``: the architecture triple of the current host, as
|
||||
detected by Spack.
|
||||
* ``$arch``: alias for ``$architecture``.
|
||||
* ``$platform``: the platform of the current host, as detected by Spack.
|
||||
* ``$operating_system``: the operating system of the current host, as
|
||||
detected by the ``distro`` python module.
|
||||
* ``$os``: alias for ``$operating_system``.
|
||||
* ``$target``: the ISA target for the current host, as detected by
|
||||
ArchSpec. E.g. ``skylake`` or ``neoverse-n1``.
|
||||
* ``$target_family``. The target family for the current host, as
|
||||
detected by ArchSpec. E.g. ``x86_64`` or ``aarch64``.
|
||||
|
||||
Note that, as with shell variables, you can write these as ``$varname``
|
||||
or with braces to distinguish the variable from surrounding characters:
|
||||
@@ -560,7 +549,7 @@ down the problem:
|
||||
|
||||
You can see above that the ``build_jobs`` and ``debug`` settings are
|
||||
built in and are not overridden by a configuration file. The
|
||||
``verify_ssl`` setting comes from the ``--insecure`` option on the
|
||||
``verify_ssl`` setting comes from the ``--insceure`` option on the
|
||||
command line. ``dirty`` and ``install_tree`` come from the custom
|
||||
scopes ``./my-scope`` and ``./my-scope-2``, and all other configuration
|
||||
options come from the default configuration files that ship with Spack.
|
||||
|
@@ -59,7 +59,7 @@ other techniques to minimize the size of the final image:
|
||||
&& echo " specs:" \
|
||||
&& echo " - gromacs+mpi" \
|
||||
&& echo " - mpich" \
|
||||
&& echo " concretizer:" \
|
||||
&& echo " concretizer: together" \
|
||||
&& echo " unify: true" \
|
||||
&& echo " config:" \
|
||||
&& echo " install_tree: /opt/software" \
|
||||
|
@@ -71,7 +71,7 @@ locally to speed up the review process.
|
||||
new release that is causing problems. If this is the case, please file an issue.
|
||||
|
||||
|
||||
We currently test against Python 2.7 and 3.6-3.10 on both macOS and Linux and
|
||||
We currently test against Python 2.7 and 3.5-3.9 on both macOS and Linux and
|
||||
perform 3 types of tests:
|
||||
|
||||
.. _cmd-spack-unit-test:
|
||||
@@ -253,6 +253,27 @@ to update them.
|
||||
multiple runs of ``spack style`` just to re-compute line numbers and
|
||||
makes it much easier to fix errors directly off of the CI output.
|
||||
|
||||
.. warning::
|
||||
|
||||
Flake8 and ``pep8-naming`` require a number of dependencies in order
|
||||
to run. If you installed ``py-flake8`` and ``py-pep8-naming``, the
|
||||
easiest way to ensure the right packages are on your ``PYTHONPATH`` is
|
||||
to run::
|
||||
|
||||
spack activate py-flake8
|
||||
spack activate pep8-naming
|
||||
|
||||
so that all of the dependencies are symlinked to a central
|
||||
location. If you see an error message like:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
Traceback (most recent call last):
|
||||
File: "/usr/bin/flake8", line 5, in <module>
|
||||
from pkg_resources import load_entry_point
|
||||
ImportError: No module named pkg_resources
|
||||
|
||||
that means Flake8 couldn't find setuptools in your ``PYTHONPATH``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Documentation Tests
|
||||
@@ -288,9 +309,13 @@ All of these can be installed with Spack, e.g.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack load py-sphinx py-sphinx-rtd-theme py-sphinxcontrib-programoutput
|
||||
$ spack activate py-sphinx
|
||||
$ spack activate py-sphinx-rtd-theme
|
||||
$ spack activate py-sphinxcontrib-programoutput
|
||||
|
||||
so that all of the dependencies are added to PYTHONPATH. If you see an error message
|
||||
so that all of the dependencies are symlinked into that Python's
|
||||
tree. Alternatively, you could arrange for their library
|
||||
directories to be added to PYTHONPATH. If you see an error message
|
||||
like:
|
||||
|
||||
.. code-block:: console
|
||||
|
@@ -107,6 +107,7 @@ with a high level view of Spack's directory structure:
|
||||
llnl/ <- some general-use libraries
|
||||
|
||||
spack/ <- spack module; contains Python code
|
||||
analyzers/ <- modules to run analysis on installed packages
|
||||
build_systems/ <- modules for different build systems
|
||||
cmd/ <- each file in here is a spack subcommand
|
||||
compilers/ <- compiler description files
|
||||
@@ -149,9 +150,11 @@ grouped by functionality.
|
||||
Package-related modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
:mod:`spack.package_base`
|
||||
Contains the :class:`~spack.package_base.PackageBase` class, which
|
||||
is the superclass for all packages in Spack.
|
||||
:mod:`spack.package`
|
||||
Contains the :class:`~spack.package_base.Package` class, which
|
||||
is the superclass for all packages in Spack. Methods on ``Package``
|
||||
implement all phases of the :ref:`package lifecycle
|
||||
<package-lifecycle>` and manage the build process.
|
||||
|
||||
:mod:`spack.util.naming`
|
||||
Contains functions for mapping between Spack package names,
|
||||
@@ -239,6 +242,22 @@ Unit tests
|
||||
Implements Spack's test suite. Add a module and put its name in
|
||||
the test suite in ``__init__.py`` to add more unit tests.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Research and Monitoring Modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
:mod:`spack.monitor`
|
||||
Contains :class:`~spack.monitor.SpackMonitorClient`. This is accessed from
|
||||
the ``spack install`` and ``spack analyze`` commands to send build and
|
||||
package metadata up to a `Spack Monitor
|
||||
<https://github.com/spack/spack-monitor>`_ server.
|
||||
|
||||
|
||||
:mod:`spack.analyzers`
|
||||
A module folder with a :class:`~spack.analyzers.analyzer_base.AnalyzerBase`
|
||||
that provides base functions to run, save, and (optionally) upload analysis
|
||||
results to a `Spack Monitor <https://github.com/spack/spack-monitor>`_ server.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
Other Modules
|
||||
@@ -282,6 +301,240 @@ Most spack commands look something like this:
|
||||
The information in Package files is used at all stages in this
|
||||
process.
|
||||
|
||||
Conceptually, packages are overloaded. They contain:
|
||||
|
||||
-------------
|
||||
Stage objects
|
||||
-------------
|
||||
|
||||
|
||||
.. _writing-analyzers:
|
||||
|
||||
-----------------
|
||||
Writing analyzers
|
||||
-----------------
|
||||
|
||||
To write an analyzer, you should add a new python file to the
|
||||
analyzers module directory at ``lib/spack/spack/analyzers`` .
|
||||
Your analyzer should be a subclass of the :class:`AnalyzerBase <spack.analyzers.analyzer_base.AnalyzerBase>`. For example, if you want
|
||||
to add an analyzer class ``Myanalyzer`` you would write to
|
||||
``spack/analyzers/myanalyzer.py`` and import and
|
||||
use the base as follows:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from .analyzer_base import AnalyzerBase
|
||||
|
||||
class Myanalyzer(AnalyzerBase):
|
||||
|
||||
|
||||
Note that the class name is your module file name, all lowercase
|
||||
except for the first capital letter. You can look at other analyzers in
|
||||
that analyzer directory for examples. The guide here will tell you about the basic functions needed.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Analyzer Output Directory
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
By default, when you run ``spack analyze run`` an analyzer output directory will
|
||||
be created in your spack user directory in your ``$HOME``. The reason we output here
|
||||
is because the install directory might not always be writable.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
~/.spack/
|
||||
analyzers
|
||||
|
||||
Result files will be written here, organized in subfolders in the same structure
|
||||
as the package, with each analyzer owning it's own subfolder. for example:
|
||||
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ tree ~/.spack/analyzers/
|
||||
/home/spackuser/.spack/analyzers/
|
||||
└── linux-ubuntu20.04-skylake
|
||||
└── gcc-9.3.0
|
||||
└── zlib-1.2.11-sl7m27mzkbejtkrajigj3a3m37ygv4u2
|
||||
├── environment_variables
|
||||
│ └── spack-analyzer-environment-variables.json
|
||||
├── install_files
|
||||
│ └── spack-analyzer-install-files.json
|
||||
└── libabigail
|
||||
└── lib
|
||||
└── spack-analyzer-libabigail-libz.so.1.2.11.xml
|
||||
|
||||
|
||||
Notice that for the libabigail analyzer, since results are generated per object,
|
||||
we honor the object's folder in case there are equivalently named files in
|
||||
different folders. The result files are typically written as json so they can be easily read and uploaded in a future interaction with a monitor.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
Analyzer Metadata
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
Your analyzer is required to have the class attributes ``name``, ``outfile``,
|
||||
and ``description``. These are printed to the user with they use the subcommand
|
||||
``spack analyze list-analyzers``. Here is an example.
|
||||
As we mentioned above, note that this analyzer would live in a module named
|
||||
``libabigail.py`` in the analyzers folder so that the class can be discovered.
|
||||
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Libabigail(AnalyzerBase):
|
||||
|
||||
name = "libabigail"
|
||||
outfile = "spack-analyzer-libabigail.json"
|
||||
description = "Application Binary Interface (ABI) features for objects"
|
||||
|
||||
|
||||
This means that the name and output file should be unique for your analyzer.
|
||||
Note that "all" cannot be the name of an analyzer, as this key is used to indicate
|
||||
that the user wants to run all analyzers.
|
||||
|
||||
.. _analyzer_run_function:
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
An analyzer run Function
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The core of an analyzer is its ``run()`` function, which should accept no
|
||||
arguments. You can assume your analyzer has the package spec of interest at ``self.spec``
|
||||
and it's up to the run function to generate whatever analysis data you need,
|
||||
and then return the object with a key as the analyzer name. The result data
|
||||
should be a list of objects, each with a name, ``analyzer_name``, ``install_file``,
|
||||
and one of ``value`` or ``binary_value``. The install file should be for a relative
|
||||
path, and not the absolute path. For example, let's say we extract a metric called
|
||||
``metric`` for ``bin/wget`` using our analyzer ``thebest-analyzer``.
|
||||
We might have data that looks like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
result = {"name": "metric", "analyzer_name": "thebest-analyzer", "value": "1", "install_file": "bin/wget"}
|
||||
|
||||
|
||||
We'd then return it as follows - note that they key is the analyzer name at ``self.name``.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
return {self.name: result}
|
||||
|
||||
This will save the complete result to the analyzer metadata folder, as described
|
||||
previously. If you want support for adding a different kind of metadata (e.g.,
|
||||
not associated with an install file) then the monitor server would need to be updated
|
||||
to support this first.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
An analyzer init Function
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you don't need any extra dependencies or checks, you can skip defining an analyzer
|
||||
init function, as the base class will handle it. Typically, it will accept
|
||||
a spec, and an optional output directory (if the user does not want the default
|
||||
metadata folder for analyzer results). The analyzer init function should call
|
||||
it's parent init, and then do any extra checks or validation that are required to
|
||||
work. For example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def __init__(self, spec, dirname=None):
|
||||
super(Myanalyzer, self).__init__(spec, dirname)
|
||||
|
||||
# install extra dependencies, do extra preparation and checks here
|
||||
|
||||
|
||||
At the end of the init, you will have available to you:
|
||||
|
||||
- **self.spec**: the spec object
|
||||
- **self.dirname**: an optional directory name the user as provided at init to save
|
||||
- **self.output_dir**: the analyzer metadata directory, where we save by default
|
||||
- **self.meta_dir**: the path to the package metadata directory (.spack) if you need it
|
||||
|
||||
And can proceed to write your analyzer.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Saving Analyzer Results
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The analyzer will have ``save_result`` called, with the result object generated
|
||||
to save it to the filesystem, and if the user has added the ``--monitor`` flag
|
||||
to upload it to a monitor server. If your result follows an accepted result
|
||||
format and you don't need to parse it further, you don't need to add this
|
||||
function to your class. However, if your result data is large or otherwise
|
||||
needs additional parsing, you can define it. If you define the function, it
|
||||
is useful to know about the ``output_dir`` property, which you can join
|
||||
with your output file relative path of choice:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
outfile = os.path.join(self.output_dir, "my-output-file.txt")
|
||||
|
||||
|
||||
The directory will be provided by the ``output_dir`` property but it won't exist,
|
||||
so you should create it:
|
||||
|
||||
|
||||
.. code::block:: python
|
||||
|
||||
# Create the output directory
|
||||
if not os.path.exists(self._output_dir):
|
||||
os.makedirs(self._output_dir)
|
||||
|
||||
|
||||
If you are generating results that match to specific files in the package
|
||||
install directory, you should try to maintain those paths in the case that
|
||||
there are equivalently named files in different directories that would
|
||||
overwrite one another. As an example of an analyzer with a custom save,
|
||||
the Libabigail analyzer saves ``*.xml`` files to the analyzer metadata
|
||||
folder in ``run()``, as they are either binaries, or as xml (text) would
|
||||
usually be too big to pass in one request. For this reason, the files
|
||||
are saved during ``run()`` and the filenames added to the result object,
|
||||
and then when the result object is passed back into ``save_result()``,
|
||||
we skip saving to the filesystem, and instead read the file and send
|
||||
each one (separately) to the monitor:
|
||||
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def save_result(self, result, monitor=None, overwrite=False):
|
||||
"""ABI results are saved to individual files, so each one needs to be
|
||||
read and uploaded. Result here should be the lookup generated in run(),
|
||||
the key is the analyzer name, and each value is the result file.
|
||||
We currently upload the entire xml as text because libabigail can't
|
||||
easily read gzipped xml, but this will be updated when it can.
|
||||
"""
|
||||
if not monitor:
|
||||
return
|
||||
|
||||
name = self.spec.package.name
|
||||
|
||||
for obj, filename in result.get(self.name, {}).items():
|
||||
|
||||
# Don't include the prefix
|
||||
rel_path = obj.replace(self.spec.prefix + os.path.sep, "")
|
||||
|
||||
# We've already saved the results to file during run
|
||||
content = spack.monitor.read_file(filename)
|
||||
|
||||
# A result needs an analyzer, value or binary_value, and name
|
||||
data = {"value": content, "install_file": rel_path, "name": "abidw-xml"}
|
||||
tty.info("Sending result for %s %s to monitor." % (name, rel_path))
|
||||
monitor.send_analyze_metadata(self.spec.package, {"libabigail": [data]})
|
||||
|
||||
|
||||
|
||||
Notice that this function, if you define it, requires a result object (generated by
|
||||
``run()``, a monitor (if you want to send), and a boolean ``overwrite`` to be used
|
||||
to check if a result exists first, and not write to it if the result exists and
|
||||
overwrite is False. Also notice that since we already saved these files to the analyzer metadata folder, we return early if a monitor isn't defined, because this function serves to send results to the monitor. If you haven't saved anything to the analyzer metadata folder
|
||||
yet, you might want to do that here. You should also use ``tty.info`` to give
|
||||
the user a message of "Writing result to $DIRNAME."
|
||||
|
||||
|
||||
.. _writing-commands:
|
||||
|
||||
@@ -446,6 +699,23 @@ with a hook, and this is the purpose of this particular hook. Akin to
|
||||
``on_phase_success`` we require the same variables - the package that failed,
|
||||
the name of the phase, and the log file where we might find errors.
|
||||
|
||||
"""""""""""""""""""""""""""""""""
|
||||
``on_analyzer_save(pkg, result)``
|
||||
"""""""""""""""""""""""""""""""""
|
||||
|
||||
After an analyzer has saved some result for a package, this hook is called,
|
||||
and it provides the package that we just ran the analysis for, along with
|
||||
the loaded result. Typically, a result is structured to have the name
|
||||
of the analyzer as key, and the result object that is defined in detail in
|
||||
:ref:`analyzer_run_function`.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def on_analyzer_save(pkg, result):
|
||||
"""given a package and a result...
|
||||
"""
|
||||
print('Do something extra with a package analysis result here')
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
Adding a New Hook Type
|
||||
@@ -963,13 +1233,8 @@ completed, the steps to make the point release are:
|
||||
|
||||
$ git checkout releases/v0.15
|
||||
|
||||
#. If a pull request to the release branch named ``Backports vX.Y.Z`` is not already
|
||||
in the project, create it. This pull request ought to be created as early as
|
||||
possible when working on a release project, so that we can build the release
|
||||
commits incrementally, and identify potential conflicts at an early stage.
|
||||
|
||||
#. Cherry-pick each pull request in the ``Done`` column of the release
|
||||
project board onto the ``Backports vX.Y.Z`` pull request.
|
||||
project board onto the release branch.
|
||||
|
||||
This is **usually** fairly simple since we squash the commits from the
|
||||
vast majority of pull requests. That means there is only one commit
|
||||
@@ -994,7 +1259,7 @@ completed, the steps to make the point release are:
|
||||
|
||||
It is important to cherry-pick commits in the order they happened,
|
||||
otherwise you can get conflicts while cherry-picking. When
|
||||
cherry-picking look at the merge date,
|
||||
cherry-picking onto a point release, look at the merge date,
|
||||
**not** the number of the pull request or the date it was opened.
|
||||
|
||||
Sometimes you may **still** get merge conflicts even if you have
|
||||
@@ -1015,19 +1280,15 @@ completed, the steps to make the point release are:
|
||||
branch if neither of the above options makes sense, but this can
|
||||
require a lot of work. It's seldom the right choice.
|
||||
|
||||
#. When all the commits from the project board are cherry-picked into
|
||||
the ``Backports vX.Y.Z`` pull request, you can push a commit to:
|
||||
#. Bump the version in ``lib/spack/spack/__init__.py``.
|
||||
|
||||
1. Bump the version in ``lib/spack/spack/__init__.py``.
|
||||
2. Update ``CHANGELOG.md`` with a list of the changes.
|
||||
#. Update ``CHANGELOG.md`` with a list of the changes.
|
||||
|
||||
This is typically a summary of the commits you cherry-picked onto the
|
||||
release branch. See `the changelog from 0.14.1
|
||||
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
|
||||
|
||||
#. Merge the ``Backports vX.Y.Z`` PR with the **Rebase and merge** strategy. This
|
||||
is needed to keep track in the release branch of all the commits that were
|
||||
cherry-picked.
|
||||
#. Push the release branch to GitHub.
|
||||
|
||||
#. Make sure CI passes on the release branch, including:
|
||||
|
||||
@@ -1046,8 +1307,6 @@ completed, the steps to make the point release are:
|
||||
|
||||
#. Follow the steps in :ref:`announcing-releases`.
|
||||
|
||||
#. Submit a PR to update the CHANGELOG in the `develop` branch
|
||||
with the addition of this point release.
|
||||
|
||||
.. _publishing-releases:
|
||||
|
||||
|
@@ -233,8 +233,8 @@ packages will be listed as roots of the Environment.
|
||||
|
||||
All of the Spack commands that act on the list of installed specs are
|
||||
Environment-sensitive in this way, including ``install``,
|
||||
``uninstall``, ``find``, ``extensions``, and more. In the
|
||||
:ref:`environment-configuration` section we will discuss
|
||||
``uninstall``, ``activate``, ``deactivate``, ``find``, ``extensions``,
|
||||
and more. In the :ref:`environment-configuration` section we will discuss
|
||||
Environment-sensitive commands further.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -376,30 +376,6 @@ from being added again. At the same time, a spec that already exists in the
|
||||
environment, but only as a dependency, will be added to the environment as a
|
||||
root spec without the ``--no-add`` option.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Developing Packages in a Spack Environment
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The ``spack develop`` command allows one to develop Spack packages in
|
||||
an environment. It requires a spec containing a concrete version, and
|
||||
will configure Spack to install the package from local source. By
|
||||
default, it will also clone the package to a subdirectory in the
|
||||
environment. This package will have a special variant ``dev_path``
|
||||
set, and Spack will ensure the package and its dependents are rebuilt
|
||||
any time the environment is installed if the package's local source
|
||||
code has been modified. Spack ensures that all instances of a
|
||||
developed package in the environment are concretized to match the
|
||||
version (and other constraints) passed as the spec argument to the
|
||||
``spack develop`` command.
|
||||
|
||||
For packages with ``git`` attributes, git branches, tags, and commits can
|
||||
also be used as valid concrete versions (see :ref:`version-specifier`).
|
||||
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
|
||||
the ``main`` branch of the package, and ``spack install`` will install from
|
||||
that git clone if ``foo`` is in the environment.
|
||||
Further development on ``foo`` can be tested by reinstalling the environment,
|
||||
and eventually committed and pushed to the upstream git repo.
|
||||
|
||||
^^^^^^^
|
||||
Loading
|
||||
^^^^^^^
|
||||
@@ -478,21 +454,14 @@ them to the Environment.
|
||||
spack:
|
||||
include:
|
||||
- relative/path/to/config.yaml
|
||||
- https://github.com/path/to/raw/config/compilers.yaml
|
||||
- /absolute/path/to/packages.yaml
|
||||
|
||||
Environments can include files or URLs. File paths can be relative or
|
||||
absolute. URLs include the path to the text for individual files or
|
||||
can be the path to a directory containing configuration files.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Configuration precedence
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Inline configurations take precedence over included configurations, so
|
||||
you don't have to change shared configuration files to make small changes
|
||||
to an individual environment. Included configurations listed earlier will
|
||||
have higher precedence, as the included configs are applied in reverse order.
|
||||
Environments can include files with either relative or absolute
|
||||
paths. Inline configurations take precedence over included
|
||||
configurations, so you don't have to change shared configuration files
|
||||
to make small changes to an individual Environment. Included configs
|
||||
listed earlier will have higher precedence, as the included configs are
|
||||
applied in reverse order.
|
||||
|
||||
-------------------------------
|
||||
Manually Editing the Specs List
|
||||
@@ -519,49 +488,8 @@ available from the yaml file.
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Spec concretization
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
An environment can be concretized in three different modes and the behavior active under
|
||||
any environment is determined by the ``concretizer:unify`` configuration option.
|
||||
|
||||
The *default* mode is to unify all specs:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- hdf5+mpi
|
||||
- zlib@1.2.8
|
||||
concretizer:
|
||||
unify: true
|
||||
|
||||
This means that any package in the environment corresponds to a single concrete spec. In
|
||||
the above example, when ``hdf5`` depends down the line of ``zlib``, it is required to
|
||||
take ``zlib@1.2.8`` instead of a newer version. This mode of concretization is
|
||||
particularly useful when environment views are used: if every package occurs in
|
||||
only one flavor, it is usually possible to merge all install directories into a view.
|
||||
|
||||
A downside of unified concretization is that it can be overly strict. For example, a
|
||||
concretization error would happen when both ``hdf5+mpi`` and ``hdf5~mpi`` are specified
|
||||
in an environment.
|
||||
|
||||
The second mode is to *unify when possible*: this makes concretization of root specs
|
||||
more independendent. Instead of requiring reuse of dependencies across different root
|
||||
specs, it is only maximized:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- hdf5~mpi
|
||||
- hdf5+mpi
|
||||
- zlib@1.2.8
|
||||
concretizer:
|
||||
unify: when_possible
|
||||
|
||||
This means that both ``hdf5`` installations will use ``zlib@1.2.8`` as a dependency even
|
||||
if newer versions of that library are available.
|
||||
|
||||
The third mode of operation is to concretize root specs entirely independently by
|
||||
disabling unified concretization:
|
||||
An environment can be concretized in three different modes and the behavior active under any environment
|
||||
is determined by the ``concretizer:unify`` property. By default specs are concretized *separately*, one after the other:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -573,11 +501,45 @@ disabling unified concretization:
|
||||
concretizer:
|
||||
unify: false
|
||||
|
||||
In this example ``hdf5`` is concretized separately, and does not consider ``zlib@1.2.8``
|
||||
as a constraint or preference. Instead, it will take the latest possible version.
|
||||
This mode of operation permits to deploy a full software stack where multiple configurations of the same package
|
||||
need to be installed alongside each other using the best possible selection of transitive dependencies. The downside
|
||||
is that redundancy of installations is disregarded completely, and thus environments might be more bloated than
|
||||
strictly needed. In the example above, for instance, if a version of ``zlib`` newer than ``1.2.8`` is known to Spack,
|
||||
then it will be used for both ``hdf5`` installations.
|
||||
|
||||
The last two concretization options are typically useful for system administrators and
|
||||
user support groups providing a large software stack for their HPC center.
|
||||
If redundancy of the environment is a concern, Spack provides a way to install it *together where possible*,
|
||||
i.e. trying to maximize reuse of dependencies across different specs:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- hdf5~mpi
|
||||
- hdf5+mpi
|
||||
- zlib@1.2.8
|
||||
concretizer:
|
||||
unify: when_possible
|
||||
|
||||
Also in this case Spack allows having multiple configurations of the same package, but privileges the reuse of
|
||||
specs over other factors. Going back to our example, this means that both ``hdf5`` installations will use
|
||||
``zlib@1.2.8`` as a dependency even if newer versions of that library are available.
|
||||
Central installations done at HPC centers by system administrators or user support groups are a common case
|
||||
that fits either of these two modes.
|
||||
|
||||
Environments can also be configured to concretize all the root specs *together*, in a self-consistent way, to
|
||||
ensure that each package in the environment comes with a single configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- hdf5+mpi
|
||||
- zlib@1.2.8
|
||||
concretizer:
|
||||
unify: true
|
||||
|
||||
This mode of operation is usually what is required by software developers that want to deploy their development
|
||||
environment and have a single view of it in the filesystem.
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -588,10 +550,10 @@ user support groups providing a large software stack for their HPC center.
|
||||
|
||||
.. admonition:: Re-concretization of user specs
|
||||
|
||||
When using *unified* concretization (when possible), the entire set of specs will be
|
||||
When concretizing specs *together* or *together where possible* the entire set of specs will be
|
||||
re-concretized after any addition of new user specs, to ensure that
|
||||
the environment remains consistent / minimal. When instead unified concretization is
|
||||
disabled, only the new specs will be concretized after any addition.
|
||||
the environment remains consistent / minimal. When instead the specs are concretized
|
||||
separately only the new specs will be re-concretized after any addition.
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
Spec Matrices
|
||||
@@ -630,6 +592,31 @@ The following two Environment manifests are identical:
|
||||
Spec matrices can be used to install swaths of software across various
|
||||
toolchains.
|
||||
|
||||
The concretization logic for spec matrices differs slightly from the
|
||||
rest of Spack. If a variant or dependency constraint from a matrix is
|
||||
invalid, Spack will reject the constraint and try again without
|
||||
it. For example, the following two Environment manifests will produce
|
||||
the same specs:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- matrix:
|
||||
- [zlib, libelf, hdf5+mpi]
|
||||
- [^mvapich2@2.2, ^openmpi@3.1.0]
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- zlib
|
||||
- libelf
|
||||
- hdf5+mpi ^mvapich2@2.2
|
||||
- hdf5+mpi ^openmpi@3.1.0
|
||||
|
||||
This allows one to create toolchains out of combinations of
|
||||
constraints and apply them somewhat indiscriminately to packages,
|
||||
without regard for the applicability of the constraint.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
Spec List References
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -961,6 +948,9 @@ Variable Paths
|
||||
PATH bin
|
||||
MANPATH man, share/man
|
||||
ACLOCAL_PATH share/aclocal
|
||||
LD_LIBRARY_PATH lib, lib64
|
||||
LIBRARY_PATH lib, lib64
|
||||
CPATH include
|
||||
PKG_CONFIG_PATH lib/pkgconfig, lib64/pkgconfig, share/pkgconfig
|
||||
CMAKE_PREFIX_PATH .
|
||||
=================== =========
|
||||
@@ -993,7 +983,7 @@ A typical workflow is as follows:
|
||||
spack env create -d .
|
||||
spack -e . add perl
|
||||
spack -e . concretize
|
||||
spack -e . env depfile -o Makefile
|
||||
spack -e . env depfile > Makefile
|
||||
make -j64
|
||||
|
||||
This generates a ``Makefile`` from a concretized environment in the
|
||||
@@ -1006,6 +996,7 @@ load, even when packages are built in parallel.
|
||||
By default the following phony convenience targets are available:
|
||||
|
||||
- ``make all``: installs the environment (default target);
|
||||
- ``make fetch-all``: only fetch sources of all packages;
|
||||
- ``make clean``: cleans files used by make, but does not uninstall packages.
|
||||
|
||||
.. tip::
|
||||
@@ -1015,23 +1006,14 @@ By default the following phony convenience targets are available:
|
||||
printed orderly per package install. To get synchronized output with colors,
|
||||
use ``make -j<N> SPACK_COLOR=always --output-sync=recurse``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Specifying dependencies on generated ``make`` targets
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
An interesting question is how to include generated ``Makefile``\s in your own
|
||||
``Makefile``\s. This comes up when you want to install an environment that provides
|
||||
executables required in a command for a make target of your own.
|
||||
|
||||
The example below shows how to accomplish this: the ``env`` target specifies
|
||||
the generated ``spack/env`` target as a prerequisite, meaning that the environment
|
||||
gets installed and is available for use in the ``env`` target.
|
||||
The following advanced example shows how generated targets can be used in a
|
||||
``Makefile``:
|
||||
|
||||
.. code:: Makefile
|
||||
|
||||
SPACK ?= spack
|
||||
|
||||
.PHONY: all clean env
|
||||
.PHONY: all clean fetch env
|
||||
|
||||
all: env
|
||||
|
||||
@@ -1040,6 +1022,9 @@ gets installed and is available for use in the ``env`` target.
|
||||
|
||||
env.mk: spack.lock
|
||||
$(SPACK) -e . env depfile -o $@ --make-target-prefix spack
|
||||
|
||||
fetch: spack/fetch
|
||||
$(info Environment fetched!)
|
||||
|
||||
env: spack/env
|
||||
$(info Environment installed!)
|
||||
@@ -1051,10 +1036,11 @@ gets installed and is available for use in the ``env`` target.
|
||||
include env.mk
|
||||
endif
|
||||
|
||||
This works as follows: when ``make`` is invoked, it first "remakes" the missing
|
||||
include ``env.mk`` as there is a target for it. This triggers concretization of
|
||||
the environment and makes spack output ``env.mk``. At that point the
|
||||
generated target ``spack/env`` becomes available through ``include env.mk``.
|
||||
When ``make`` is invoked, it first "remakes" the missing include ``env.mk``
|
||||
from its rule, which triggers concretization. When done, the generated targets
|
||||
``spack/fetch`` and ``spack/env`` are available. In the above
|
||||
example, the ``env`` target uses the latter as a prerequisite, meaning
|
||||
that it can make use of the installed packages in its commands.
|
||||
|
||||
As it is typically undesirable to remake ``env.mk`` as part of ``make clean``,
|
||||
the include is conditional.
|
||||
@@ -1062,27 +1048,7 @@ the include is conditional.
|
||||
.. note::
|
||||
|
||||
When including generated ``Makefile``\s, it is important to use
|
||||
the ``--make-target-prefix`` flag and use the non-phony target
|
||||
``<target-prefix>/env`` as prerequisite, instead of the phony target
|
||||
``<target-prefix>/all``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Building a subset of the environment
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The generated ``Makefile``\s contain install targets for each spec. Given the hash
|
||||
of a particular spec, you can use the ``.install/<hash>`` target to install the
|
||||
spec with its dependencies. There is also ``.install-deps/<hash>`` to *only* install
|
||||
its dependencies. This can be useful when certain flags should only apply to
|
||||
dependencies. Below we show a use case where a spec is installed with verbose
|
||||
output (``spack install --verbose``) while its dependencies are installed silently:
|
||||
|
||||
.. code:: console
|
||||
|
||||
$ spack env depfile -o Makefile --make-target-prefix my_env
|
||||
|
||||
# Install dependencies in parallel, only show a log on error.
|
||||
$ make -j16 my_env/.install-deps/<hash> SPACK_INSTALL_FLAGS=--show-log-on-error
|
||||
|
||||
# Install the root spec with verbose output.
|
||||
$ make -j16 my_env/.install/<hash> SPACK_INSTALL_FLAGS=--verbose
|
||||
the ``--make-target-prefix`` flag and use the non-phony targets
|
||||
``<target-prefix>/env`` and ``<target-prefix>/fetch`` as
|
||||
prerequisites, instead of the phony targets ``<target-prefix>/all``
|
||||
and ``<target-prefix>/fetch-all`` respectively.
|
||||
|
@@ -98,42 +98,40 @@ For example, this command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack create https://ftp.osuosl.org/pub/blfs/conglomeration/libelf/libelf-0.8.13.tar.gz
|
||||
$ spack create http://www.mr511.de/software/libelf-0.8.13.tar.gz
|
||||
|
||||
creates a simple python file:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from spack.package import *
|
||||
from spack import *
|
||||
|
||||
|
||||
class Libelf(AutotoolsPackage):
|
||||
class Libelf(Package):
|
||||
"""FIXME: Put a proper description of your package here."""
|
||||
|
||||
# FIXME: Add a proper url for your package's homepage here.
|
||||
homepage = "https://www.example.com"
|
||||
url = "https://ftp.osuosl.org/pub/blfs/conglomeration/libelf/libelf-0.8.13.tar.gz"
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz"
|
||||
|
||||
# FIXME: Add a list of GitHub accounts to
|
||||
# notify when the package is updated.
|
||||
# maintainers = ["github_user1", "github_user2"]
|
||||
|
||||
version("0.8.13", sha256="591a9b4ec81c1f2042a97aa60564e0cb79d041c52faa7416acb38bc95bd2c76d")
|
||||
version('0.8.13', '4136d7b4c04df68b686570afa26988ac')
|
||||
|
||||
# FIXME: Add dependencies if required.
|
||||
# depends_on("foo")
|
||||
# depends_on('foo')
|
||||
|
||||
def configure_args(self):
|
||||
# FIXME: Add arguments other than --prefix
|
||||
# FIXME: If not needed delete this function
|
||||
args = []
|
||||
return args
|
||||
def install(self, spec, prefix):
|
||||
# FIXME: Modify the configure line to suit your build system here.
|
||||
configure('--prefix={0}'.format(prefix))
|
||||
|
||||
# FIXME: Add logic to build and install here.
|
||||
make()
|
||||
make('install')
|
||||
|
||||
It doesn't take much python coding to get from there to a working
|
||||
package:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/libelf/package.py
|
||||
:lines: 5-
|
||||
:lines: 6-
|
||||
|
||||
Spack also provides wrapper functions around common commands like
|
||||
``configure``, ``make``, and ``cmake`` to make writing packages
|
||||
|
@@ -23,36 +23,8 @@ be present on the machine where Spack is run:
|
||||
These requirements can be easily installed on most modern Linux systems;
|
||||
on macOS, XCode is required. Spack is designed to run on HPC
|
||||
platforms like Cray. Not all packages should be expected
|
||||
to work on all platforms.
|
||||
|
||||
A build matrix showing which packages are working on which systems is shown below.
|
||||
|
||||
.. tab-set::
|
||||
|
||||
.. tab-item:: Debian/Ubuntu
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
apt update
|
||||
apt install build-essential ca-certificates coreutils curl environment-modules gfortran git gpg lsb-release python3 python3-distutils python3-venv unzip zip
|
||||
|
||||
.. tab-item:: RHEL
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
yum update -y
|
||||
yum install -y epel-release
|
||||
yum update -y
|
||||
yum --enablerepo epel groupinstall -y "Development Tools"
|
||||
yum --enablerepo epel install -y curl findutils gcc-c++ gcc gcc-gfortran git gnupg2 hostname iproute redhat-lsb-core make patch python3 python3-pip python3-setuptools unzip
|
||||
python3 -m pip install boto3
|
||||
|
||||
.. tab-item:: macOS Brew
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
brew update
|
||||
brew install curl gcc git gnupg zip
|
||||
to work on all platforms. A build matrix showing which packages are
|
||||
working on which systems is planned but not yet available.
|
||||
|
||||
------------
|
||||
Installation
|
||||
@@ -124,41 +96,88 @@ Spack provides two ways of bootstrapping ``clingo``: from pre-built binaries
|
||||
(default), or from sources. The fastest way to get started is to bootstrap from
|
||||
pre-built binaries.
|
||||
|
||||
The first time you concretize a spec, Spack will bootstrap automatically:
|
||||
.. note::
|
||||
|
||||
When bootstrapping from pre-built binaries, Spack currently requires
|
||||
``patchelf`` on Linux and ``otool`` on macOS. If ``patchelf`` is not in the
|
||||
``PATH``, Spack will build it from sources, and a C++ compiler is required.
|
||||
|
||||
The first time you concretize a spec, Spack will bootstrap in the background:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack spec zlib
|
||||
==> Bootstrapping clingo from pre-built binaries
|
||||
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.4/build_cache/linux-centos7-x86_64-gcc-10.2.1-clingo-bootstrap-spack-ba5ijauisd3uuixtmactc36vps7yfsrl.spec.json
|
||||
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.4/build_cache/linux-centos7-x86_64/gcc-10.2.1/clingo-bootstrap-spack/linux-centos7-x86_64-gcc-10.2.1-clingo-bootstrap-spack-ba5ijauisd3uuixtmactc36vps7yfsrl.spack
|
||||
==> Installing "clingo-bootstrap@spack%gcc@10.2.1~docs~ipo+python+static_libstdcpp build_type=Release arch=linux-centos7-x86_64" from a buildcache
|
||||
==> Bootstrapping patchelf from pre-built binaries
|
||||
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.4/build_cache/linux-centos7-x86_64-gcc-10.2.1-patchelf-0.16.1-p72zyan5wrzuabtmzq7isa5mzyh6ahdp.spec.json
|
||||
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.4/build_cache/linux-centos7-x86_64/gcc-10.2.1/patchelf-0.16.1/linux-centos7-x86_64-gcc-10.2.1-patchelf-0.16.1-p72zyan5wrzuabtmzq7isa5mzyh6ahdp.spack
|
||||
==> Installing "patchelf@0.16.1%gcc@10.2.1 ldflags="-static-libstdc++ -static-libgcc" build_system=autotools arch=linux-centos7-x86_64" from a buildcache
|
||||
$ time spack spec zlib
|
||||
Input spec
|
||||
--------------------------------
|
||||
zlib
|
||||
|
||||
Concretized
|
||||
--------------------------------
|
||||
zlib@1.2.13%gcc@9.4.0+optimize+pic+shared build_system=makefile arch=linux-ubuntu20.04-icelake
|
||||
zlib@1.2.11%gcc@7.5.0+optimize+pic+shared arch=linux-ubuntu18.04-zen
|
||||
|
||||
If for security concerns you cannot bootstrap ``clingo`` from pre-built
|
||||
binaries, you have to disable fetching the binaries we generated with Github Actions.
|
||||
real 0m20.023s
|
||||
user 0m18.351s
|
||||
sys 0m0.784s
|
||||
|
||||
After this command you'll see that ``clingo`` has been installed for Spack's own use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack bootstrap disable github-actions-v0.4
|
||||
==> "github-actions-v0.4" is now disabled and will not be used for bootstrapping
|
||||
$ spack bootstrap disable github-actions-v0.3
|
||||
==> "github-actions-v0.3" is now disabled and will not be used for bootstrapping
|
||||
$ spack find -b
|
||||
==> Showing internal bootstrap store at "/root/.spack/bootstrap/store"
|
||||
==> 3 installed packages
|
||||
-- linux-rhel5-x86_64 / gcc@9.3.0 -------------------------------
|
||||
clingo-bootstrap@spack python@3.6
|
||||
|
||||
-- linux-ubuntu18.04-zen / gcc@7.5.0 ----------------------------
|
||||
patchelf@0.13
|
||||
|
||||
Subsequent calls to the concretizer will then be much faster:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ time spack spec zlib
|
||||
[ ... ]
|
||||
real 0m0.490s
|
||||
user 0m0.431s
|
||||
sys 0m0.041s
|
||||
|
||||
|
||||
If for security concerns you cannot bootstrap ``clingo`` from pre-built
|
||||
binaries, you have to mark this bootstrapping method as untrusted. This makes
|
||||
Spack fall back to bootstrapping from sources:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack bootstrap untrust github-actions-v0.2
|
||||
==> "github-actions-v0.2" is now untrusted and will not be used for bootstrapping
|
||||
|
||||
You can verify that the new settings are effective with:
|
||||
|
||||
.. command-output:: spack bootstrap list
|
||||
.. code-block:: console
|
||||
|
||||
$ spack bootstrap list
|
||||
Name: github-actions-v0.2 UNTRUSTED
|
||||
|
||||
Type: buildcache
|
||||
|
||||
Info:
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.2
|
||||
homepage: https://github.com/spack/spack-bootstrap-mirrors
|
||||
releases: https://github.com/spack/spack-bootstrap-mirrors/releases
|
||||
|
||||
Description:
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
|
||||
[ ... ]
|
||||
|
||||
Name: spack-install TRUSTED
|
||||
|
||||
Type: install
|
||||
|
||||
Description:
|
||||
Specs built from sources by Spack. May take a long time.
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -188,7 +207,9 @@ under the ``${HOME}/.spack`` directory. The software installed there can be quer
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -b find
|
||||
$ spack find --bootstrap
|
||||
==> Showing internal bootstrap store at "/home/spack/.spack/bootstrap/store"
|
||||
==> 3 installed packages
|
||||
-- linux-ubuntu18.04-x86_64 / gcc@10.1.0 ------------------------
|
||||
clingo-bootstrap@spack python@3.6.9 re2c@1.2.1
|
||||
|
||||
@@ -197,7 +218,7 @@ In case it's needed the bootstrap store can also be cleaned with:
|
||||
.. code-block:: console
|
||||
|
||||
$ spack clean -b
|
||||
==> Removing bootstrapped software and configuration in "/home/spack/.spack/bootstrap"
|
||||
==> Removing software in "/home/spack/.spack/bootstrap/store"
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
Check Installation
|
||||
|
Binary file not shown.
Before Width: | Height: | Size: 658 KiB |
Binary file not shown.
Before Width: | Height: | Size: 449 KiB |
Binary file not shown.
Before Width: | Height: | Size: 128 KiB |
Binary file not shown.
Before Width: | Height: | Size: 126 KiB |
Binary file not shown.
Before Width: | Height: | Size: 35 KiB |
File diff suppressed because it is too large
Load Diff
@@ -67,6 +67,7 @@ or refer to the full manual below.
|
||||
build_settings
|
||||
environments
|
||||
containers
|
||||
monitoring
|
||||
mirrors
|
||||
module_file_support
|
||||
repositories
|
||||
@@ -77,6 +78,12 @@ or refer to the full manual below.
|
||||
extensions
|
||||
pipelines
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Research
|
||||
|
||||
analyze
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Contributing
|
||||
|
@@ -77,7 +77,7 @@ installation of a package.
|
||||
|
||||
Spack only generates modulefiles when a package is installed. If
|
||||
you attempt to install a package and it is already installed, Spack
|
||||
will not regenerate modulefiles for the package. This may lead to
|
||||
will not regenerate modulefiles for the package. This may to
|
||||
inconsistent modulefiles if the Spack module configuration has
|
||||
changed since the package was installed, either by editing a file
|
||||
or changing scopes or environments.
|
||||
@@ -113,8 +113,6 @@ from language interpreters into their extensions. The latter two instead permit
|
||||
fine tune the filesystem layout, content and creation of module files to meet
|
||||
site specific conventions.
|
||||
|
||||
.. _overide-api-calls-in-package-py:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Override API calls in ``package.py``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -136,7 +134,7 @@ The second method:
|
||||
pass
|
||||
|
||||
can instead inject run-time environment modifications in the module files of packages
|
||||
that depend on it. In both cases you need to fill ``env`` with the desired
|
||||
that depend on it. In both cases you need to fill ``run_env`` with the desired
|
||||
list of environment modifications.
|
||||
|
||||
.. admonition:: The ``r`` package and callback APIs
|
||||
@@ -310,7 +308,7 @@ the variable ``FOOBAR`` will be unset.
|
||||
spec constraints are instead evaluated top to bottom.
|
||||
|
||||
""""""""""""""""""""""""""""""""""""""""""""
|
||||
Exclude or include specific module files
|
||||
Blacklist or whitelist specific module files
|
||||
""""""""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
You can use anonymous specs also to prevent module files from being written or
|
||||
@@ -324,8 +322,8 @@ your system. If you write a configuration file like:
|
||||
modules:
|
||||
default:
|
||||
tcl:
|
||||
include: ['gcc', 'llvm'] # include will have precedence over exclude
|
||||
exclude: ['%gcc@4.4.7'] # Assuming gcc@4.4.7 is the system compiler
|
||||
whitelist: ['gcc', 'llvm'] # Whitelist will have precedence over blacklist
|
||||
blacklist: ['%gcc@4.4.7'] # Assuming gcc@4.4.7 is the system compiler
|
||||
|
||||
you will prevent the generation of module files for any package that
|
||||
is compiled with ``gcc@4.4.7``, with the only exception of any ``gcc``
|
||||
@@ -492,7 +490,7 @@ satisfies a default, Spack will generate the module file in the
|
||||
appropriate path, and will generate a default symlink to the module
|
||||
file as well.
|
||||
|
||||
.. warning::
|
||||
.. warning::
|
||||
If Spack is configured to generate multiple default packages in the
|
||||
same directory, the last modulefile to be generated will be the
|
||||
default module.
|
||||
@@ -520,33 +518,18 @@ inspections and customize them per-module-set.
|
||||
prefix_inspections:
|
||||
bin:
|
||||
- PATH
|
||||
man:
|
||||
- MANPATH
|
||||
lib:
|
||||
- LIBRARY_PATH
|
||||
'':
|
||||
- CMAKE_PREFIX_PATH
|
||||
|
||||
Prefix inspections are only applied if the relative path inside the
|
||||
installation prefix exists. In this case, for a Spack package ``foo``
|
||||
installed to ``/spack/prefix/foo``, if ``foo`` installs executables to
|
||||
``bin`` but no manpages in ``man``, the generated module file for
|
||||
``bin`` but no libraries in ``lib``, the generated module file for
|
||||
``foo`` would update ``PATH`` to contain ``/spack/prefix/foo/bin`` and
|
||||
``CMAKE_PREFIX_PATH`` to contain ``/spack/prefix/foo``, but would not
|
||||
update ``MANPATH``.
|
||||
|
||||
The default list of environment variables in this config section
|
||||
inludes ``PATH``, ``MANPATH``, ``ACLOCAL_PATH``, ``PKG_CONFIG_PATH``
|
||||
and ``CMAKE_PREFIX_PATH``, as well as ``DYLD_FALLBACK_LIBRARY_PATH``
|
||||
on macOS. On Linux however, the corresponding ``LD_LIBRARY_PATH``
|
||||
variable is *not* set, because it affects the behavior of
|
||||
system executables too.
|
||||
|
||||
.. note::
|
||||
|
||||
In general, the ``LD_LIBRARY_PATH`` variable is not required
|
||||
when using packages built with Spack, thanks to the use of RPATH.
|
||||
Some packages may still need the variable, which is best handled
|
||||
on a per-package basis instead of globally, as explained in
|
||||
:ref:`overide-api-calls-in-package-py`.
|
||||
update ``LIBRARY_PATH``.
|
||||
|
||||
There is a special case for prefix inspections relative to environment
|
||||
views. If all of the following conditions hold for a module set
|
||||
@@ -606,7 +589,7 @@ Filter out environment modifications
|
||||
Modifications to certain environment variables in module files are there by
|
||||
default, for instance because they are generated by prefix inspections.
|
||||
If you want to prevent modifications to some environment variables, you can
|
||||
do so by using the ``exclude_env_vars``:
|
||||
do so by using the environment blacklist:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -616,7 +599,7 @@ do so by using the ``exclude_env_vars``:
|
||||
all:
|
||||
filter:
|
||||
# Exclude changes to any of these variables
|
||||
exclude_env_vars: ['CPATH', 'LIBRARY_PATH']
|
||||
environment_blacklist: ['CPATH', 'LIBRARY_PATH']
|
||||
|
||||
The configuration above will generate module files that will not contain
|
||||
modifications to either ``CPATH`` or ``LIBRARY_PATH``.
|
||||
|
265
lib/spack/docs/monitoring.rst
Normal file
265
lib/spack/docs/monitoring.rst
Normal file
@@ -0,0 +1,265 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _monitoring:
|
||||
|
||||
==========
|
||||
Monitoring
|
||||
==========
|
||||
|
||||
You can use a `spack monitor <https://github.com/spack/spack-monitor>`_ "Spackmon"
|
||||
server to store a database of your packages, builds, and associated metadata
|
||||
for provenance, research, or some other kind of development. You should
|
||||
follow the instructions in the `spack monitor documentation <https://spack-monitor.readthedocs.org>`_
|
||||
to first create a server along with a username and token for yourself.
|
||||
You can then use this guide to interact with the server.
|
||||
|
||||
-------------------
|
||||
Analysis Monitoring
|
||||
-------------------
|
||||
|
||||
To read about how to monitor an analysis (meaning you want to send analysis results
|
||||
to a server) see :ref:`analyze_monitoring`.
|
||||
|
||||
---------------------
|
||||
Monitoring An Install
|
||||
---------------------
|
||||
|
||||
Since an install is typically when you build packages, we logically want
|
||||
to tell spack to monitor during this step. Let's start with an example
|
||||
where we want to monitor the install of hdf5. Unless you have disabled authentication
|
||||
for the server, we first want to export our spack monitor token and username to the environment:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438
|
||||
$ export SPACKMON_USER=spacky
|
||||
|
||||
|
||||
By default, the host for your server is expected to be at ``http://127.0.0.1``
|
||||
with a prefix of ``ms1``, and if this is the case, you can simply add the
|
||||
``--monitor`` flag to the install command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor hdf5
|
||||
|
||||
|
||||
If you need to customize the host or the prefix, you can do that as well:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor --monitor-prefix monitor --monitor-host https://monitor-service.io hdf5
|
||||
|
||||
|
||||
As a precaution, we cut out early in the spack client if you have not provided
|
||||
authentication credentials. For example, if you run the command above without
|
||||
exporting your username or token, you'll see:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
==> Error: You are required to export SPACKMON_TOKEN and SPACKMON_USER
|
||||
|
||||
This extra check is to ensure that we don't start any builds,
|
||||
and then discover that you forgot to export your token. However, if
|
||||
your monitoring server has authentication disabled, you can tell this to
|
||||
the client to skip this step:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor --monitor-disable-auth hdf5
|
||||
|
||||
If the service is not running, you'll cleanly exit early - the install will
|
||||
not continue if you've asked it to monitor and there is no service.
|
||||
For example, here is what you'll see if the monitoring service is not running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
[Errno 111] Connection refused
|
||||
|
||||
|
||||
If you want to continue builds (and stop monitoring) you can set the ``--monitor-keep-going``
|
||||
flag.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor --monitor-keep-going hdf5
|
||||
|
||||
This could mean that if a request fails, you only have partial or no data
|
||||
added to your monitoring database. This setting will not be applied to the
|
||||
first request to check if the server is running, but to subsequent requests.
|
||||
If you don't have a monitor server running and you want to build, simply
|
||||
don't provide the ``--monitor`` flag! Finally, if you want to provide one or
|
||||
more tags to your build, you can do:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# Add one tag, "pizza"
|
||||
$ spack install --monitor --monitor-tags pizza hdf5
|
||||
|
||||
# Add two tags, "pizza" and "pasta"
|
||||
$ spack install --monitor --monitor-tags pizza,pasta hdf5
|
||||
|
||||
|
||||
----------------------------
|
||||
Monitoring with Containerize
|
||||
----------------------------
|
||||
|
||||
The same argument group is available to add to a containerize command.
|
||||
|
||||
^^^^^^
|
||||
Docker
|
||||
^^^^^^
|
||||
|
||||
To add monitoring to a Docker container recipe generation using the defaults,
|
||||
and assuming a monitor server running on localhost, you would
|
||||
start with a spack.yaml in your present working directory:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- samtools
|
||||
|
||||
And then do:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# preview first
|
||||
spack containerize --monitor
|
||||
|
||||
# and then write to a Dockerfile
|
||||
spack containerize --monitor > Dockerfile
|
||||
|
||||
|
||||
The install command will be edited to include commands for enabling monitoring.
|
||||
However, getting secrets into the container for your monitor server is something
|
||||
that should be done carefully. Specifically you should:
|
||||
|
||||
- Never try to define secrets as ENV, ARG, or using ``--build-arg``
|
||||
- Do not try to get the secret into the container via a "temporary" file that you remove (it in fact will still exist in a layer)
|
||||
|
||||
Instead, it's recommended to use buildkit `as explained here <https://pythonspeed.com/articles/docker-build-secrets/>`_.
|
||||
You'll need to again export environment variables for your spack monitor server:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438
|
||||
$ export SPACKMON_USER=spacky
|
||||
|
||||
And then use buildkit along with your build and identifying the name of the secret:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ DOCKER_BUILDKIT=1 docker build --secret id=st,env=SPACKMON_TOKEN --secret id=su,env=SPACKMON_USER -t spack/container .
|
||||
|
||||
The secrets are expected to come from your environment, and then will be temporarily mounted and available
|
||||
at ``/run/secrets/<name>``. If you forget to supply them (and authentication is required) the build
|
||||
will fail. If you need to build on your host (and interact with a spack monitor at localhost) you'll
|
||||
need to tell Docker to use the host network:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ DOCKER_BUILDKIT=1 docker build --network="host" --secret id=st,env=SPACKMON_TOKEN --secret id=su,env=SPACKMON_USER -t spack/container .
|
||||
|
||||
|
||||
^^^^^^^^^^^
|
||||
Singularity
|
||||
^^^^^^^^^^^
|
||||
|
||||
To add monitoring to a Singularity container build, the spack.yaml needs to
|
||||
be modified slightly to specify wanting a different format:
|
||||
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- samtools
|
||||
container:
|
||||
format: singularity
|
||||
|
||||
|
||||
Again, generate the recipe:
|
||||
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# preview first
|
||||
$ spack containerize --monitor
|
||||
|
||||
# then write to a Singularity recipe
|
||||
$ spack containerize --monitor > Singularity
|
||||
|
||||
|
||||
Singularity doesn't have a direct way to define secrets at build time, so we have
|
||||
to do a bit of a manual command to add a file, source secrets in it, and remove it.
|
||||
Since Singularity doesn't have layers like Docker, deleting a file will truly
|
||||
remove it from the container and history. So let's say we have this file,
|
||||
``secrets.sh``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# secrets.sh
|
||||
export SPACKMON_USER=spack
|
||||
export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438
|
||||
|
||||
|
||||
We would then generate the Singularity recipe, and add a files section,
|
||||
a source of that file at the start of ``%post``, and **importantly**
|
||||
a removal of the final at the end of that same section.
|
||||
|
||||
.. code-block::
|
||||
|
||||
Bootstrap: docker
|
||||
From: spack/ubuntu-bionic:latest
|
||||
Stage: build
|
||||
|
||||
%files
|
||||
secrets.sh /opt/secrets.sh
|
||||
|
||||
%post
|
||||
. /opt/secrets.sh
|
||||
|
||||
# spack install commands are here
|
||||
...
|
||||
|
||||
# Don't forget to remove here!
|
||||
rm /opt/secrets.sh
|
||||
|
||||
|
||||
You can then build the container as your normally would.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ sudo singularity build container.sif Singularity
|
||||
|
||||
|
||||
------------------
|
||||
Monitoring Offline
|
||||
------------------
|
||||
|
||||
In the case that you want to save monitor results to your filesystem
|
||||
and then upload them later (perhaps you are in an environment where you don't
|
||||
have credentials or it isn't safe to use them) you can use the ``--monitor-save-local``
|
||||
flag.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor --monitor-save-local hdf5
|
||||
|
||||
This will save results in a subfolder, "monitor" in your designated spack
|
||||
reports folder, which defaults to ``$HOME/.spack/reports/monitor``. When
|
||||
you are ready to upload them to a spack monitor server:
|
||||
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack monitor upload ~/.spack/reports/monitor
|
||||
|
||||
|
||||
You can choose the root directory of results as shown above, or a specific
|
||||
subdirectory. The command accepts other arguments to specify configuration
|
||||
for the monitor.
|
File diff suppressed because it is too large
Load Diff
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _pipelines:
|
||||
|
||||
============
|
||||
CI Pipelines
|
||||
============
|
||||
=========
|
||||
Pipelines
|
||||
=========
|
||||
|
||||
Spack provides commands that support generating and running automated build
|
||||
pipelines designed for Gitlab CI. At the highest level it works like this:
|
||||
@@ -168,7 +168,7 @@ which specs are up to date and which need to be rebuilt (it's a good idea for ot
|
||||
reasons as well, but those are out of scope for this discussion). In this case we
|
||||
have disabled it (using ``rebuild-index: False``) because the index would only be
|
||||
generated in the artifacts mirror anyway, and consequently would not be available
|
||||
during subsequent pipeline runs.
|
||||
during subesequent pipeline runs.
|
||||
|
||||
.. note::
|
||||
With the addition of reproducible builds (#22887) a previously working
|
||||
@@ -267,64 +267,24 @@ generated by jobs in the pipeline.
|
||||
``spack ci rebuild``
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The purpose of ``spack ci rebuild`` is straightforward: take its assigned
|
||||
spec and ensure a binary of a successful build exists on the target mirror.
|
||||
If the binary does not already exist, it is built from source and pushed
|
||||
to the mirror. The associated stand-alone tests are optionally run against
|
||||
the new build. Additionally, files for reproducing the build outside of the
|
||||
CI environment are created to facilitate debugging.
|
||||
The purpose of the ``spack ci rebuild`` is straightforward: take its assigned
|
||||
spec job, check whether the target mirror already has a binary for that spec,
|
||||
and if not, build the spec from source and push the binary to the mirror. To
|
||||
accomplish this in a reproducible way, the sub-command prepares a ``spack install``
|
||||
command line to build a single spec in the DAG, saves that command in a
|
||||
shell script, ``install.sh``, in the current working directory, and then runs
|
||||
it to install the spec. The shell script is also exported as an artifact to
|
||||
aid in reproducing the build outside of the CI environment.
|
||||
|
||||
If a binary for the spec does not exist on the target mirror, an install
|
||||
shell script, ``install.sh``, is created and saved in the current working
|
||||
directory. The script is run in a job to install the spec from source. The
|
||||
resulting binary package is pushed to the mirror. If ``cdash`` is configured
|
||||
for the environment, then the build results will be uploaded to the site.
|
||||
If it was necessary to install the spec from source, ``spack ci rebuild`` will
|
||||
also subsequently create a binary package for the spec and try to push it to the
|
||||
mirror.
|
||||
|
||||
Environment variables and values in the ``gitlab-ci`` section of the
|
||||
``spack.yaml`` environment file provide inputs to this process. The
|
||||
two main sources of environment variables are variables written into
|
||||
``.gitlab-ci.yml`` by ``spack ci generate`` and the GitLab CI runtime.
|
||||
Several key CI pipeline variables are described in
|
||||
:ref:`ci_environment_variables`.
|
||||
|
||||
If the ``--tests`` option is provided, stand-alone tests are performed but
|
||||
only if the build was successful *and* the package does not appear in the
|
||||
list of ``broken-tests-packages``. A shell script, ``test.sh``, is created
|
||||
and run to perform the tests. On completion, test logs are exported as job
|
||||
artifacts for review and to facilitate debugging. If `cdash` is configured,
|
||||
test results are also uploaded to the site.
|
||||
|
||||
A snippet from an example ``spack.yaml`` file illustrating use of this
|
||||
option *and* specification of a package with broken tests is given below.
|
||||
The inclusion of a spec for building ``gptune`` is not shown here. Note
|
||||
that ``--tests`` is passed to ``spack ci rebuild`` as part of the
|
||||
``gitlab-ci`` script.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
gitlab-ci:
|
||||
script:
|
||||
- . "./share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
- cd ${SPACK_CONCRETE_ENV_DIR}
|
||||
- spack env activate --without-view .
|
||||
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
|
||||
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
|
||||
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
|
||||
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
|
||||
- spack -d ci rebuild --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
|
||||
|
||||
broken-tests-packages:
|
||||
- gptune
|
||||
|
||||
In this case, even if ``gptune`` is successfully built from source, the
|
||||
pipeline will *not* run its stand-alone tests since the package is listed
|
||||
under ``broken-tests-packages``.
|
||||
|
||||
Spack's cloud pipelines provide actual, up-to-date examples of the CI/CD
|
||||
configuration and environment files used by Spack. You can find them
|
||||
under Spack's `stacks
|
||||
<https://github.com/spack/spack/tree/develop/share/spack/gitlab/cloud_pipelines/stacks>`_ repository directory.
|
||||
The ``spack ci rebuild`` sub-command mainly expects its "input" to come either
|
||||
from environment variables or from the ``gitlab-ci`` section of the ``spack.yaml``
|
||||
environment file. There are two main sources of the environment variables, some
|
||||
are written into ``.gitlab-ci.yml`` by ``spack ci generate``, and some are
|
||||
provided by the GitLab CI runtime.
|
||||
|
||||
.. _cmd-spack-ci-rebuild-index:
|
||||
|
||||
@@ -487,7 +447,7 @@ Note about "no-op" jobs
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If no specs in an environment need to be rebuilt during a given pipeline run
|
||||
(meaning all are already up to date on the mirror), a single successful job
|
||||
(meaning all are already up to date on the mirror), a single succesful job
|
||||
(a NO-OP) is still generated to avoid an empty pipeline (which GitLab
|
||||
considers to be an error). An optional ``service-job-attributes`` section
|
||||
can be added to your ``spack.yaml`` where you can provide ``tags`` and
|
||||
@@ -765,7 +725,7 @@ above with ``git checkout ${SPACK_CHECKOUT_VERSION}``.
|
||||
On the other hand, if you're pointing to a spack repository and branch under your
|
||||
control, there may be no benefit in using the captured ``SPACK_CHECKOUT_VERSION``,
|
||||
and you can instead just clone using the variables you define (``SPACK_REPO``
|
||||
and ``SPACK_REF`` in the example above).
|
||||
and ``SPACK_REF`` in the example aboves).
|
||||
|
||||
.. _custom_workflow:
|
||||
|
||||
|
@@ -1,12 +1,10 @@
|
||||
# These dependencies should be installed using pip in order
|
||||
# to build the documentation.
|
||||
|
||||
sphinx>=3.4,!=4.1.2,!=5.1.0
|
||||
sphinx>=3.4,!=4.1.2
|
||||
sphinxcontrib-programoutput
|
||||
sphinx-design
|
||||
sphinx-rtd-theme
|
||||
python-levenshtein
|
||||
# Restrict to docutils <0.17 to workaround a list rendering issue in sphinx.
|
||||
# https://stackoverflow.com/questions/67542699
|
||||
docutils <0.17
|
||||
pygments <2.13
|
||||
|
@@ -18,10 +18,7 @@ spack:
|
||||
- "py-sphinx@3.4:4.1.1,4.1.3:"
|
||||
- py-sphinxcontrib-programoutput
|
||||
- py-docutils@:0.16
|
||||
- py-sphinx-design
|
||||
- py-sphinx-rtd-theme
|
||||
- py-pygments@:2.12
|
||||
|
||||
# VCS
|
||||
- git
|
||||
- mercurial
|
||||
|
@@ -1,5 +1,5 @@
|
||||
Name, Supported Versions, Notes, Requirement Reason
|
||||
Python, 2.7/3.6-3.11, , Interpreter for Spack
|
||||
Python, 2.7/3.5-3.10, , Interpreter for Spack
|
||||
C/C++ Compilers, , , Building software
|
||||
make, , , Build software
|
||||
patch, , , Build software
|
||||
@@ -11,7 +11,6 @@ bzip2, , , Compress/Decompress archives
|
||||
xz, , , Compress/Decompress archives
|
||||
zstd, , Optional, Compress/Decompress archives
|
||||
file, , , Create/Use Buildcaches
|
||||
lsb-release, , , Linux: identify operating system version
|
||||
gnupg2, , , Sign/Verify Buildcaches
|
||||
git, , , Manage Software Repositories
|
||||
svn, , Optional, Manage Software Repositories
|
||||
|
|
8
lib/spack/env/cc
vendored
8
lib/spack/env/cc
vendored
@@ -241,28 +241,28 @@ case "$command" in
|
||||
mode=cpp
|
||||
debug_flags="-g"
|
||||
;;
|
||||
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc|amdclang|cl.exe|craycc)
|
||||
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc|amdclang|cl.exe)
|
||||
command="$SPACK_CC"
|
||||
language="C"
|
||||
comp="CC"
|
||||
lang_flags=C
|
||||
debug_flags="-g"
|
||||
;;
|
||||
c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|crayCC)
|
||||
c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++)
|
||||
command="$SPACK_CXX"
|
||||
language="C++"
|
||||
comp="CXX"
|
||||
lang_flags=CXX
|
||||
debug_flags="-g"
|
||||
;;
|
||||
ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt|amdflang|crayftn)
|
||||
ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt|amdflang)
|
||||
command="$SPACK_FC"
|
||||
language="Fortran 90"
|
||||
comp="FC"
|
||||
lang_flags=F
|
||||
debug_flags="-g"
|
||||
;;
|
||||
f77|xlf|xlf_r|pgf77)
|
||||
f77|xlf|xlf_r|pgf77|amdflang)
|
||||
command="$SPACK_F77"
|
||||
language="Fortran 77"
|
||||
comp="F77"
|
||||
|
1
lib/spack/env/cce/case-insensitive/crayCC
vendored
1
lib/spack/env/cce/case-insensitive/crayCC
vendored
@@ -1 +0,0 @@
|
||||
../../cc
|
1
lib/spack/env/cce/craycc
vendored
1
lib/spack/env/cce/craycc
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/cce/crayftn
vendored
1
lib/spack/env/cce/crayftn
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.0 (commit 77640e572725ad97f18e63a04857155752ace045)
|
||||
* Version: 0.1.4 (commit b8eea9df2b4204ff27d204452cd46f5199a0b423)
|
||||
|
||||
argparse
|
||||
--------
|
||||
|
34
lib/spack/external/archspec/cpu/detect.py
vendored
34
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -132,15 +132,9 @@ def sysctl(*args):
|
||||
"model name": sysctl("-n", "machdep.cpu.brand_string"),
|
||||
}
|
||||
else:
|
||||
model = "unknown"
|
||||
model_str = sysctl("-n", "machdep.cpu.brand_string").lower()
|
||||
if "m2" in model_str:
|
||||
model = "m2"
|
||||
elif "m1" in model_str:
|
||||
model = "m1"
|
||||
elif "apple" in model_str:
|
||||
model = "m1"
|
||||
|
||||
model = (
|
||||
"m1" if "Apple" in sysctl("-n", "machdep.cpu.brand_string") else "unknown"
|
||||
)
|
||||
info = {
|
||||
"vendor_id": "Apple",
|
||||
"flags": [],
|
||||
@@ -328,26 +322,14 @@ def compatibility_check_for_aarch64(info, target):
|
||||
features = set(info.get("Features", "").split())
|
||||
vendor = info.get("CPU implementer", "generic")
|
||||
|
||||
# At the moment it's not clear how to detect compatibility with
|
||||
# a specific version of the architecture
|
||||
if target.vendor == "generic" and target.name != "aarch64":
|
||||
return False
|
||||
|
||||
arch_root = TARGETS[basename]
|
||||
arch_root_and_vendor = arch_root == target.family and target.vendor in (
|
||||
vendor,
|
||||
"generic",
|
||||
return (
|
||||
(target == arch_root or arch_root in target.ancestors)
|
||||
and target.vendor in (vendor, "generic")
|
||||
# On macOS it seems impossible to get all the CPU features with syctl info
|
||||
and (target.features.issubset(features) or platform.system() == "Darwin")
|
||||
)
|
||||
|
||||
# On macOS it seems impossible to get all the CPU features
|
||||
# with syctl info, but for ARM we can get the exact model
|
||||
if platform.system() == "Darwin":
|
||||
model_key = info.get("model", basename)
|
||||
model = TARGETS[model_key]
|
||||
return arch_root_and_vendor and (target == model or target in model.ancestors)
|
||||
|
||||
return arch_root_and_vendor and target.features.issubset(features)
|
||||
|
||||
|
||||
@compatibility_check(architecture_family="riscv64")
|
||||
def compatibility_check_for_riscv64(info, target):
|
||||
|
@@ -106,7 +106,7 @@ def __eq__(self, other):
|
||||
self.name == other.name
|
||||
and self.vendor == other.vendor
|
||||
and self.features == other.features
|
||||
and self.parents == other.parents # avoid ancestors here
|
||||
and self.ancestors == other.ancestors
|
||||
and self.compilers == other.compilers
|
||||
and self.generation == other.generation
|
||||
)
|
||||
|
@@ -85,7 +85,7 @@
|
||||
"intel": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "pentium4",
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
@@ -1099,7 +1099,8 @@
|
||||
"avx512cd",
|
||||
"avx512vbmi",
|
||||
"avx512ifma",
|
||||
"sha"
|
||||
"sha",
|
||||
"umip"
|
||||
],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
@@ -1262,6 +1263,7 @@
|
||||
"avx512vbmi",
|
||||
"avx512ifma",
|
||||
"sha_ni",
|
||||
"umip",
|
||||
"clwb",
|
||||
"rdpid",
|
||||
"gfni",
|
||||
@@ -2093,163 +2095,8 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"armv8.1a": {
|
||||
"from": ["aarch64"],
|
||||
"vendor": "generic",
|
||||
"features": [],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "5:",
|
||||
"flags": "-march=armv8.1-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8.1-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"apple-clang": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8.1-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"arm": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8.1-a -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"armv8.2a": {
|
||||
"from": ["armv8.1a"],
|
||||
"vendor": "generic",
|
||||
"features": [],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "6:",
|
||||
"flags": "-march=armv8.2-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8.2-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"apple-clang": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8.2-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"arm": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8.2-a -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"armv8.3a": {
|
||||
"from": ["armv8.2a"],
|
||||
"vendor": "generic",
|
||||
"features": [],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "6:",
|
||||
"flags": "-march=armv8.3-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "6:",
|
||||
"flags": "-march=armv8.3-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"apple-clang": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8.3-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"arm": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8.3-a -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"armv8.4a": {
|
||||
"from": ["armv8.3a"],
|
||||
"vendor": "generic",
|
||||
"features": [],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "8:",
|
||||
"flags": "-march=armv8.4-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "8:",
|
||||
"flags": "-march=armv8.4-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"apple-clang": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8.4-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"arm": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8.4-a -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"armv8.5a": {
|
||||
"from": ["armv8.4a"],
|
||||
"vendor": "generic",
|
||||
"features": [],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "9:",
|
||||
"flags": "-march=armv8.5-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "11:",
|
||||
"flags": "-march=armv8.5-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"apple-clang": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8.5-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"arm": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8.5-a -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"thunderx2": {
|
||||
"from": ["armv8.1a"],
|
||||
"from": ["aarch64"],
|
||||
"vendor": "Cavium",
|
||||
"features": [
|
||||
"fp",
|
||||
@@ -2296,7 +2143,7 @@
|
||||
}
|
||||
},
|
||||
"a64fx": {
|
||||
"from": ["armv8.2a"],
|
||||
"from": ["aarch64"],
|
||||
"vendor": "Fujitsu",
|
||||
"features": [
|
||||
"fp",
|
||||
@@ -2364,7 +2211,7 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"cortex_a72": {
|
||||
"graviton": {
|
||||
"from": ["aarch64"],
|
||||
"vendor": "ARM",
|
||||
"features": [
|
||||
@@ -2390,19 +2237,19 @@
|
||||
},
|
||||
{
|
||||
"versions": "6:",
|
||||
"flags" : "-mcpu=cortex-a72"
|
||||
"flags" : "-march=armv8-a+crc+crypto -mtune=cortex-a72"
|
||||
}
|
||||
],
|
||||
"clang" : [
|
||||
{
|
||||
"versions": "3.9:",
|
||||
"flags" : "-mcpu=cortex-a72"
|
||||
"flags" : "-march=armv8-a+crc+crypto"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"neoverse_n1": {
|
||||
"from": ["cortex_a72", "armv8.2a"],
|
||||
"graviton2": {
|
||||
"from": ["aarch64"],
|
||||
"vendor": "ARM",
|
||||
"features": [
|
||||
"fp",
|
||||
@@ -2451,7 +2298,7 @@
|
||||
},
|
||||
{
|
||||
"versions": "9.0:",
|
||||
"flags" : "-mcpu=neoverse-n1"
|
||||
"flags" : "-march=armv8.2-a+fp16+rcpc+dotprod+crypto -mtune=neoverse-n1"
|
||||
}
|
||||
],
|
||||
"clang" : [
|
||||
@@ -2462,10 +2309,6 @@
|
||||
{
|
||||
"versions": "5:",
|
||||
"flags" : "-march=armv8.2-a+fp16+rcpc+dotprod+crypto"
|
||||
},
|
||||
{
|
||||
"versions": "10:",
|
||||
"flags" : "-mcpu=neoverse-n1"
|
||||
}
|
||||
],
|
||||
"arm" : [
|
||||
@@ -2476,113 +2319,8 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"neoverse_v1": {
|
||||
"from": ["neoverse_n1", "armv8.4a"],
|
||||
"vendor": "ARM",
|
||||
"features": [
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"crc32",
|
||||
"atomics",
|
||||
"fphp",
|
||||
"asimdhp",
|
||||
"cpuid",
|
||||
"asimdrdm",
|
||||
"jscvt",
|
||||
"fcma",
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"sm3",
|
||||
"sm4",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"sve",
|
||||
"asimdfhm",
|
||||
"dit",
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"paca",
|
||||
"pacg",
|
||||
"dcpodp",
|
||||
"svei8mm",
|
||||
"svebf16",
|
||||
"i8mm",
|
||||
"bf16",
|
||||
"dgh",
|
||||
"rng"
|
||||
],
|
||||
"compilers" : {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "4.8:4.8.9",
|
||||
"flags": "-march=armv8-a"
|
||||
},
|
||||
{
|
||||
"versions": "4.9:5.9",
|
||||
"flags": "-march=armv8-a+crc+crypto"
|
||||
},
|
||||
{
|
||||
"versions": "6:6.9",
|
||||
"flags" : "-march=armv8.1-a"
|
||||
},
|
||||
{
|
||||
"versions": "7:7.9",
|
||||
"flags" : "-march=armv8.2-a+crypto+fp16 -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "8.0:8.9",
|
||||
"flags" : "-march=armv8.2-a+fp16+dotprod+crypto -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "9.0:9.9",
|
||||
"flags" : "-mcpu=neoverse-v1"
|
||||
},
|
||||
{
|
||||
"versions": "10.0:",
|
||||
"flags" : "-mcpu=neoverse-v1"
|
||||
}
|
||||
|
||||
],
|
||||
"clang" : [
|
||||
{
|
||||
"versions": "3.9:4.9",
|
||||
"flags" : "-march=armv8.2-a+fp16+crc+crypto"
|
||||
},
|
||||
{
|
||||
"versions": "5:10",
|
||||
"flags" : "-march=armv8.2-a+fp16+rcpc+dotprod+crypto"
|
||||
},
|
||||
{
|
||||
"versions": "11:",
|
||||
"flags" : "-march=armv8.4-a+sve+ssbs+fp16+bf16+crypto+i8mm+rng"
|
||||
},
|
||||
{
|
||||
"versions": "12:",
|
||||
"flags" : "-mcpu=neoverse-v1"
|
||||
}
|
||||
],
|
||||
"arm" : [
|
||||
{
|
||||
"versions": "20:21.9",
|
||||
"flags" : "-march=armv8.2-a+sve+fp16+rcpc+dotprod+crypto"
|
||||
},
|
||||
{
|
||||
"versions": "22:",
|
||||
"flags" : "-march=armv8.4-a+sve+ssbs+fp16+bf16+crypto+i8mm+rng"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"m1": {
|
||||
"from": ["armv8.4a"],
|
||||
"from": ["aarch64"],
|
||||
"vendor": "Apple",
|
||||
"features": [
|
||||
"fp",
|
||||
@@ -2647,76 +2385,6 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"m2": {
|
||||
"from": ["m1", "armv8.5a"],
|
||||
"vendor": "Apple",
|
||||
"features": [
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"crc32",
|
||||
"atomics",
|
||||
"fphp",
|
||||
"asimdhp",
|
||||
"cpuid",
|
||||
"asimdrdm",
|
||||
"jscvt",
|
||||
"fcma",
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"asimdfhm",
|
||||
"dit",
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"sb",
|
||||
"paca",
|
||||
"pacg",
|
||||
"dcpodp",
|
||||
"flagm2",
|
||||
"frint",
|
||||
"ecv",
|
||||
"bf16",
|
||||
"i8mm",
|
||||
"bti"
|
||||
],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "8.0:",
|
||||
"flags" : "-march=armv8.5-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"clang" : [
|
||||
{
|
||||
"versions": "9.0:12.0",
|
||||
"flags" : "-march=armv8.5-a"
|
||||
},
|
||||
{
|
||||
"versions": "13.0:",
|
||||
"flags" : "-mcpu=apple-m1"
|
||||
}
|
||||
],
|
||||
"apple-clang": [
|
||||
{
|
||||
"versions": "11.0:12.5",
|
||||
"flags" : "-march=armv8.5-a"
|
||||
},
|
||||
{
|
||||
"versions": "13.0:",
|
||||
"flags" : "-mcpu=vortex"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"arm": {
|
||||
"from": [],
|
||||
"vendor": "generic",
|
||||
|
9
lib/spack/external/ctest_log_parser.py
vendored
9
lib/spack/external/ctest_log_parser.py
vendored
@@ -71,8 +71,6 @@
|
||||
import re
|
||||
import math
|
||||
import multiprocessing
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
from contextlib import contextmanager
|
||||
|
||||
@@ -411,12 +409,7 @@ def parse(self, stream, context=6, jobs=None):
|
||||
pool = multiprocessing.Pool(jobs)
|
||||
try:
|
||||
# this is a workaround for a Python bug in Pool with ctrl-C
|
||||
if sys.version_info >= (3, 2):
|
||||
max_timeout = threading.TIMEOUT_MAX
|
||||
else:
|
||||
max_timeout = 9999999
|
||||
results = pool.map_async(_parse_unpack, args, 1).get(max_timeout)
|
||||
|
||||
results = pool.map_async(_parse_unpack, args, 1).get(9999999)
|
||||
errors, warnings, timings = zip(*results)
|
||||
finally:
|
||||
pool.terminate()
|
||||
|
@@ -29,8 +29,8 @@ class Command(object):
|
||||
- optionals: list of optional arguments (list)
|
||||
- subcommands: list of subcommand parsers (list)
|
||||
"""
|
||||
|
||||
def __init__(self, prog, description, usage, positionals, optionals, subcommands):
|
||||
def __init__(self, prog, description, usage,
|
||||
positionals, optionals, subcommands):
|
||||
self.prog = prog
|
||||
self.description = description
|
||||
self.usage = usage
|
||||
@@ -71,15 +71,15 @@ def parse(self, parser, prog):
|
||||
"""
|
||||
self.parser = parser
|
||||
|
||||
split_prog = parser.prog.split(" ")
|
||||
split_prog = parser.prog.split(' ')
|
||||
split_prog[-1] = prog
|
||||
prog = " ".join(split_prog)
|
||||
prog = ' '.join(split_prog)
|
||||
description = parser.description
|
||||
|
||||
fmt = parser._get_formatter()
|
||||
actions = parser._actions
|
||||
groups = parser._mutually_exclusive_groups
|
||||
usage = fmt._format_usage(None, actions, groups, "").strip()
|
||||
usage = fmt._format_usage(None, actions, groups, '').strip()
|
||||
|
||||
# Go through actions and split them into optionals, positionals,
|
||||
# and subcommands
|
||||
@@ -90,8 +90,8 @@ def parse(self, parser, prog):
|
||||
if action.option_strings:
|
||||
flags = action.option_strings
|
||||
dest_flags = fmt._format_action_invocation(action)
|
||||
help = self._expand_help(action) if action.help else ""
|
||||
help = help.replace("\n", " ")
|
||||
help = self._expand_help(action) if action.help else ''
|
||||
help = help.replace('\n', ' ')
|
||||
optionals.append((flags, dest_flags, help))
|
||||
elif isinstance(action, argparse._SubParsersAction):
|
||||
for subaction in action._choices_actions:
|
||||
@@ -100,19 +100,20 @@ def parse(self, parser, prog):
|
||||
|
||||
# Look for aliases of the form 'name (alias, ...)'
|
||||
if self.aliases:
|
||||
match = re.match(r"(.*) \((.*)\)", subaction.metavar)
|
||||
match = re.match(r'(.*) \((.*)\)', subaction.metavar)
|
||||
if match:
|
||||
aliases = match.group(2).split(", ")
|
||||
aliases = match.group(2).split(', ')
|
||||
for alias in aliases:
|
||||
subparser = action._name_parser_map[alias]
|
||||
subcommands.append((subparser, alias))
|
||||
else:
|
||||
args = fmt._format_action_invocation(action)
|
||||
help = self._expand_help(action) if action.help else ""
|
||||
help = help.replace("\n", " ")
|
||||
help = self._expand_help(action) if action.help else ''
|
||||
help = help.replace('\n', ' ')
|
||||
positionals.append((args, help))
|
||||
|
||||
return Command(prog, description, usage, positionals, optionals, subcommands)
|
||||
return Command(
|
||||
prog, description, usage, positionals, optionals, subcommands)
|
||||
|
||||
def format(self, cmd):
|
||||
"""Returns the string representation of a single node in the
|
||||
@@ -160,13 +161,14 @@ def write(self, parser):
|
||||
raise
|
||||
|
||||
|
||||
_rst_levels = ["=", "-", "^", "~", ":", "`"]
|
||||
_rst_levels = ['=', '-', '^', '~', ':', '`']
|
||||
|
||||
|
||||
class ArgparseRstWriter(ArgparseWriter):
|
||||
"""Write argparse output as rst sections."""
|
||||
|
||||
def __init__(self, prog, out=None, aliases=False, rst_levels=_rst_levels):
|
||||
def __init__(self, prog, out=None, aliases=False,
|
||||
rst_levels=_rst_levels):
|
||||
"""Create a new ArgparseRstWriter.
|
||||
|
||||
Parameters:
|
||||
@@ -215,12 +217,11 @@ def begin_command(self, prog):
|
||||
{1}
|
||||
{2}
|
||||
|
||||
""".format(
|
||||
prog.replace(" ", "-"), prog, self.rst_levels[self.level] * len(prog)
|
||||
)
|
||||
""".format(prog.replace(' ', '-'), prog,
|
||||
self.rst_levels[self.level] * len(prog))
|
||||
|
||||
def description(self, description):
|
||||
return description + "\n\n"
|
||||
return description + '\n\n'
|
||||
|
||||
def usage(self, usage):
|
||||
return """\
|
||||
@@ -228,39 +229,33 @@ def usage(self, usage):
|
||||
|
||||
{0}
|
||||
|
||||
""".format(
|
||||
usage
|
||||
)
|
||||
""".format(usage)
|
||||
|
||||
def begin_positionals(self):
|
||||
return "\n**Positional arguments**\n\n"
|
||||
return '\n**Positional arguments**\n\n'
|
||||
|
||||
def positional(self, name, help):
|
||||
return """\
|
||||
{0}
|
||||
{1}
|
||||
|
||||
""".format(
|
||||
name, help
|
||||
)
|
||||
""".format(name, help)
|
||||
|
||||
def end_positionals(self):
|
||||
return ""
|
||||
return ''
|
||||
|
||||
def begin_optionals(self):
|
||||
return "\n**Optional arguments**\n\n"
|
||||
return '\n**Optional arguments**\n\n'
|
||||
|
||||
def optional(self, opts, help):
|
||||
return """\
|
||||
``{0}``
|
||||
{1}
|
||||
|
||||
""".format(
|
||||
opts, help
|
||||
)
|
||||
""".format(opts, help)
|
||||
|
||||
def end_optionals(self):
|
||||
return ""
|
||||
return ''
|
||||
|
||||
def begin_subcommands(self, subcommands):
|
||||
string = """
|
||||
@@ -272,10 +267,11 @@ def begin_subcommands(self, subcommands):
|
||||
"""
|
||||
|
||||
for cmd, _ in subcommands:
|
||||
prog = re.sub(r"^[^ ]* ", "", cmd.prog)
|
||||
string += " * :ref:`{0} <{1}>`\n".format(prog, cmd.prog.replace(" ", "-"))
|
||||
prog = re.sub(r'^[^ ]* ', '', cmd.prog)
|
||||
string += ' * :ref:`{0} <{1}>`\n'.format(
|
||||
prog, cmd.prog.replace(' ', '-'))
|
||||
|
||||
return string + "\n"
|
||||
return string + '\n'
|
||||
|
||||
|
||||
class ArgparseCompletionWriter(ArgparseWriter):
|
||||
@@ -310,11 +306,9 @@ def format(self, cmd):
|
||||
# Flatten lists of lists
|
||||
optionals = [x for xx in optionals for x in xx]
|
||||
|
||||
return (
|
||||
self.start_function(cmd.prog)
|
||||
+ self.body(positionals, optionals, subcommands)
|
||||
+ self.end_function(cmd.prog)
|
||||
)
|
||||
return (self.start_function(cmd.prog) +
|
||||
self.body(positionals, optionals, subcommands) +
|
||||
self.end_function(cmd.prog))
|
||||
|
||||
def start_function(self, prog):
|
||||
"""Returns the syntax needed to begin a function definition.
|
||||
@@ -325,8 +319,8 @@ def start_function(self, prog):
|
||||
Returns:
|
||||
str: the function definition beginning
|
||||
"""
|
||||
name = prog.replace("-", "_").replace(" ", "_")
|
||||
return "\n_{0}() {{".format(name)
|
||||
name = prog.replace('-', '_').replace(' ', '_')
|
||||
return '\n_{0}() {{'.format(name)
|
||||
|
||||
def end_function(self, prog=None):
|
||||
"""Returns the syntax needed to end a function definition.
|
||||
@@ -337,7 +331,7 @@ def end_function(self, prog=None):
|
||||
Returns:
|
||||
str: the function definition ending
|
||||
"""
|
||||
return "}\n"
|
||||
return '}\n'
|
||||
|
||||
def body(self, positionals, optionals, subcommands):
|
||||
"""Returns the body of the function.
|
||||
@@ -350,7 +344,7 @@ def body(self, positionals, optionals, subcommands):
|
||||
Returns:
|
||||
str: the function body
|
||||
"""
|
||||
return ""
|
||||
return ''
|
||||
|
||||
def positionals(self, positionals):
|
||||
"""Returns the syntax for reporting positional arguments.
|
||||
@@ -361,7 +355,7 @@ def positionals(self, positionals):
|
||||
Returns:
|
||||
str: the syntax for positional arguments
|
||||
"""
|
||||
return ""
|
||||
return ''
|
||||
|
||||
def optionals(self, optionals):
|
||||
"""Returns the syntax for reporting optional flags.
|
||||
@@ -372,7 +366,7 @@ def optionals(self, optionals):
|
||||
Returns:
|
||||
str: the syntax for optional flags
|
||||
"""
|
||||
return ""
|
||||
return ''
|
||||
|
||||
def subcommands(self, subcommands):
|
||||
"""Returns the syntax for reporting subcommands.
|
||||
@@ -383,4 +377,4 @@ def subcommands(self, subcommands):
|
||||
Returns:
|
||||
str: the syntax for subcommand parsers
|
||||
"""
|
||||
return ""
|
||||
return ''
|
||||
|
@@ -18,22 +18,22 @@
|
||||
map = map
|
||||
zip = zip
|
||||
from itertools import zip_longest as zip_longest # novm # noqa: F401
|
||||
from urllib.parse import urlencode as urlencode # novm # noqa: F401
|
||||
from urllib.request import urlopen as urlopen # novm # noqa: F401
|
||||
from urllib.parse import urlencode as urlencode # novm # noqa: F401
|
||||
from urllib.request import urlopen as urlopen # novm # noqa: F401
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Hashable as Hashable # novm
|
||||
from collections.abc import Iterable as Iterable # novm
|
||||
from collections.abc import Mapping as Mapping # novm
|
||||
from collections.abc import MutableMapping as MutableMapping # novm
|
||||
from collections.abc import Hashable as Hashable # novm
|
||||
from collections.abc import Iterable as Iterable # novm
|
||||
from collections.abc import Mapping as Mapping # novm
|
||||
from collections.abc import MutableMapping as MutableMapping # novm
|
||||
from collections.abc import MutableSequence as MutableSequence # novm
|
||||
from collections.abc import MutableSet as MutableSet # novm
|
||||
from collections.abc import Sequence as Sequence # novm
|
||||
from collections.abc import MutableSet as MutableSet # novm
|
||||
from collections.abc import Sequence as Sequence # novm
|
||||
else:
|
||||
from collections import Hashable as Hashable # noqa: F401
|
||||
from collections import Iterable as Iterable # noqa: F401
|
||||
from collections import Mapping as Mapping # noqa: F401
|
||||
from collections import MutableMapping as MutableMapping # noqa: F401
|
||||
from collections import Hashable as Hashable # noqa: F401
|
||||
from collections import Iterable as Iterable # noqa: F401
|
||||
from collections import Mapping as Mapping # noqa: F401
|
||||
from collections import MutableMapping as MutableMapping # noqa: F401
|
||||
from collections import MutableSequence as MutableSequence # noqa: F401
|
||||
from collections import MutableSet as MutableSet # noqa: F401
|
||||
from collections import Sequence as Sequence # noqa: F401
|
||||
from collections import MutableSet as MutableSet # noqa: F401
|
||||
from collections import Sequence as Sequence # noqa: F401
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -13,7 +13,7 @@
|
||||
import sys
|
||||
import traceback
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Callable, Iterable, List, Tuple
|
||||
from typing import List, Tuple
|
||||
|
||||
import six
|
||||
from six import string_types
|
||||
@@ -21,7 +21,7 @@
|
||||
from llnl.util.compat import MutableMapping, MutableSequence, zip_longest
|
||||
|
||||
# Ignore emacs backups when listing modules
|
||||
ignore_modules = [r"^\.#", "~$"]
|
||||
ignore_modules = [r'^\.#', '~$']
|
||||
|
||||
|
||||
def index_by(objects, *funcs):
|
||||
@@ -91,9 +91,9 @@ def index_by(objects, *funcs):
|
||||
|
||||
def caller_locals():
|
||||
"""This will return the locals of the *parent* of the caller.
|
||||
This allows a function to insert variables into its caller's
|
||||
scope. Yes, this is some black magic, and yes it's useful
|
||||
for implementing things like depends_on and provides.
|
||||
This allows a function to insert variables into its caller's
|
||||
scope. Yes, this is some black magic, and yes it's useful
|
||||
for implementing things like depends_on and provides.
|
||||
"""
|
||||
# Passing zero here skips line context for speed.
|
||||
stack = inspect.stack(0)
|
||||
@@ -105,7 +105,7 @@ def caller_locals():
|
||||
|
||||
def get_calling_module_name():
|
||||
"""Make sure that the caller is a class definition, and return the
|
||||
enclosing module's name.
|
||||
enclosing module's name.
|
||||
"""
|
||||
# Passing zero here skips line context for speed.
|
||||
stack = inspect.stack(0)
|
||||
@@ -115,13 +115,12 @@ def get_calling_module_name():
|
||||
finally:
|
||||
del stack
|
||||
|
||||
if "__module__" not in caller_locals:
|
||||
raise RuntimeError(
|
||||
"Must invoke get_calling_module_name() " "from inside a class definition!"
|
||||
)
|
||||
if '__module__' not in caller_locals:
|
||||
raise RuntimeError("Must invoke get_calling_module_name() "
|
||||
"from inside a class definition!")
|
||||
|
||||
module_name = caller_locals["__module__"]
|
||||
base_name = module_name.split(".")[-1]
|
||||
module_name = caller_locals['__module__']
|
||||
base_name = module_name.split('.')[-1]
|
||||
return base_name
|
||||
|
||||
|
||||
@@ -129,8 +128,8 @@ def attr_required(obj, attr_name):
|
||||
"""Ensure that a class has a required attribute."""
|
||||
if not hasattr(obj, attr_name):
|
||||
raise RequiredAttributeError(
|
||||
"No required attribute '%s' in class '%s'" % (attr_name, obj.__class__.__name__)
|
||||
)
|
||||
"No required attribute '%s' in class '%s'"
|
||||
% (attr_name, obj.__class__.__name__))
|
||||
|
||||
|
||||
def attr_setdefault(obj, name, value):
|
||||
@@ -202,35 +201,33 @@ def _memoized_function(*args, **kwargs):
|
||||
# TypeError is raised when indexing into a dict if the key is unhashable.
|
||||
raise six.raise_from(
|
||||
UnhashableArguments(
|
||||
"args + kwargs '{}' was not hashable for function '{}'".format(
|
||||
key, func.__name__
|
||||
),
|
||||
"args + kwargs '{}' was not hashable for function '{}'"
|
||||
.format(key, func.__name__),
|
||||
),
|
||||
e,
|
||||
)
|
||||
e)
|
||||
|
||||
return _memoized_function
|
||||
|
||||
|
||||
def list_modules(directory, **kwargs):
|
||||
"""Lists all of the modules, excluding ``__init__.py``, in a
|
||||
particular directory. Listed packages have no particular
|
||||
order."""
|
||||
list_directories = kwargs.setdefault("directories", True)
|
||||
particular directory. Listed packages have no particular
|
||||
order."""
|
||||
list_directories = kwargs.setdefault('directories', True)
|
||||
|
||||
for name in os.listdir(directory):
|
||||
if name == "__init__.py":
|
||||
if name == '__init__.py':
|
||||
continue
|
||||
|
||||
path = os.path.join(directory, name)
|
||||
if list_directories and os.path.isdir(path):
|
||||
init_py = os.path.join(path, "__init__.py")
|
||||
init_py = os.path.join(path, '__init__.py')
|
||||
if os.path.isfile(init_py):
|
||||
yield name
|
||||
|
||||
elif name.endswith(".py"):
|
||||
elif name.endswith('.py'):
|
||||
if not any(re.search(pattern, name) for pattern in ignore_modules):
|
||||
yield re.sub(".py$", "", name)
|
||||
yield re.sub('.py$', '', name)
|
||||
|
||||
|
||||
def decorator_with_or_without_args(decorator):
|
||||
@@ -260,34 +257,41 @@ def new_dec(*args, **kwargs):
|
||||
|
||||
def key_ordering(cls):
|
||||
"""Decorates a class with extra methods that implement rich comparison
|
||||
operations and ``__hash__``. The decorator assumes that the class
|
||||
implements a function called ``_cmp_key()``. The rich comparison
|
||||
operations will compare objects using this key, and the ``__hash__``
|
||||
function will return the hash of this key.
|
||||
operations and ``__hash__``. The decorator assumes that the class
|
||||
implements a function called ``_cmp_key()``. The rich comparison
|
||||
operations will compare objects using this key, and the ``__hash__``
|
||||
function will return the hash of this key.
|
||||
|
||||
If a class already has ``__eq__``, ``__ne__``, ``__lt__``, ``__le__``,
|
||||
``__gt__``, or ``__ge__`` defined, this decorator will overwrite them.
|
||||
If a class already has ``__eq__``, ``__ne__``, ``__lt__``, ``__le__``,
|
||||
``__gt__``, or ``__ge__`` defined, this decorator will overwrite them.
|
||||
|
||||
Raises:
|
||||
TypeError: If the class does not have a ``_cmp_key`` method
|
||||
Raises:
|
||||
TypeError: If the class does not have a ``_cmp_key`` method
|
||||
"""
|
||||
|
||||
def setter(name, value):
|
||||
value.__name__ = name
|
||||
setattr(cls, name, value)
|
||||
|
||||
if not has_method(cls, "_cmp_key"):
|
||||
if not has_method(cls, '_cmp_key'):
|
||||
raise TypeError("'%s' doesn't define _cmp_key()." % cls.__name__)
|
||||
|
||||
setter("__eq__", lambda s, o: (s is o) or (o is not None and s._cmp_key() == o._cmp_key()))
|
||||
setter("__lt__", lambda s, o: o is not None and s._cmp_key() < o._cmp_key())
|
||||
setter("__le__", lambda s, o: o is not None and s._cmp_key() <= o._cmp_key())
|
||||
setter('__eq__',
|
||||
lambda s, o:
|
||||
(s is o) or (o is not None and s._cmp_key() == o._cmp_key()))
|
||||
setter('__lt__',
|
||||
lambda s, o: o is not None and s._cmp_key() < o._cmp_key())
|
||||
setter('__le__',
|
||||
lambda s, o: o is not None and s._cmp_key() <= o._cmp_key())
|
||||
|
||||
setter("__ne__", lambda s, o: (s is not o) and (o is None or s._cmp_key() != o._cmp_key()))
|
||||
setter("__gt__", lambda s, o: o is None or s._cmp_key() > o._cmp_key())
|
||||
setter("__ge__", lambda s, o: o is None or s._cmp_key() >= o._cmp_key())
|
||||
setter('__ne__',
|
||||
lambda s, o:
|
||||
(s is not o) and (o is None or s._cmp_key() != o._cmp_key()))
|
||||
setter('__gt__',
|
||||
lambda s, o: o is None or s._cmp_key() > o._cmp_key())
|
||||
setter('__ge__',
|
||||
lambda s, o: o is None or s._cmp_key() >= o._cmp_key())
|
||||
|
||||
setter("__hash__", lambda self: hash(self._cmp_key()))
|
||||
setter('__hash__', lambda self: hash(self._cmp_key()))
|
||||
|
||||
return cls
|
||||
|
||||
@@ -454,7 +458,8 @@ def gt(self, other):
|
||||
def le(self, other):
|
||||
if self is other:
|
||||
return True
|
||||
return (other is not None) and not lazy_lt(other._cmp_iter, self._cmp_iter)
|
||||
return (other is not None) and not lazy_lt(other._cmp_iter,
|
||||
self._cmp_iter)
|
||||
|
||||
def ge(self, other):
|
||||
if self is other:
|
||||
@@ -484,9 +489,7 @@ def add_func_to_class(name, func):
|
||||
@lazy_lexicographic_ordering
|
||||
class HashableMap(MutableMapping):
|
||||
"""This is a hashable, comparable dictionary. Hash is performed on
|
||||
a tuple of the values in the dictionary."""
|
||||
|
||||
__slots__ = ("dict",)
|
||||
a tuple of the values in the dictionary."""
|
||||
|
||||
def __init__(self):
|
||||
self.dict = {}
|
||||
@@ -524,7 +527,7 @@ def copy(self):
|
||||
|
||||
def in_function(function_name):
|
||||
"""True if the caller was called from some function with
|
||||
the supplied Name, False otherwise."""
|
||||
the supplied Name, False otherwise."""
|
||||
stack = inspect.stack()
|
||||
try:
|
||||
for elt in stack[2:]:
|
||||
@@ -537,25 +540,24 @@ def in_function(function_name):
|
||||
|
||||
def check_kwargs(kwargs, fun):
|
||||
"""Helper for making functions with kwargs. Checks whether the kwargs
|
||||
are empty after all of them have been popped off. If they're
|
||||
not, raises an error describing which kwargs are invalid.
|
||||
are empty after all of them have been popped off. If they're
|
||||
not, raises an error describing which kwargs are invalid.
|
||||
|
||||
Example::
|
||||
Example::
|
||||
|
||||
def foo(self, **kwargs):
|
||||
x = kwargs.pop('x', None)
|
||||
y = kwargs.pop('y', None)
|
||||
z = kwargs.pop('z', None)
|
||||
check_kwargs(kwargs, self.foo)
|
||||
def foo(self, **kwargs):
|
||||
x = kwargs.pop('x', None)
|
||||
y = kwargs.pop('y', None)
|
||||
z = kwargs.pop('z', None)
|
||||
check_kwargs(kwargs, self.foo)
|
||||
|
||||
# This raises a TypeError:
|
||||
foo(w='bad kwarg')
|
||||
# This raises a TypeError:
|
||||
foo(w='bad kwarg')
|
||||
"""
|
||||
if kwargs:
|
||||
raise TypeError(
|
||||
"'%s' is an invalid keyword argument for function %s()."
|
||||
% (next(iter(kwargs)), fun.__name__)
|
||||
)
|
||||
% (next(iter(kwargs)), fun.__name__))
|
||||
|
||||
|
||||
def match_predicate(*args):
|
||||
@@ -571,7 +573,6 @@ def match_predicate(*args):
|
||||
* any regex in a list or tuple of regexes matches.
|
||||
* any predicate in args matches.
|
||||
"""
|
||||
|
||||
def match(string):
|
||||
for arg in args:
|
||||
if isinstance(arg, string_types):
|
||||
@@ -584,11 +585,9 @@ def match(string):
|
||||
if arg(string):
|
||||
return True
|
||||
else:
|
||||
raise ValueError(
|
||||
"args to match_predicate must be regex, " "list of regexes, or callable."
|
||||
)
|
||||
raise ValueError("args to match_predicate must be regex, "
|
||||
"list of regexes, or callable.")
|
||||
return False
|
||||
|
||||
return match
|
||||
|
||||
|
||||
@@ -648,7 +647,7 @@ def pretty_date(time, now=None):
|
||||
day_diff = diff.days
|
||||
|
||||
if day_diff < 0:
|
||||
return ""
|
||||
return ''
|
||||
|
||||
if day_diff == 0:
|
||||
if second_diff < 10:
|
||||
@@ -706,40 +705,43 @@ def pretty_string_to_date(date_str, now=None):
|
||||
now = now or datetime.now()
|
||||
|
||||
# datetime formats
|
||||
pattern[re.compile(r"^\d{4}$")] = lambda x: datetime.strptime(x, "%Y")
|
||||
pattern[re.compile(r"^\d{4}-\d{2}$")] = lambda x: datetime.strptime(x, "%Y-%m")
|
||||
pattern[re.compile(r"^\d{4}-\d{2}-\d{2}$")] = lambda x: datetime.strptime(x, "%Y-%m-%d")
|
||||
pattern[re.compile(r"^\d{4}-\d{2}-\d{2} \d{2}:\d{2}$")] = lambda x: datetime.strptime(
|
||||
x, "%Y-%m-%d %H:%M"
|
||||
pattern[re.compile(r'^\d{4}$')] = lambda x: datetime.strptime(x, '%Y')
|
||||
pattern[re.compile(r'^\d{4}-\d{2}$')] = lambda x: datetime.strptime(
|
||||
x, '%Y-%m'
|
||||
)
|
||||
pattern[re.compile(r"^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$")] = lambda x: datetime.strptime(
|
||||
x, "%Y-%m-%d %H:%M:%S"
|
||||
pattern[re.compile(r'^\d{4}-\d{2}-\d{2}$')] = lambda x: datetime.strptime(
|
||||
x, '%Y-%m-%d'
|
||||
)
|
||||
pattern[re.compile(r'^\d{4}-\d{2}-\d{2} \d{2}:\d{2}$')] = \
|
||||
lambda x: datetime.strptime(x, '%Y-%m-%d %H:%M')
|
||||
pattern[re.compile(r'^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$')] = \
|
||||
lambda x: datetime.strptime(x, '%Y-%m-%d %H:%M:%S')
|
||||
|
||||
pretty_regex = re.compile(r"(a|\d+)\s*(year|month|week|day|hour|minute|second)s?\s*ago")
|
||||
pretty_regex = re.compile(
|
||||
r'(a|\d+)\s*(year|month|week|day|hour|minute|second)s?\s*ago')
|
||||
|
||||
def _n_xxx_ago(x):
|
||||
how_many, time_period = pretty_regex.search(x).groups()
|
||||
|
||||
how_many = 1 if how_many == "a" else int(how_many)
|
||||
how_many = 1 if how_many == 'a' else int(how_many)
|
||||
|
||||
# timedelta natively supports time periods up to 'weeks'.
|
||||
# To apply month or year we convert to 30 and 365 days
|
||||
if time_period == "month":
|
||||
if time_period == 'month':
|
||||
how_many *= 30
|
||||
time_period = "day"
|
||||
elif time_period == "year":
|
||||
time_period = 'day'
|
||||
elif time_period == 'year':
|
||||
how_many *= 365
|
||||
time_period = "day"
|
||||
time_period = 'day'
|
||||
|
||||
kwargs = {(time_period + "s"): how_many}
|
||||
kwargs = {(time_period + 's'): how_many}
|
||||
return now - timedelta(**kwargs)
|
||||
|
||||
pattern[pretty_regex] = _n_xxx_ago
|
||||
|
||||
# yesterday
|
||||
callback = lambda x: now - timedelta(days=1)
|
||||
pattern[re.compile("^yesterday$")] = callback
|
||||
pattern[re.compile('^yesterday$')] = callback
|
||||
|
||||
for regexp, parser in pattern.items():
|
||||
if bool(regexp.match(date_str)):
|
||||
@@ -749,27 +751,8 @@ def _n_xxx_ago(x):
|
||||
raise ValueError(msg)
|
||||
|
||||
|
||||
def pretty_seconds(seconds):
|
||||
"""Seconds to string with appropriate units
|
||||
|
||||
Arguments:
|
||||
seconds (float): Number of seconds
|
||||
|
||||
Returns:
|
||||
str: Time string with units
|
||||
"""
|
||||
if seconds >= 1:
|
||||
value, unit = seconds, "s"
|
||||
elif seconds >= 1e-3:
|
||||
value, unit = seconds * 1e3, "ms"
|
||||
elif seconds >= 1e-6:
|
||||
value, unit = seconds * 1e6, "us"
|
||||
else:
|
||||
value, unit = seconds * 1e9, "ns"
|
||||
return "%.3f%s" % (value, unit)
|
||||
|
||||
|
||||
class RequiredAttributeError(ValueError):
|
||||
|
||||
def __init__(self, message):
|
||||
super(RequiredAttributeError, self).__init__(message)
|
||||
|
||||
@@ -781,7 +764,6 @@ class ObjectWrapper(object):
|
||||
This class is modeled after the stackoverflow answer:
|
||||
* http://stackoverflow.com/a/1445289/771663
|
||||
"""
|
||||
|
||||
def __init__(self, wrapped_object):
|
||||
wrapped_cls = type(wrapped_object)
|
||||
wrapped_name = wrapped_cls.__name__
|
||||
@@ -825,7 +807,7 @@ def __getattr__(self, name):
|
||||
# requested but not yet set. The final 'getattr' line here requires
|
||||
# 'instance'/'_instance' to be defined or it will enter an infinite
|
||||
# loop, so protect against that here.
|
||||
if name in ["_instance", "instance"]:
|
||||
if name in ['_instance', 'instance']:
|
||||
raise AttributeError()
|
||||
return getattr(self.instance, name)
|
||||
|
||||
@@ -855,7 +837,7 @@ def __init__(self, ref_function):
|
||||
self.ref_function = ref_function
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name == "ref_function":
|
||||
if name == 'ref_function':
|
||||
raise AttributeError()
|
||||
return getattr(self.ref_function(), name)
|
||||
|
||||
@@ -893,8 +875,8 @@ def load_module_from_file(module_name, module_path):
|
||||
# This recipe is adapted from https://stackoverflow.com/a/67692/771663
|
||||
if sys.version_info[0] == 3 and sys.version_info[1] >= 5:
|
||||
import importlib.util
|
||||
|
||||
spec = importlib.util.spec_from_file_location(module_name, module_path) # novm
|
||||
spec = importlib.util.spec_from_file_location( # novm
|
||||
module_name, module_path)
|
||||
module = importlib.util.module_from_spec(spec) # novm
|
||||
# The module object needs to exist in sys.modules before the
|
||||
# loader executes the module code.
|
||||
@@ -911,7 +893,6 @@ def load_module_from_file(module_name, module_path):
|
||||
raise
|
||||
elif sys.version_info[0] == 2:
|
||||
import imp
|
||||
|
||||
module = imp.load_source(module_name, module_path)
|
||||
return module
|
||||
|
||||
@@ -943,10 +924,8 @@ def uniq(sequence):
|
||||
|
||||
def star(func):
|
||||
"""Unpacks arguments for use with Multiprocessing mapping functions"""
|
||||
|
||||
def _wrapper(args):
|
||||
return func(*args)
|
||||
|
||||
return _wrapper
|
||||
|
||||
|
||||
@@ -955,23 +934,22 @@ class Devnull(object):
|
||||
|
||||
See https://stackoverflow.com/a/2929954.
|
||||
"""
|
||||
|
||||
def write(self, *_):
|
||||
pass
|
||||
|
||||
|
||||
def elide_list(line_list, max_num=10):
|
||||
"""Takes a long list and limits it to a smaller number of elements,
|
||||
replacing intervening elements with '...'. For example::
|
||||
replacing intervening elements with '...'. For example::
|
||||
|
||||
elide_list([1,2,3,4,5,6], 4)
|
||||
elide_list([1,2,3,4,5,6], 4)
|
||||
|
||||
gives::
|
||||
gives::
|
||||
|
||||
[1, 2, 3, '...', 6]
|
||||
[1, 2, 3, '...', 6]
|
||||
"""
|
||||
if len(line_list) > max_num:
|
||||
return line_list[: max_num - 1] + ["..."] + line_list[-1:]
|
||||
return line_list[:max_num - 1] + ['...'] + line_list[-1:]
|
||||
else:
|
||||
return line_list
|
||||
|
||||
@@ -994,40 +972,7 @@ def enum(**kwargs):
|
||||
Args:
|
||||
**kwargs: explicit dictionary of enums
|
||||
"""
|
||||
return type("Enum", (object,), kwargs)
|
||||
|
||||
|
||||
def stable_partition(
|
||||
input_iterable, # type: Iterable
|
||||
predicate_fn, # type: Callable[[Any], bool]
|
||||
):
|
||||
# type: (...) -> Tuple[List[Any], List[Any]]
|
||||
"""Partition the input iterable according to a custom predicate.
|
||||
|
||||
Args:
|
||||
input_iterable: input iterable to be partitioned.
|
||||
predicate_fn: predicate function accepting an iterable item
|
||||
as argument.
|
||||
|
||||
Return:
|
||||
Tuple of the list of elements evaluating to True, and
|
||||
list of elements evaluating to False.
|
||||
"""
|
||||
true_items, false_items = [], []
|
||||
for item in input_iterable:
|
||||
if predicate_fn(item):
|
||||
true_items.append(item)
|
||||
continue
|
||||
false_items.append(item)
|
||||
return true_items, false_items
|
||||
|
||||
|
||||
def ensure_last(lst, *elements):
|
||||
"""Performs a stable partition of lst, ensuring that ``elements``
|
||||
occur at the end of ``lst`` in specified order. Mutates ``lst``.
|
||||
Raises ``ValueError`` if any ``elements`` are not already in ``lst``."""
|
||||
for elt in elements:
|
||||
lst.append(lst.pop(lst.index(elt)))
|
||||
return type('Enum', (object,), kwargs)
|
||||
|
||||
|
||||
class TypedMutableSequence(MutableSequence):
|
||||
@@ -1043,7 +988,6 @@ class Foo(TypedMutableSequence):
|
||||
if isinstance(l, Foo):
|
||||
# do something
|
||||
"""
|
||||
|
||||
def __init__(self, iterable):
|
||||
self.data = list(iterable)
|
||||
|
||||
@@ -1073,7 +1017,7 @@ class GroupedExceptionHandler(object):
|
||||
"""A generic mechanism to coalesce multiple exceptions and preserve tracebacks."""
|
||||
|
||||
def __init__(self):
|
||||
self.exceptions = [] # type: List[Tuple[str, Exception, List[str]]]
|
||||
self.exceptions = [] # type: List[Tuple[str, Exception, List[str]]]
|
||||
|
||||
def __bool__(self):
|
||||
"""Whether any exceptions were handled."""
|
||||
@@ -1092,15 +1036,17 @@ def grouped_message(self, with_tracebacks=True):
|
||||
# type: (bool) -> str
|
||||
"""Print out an error message coalescing all the forwarded errors."""
|
||||
each_exception_message = [
|
||||
"{0} raised {1}: {2}{3}".format(
|
||||
'{0} raised {1}: {2}{3}'.format(
|
||||
context,
|
||||
exc.__class__.__name__,
|
||||
exc,
|
||||
"\n{0}".format("".join(tb)) if with_tracebacks else "",
|
||||
'\n{0}'.format(''.join(tb)) if with_tracebacks else '',
|
||||
)
|
||||
for context, exc, tb in self.exceptions
|
||||
]
|
||||
return "due to the following failures:\n{0}".format("\n".join(each_exception_message))
|
||||
return 'due to the following failures:\n{0}'.format(
|
||||
'\n'.join(each_exception_message)
|
||||
)
|
||||
|
||||
|
||||
class GroupedExceptionForwarder(object):
|
||||
@@ -1126,16 +1072,3 @@ def __exit__(self, exc_type, exc_value, tb):
|
||||
# Suppress any exception from being re-raised:
|
||||
# https://docs.python.org/3/reference/datamodel.html#object.__exit__.
|
||||
return True
|
||||
|
||||
|
||||
class classproperty(object):
|
||||
"""Non-data descriptor to evaluate a class-level property. The function that performs
|
||||
the evaluation is injected at creation time and take an instance (could be None) and
|
||||
an owner (i.e. the class that originated the instance)
|
||||
"""
|
||||
|
||||
def __init__(self, callback):
|
||||
self.callback = callback
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
return self.callback(owner)
|
||||
|
@@ -13,12 +13,12 @@
|
||||
from collections import OrderedDict
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, touch, traverse_tree
|
||||
from llnl.util.filesystem import mkdirp, touch, traverse_tree
|
||||
from llnl.util.symlink import islink, symlink
|
||||
|
||||
__all__ = ["LinkTree"]
|
||||
__all__ = ['LinkTree']
|
||||
|
||||
empty_file_name = ".spack-empty"
|
||||
empty_file_name = '.spack-empty'
|
||||
|
||||
|
||||
def remove_link(src, dest):
|
||||
@@ -38,28 +38,26 @@ class MergeConflict:
|
||||
|
||||
project(src_a) == project(src_b) == dst
|
||||
"""
|
||||
|
||||
def __init__(self, dst, src_a=None, src_b=None):
|
||||
self.dst = dst
|
||||
self.src_a = src_a
|
||||
self.src_b = src_b
|
||||
|
||||
|
||||
class SourceMergeVisitor(BaseDirectoryVisitor):
|
||||
class SourceMergeVisitor(object):
|
||||
"""
|
||||
Visitor that produces actions:
|
||||
- An ordered list of directories to create in dst
|
||||
- A list of files to link in dst
|
||||
- A list of merge conflicts in dst/
|
||||
"""
|
||||
|
||||
def __init__(self, ignore=None):
|
||||
self.ignore = ignore if ignore is not None else lambda f: False
|
||||
|
||||
# When mapping <src root> to <dst root>/<projection>, we need
|
||||
# to prepend the <projection> bit to the relative path in the
|
||||
# destination dir.
|
||||
self.projection = ""
|
||||
self.projection = ''
|
||||
|
||||
# When a file blocks another file, the conflict can sometimes
|
||||
# be resolved / ignored (e.g. <prefix>/LICENSE or
|
||||
@@ -90,13 +88,10 @@ def before_visit_dir(self, root, rel_path, depth):
|
||||
elif proj_rel_path in self.files:
|
||||
# Can't create a dir where a file is.
|
||||
src_a_root, src_a_relpath = self.files[proj_rel_path]
|
||||
self.fatal_conflicts.append(
|
||||
MergeConflict(
|
||||
dst=proj_rel_path,
|
||||
src_a=os.path.join(src_a_root, src_a_relpath),
|
||||
src_b=os.path.join(root, rel_path),
|
||||
)
|
||||
)
|
||||
self.fatal_conflicts.append(MergeConflict(
|
||||
dst=proj_rel_path,
|
||||
src_a=os.path.join(src_a_root, src_a_relpath),
|
||||
src_b=os.path.join(root, rel_path)))
|
||||
return False
|
||||
elif proj_rel_path in self.directories:
|
||||
# No new directory, carry on.
|
||||
@@ -106,6 +101,9 @@ def before_visit_dir(self, root, rel_path, depth):
|
||||
self.directories[proj_rel_path] = (root, rel_path)
|
||||
return True
|
||||
|
||||
def after_visit_dir(self, root, rel_path, depth):
|
||||
pass
|
||||
|
||||
def before_visit_symlinked_dir(self, root, rel_path, depth):
|
||||
"""
|
||||
Replace symlinked dirs with actual directories when possible in low depths,
|
||||
@@ -138,6 +136,9 @@ def before_visit_symlinked_dir(self, root, rel_path, depth):
|
||||
self.visit_file(root, rel_path, depth)
|
||||
return False
|
||||
|
||||
def after_visit_symlinked_dir(self, root, rel_path, depth):
|
||||
pass
|
||||
|
||||
def visit_file(self, root, rel_path, depth):
|
||||
proj_rel_path = os.path.join(self.projection, rel_path)
|
||||
|
||||
@@ -146,59 +147,46 @@ def visit_file(self, root, rel_path, depth):
|
||||
elif proj_rel_path in self.directories:
|
||||
# Can't create a file where a dir is; fatal error
|
||||
src_a_root, src_a_relpath = self.directories[proj_rel_path]
|
||||
self.fatal_conflicts.append(
|
||||
MergeConflict(
|
||||
dst=proj_rel_path,
|
||||
src_a=os.path.join(src_a_root, src_a_relpath),
|
||||
src_b=os.path.join(root, rel_path),
|
||||
)
|
||||
)
|
||||
self.fatal_conflicts.append(MergeConflict(
|
||||
dst=proj_rel_path,
|
||||
src_a=os.path.join(src_a_root, src_a_relpath),
|
||||
src_b=os.path.join(root, rel_path)))
|
||||
elif proj_rel_path in self.files:
|
||||
# In some cases we can resolve file-file conflicts
|
||||
src_a_root, src_a_relpath = self.files[proj_rel_path]
|
||||
self.file_conflicts.append(
|
||||
MergeConflict(
|
||||
dst=proj_rel_path,
|
||||
src_a=os.path.join(src_a_root, src_a_relpath),
|
||||
src_b=os.path.join(root, rel_path),
|
||||
)
|
||||
)
|
||||
self.file_conflicts.append(MergeConflict(
|
||||
dst=proj_rel_path,
|
||||
src_a=os.path.join(src_a_root, src_a_relpath),
|
||||
src_b=os.path.join(root, rel_path)))
|
||||
else:
|
||||
# Otherwise register this file to be linked.
|
||||
self.files[proj_rel_path] = (root, rel_path)
|
||||
|
||||
def visit_symlinked_file(self, root, rel_path, depth):
|
||||
# Treat symlinked files as ordinary files (without "dereferencing")
|
||||
self.visit_file(root, rel_path, depth)
|
||||
|
||||
def set_projection(self, projection):
|
||||
self.projection = os.path.normpath(projection)
|
||||
|
||||
# Todo, is this how to check in general for empty projection?
|
||||
if self.projection == ".":
|
||||
self.projection = ""
|
||||
if self.projection == '.':
|
||||
self.projection = ''
|
||||
return
|
||||
|
||||
# If there is a projection, we'll also create the directories
|
||||
# it consists of, and check whether that's causing conflicts.
|
||||
path = ""
|
||||
path = ''
|
||||
for part in self.projection.split(os.sep):
|
||||
path = os.path.join(path, part)
|
||||
if path not in self.files:
|
||||
self.directories[path] = ("<projection>", path)
|
||||
self.directories[path] = ('<projection>', path)
|
||||
else:
|
||||
# Can't create a dir where a file is.
|
||||
src_a_root, src_a_relpath = self.files[path]
|
||||
self.fatal_conflicts.append(
|
||||
MergeConflict(
|
||||
dst=path,
|
||||
src_a=os.path.join(src_a_root, src_a_relpath),
|
||||
src_b=os.path.join("<projection>", path),
|
||||
)
|
||||
)
|
||||
self.fatal_conflicts.append(MergeConflict(
|
||||
dst=path,
|
||||
src_a=os.path.join(src_a_root, src_a_relpath),
|
||||
src_b=os.path.join('<projection>', path)))
|
||||
|
||||
|
||||
class DestinationMergeVisitor(BaseDirectoryVisitor):
|
||||
class DestinationMergeVisitor(object):
|
||||
"""DestinatinoMergeVisitor takes a SourceMergeVisitor
|
||||
and:
|
||||
|
||||
@@ -212,7 +200,6 @@ class DestinationMergeVisitor(BaseDirectoryVisitor):
|
||||
in the target prefix will never be merged with
|
||||
directories in the sources directories.
|
||||
"""
|
||||
|
||||
def __init__(self, source_merge_visitor):
|
||||
self.src = source_merge_visitor
|
||||
|
||||
@@ -221,11 +208,10 @@ def before_visit_dir(self, root, rel_path, depth):
|
||||
# and don't traverse deeper
|
||||
if rel_path in self.src.files:
|
||||
src_a_root, src_a_relpath = self.src.files[rel_path]
|
||||
self.src.fatal_conflicts.append(
|
||||
MergeConflict(
|
||||
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||
)
|
||||
)
|
||||
self.src.fatal_conflicts.append(MergeConflict(
|
||||
rel_path,
|
||||
os.path.join(src_a_root, src_a_relpath),
|
||||
os.path.join(root, rel_path)))
|
||||
return False
|
||||
|
||||
# If destination dir was also a src dir, remove the mkdir
|
||||
@@ -238,6 +224,9 @@ def before_visit_dir(self, root, rel_path, depth):
|
||||
# don't descend into it.
|
||||
return False
|
||||
|
||||
def after_visit_dir(self, root, rel_path, depth):
|
||||
pass
|
||||
|
||||
def before_visit_symlinked_dir(self, root, rel_path, depth):
|
||||
"""
|
||||
Symlinked directories in the destination prefix should
|
||||
@@ -247,44 +236,39 @@ def before_visit_symlinked_dir(self, root, rel_path, depth):
|
||||
# Always conflict
|
||||
if rel_path in self.src.directories:
|
||||
src_a_root, src_a_relpath = self.src.directories[rel_path]
|
||||
self.src.fatal_conflicts.append(
|
||||
MergeConflict(
|
||||
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||
)
|
||||
)
|
||||
self.src.fatal_conflicts.append(MergeConflict(
|
||||
rel_path,
|
||||
os.path.join(src_a_root, src_a_relpath),
|
||||
os.path.join(root, rel_path)))
|
||||
|
||||
if rel_path in self.src.files:
|
||||
src_a_root, src_a_relpath = self.src.files[rel_path]
|
||||
self.src.fatal_conflicts.append(
|
||||
MergeConflict(
|
||||
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||
)
|
||||
)
|
||||
self.src.fatal_conflicts.append(MergeConflict(
|
||||
rel_path,
|
||||
os.path.join(src_a_root, src_a_relpath),
|
||||
os.path.join(root, rel_path)))
|
||||
|
||||
# Never descend into symlinked target dirs.
|
||||
return False
|
||||
|
||||
def after_visit_symlinked_dir(self, root, rel_path, depth):
|
||||
pass
|
||||
|
||||
def visit_file(self, root, rel_path, depth):
|
||||
# Can't merge a file if target already exists
|
||||
if rel_path in self.src.directories:
|
||||
src_a_root, src_a_relpath = self.src.directories[rel_path]
|
||||
self.src.fatal_conflicts.append(
|
||||
MergeConflict(
|
||||
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||
)
|
||||
)
|
||||
self.src.fatal_conflicts.append(MergeConflict(
|
||||
rel_path,
|
||||
os.path.join(src_a_root, src_a_relpath),
|
||||
os.path.join(root, rel_path)))
|
||||
|
||||
elif rel_path in self.src.files:
|
||||
src_a_root, src_a_relpath = self.src.files[rel_path]
|
||||
self.src.fatal_conflicts.append(
|
||||
MergeConflict(
|
||||
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||
)
|
||||
)
|
||||
|
||||
def visit_symlinked_file(self, root, rel_path, depth):
|
||||
# Treat symlinked files as ordinary files (without "dereferencing")
|
||||
self.visit_file(root, rel_path, depth)
|
||||
self.src.fatal_conflicts.append(MergeConflict(
|
||||
rel_path,
|
||||
os.path.join(src_a_root, src_a_relpath),
|
||||
os.path.join(root, rel_path)))
|
||||
|
||||
|
||||
class LinkTree(object):
|
||||
@@ -297,31 +281,30 @@ class LinkTree(object):
|
||||
symlinked to, to prevent the source directory from ever being
|
||||
modified.
|
||||
"""
|
||||
|
||||
def __init__(self, source_root):
|
||||
if not os.path.exists(source_root):
|
||||
raise IOError("No such file or directory: '%s'", source_root)
|
||||
|
||||
self._root = source_root
|
||||
|
||||
def find_conflict(self, dest_root, ignore=None, ignore_file_conflicts=False):
|
||||
def find_conflict(self, dest_root, ignore=None,
|
||||
ignore_file_conflicts=False):
|
||||
"""Returns the first file in dest that conflicts with src"""
|
||||
ignore = ignore or (lambda x: False)
|
||||
conflicts = self.find_dir_conflicts(dest_root, ignore)
|
||||
|
||||
if not ignore_file_conflicts:
|
||||
conflicts.extend(
|
||||
dst
|
||||
for src, dst in self.get_file_map(dest_root, ignore).items()
|
||||
if os.path.exists(dst)
|
||||
)
|
||||
dst for src, dst
|
||||
in self.get_file_map(dest_root, ignore).items()
|
||||
if os.path.exists(dst))
|
||||
|
||||
if conflicts:
|
||||
return conflicts[0]
|
||||
|
||||
def find_dir_conflicts(self, dest_root, ignore):
|
||||
conflicts = []
|
||||
kwargs = {"follow_nonexisting": False, "ignore": ignore}
|
||||
kwargs = {'follow_nonexisting': False, 'ignore': ignore}
|
||||
for src, dest in traverse_tree(self._root, dest_root, **kwargs):
|
||||
if os.path.isdir(src):
|
||||
if os.path.exists(dest) and not os.path.isdir(dest):
|
||||
@@ -332,7 +315,7 @@ def find_dir_conflicts(self, dest_root, ignore):
|
||||
|
||||
def get_file_map(self, dest_root, ignore):
|
||||
merge_map = {}
|
||||
kwargs = {"follow_nonexisting": True, "ignore": ignore}
|
||||
kwargs = {'follow_nonexisting': True, 'ignore': ignore}
|
||||
for src, dest in traverse_tree(self._root, dest_root, **kwargs):
|
||||
if not os.path.isdir(src):
|
||||
merge_map[src] = dest
|
||||
@@ -354,7 +337,8 @@ def merge_directories(self, dest_root, ignore):
|
||||
touch(marker)
|
||||
|
||||
def unmerge_directories(self, dest_root, ignore):
|
||||
for src, dest in traverse_tree(self._root, dest_root, ignore=ignore, order="post"):
|
||||
for src, dest in traverse_tree(
|
||||
self._root, dest_root, ignore=ignore, order='post'):
|
||||
if os.path.isdir(src):
|
||||
if not os.path.exists(dest):
|
||||
continue
|
||||
@@ -370,7 +354,8 @@ def unmerge_directories(self, dest_root, ignore):
|
||||
if os.path.exists(marker):
|
||||
os.remove(marker)
|
||||
|
||||
def merge(self, dest_root, ignore_conflicts=False, ignore=None, link=symlink, relative=False):
|
||||
def merge(self, dest_root, ignore_conflicts=False, ignore=None,
|
||||
link=symlink, relative=False):
|
||||
"""Link all files in src into dest, creating directories
|
||||
if necessary.
|
||||
|
||||
@@ -392,8 +377,7 @@ def merge(self, dest_root, ignore_conflicts=False, ignore=None, link=symlink, re
|
||||
ignore = lambda x: False
|
||||
|
||||
conflict = self.find_conflict(
|
||||
dest_root, ignore=ignore, ignore_file_conflicts=ignore_conflicts
|
||||
)
|
||||
dest_root, ignore=ignore, ignore_file_conflicts=ignore_conflicts)
|
||||
if conflict:
|
||||
raise SingleMergeConflictError(conflict)
|
||||
|
||||
@@ -432,7 +416,8 @@ class MergeConflictError(Exception):
|
||||
|
||||
class SingleMergeConflictError(MergeConflictError):
|
||||
def __init__(self, path):
|
||||
super(MergeConflictError, self).__init__("Package merge blocked by file: %s" % path)
|
||||
super(MergeConflictError, self).__init__(
|
||||
"Package merge blocked by file: %s" % path)
|
||||
|
||||
|
||||
class MergeConflictSummary(MergeConflictError):
|
||||
@@ -445,6 +430,5 @@ def __init__(self, conflicts):
|
||||
# show the first 3 merge conflicts.
|
||||
for conflict in conflicts[:3]:
|
||||
msg += "\n `{0}` and `{1}` both project to `{2}`".format(
|
||||
conflict.src_a, conflict.src_b, conflict.dst
|
||||
)
|
||||
conflict.src_a, conflict.src_b, conflict.dst)
|
||||
super(MergeConflictSummary, self).__init__(msg)
|
||||
|
@@ -12,26 +12,25 @@
|
||||
from typing import Dict, Tuple # novm
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import pretty_seconds
|
||||
|
||||
import spack.util.string
|
||||
|
||||
if sys.platform != "win32":
|
||||
if sys.platform != 'win32':
|
||||
import fcntl
|
||||
|
||||
|
||||
__all__ = [
|
||||
"Lock",
|
||||
"LockDowngradeError",
|
||||
"LockUpgradeError",
|
||||
"LockTransaction",
|
||||
"WriteTransaction",
|
||||
"ReadTransaction",
|
||||
"LockError",
|
||||
"LockTimeoutError",
|
||||
"LockPermissionError",
|
||||
"LockROFileError",
|
||||
"CantCreateLockError",
|
||||
'Lock',
|
||||
'LockDowngradeError',
|
||||
'LockUpgradeError',
|
||||
'LockTransaction',
|
||||
'WriteTransaction',
|
||||
'ReadTransaction',
|
||||
'LockError',
|
||||
'LockTimeoutError',
|
||||
'LockPermissionError',
|
||||
'LockROFileError',
|
||||
'CantCreateLockError'
|
||||
]
|
||||
|
||||
|
||||
@@ -48,7 +47,6 @@ class OpenFile(object):
|
||||
the file descriptor from the file handle if needed -- or we could make this track
|
||||
file descriptors as well in the future.
|
||||
"""
|
||||
|
||||
def __init__(self, fh):
|
||||
self.fh = fh
|
||||
self.refs = 0
|
||||
@@ -94,11 +92,11 @@ def get_fh(self, path):
|
||||
path (str): path to lock file we want a filehandle for
|
||||
"""
|
||||
# Open writable files as 'r+' so we can upgrade to write later
|
||||
os_mode, fh_mode = (os.O_RDWR | os.O_CREAT), "r+"
|
||||
os_mode, fh_mode = (os.O_RDWR | os.O_CREAT), 'r+'
|
||||
|
||||
pid = os.getpid()
|
||||
open_file = None # OpenFile object, if there is one
|
||||
stat = None # stat result for the lockfile, if it exists
|
||||
stat = None # stat result for the lockfile, if it exists
|
||||
|
||||
try:
|
||||
# see whether we've seen this inode/pid before
|
||||
@@ -111,7 +109,7 @@ def get_fh(self, path):
|
||||
raise
|
||||
|
||||
# path does not exist -- fail if we won't be able to create it
|
||||
parent = os.path.dirname(path) or "."
|
||||
parent = os.path.dirname(path) or '.'
|
||||
if not os.access(parent, os.W_OK):
|
||||
raise CantCreateLockError(path)
|
||||
|
||||
@@ -121,7 +119,7 @@ def get_fh(self, path):
|
||||
# we know path exists but not if it's writable. If it's read-only,
|
||||
# only open the file for reading (and fail if we're trying to get
|
||||
# an exclusive (write) lock on it)
|
||||
os_mode, fh_mode = os.O_RDONLY, "r"
|
||||
os_mode, fh_mode = os.O_RDONLY, 'r'
|
||||
|
||||
fd = os.open(path, os_mode)
|
||||
fh = os.fdopen(fd, fh_mode)
|
||||
@@ -164,10 +162,10 @@ def release_fh(self, path):
|
||||
def _attempts_str(wait_time, nattempts):
|
||||
# Don't print anything if we succeeded on the first try
|
||||
if nattempts <= 1:
|
||||
return ""
|
||||
return ''
|
||||
|
||||
attempts = spack.util.string.plural(nattempts, "attempt")
|
||||
return " after {} and {}".format(pretty_seconds(wait_time), attempts)
|
||||
attempts = spack.util.string.plural(nattempts, 'attempt')
|
||||
return ' after {0:0.2f}s and {1}'.format(wait_time, attempts)
|
||||
|
||||
|
||||
class LockType(object):
|
||||
@@ -190,7 +188,8 @@ def to_module(tid):
|
||||
|
||||
@staticmethod
|
||||
def is_valid(op):
|
||||
return op == LockType.READ or op == LockType.WRITE
|
||||
return op == LockType.READ \
|
||||
or op == LockType.WRITE
|
||||
|
||||
|
||||
class Lock(object):
|
||||
@@ -208,7 +207,8 @@ class Lock(object):
|
||||
overlapping byte ranges in the same file).
|
||||
"""
|
||||
|
||||
def __init__(self, path, start=0, length=0, default_timeout=None, debug=False, desc=""):
|
||||
def __init__(self, path, start=0, length=0, default_timeout=None,
|
||||
debug=False, desc=''):
|
||||
"""Construct a new lock on the file at ``path``.
|
||||
|
||||
By default, the lock applies to the whole file. Optionally,
|
||||
@@ -243,7 +243,7 @@ def __init__(self, path, start=0, length=0, default_timeout=None, debug=False, d
|
||||
self.debug = debug
|
||||
|
||||
# optional debug description
|
||||
self.desc = " ({0})".format(desc) if desc else ""
|
||||
self.desc = ' ({0})'.format(desc) if desc else ''
|
||||
|
||||
# If the user doesn't set a default timeout, or if they choose
|
||||
# None, 0, etc. then lock attempts will not time out (unless the
|
||||
@@ -280,17 +280,17 @@ def _poll_interval_generator(_wait_times=None):
|
||||
|
||||
def __repr__(self):
|
||||
"""Formal representation of the lock."""
|
||||
rep = "{0}(".format(self.__class__.__name__)
|
||||
rep = '{0}('.format(self.__class__.__name__)
|
||||
for attr, value in self.__dict__.items():
|
||||
rep += "{0}={1}, ".format(attr, value.__repr__())
|
||||
return "{0})".format(rep.strip(", "))
|
||||
rep += '{0}={1}, '.format(attr, value.__repr__())
|
||||
return '{0})'.format(rep.strip(', '))
|
||||
|
||||
def __str__(self):
|
||||
"""Readable string (with key fields) of the lock."""
|
||||
location = "{0}[{1}:{2}]".format(self.path, self._start, self._length)
|
||||
timeout = "timeout={0}".format(self.default_timeout)
|
||||
activity = "#reads={0}, #writes={1}".format(self._reads, self._writes)
|
||||
return "({0}, {1}, {2})".format(location, timeout, activity)
|
||||
location = '{0}[{1}:{2}]'.format(self.path, self._start, self._length)
|
||||
timeout = 'timeout={0}'.format(self.default_timeout)
|
||||
activity = '#reads={0}, #writes={1}'.format(self._reads, self._writes)
|
||||
return '({0}, {1}, {2})'.format(location, timeout, activity)
|
||||
|
||||
def _lock(self, op, timeout=None):
|
||||
"""This takes a lock using POSIX locks (``fcntl.lockf``).
|
||||
@@ -305,7 +305,7 @@ def _lock(self, op, timeout=None):
|
||||
assert LockType.is_valid(op)
|
||||
op_str = LockType.to_str(op)
|
||||
|
||||
self._log_acquiring("{0} LOCK".format(op_str))
|
||||
self._log_acquiring('{0} LOCK'.format(op_str))
|
||||
timeout = timeout or self.default_timeout
|
||||
|
||||
# Create file and parent directories if they don't exist.
|
||||
@@ -313,16 +313,14 @@ def _lock(self, op, timeout=None):
|
||||
self._ensure_parent_directory()
|
||||
self._file = file_tracker.get_fh(self.path)
|
||||
|
||||
if LockType.to_module(op) == fcntl.LOCK_EX and self._file.mode == "r":
|
||||
if LockType.to_module(op) == fcntl.LOCK_EX and self._file.mode == 'r':
|
||||
# Attempt to upgrade to write lock w/a read-only file.
|
||||
# If the file were writable, we'd have opened it 'r+'
|
||||
raise LockROFileError(self.path)
|
||||
|
||||
self._log_debug(
|
||||
"{} locking [{}:{}]: timeout {}".format(
|
||||
op_str.lower(), self._start, self._length, pretty_seconds(timeout or 0)
|
||||
)
|
||||
)
|
||||
self._log_debug("{0} locking [{1}:{2}]: timeout {3} sec"
|
||||
.format(op_str.lower(), self._start, self._length,
|
||||
timeout))
|
||||
|
||||
poll_intervals = iter(Lock._poll_interval_generator())
|
||||
start_time = time.time()
|
||||
@@ -341,8 +339,8 @@ def _lock(self, op, timeout=None):
|
||||
total_wait_time = time.time() - start_time
|
||||
return total_wait_time, num_attempts
|
||||
|
||||
total_wait_time = time.time() - start_time
|
||||
raise LockTimeoutError(op_str.lower(), self.path, total_wait_time, num_attempts)
|
||||
raise LockTimeoutError("Timed out waiting for a {0} lock."
|
||||
.format(op_str.lower()))
|
||||
|
||||
def _poll_lock(self, op):
|
||||
"""Attempt to acquire the lock in a non-blocking manner. Return whether
|
||||
@@ -351,19 +349,16 @@ def _poll_lock(self, op):
|
||||
module_op = LockType.to_module(op)
|
||||
try:
|
||||
# Try to get the lock (will raise if not available.)
|
||||
fcntl.lockf(
|
||||
self._file, module_op | fcntl.LOCK_NB, self._length, self._start, os.SEEK_SET
|
||||
)
|
||||
fcntl.lockf(self._file, module_op | fcntl.LOCK_NB,
|
||||
self._length, self._start, os.SEEK_SET)
|
||||
|
||||
# help for debugging distributed locking
|
||||
if self.debug:
|
||||
# All locks read the owner PID and host
|
||||
self._read_log_debug_data()
|
||||
self._log_debug(
|
||||
"{0} locked {1} [{2}:{3}] (owner={4})".format(
|
||||
LockType.to_str(op), self.path, self._start, self._length, self.pid
|
||||
)
|
||||
)
|
||||
self._log_debug('{0} locked {1} [{2}:{3}] (owner={4})'
|
||||
.format(LockType.to_str(op), self.path,
|
||||
self._start, self._length, self.pid))
|
||||
|
||||
# Exclusive locks write their PID/host
|
||||
if module_op == fcntl.LOCK_EX:
|
||||
@@ -383,17 +378,14 @@ def _ensure_parent_directory(self):
|
||||
|
||||
# relative paths to lockfiles in the current directory have no parent
|
||||
if not parent:
|
||||
return "."
|
||||
return '.'
|
||||
|
||||
try:
|
||||
os.makedirs(parent)
|
||||
except OSError as e:
|
||||
# os.makedirs can fail in a number of ways when the directory already exists.
|
||||
# With EISDIR, we know it exists, and others like EEXIST, EACCES, and EROFS
|
||||
# are fine if we ensure that the directory exists.
|
||||
# Python 3 allows an exist_ok parameter and ignores any OSError as long as
|
||||
# the directory exists.
|
||||
if not (e.errno == errno.EISDIR or os.path.isdir(parent)):
|
||||
# makedirs can fail when diretory already exists.
|
||||
if not (e.errno == errno.EEXIST and os.path.isdir(parent) or
|
||||
e.errno == errno.EISDIR):
|
||||
raise
|
||||
return parent
|
||||
|
||||
@@ -404,9 +396,9 @@ def _read_log_debug_data(self):
|
||||
|
||||
line = self._file.read()
|
||||
if line:
|
||||
pid, host = line.strip().split(",")
|
||||
_, _, self.pid = pid.rpartition("=")
|
||||
_, _, self.host = host.rpartition("=")
|
||||
pid, host = line.strip().split(',')
|
||||
_, _, self.pid = pid.rpartition('=')
|
||||
_, _, self.host = host.rpartition('=')
|
||||
self.pid = int(self.pid)
|
||||
|
||||
def _write_log_debug_data(self):
|
||||
@@ -431,7 +423,8 @@ def _unlock(self):
|
||||
be masquerading as write locks, but this removes either.
|
||||
|
||||
"""
|
||||
fcntl.lockf(self._file, fcntl.LOCK_UN, self._length, self._start, os.SEEK_SET)
|
||||
fcntl.lockf(self._file, fcntl.LOCK_UN,
|
||||
self._length, self._start, os.SEEK_SET)
|
||||
|
||||
file_tracker.release_fh(self.path)
|
||||
self._file = None
|
||||
@@ -456,7 +449,7 @@ def acquire_read(self, timeout=None):
|
||||
wait_time, nattempts = self._lock(LockType.READ, timeout=timeout)
|
||||
self._reads += 1
|
||||
# Log if acquired, which includes counts when verbose
|
||||
self._log_acquired("READ LOCK", wait_time, nattempts)
|
||||
self._log_acquired('READ LOCK', wait_time, nattempts)
|
||||
return True
|
||||
else:
|
||||
# Increment the read count for nested lock tracking
|
||||
@@ -481,7 +474,7 @@ def acquire_write(self, timeout=None):
|
||||
wait_time, nattempts = self._lock(LockType.WRITE, timeout=timeout)
|
||||
self._writes += 1
|
||||
# Log if acquired, which includes counts when verbose
|
||||
self._log_acquired("WRITE LOCK", wait_time, nattempts)
|
||||
self._log_acquired('WRITE LOCK', wait_time, nattempts)
|
||||
|
||||
# return True only if we weren't nested in a read lock.
|
||||
# TODO: we may need to return two values: whether we got
|
||||
@@ -568,7 +561,7 @@ def release_read(self, release_fn=None):
|
||||
"""
|
||||
assert self._reads > 0
|
||||
|
||||
locktype = "READ LOCK"
|
||||
locktype = 'READ LOCK'
|
||||
if self._reads == 1 and self._writes == 0:
|
||||
self._log_releasing(locktype)
|
||||
|
||||
@@ -576,7 +569,7 @@ def release_read(self, release_fn=None):
|
||||
release_fn = release_fn or true_fn
|
||||
result = release_fn()
|
||||
|
||||
self._unlock() # can raise LockError.
|
||||
self._unlock() # can raise LockError.
|
||||
self._reads = 0
|
||||
self._log_released(locktype)
|
||||
return result
|
||||
@@ -604,14 +597,14 @@ def release_write(self, release_fn=None):
|
||||
assert self._writes > 0
|
||||
release_fn = release_fn or true_fn
|
||||
|
||||
locktype = "WRITE LOCK"
|
||||
locktype = 'WRITE LOCK'
|
||||
if self._writes == 1 and self._reads == 0:
|
||||
self._log_releasing(locktype)
|
||||
|
||||
# we need to call release_fn before releasing the lock
|
||||
result = release_fn()
|
||||
|
||||
self._unlock() # can raise LockError.
|
||||
self._unlock() # can raise LockError.
|
||||
self._writes = 0
|
||||
self._log_released(locktype)
|
||||
return result
|
||||
@@ -632,55 +625,56 @@ def cleanup(self):
|
||||
raise LockError("Attempting to cleanup active lock.")
|
||||
|
||||
def _get_counts_desc(self):
|
||||
return (
|
||||
"(reads {0}, writes {1})".format(self._reads, self._writes) if tty.is_verbose() else ""
|
||||
)
|
||||
return '(reads {0}, writes {1})'.format(self._reads, self._writes) \
|
||||
if tty.is_verbose() else ''
|
||||
|
||||
def _log_acquired(self, locktype, wait_time, nattempts):
|
||||
attempts_part = _attempts_str(wait_time, nattempts)
|
||||
now = datetime.now()
|
||||
desc = "Acquired at %s" % now.strftime("%H:%M:%S.%f")
|
||||
self._log_debug(self._status_msg(locktype, "{0}{1}".format(desc, attempts_part)))
|
||||
desc = 'Acquired at %s' % now.strftime("%H:%M:%S.%f")
|
||||
self._log_debug(self._status_msg(locktype, '{0}{1}'
|
||||
.format(desc, attempts_part)))
|
||||
|
||||
def _log_acquiring(self, locktype):
|
||||
self._log_debug(self._status_msg(locktype, "Acquiring"), level=3)
|
||||
self._log_debug(self._status_msg(locktype, 'Acquiring'), level=3)
|
||||
|
||||
def _log_debug(self, *args, **kwargs):
|
||||
"""Output lock debug messages."""
|
||||
kwargs["level"] = kwargs.get("level", 2)
|
||||
kwargs['level'] = kwargs.get('level', 2)
|
||||
tty.debug(*args, **kwargs)
|
||||
|
||||
def _log_downgraded(self, wait_time, nattempts):
|
||||
attempts_part = _attempts_str(wait_time, nattempts)
|
||||
now = datetime.now()
|
||||
desc = "Downgraded at %s" % now.strftime("%H:%M:%S.%f")
|
||||
self._log_debug(self._status_msg("READ LOCK", "{0}{1}".format(desc, attempts_part)))
|
||||
desc = 'Downgraded at %s' % now.strftime("%H:%M:%S.%f")
|
||||
self._log_debug(self._status_msg('READ LOCK', '{0}{1}'
|
||||
.format(desc, attempts_part)))
|
||||
|
||||
def _log_downgrading(self):
|
||||
self._log_debug(self._status_msg("WRITE LOCK", "Downgrading"), level=3)
|
||||
self._log_debug(self._status_msg('WRITE LOCK', 'Downgrading'), level=3)
|
||||
|
||||
def _log_released(self, locktype):
|
||||
now = datetime.now()
|
||||
desc = "Released at %s" % now.strftime("%H:%M:%S.%f")
|
||||
desc = 'Released at %s' % now.strftime("%H:%M:%S.%f")
|
||||
self._log_debug(self._status_msg(locktype, desc))
|
||||
|
||||
def _log_releasing(self, locktype):
|
||||
self._log_debug(self._status_msg(locktype, "Releasing"), level=3)
|
||||
self._log_debug(self._status_msg(locktype, 'Releasing'), level=3)
|
||||
|
||||
def _log_upgraded(self, wait_time, nattempts):
|
||||
attempts_part = _attempts_str(wait_time, nattempts)
|
||||
now = datetime.now()
|
||||
desc = "Upgraded at %s" % now.strftime("%H:%M:%S.%f")
|
||||
self._log_debug(self._status_msg("WRITE LOCK", "{0}{1}".format(desc, attempts_part)))
|
||||
desc = 'Upgraded at %s' % now.strftime("%H:%M:%S.%f")
|
||||
self._log_debug(self._status_msg('WRITE LOCK', '{0}{1}'.
|
||||
format(desc, attempts_part)))
|
||||
|
||||
def _log_upgrading(self):
|
||||
self._log_debug(self._status_msg("READ LOCK", "Upgrading"), level=3)
|
||||
self._log_debug(self._status_msg('READ LOCK', 'Upgrading'), level=3)
|
||||
|
||||
def _status_msg(self, locktype, status):
|
||||
status_desc = "[{0}] {1}".format(status, self._get_counts_desc())
|
||||
return "{0}{1.desc}: {1.path}[{1._start}:{1._length}] {2}".format(
|
||||
locktype, self, status_desc
|
||||
)
|
||||
status_desc = '[{0}] {1}'.format(status, self._get_counts_desc())
|
||||
return '{0}{1.desc}: {1.path}[{1._start}:{1._length}] {2}'.format(
|
||||
locktype, self, status_desc)
|
||||
|
||||
|
||||
class LockTransaction(object):
|
||||
@@ -721,7 +715,7 @@ def __init__(self, lock, acquire=None, release=None, timeout=None):
|
||||
def __enter__(self):
|
||||
if self._enter() and self._acquire_fn:
|
||||
self._as = self._acquire_fn()
|
||||
if hasattr(self._as, "__enter__"):
|
||||
if hasattr(self._as, '__enter__'):
|
||||
return self._as.__enter__()
|
||||
else:
|
||||
return self._as
|
||||
@@ -733,7 +727,7 @@ def release_fn():
|
||||
if self._release_fn is not None:
|
||||
return self._release_fn(type, value, traceback)
|
||||
|
||||
if self._as and hasattr(self._as, "__exit__"):
|
||||
if self._as and hasattr(self._as, '__exit__'):
|
||||
if self._as.__exit__(type, value, traceback):
|
||||
suppress = True
|
||||
|
||||
@@ -745,7 +739,6 @@ def release_fn():
|
||||
|
||||
class ReadTransaction(LockTransaction):
|
||||
"""LockTransaction context manager that does a read and releases it."""
|
||||
|
||||
def _enter(self):
|
||||
return self._lock.acquire_read(self._timeout)
|
||||
|
||||
@@ -755,7 +748,6 @@ def _exit(self, release_fn):
|
||||
|
||||
class WriteTransaction(LockTransaction):
|
||||
"""LockTransaction context manager that does a write and releases it."""
|
||||
|
||||
def _enter(self):
|
||||
return self._lock.acquire_write(self._timeout)
|
||||
|
||||
@@ -769,7 +761,6 @@ class LockError(Exception):
|
||||
|
||||
class LockDowngradeError(LockError):
|
||||
"""Raised when unable to downgrade from a write to a read lock."""
|
||||
|
||||
def __init__(self, path):
|
||||
msg = "Cannot downgrade lock from write to read on file: %s" % path
|
||||
super(LockDowngradeError, self).__init__(msg)
|
||||
@@ -782,22 +773,9 @@ class LockLimitError(LockError):
|
||||
class LockTimeoutError(LockError):
|
||||
"""Raised when an attempt to acquire a lock times out."""
|
||||
|
||||
def __init__(self, lock_type, path, time, attempts):
|
||||
fmt = "Timed out waiting for a {} lock after {}.\n Made {} {} on file: {}"
|
||||
super(LockTimeoutError, self).__init__(
|
||||
fmt.format(
|
||||
lock_type,
|
||||
pretty_seconds(time),
|
||||
attempts,
|
||||
"attempt" if attempts == 1 else "attempts",
|
||||
path,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class LockUpgradeError(LockError):
|
||||
"""Raised when unable to upgrade from a read to a write lock."""
|
||||
|
||||
def __init__(self, path):
|
||||
msg = "Cannot upgrade lock from read to write on file: %s" % path
|
||||
super(LockUpgradeError, self).__init__(msg)
|
||||
@@ -809,7 +787,6 @@ class LockPermissionError(LockError):
|
||||
|
||||
class LockROFileError(LockPermissionError):
|
||||
"""Tried to take an exclusive lock on a read-only file."""
|
||||
|
||||
def __init__(self, path):
|
||||
msg = "Can't take write lock on read-only file: %s" % path
|
||||
super(LockROFileError, self).__init__(msg)
|
||||
@@ -817,7 +794,6 @@ def __init__(self, path):
|
||||
|
||||
class CantCreateLockError(LockPermissionError):
|
||||
"""Attempt to create a lock in an unwritable location."""
|
||||
|
||||
def __init__(self, path):
|
||||
msg = "cannot create lock '%s': " % path
|
||||
msg += "file does not exist and location is not writable"
|
||||
|
@@ -10,7 +10,7 @@
|
||||
"""
|
||||
from multiprocessing import Semaphore, Value
|
||||
|
||||
__all__ = ["Barrier"]
|
||||
__all__ = ['Barrier']
|
||||
|
||||
|
||||
class Barrier:
|
||||
@@ -24,7 +24,7 @@ class Barrier:
|
||||
def __init__(self, n, timeout=None):
|
||||
self.n = n
|
||||
self.to = timeout
|
||||
self.count = Value("i", 0)
|
||||
self.count = Value('i', 0)
|
||||
self.mutex = Semaphore(1)
|
||||
self.turnstile1 = Semaphore(0)
|
||||
self.turnstile2 = Semaphore(1)
|
||||
|
@@ -11,7 +11,7 @@
|
||||
|
||||
from llnl.util import lang
|
||||
|
||||
is_windows = _platform == "win32"
|
||||
is_windows = _platform == 'win32'
|
||||
|
||||
if is_windows:
|
||||
from win32file import CreateHardLink
|
||||
@@ -47,7 +47,7 @@ def _win32_junction(path, link):
|
||||
|
||||
# os.symlink will fail if link exists, emulate the behavior here
|
||||
if exists(link):
|
||||
raise OSError(errno.EEXIST, "File exists: %s -> %s" % (link, path))
|
||||
raise OSError(errno.EEXIST, 'File exists: %s -> %s' % (link, path))
|
||||
|
||||
if not os.path.isabs(path):
|
||||
parent = os.path.join(link, os.pardir)
|
||||
@@ -61,14 +61,13 @@ def _win32_junction(path, link):
|
||||
def _win32_can_symlink():
|
||||
tempdir = tempfile.mkdtemp()
|
||||
|
||||
dpath = join(tempdir, "dpath")
|
||||
fpath = join(tempdir, "fpath.txt")
|
||||
dpath = join(tempdir, 'dpath')
|
||||
fpath = join(tempdir, 'fpath.txt')
|
||||
|
||||
dlink = join(tempdir, "dlink")
|
||||
flink = join(tempdir, "flink.txt")
|
||||
dlink = join(tempdir, 'dlink')
|
||||
flink = join(tempdir, 'flink.txt')
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
fs.touchp(fpath)
|
||||
|
||||
try:
|
||||
@@ -107,6 +106,7 @@ def _win32_is_junction(path):
|
||||
FILE_ATTRIBUTE_REPARSE_POINT = 0x400
|
||||
|
||||
res = GetFileAttributes(path)
|
||||
return res != INVALID_FILE_ATTRIBUTES and bool(res & FILE_ATTRIBUTE_REPARSE_POINT)
|
||||
return res != INVALID_FILE_ATTRIBUTES and \
|
||||
bool(res & FILE_ATTRIBUTE_REPARSE_POINT)
|
||||
|
||||
return False
|
||||
|
@@ -54,7 +54,7 @@ def is_stacktrace():
|
||||
|
||||
def set_debug(level=0):
|
||||
global _debug
|
||||
assert level >= 0, "Debug level must be a positive value"
|
||||
assert level >= 0, 'Debug level must be a positive value'
|
||||
_debug = level
|
||||
|
||||
|
||||
@@ -110,7 +110,10 @@ def output_filter(filter_fn):
|
||||
class SuppressOutput:
|
||||
"""Class for disabling output in a scope using 'with' keyword"""
|
||||
|
||||
def __init__(self, msg_enabled=True, warn_enabled=True, error_enabled=True):
|
||||
def __init__(self,
|
||||
msg_enabled=True,
|
||||
warn_enabled=True,
|
||||
error_enabled=True):
|
||||
|
||||
self._msg_enabled_initial = _msg_enabled
|
||||
self._warn_enabled_initial = _warn_enabled
|
||||
@@ -161,10 +164,11 @@ def get_timestamp(force=False):
|
||||
"""Get a string timestamp"""
|
||||
if _debug or _timestamp or force:
|
||||
# Note inclusion of the PID is useful for parallel builds.
|
||||
pid = ", {0}".format(os.getpid()) if show_pid() else ""
|
||||
return "[{0}{1}] ".format(datetime.now().strftime("%Y-%m-%d-%H:%M:%S.%f"), pid)
|
||||
pid = ', {0}'.format(os.getpid()) if show_pid() else ''
|
||||
return '[{0}{1}] '.format(
|
||||
datetime.now().strftime("%Y-%m-%d-%H:%M:%S.%f"), pid)
|
||||
else:
|
||||
return ""
|
||||
return ''
|
||||
|
||||
|
||||
def msg(message, *args, **kwargs):
|
||||
@@ -174,14 +178,26 @@ def msg(message, *args, **kwargs):
|
||||
if isinstance(message, Exception):
|
||||
message = "%s: %s" % (message.__class__.__name__, str(message))
|
||||
|
||||
newline = kwargs.get("newline", True)
|
||||
newline = kwargs.get('newline', True)
|
||||
st_text = ""
|
||||
if _stacktrace:
|
||||
st_text = process_stacktrace(2)
|
||||
if newline:
|
||||
cprint("@*b{%s==>} %s%s" % (st_text, get_timestamp(), cescape(_output_filter(message))))
|
||||
cprint(
|
||||
"@*b{%s==>} %s%s" % (
|
||||
st_text,
|
||||
get_timestamp(),
|
||||
cescape(_output_filter(message))
|
||||
)
|
||||
)
|
||||
else:
|
||||
cwrite("@*b{%s==>} %s%s" % (st_text, get_timestamp(), cescape(_output_filter(message))))
|
||||
cwrite(
|
||||
"@*b{%s==>} %s%s" % (
|
||||
st_text,
|
||||
get_timestamp(),
|
||||
cescape(_output_filter(message))
|
||||
)
|
||||
)
|
||||
for arg in args:
|
||||
print(indent + _output_filter(six.text_type(arg)))
|
||||
|
||||
@@ -190,19 +206,23 @@ def info(message, *args, **kwargs):
|
||||
if isinstance(message, Exception):
|
||||
message = "%s: %s" % (message.__class__.__name__, str(message))
|
||||
|
||||
format = kwargs.get("format", "*b")
|
||||
stream = kwargs.get("stream", sys.stdout)
|
||||
wrap = kwargs.get("wrap", False)
|
||||
break_long_words = kwargs.get("break_long_words", False)
|
||||
st_countback = kwargs.get("countback", 3)
|
||||
format = kwargs.get('format', '*b')
|
||||
stream = kwargs.get('stream', sys.stdout)
|
||||
wrap = kwargs.get('wrap', False)
|
||||
break_long_words = kwargs.get('break_long_words', False)
|
||||
st_countback = kwargs.get('countback', 3)
|
||||
|
||||
st_text = ""
|
||||
if _stacktrace:
|
||||
st_text = process_stacktrace(st_countback)
|
||||
cprint(
|
||||
"@%s{%s==>} %s%s"
|
||||
% (format, st_text, get_timestamp(), cescape(_output_filter(six.text_type(message)))),
|
||||
stream=stream,
|
||||
"@%s{%s==>} %s%s" % (
|
||||
format,
|
||||
st_text,
|
||||
get_timestamp(),
|
||||
cescape(_output_filter(six.text_type(message)))
|
||||
),
|
||||
stream=stream
|
||||
)
|
||||
for arg in args:
|
||||
if wrap:
|
||||
@@ -210,25 +230,27 @@ def info(message, *args, **kwargs):
|
||||
_output_filter(six.text_type(arg)),
|
||||
initial_indent=indent,
|
||||
subsequent_indent=indent,
|
||||
break_long_words=break_long_words,
|
||||
break_long_words=break_long_words
|
||||
)
|
||||
for line in lines:
|
||||
stream.write(line + "\n")
|
||||
stream.write(line + '\n')
|
||||
else:
|
||||
stream.write(indent + _output_filter(six.text_type(arg)) + "\n")
|
||||
stream.write(
|
||||
indent + _output_filter(six.text_type(arg)) + '\n'
|
||||
)
|
||||
|
||||
|
||||
def verbose(message, *args, **kwargs):
|
||||
if _verbose:
|
||||
kwargs.setdefault("format", "c")
|
||||
kwargs.setdefault('format', 'c')
|
||||
info(message, *args, **kwargs)
|
||||
|
||||
|
||||
def debug(message, *args, **kwargs):
|
||||
level = kwargs.get("level", 1)
|
||||
level = kwargs.get('level', 1)
|
||||
if is_debug(level):
|
||||
kwargs.setdefault("format", "g")
|
||||
kwargs.setdefault("stream", sys.stderr)
|
||||
kwargs.setdefault('format', 'g')
|
||||
kwargs.setdefault('stream', sys.stderr)
|
||||
info(message, *args, **kwargs)
|
||||
|
||||
|
||||
@@ -236,8 +258,8 @@ def error(message, *args, **kwargs):
|
||||
if not error_enabled():
|
||||
return
|
||||
|
||||
kwargs.setdefault("format", "*r")
|
||||
kwargs.setdefault("stream", sys.stderr)
|
||||
kwargs.setdefault('format', '*r')
|
||||
kwargs.setdefault('stream', sys.stderr)
|
||||
info("Error: " + six.text_type(message), *args, **kwargs)
|
||||
|
||||
|
||||
@@ -245,27 +267,27 @@ def warn(message, *args, **kwargs):
|
||||
if not warn_enabled():
|
||||
return
|
||||
|
||||
kwargs.setdefault("format", "*Y")
|
||||
kwargs.setdefault("stream", sys.stderr)
|
||||
kwargs.setdefault('format', '*Y')
|
||||
kwargs.setdefault('stream', sys.stderr)
|
||||
info("Warning: " + six.text_type(message), *args, **kwargs)
|
||||
|
||||
|
||||
def die(message, *args, **kwargs):
|
||||
kwargs.setdefault("countback", 4)
|
||||
kwargs.setdefault('countback', 4)
|
||||
error(message, *args, **kwargs)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def get_number(prompt, **kwargs):
|
||||
default = kwargs.get("default", None)
|
||||
abort = kwargs.get("abort", None)
|
||||
default = kwargs.get('default', None)
|
||||
abort = kwargs.get('abort', None)
|
||||
|
||||
if default is not None and abort is not None:
|
||||
prompt += " (default is %s, %s to abort) " % (default, abort)
|
||||
prompt += ' (default is %s, %s to abort) ' % (default, abort)
|
||||
elif default is not None:
|
||||
prompt += " (default is %s) " % default
|
||||
prompt += ' (default is %s) ' % default
|
||||
elif abort is not None:
|
||||
prompt += " (%s to abort) " % abort
|
||||
prompt += ' (%s to abort) ' % abort
|
||||
|
||||
number = None
|
||||
while number is None:
|
||||
@@ -288,16 +310,17 @@ def get_number(prompt, **kwargs):
|
||||
|
||||
|
||||
def get_yes_or_no(prompt, **kwargs):
|
||||
default_value = kwargs.get("default", None)
|
||||
default_value = kwargs.get('default', None)
|
||||
|
||||
if default_value is None:
|
||||
prompt += " [y/n] "
|
||||
prompt += ' [y/n] '
|
||||
elif default_value is True:
|
||||
prompt += " [Y/n] "
|
||||
prompt += ' [Y/n] '
|
||||
elif default_value is False:
|
||||
prompt += " [y/N] "
|
||||
prompt += ' [y/N] '
|
||||
else:
|
||||
raise ValueError("default for get_yes_no() must be True, False, or None.")
|
||||
raise ValueError(
|
||||
"default for get_yes_no() must be True, False, or None.")
|
||||
|
||||
result = None
|
||||
while result is None:
|
||||
@@ -308,9 +331,9 @@ def get_yes_or_no(prompt, **kwargs):
|
||||
if result is None:
|
||||
print("Please enter yes or no.")
|
||||
else:
|
||||
if ans == "y" or ans == "yes":
|
||||
if ans == 'y' or ans == 'yes':
|
||||
result = True
|
||||
elif ans == "n" or ans == "no":
|
||||
elif ans == 'n' or ans == 'no':
|
||||
result = False
|
||||
return result
|
||||
|
||||
@@ -322,12 +345,12 @@ def hline(label=None, **kwargs):
|
||||
char (str): Char to draw the line with. Default '-'
|
||||
max_width (int): Maximum width of the line. Default is 64 chars.
|
||||
"""
|
||||
char = kwargs.pop("char", "-")
|
||||
max_width = kwargs.pop("max_width", 64)
|
||||
char = kwargs.pop('char', '-')
|
||||
max_width = kwargs.pop('max_width', 64)
|
||||
if kwargs:
|
||||
raise TypeError(
|
||||
"'%s' is an invalid keyword argument for this function." % next(kwargs.iterkeys())
|
||||
)
|
||||
"'%s' is an invalid keyword argument for this function."
|
||||
% next(kwargs.iterkeys()))
|
||||
|
||||
rows, cols = terminal_size()
|
||||
if not cols:
|
||||
@@ -351,14 +374,13 @@ def hline(label=None, **kwargs):
|
||||
def terminal_size():
|
||||
"""Gets the dimensions of the console: (rows, cols)."""
|
||||
if _platform != "win32":
|
||||
|
||||
def ioctl_gwinsz(fd):
|
||||
try:
|
||||
rc = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234"))
|
||||
rc = struct.unpack('hh', fcntl.ioctl(
|
||||
fd, termios.TIOCGWINSZ, '1234'))
|
||||
except BaseException:
|
||||
return
|
||||
return rc
|
||||
|
||||
rc = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2)
|
||||
if not rc:
|
||||
try:
|
||||
@@ -368,14 +390,12 @@ def ioctl_gwinsz(fd):
|
||||
except BaseException:
|
||||
pass
|
||||
if not rc:
|
||||
rc = (os.environ.get("LINES", 25), os.environ.get("COLUMNS", 80))
|
||||
rc = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
|
||||
|
||||
return int(rc[0]), int(rc[1])
|
||||
else:
|
||||
if sys.version_info[0] < 3:
|
||||
raise RuntimeError(
|
||||
"Terminal size not obtainable on Windows with a\
|
||||
Python version older than 3"
|
||||
)
|
||||
rc = (os.environ.get("LINES", 25), os.environ.get("COLUMNS", 80))
|
||||
raise RuntimeError("Terminal size not obtainable on Windows with a\
|
||||
Python version older than 3")
|
||||
rc = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
|
||||
return int(rc[0]), int(rc[1])
|
||||
|
@@ -18,27 +18,29 @@
|
||||
|
||||
|
||||
class ColumnConfig:
|
||||
|
||||
def __init__(self, cols):
|
||||
self.cols = cols
|
||||
self.line_length = 0
|
||||
self.valid = True
|
||||
self.widths = [0] * cols # does not include ansi colors
|
||||
self.widths = [0] * cols # does not include ansi colors
|
||||
|
||||
def __repr__(self):
|
||||
attrs = [(a, getattr(self, a)) for a in dir(self) if not a.startswith("__")]
|
||||
attrs = [(a, getattr(self, a))
|
||||
for a in dir(self) if not a.startswith("__")]
|
||||
return "<Config: %s>" % ", ".join("%s: %r" % a for a in attrs)
|
||||
|
||||
|
||||
def config_variable_cols(elts, console_width, padding, cols=0):
|
||||
"""Variable-width column fitting algorithm.
|
||||
|
||||
This function determines the most columns that can fit in the
|
||||
screen width. Unlike uniform fitting, where all columns take
|
||||
the width of the longest element in the list, each column takes
|
||||
the width of its own longest element. This packs elements more
|
||||
efficiently on screen.
|
||||
This function determines the most columns that can fit in the
|
||||
screen width. Unlike uniform fitting, where all columns take
|
||||
the width of the longest element in the list, each column takes
|
||||
the width of its own longest element. This packs elements more
|
||||
efficiently on screen.
|
||||
|
||||
If cols is nonzero, force
|
||||
If cols is nonzero, force
|
||||
"""
|
||||
if cols < 0:
|
||||
raise ValueError("cols must be non-negative.")
|
||||
@@ -62,8 +64,8 @@ def config_variable_cols(elts, console_width, padding, cols=0):
|
||||
|
||||
if conf.widths[col] < (length + p):
|
||||
conf.line_length += length + p - conf.widths[col]
|
||||
conf.widths[col] = length + p
|
||||
conf.valid = conf.line_length < console_width
|
||||
conf.widths[col] = length + p
|
||||
conf.valid = (conf.line_length < console_width)
|
||||
|
||||
try:
|
||||
config = next(conf for conf in reversed(configs) if conf.valid)
|
||||
@@ -79,9 +81,9 @@ def config_variable_cols(elts, console_width, padding, cols=0):
|
||||
def config_uniform_cols(elts, console_width, padding, cols=0):
|
||||
"""Uniform-width column fitting algorithm.
|
||||
|
||||
Determines the longest element in the list, and determines how
|
||||
many columns of that width will fit on screen. Returns a
|
||||
corresponding column config.
|
||||
Determines the longest element in the list, and determines how
|
||||
many columns of that width will fit on screen. Returns a
|
||||
corresponding column config.
|
||||
"""
|
||||
if cols < 0:
|
||||
raise ValueError("cols must be non-negative.")
|
||||
@@ -120,18 +122,18 @@ def colify(elts, **options):
|
||||
and fit less data on the screen
|
||||
"""
|
||||
# Get keyword arguments or set defaults
|
||||
cols = options.pop("cols", 0)
|
||||
output = options.pop("output", sys.stdout)
|
||||
indent = options.pop("indent", 0)
|
||||
padding = options.pop("padding", 2)
|
||||
tty = options.pop("tty", None)
|
||||
method = options.pop("method", "variable")
|
||||
cols = options.pop("cols", 0)
|
||||
output = options.pop("output", sys.stdout)
|
||||
indent = options.pop("indent", 0)
|
||||
padding = options.pop("padding", 2)
|
||||
tty = options.pop('tty', None)
|
||||
method = options.pop("method", "variable")
|
||||
console_cols = options.pop("width", None)
|
||||
|
||||
if options:
|
||||
raise TypeError(
|
||||
"'%s' is an invalid keyword argument for this function." % next(options.iterkeys())
|
||||
)
|
||||
"'%s' is an invalid keyword argument for this function."
|
||||
% next(options.iterkeys()))
|
||||
|
||||
# elts needs to be an array of strings so we can count the elements
|
||||
elts = [text_type(elt) for elt in elts]
|
||||
@@ -139,10 +141,10 @@ def colify(elts, **options):
|
||||
return (0, ())
|
||||
|
||||
# environment size is of the form "<rows>x<cols>"
|
||||
env_size = os.environ.get("COLIFY_SIZE")
|
||||
env_size = os.environ.get('COLIFY_SIZE')
|
||||
if env_size:
|
||||
try:
|
||||
r, c = env_size.split("x")
|
||||
r, c = env_size.split('x')
|
||||
console_rows, console_cols = int(r), int(c)
|
||||
tty = True
|
||||
except BaseException:
|
||||
@@ -178,7 +180,7 @@ def colify(elts, **options):
|
||||
elt = col * rows + row
|
||||
width = config.widths[col] + cextra(elts[elt])
|
||||
if col < cols - 1:
|
||||
fmt = "%%-%ds" % width
|
||||
fmt = '%%-%ds' % width
|
||||
output.write(fmt % elts[elt])
|
||||
else:
|
||||
# Don't pad the rightmost column (sapces can wrap on
|
||||
@@ -196,15 +198,15 @@ def colify(elts, **options):
|
||||
def colify_table(table, **options):
|
||||
"""Version of ``colify()`` for data expressed in rows, (list of lists).
|
||||
|
||||
Same as regular colify but:
|
||||
Same as regular colify but:
|
||||
|
||||
1. This takes a list of lists, where each sub-list must be the
|
||||
same length, and each is interpreted as a row in a table.
|
||||
Regular colify displays a sequential list of values in columns.
|
||||
1. This takes a list of lists, where each sub-list must be the
|
||||
same length, and each is interpreted as a row in a table.
|
||||
Regular colify displays a sequential list of values in columns.
|
||||
|
||||
2. Regular colify will always print with 1 column when the output
|
||||
is not a tty. This will always print with same dimensions of
|
||||
the table argument.
|
||||
2. Regular colify will always print with 1 column when the output
|
||||
is not a tty. This will always print with same dimensions of
|
||||
the table argument.
|
||||
|
||||
"""
|
||||
if table is None:
|
||||
@@ -219,20 +221,20 @@ def transpose():
|
||||
for row in table:
|
||||
yield row[i]
|
||||
|
||||
if "cols" in options:
|
||||
if 'cols' in options:
|
||||
raise ValueError("Cannot override columsn in colify_table.")
|
||||
options["cols"] = columns
|
||||
options['cols'] = columns
|
||||
|
||||
# don't reduce to 1 column for non-tty
|
||||
options["tty"] = True
|
||||
options['tty'] = True
|
||||
|
||||
colify(transpose(), **options)
|
||||
|
||||
|
||||
def colified(elts, **options):
|
||||
"""Invokes the ``colify()`` function but returns the result as a string
|
||||
instead of writing it to an output string."""
|
||||
instead of writing it to an output string."""
|
||||
sio = StringIO()
|
||||
options["output"] = sio
|
||||
options['output'] = sio
|
||||
colify(elts, **options)
|
||||
return sio.getvalue()
|
||||
|
@@ -76,33 +76,29 @@ def __init__(self, message):
|
||||
|
||||
|
||||
# Text styles for ansi codes
|
||||
styles = {"*": "1", "_": "4", None: "0"} # bold # underline # plain
|
||||
styles = {'*': '1', # bold
|
||||
'_': '4', # underline
|
||||
None: '0'} # plain
|
||||
|
||||
# Dim and bright ansi colors
|
||||
colors = {
|
||||
"k": 30,
|
||||
"K": 90, # black
|
||||
"r": 31,
|
||||
"R": 91, # red
|
||||
"g": 32,
|
||||
"G": 92, # green
|
||||
"y": 33,
|
||||
"Y": 93, # yellow
|
||||
"b": 34,
|
||||
"B": 94, # blue
|
||||
"m": 35,
|
||||
"M": 95, # magenta
|
||||
"c": 36,
|
||||
"C": 96, # cyan
|
||||
"w": 37,
|
||||
"W": 97,
|
||||
} # white
|
||||
colors = {'k': 30, 'K': 90, # black
|
||||
'r': 31, 'R': 91, # red
|
||||
'g': 32, 'G': 92, # green
|
||||
'y': 33, 'Y': 93, # yellow
|
||||
'b': 34, 'B': 94, # blue
|
||||
'm': 35, 'M': 95, # magenta
|
||||
'c': 36, 'C': 96, # cyan
|
||||
'w': 37, 'W': 97} # white
|
||||
|
||||
# Regex to be used for color formatting
|
||||
color_re = r"@(?:@|\.|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)"
|
||||
color_re = r'@(?:@|\.|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)'
|
||||
|
||||
# Mapping from color arguments to values for tty.set_color
|
||||
color_when_values = {"always": True, "auto": None, "never": False}
|
||||
color_when_values = {
|
||||
'always': True,
|
||||
'auto': None,
|
||||
'never': False
|
||||
}
|
||||
|
||||
# Force color; None: Only color if stdout is a tty
|
||||
# True: Always colorize output, False: Never colorize output
|
||||
@@ -118,7 +114,7 @@ def _color_when_value(when):
|
||||
if when in color_when_values:
|
||||
return color_when_values[when]
|
||||
elif when not in color_when_values.values():
|
||||
raise ValueError("Invalid color setting: %s" % when)
|
||||
raise ValueError('Invalid color setting: %s' % when)
|
||||
return when
|
||||
|
||||
|
||||
@@ -150,19 +146,16 @@ def color_when(value):
|
||||
|
||||
|
||||
class match_to_ansi(object):
|
||||
def __init__(self, color=True, enclose=False):
|
||||
|
||||
def __init__(self, color=True):
|
||||
self.color = _color_when_value(color)
|
||||
self.enclose = enclose
|
||||
|
||||
def escape(self, s):
|
||||
"""Returns a TTY escape sequence for a color"""
|
||||
if self.color:
|
||||
if self.enclose:
|
||||
return r"\[\033[%sm\]" % s
|
||||
else:
|
||||
return "\033[%sm" % s
|
||||
return "\033[%sm" % s
|
||||
else:
|
||||
return ""
|
||||
return ''
|
||||
|
||||
def __call__(self, match):
|
||||
"""Convert a match object generated by ``color_re`` into an ansi
|
||||
@@ -171,22 +164,22 @@ def __call__(self, match):
|
||||
style, color, text = match.groups()
|
||||
m = match.group(0)
|
||||
|
||||
if m == "@@":
|
||||
return "@"
|
||||
elif m == "@.":
|
||||
if m == '@@':
|
||||
return '@'
|
||||
elif m == '@.':
|
||||
return self.escape(0)
|
||||
elif m == "@":
|
||||
raise ColorParseError("Incomplete color format: '%s' in %s" % (m, match.string))
|
||||
elif m == '@':
|
||||
raise ColorParseError("Incomplete color format: '%s' in %s"
|
||||
% (m, match.string))
|
||||
|
||||
string = styles[style]
|
||||
if color:
|
||||
if color not in colors:
|
||||
raise ColorParseError(
|
||||
"Invalid color specifier: '%s' in '%s'" % (color, match.string)
|
||||
)
|
||||
string += ";" + str(colors[color])
|
||||
raise ColorParseError("Invalid color specifier: '%s' in '%s'"
|
||||
% (color, match.string))
|
||||
string += ';' + str(colors[color])
|
||||
|
||||
colored_text = ""
|
||||
colored_text = ''
|
||||
if text:
|
||||
colored_text = text + self.escape(0)
|
||||
|
||||
@@ -205,31 +198,29 @@ def colorize(string, **kwargs):
|
||||
Keyword Arguments:
|
||||
color (bool): If False, output will be plain text without control
|
||||
codes, for output to non-console devices.
|
||||
enclose (bool): If True, enclose ansi color sequences with
|
||||
square brackets to prevent misestimation of terminal width.
|
||||
"""
|
||||
color = _color_when_value(kwargs.get("color", get_color_when()))
|
||||
string = re.sub(color_re, match_to_ansi(color, kwargs.get("enclose")), string)
|
||||
string = string.replace("}}", "}")
|
||||
color = _color_when_value(kwargs.get('color', get_color_when()))
|
||||
string = re.sub(color_re, match_to_ansi(color), string)
|
||||
string = string.replace('}}', '}')
|
||||
return string
|
||||
|
||||
|
||||
def clen(string):
|
||||
"""Return the length of a string, excluding ansi color sequences."""
|
||||
return len(re.sub(r"\033[^m]*m", "", string))
|
||||
return len(re.sub(r'\033[^m]*m', '', string))
|
||||
|
||||
|
||||
def cextra(string):
|
||||
"""Length of extra color characters in a string"""
|
||||
return len("".join(re.findall(r"\033[^m]*m", string)))
|
||||
return len(''.join(re.findall(r'\033[^m]*m', string)))
|
||||
|
||||
|
||||
def cwrite(string, stream=None, color=None):
|
||||
"""Replace all color expressions in string with ANSI control
|
||||
codes and write the result to the stream. If color is
|
||||
False, this will write plain text with no color. If True,
|
||||
then it will always write colored output. If not supplied,
|
||||
then it will be set based on stream.isatty().
|
||||
codes and write the result to the stream. If color is
|
||||
False, this will write plain text with no color. If True,
|
||||
then it will always write colored output. If not supplied,
|
||||
then it will be set based on stream.isatty().
|
||||
"""
|
||||
stream = sys.stdout if stream is None else stream
|
||||
if color is None:
|
||||
@@ -260,19 +251,20 @@ def cescape(string):
|
||||
(str): the string with color codes escaped
|
||||
"""
|
||||
string = six.text_type(string)
|
||||
string = string.replace("@", "@@")
|
||||
string = string.replace("}", "}}")
|
||||
string = string.replace('@', '@@')
|
||||
string = string.replace('}', '}}')
|
||||
return string
|
||||
|
||||
|
||||
class ColorStream(object):
|
||||
|
||||
def __init__(self, stream, color=None):
|
||||
self._stream = stream
|
||||
self._color = color
|
||||
|
||||
def write(self, string, **kwargs):
|
||||
raw = kwargs.get("raw", False)
|
||||
raw_write = getattr(self._stream, "write")
|
||||
raw = kwargs.get('raw', False)
|
||||
raw_write = getattr(self._stream, 'write')
|
||||
|
||||
color = self._color
|
||||
if self._color is None:
|
||||
@@ -283,6 +275,6 @@ def write(self, string, **kwargs):
|
||||
raw_write(colorize(string, color=color))
|
||||
|
||||
def writelines(self, sequence, **kwargs):
|
||||
raw = kwargs.get("raw", False)
|
||||
raw = kwargs.get('raw', False)
|
||||
for string in sequence:
|
||||
self.write(string, self.color, raw=raw)
|
||||
|
@@ -31,22 +31,21 @@
|
||||
termios = None # type: Optional[ModuleType]
|
||||
try:
|
||||
import termios as term_mod
|
||||
|
||||
termios = term_mod
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
# Use this to strip escape sequences
|
||||
_escape = re.compile(r"\x1b[^m]*m|\x1b\[?1034h|\x1b\][0-9]+;[^\x07]*\x07")
|
||||
_escape = re.compile(r'\x1b[^m]*m|\x1b\[?1034h|\x1b\][0-9]+;[^\x07]*\x07')
|
||||
|
||||
# control characters for enabling/disabling echo
|
||||
#
|
||||
# We use control characters to ensure that echo enable/disable are inline
|
||||
# with the other output. We always follow these with a newline to ensure
|
||||
# one per line the following newline is ignored in output.
|
||||
xon, xoff = "\x11\n", "\x13\n"
|
||||
control = re.compile("(\x11\n|\x13\n)")
|
||||
xon, xoff = '\x11\n', '\x13\n'
|
||||
control = re.compile('(\x11\n|\x13\n)')
|
||||
|
||||
|
||||
@contextmanager
|
||||
@@ -60,13 +59,17 @@ def ignore_signal(signum):
|
||||
|
||||
|
||||
def _is_background_tty(stream):
|
||||
"""True if the stream is a tty and calling process is in the background."""
|
||||
return stream.isatty() and os.getpgrp() != os.tcgetpgrp(stream.fileno())
|
||||
"""True if the stream is a tty and calling process is in the background.
|
||||
"""
|
||||
return (
|
||||
stream.isatty() and
|
||||
os.getpgrp() != os.tcgetpgrp(stream.fileno())
|
||||
)
|
||||
|
||||
|
||||
def _strip(line):
|
||||
"""Strip color and control characters from a line."""
|
||||
return _escape.sub("", line)
|
||||
return _escape.sub('', line)
|
||||
|
||||
|
||||
class keyboard_input(object):
|
||||
@@ -144,7 +147,6 @@ class keyboard_input(object):
|
||||
a TTY, ``keyboard_input`` has no effect.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, stream):
|
||||
"""Create a context manager that will enable keyboard input on stream.
|
||||
|
||||
@@ -202,7 +204,7 @@ def check_fg_bg(self):
|
||||
bg = self._is_background()
|
||||
|
||||
# restore sanity if flags are amiss -- see diagram in class docs
|
||||
if not bg and any(flags): # fg, but input not enabled
|
||||
if not bg and any(flags): # fg, but input not enabled
|
||||
self._enable_keyboard_input()
|
||||
elif bg and not all(flags): # bg, but input enabled
|
||||
self._restore_default_terminal_settings()
|
||||
@@ -226,7 +228,8 @@ def __enter__(self):
|
||||
|
||||
# Install a signal handler to disable/enable keyboard input
|
||||
# when the process moves between foreground and background.
|
||||
self.old_handlers[signal.SIGTSTP] = signal.signal(signal.SIGTSTP, self._tstp_handler)
|
||||
self.old_handlers[signal.SIGTSTP] = signal.signal(
|
||||
signal.SIGTSTP, self._tstp_handler)
|
||||
|
||||
# add an atexit handler to ensure the terminal is restored
|
||||
atexit.register(self._restore_default_terminal_settings)
|
||||
@@ -255,7 +258,6 @@ class Unbuffered(object):
|
||||
|
||||
This is implemented by forcing a flush after each write.
|
||||
"""
|
||||
|
||||
def __init__(self, stream):
|
||||
self.stream = stream
|
||||
|
||||
@@ -300,7 +302,6 @@ class FileWrapper(object):
|
||||
yet), or neither. When unwrapped, it returns an open file (or file-like)
|
||||
object.
|
||||
"""
|
||||
|
||||
def __init__(self, file_like):
|
||||
# This records whether the file-like object returned by "unwrap" is
|
||||
# purely in-memory. In that case a subprocess will need to explicitly
|
||||
@@ -324,9 +325,9 @@ def unwrap(self):
|
||||
if self.open:
|
||||
if self.file_like:
|
||||
if sys.version_info < (3,):
|
||||
self.file = open(self.file_like, "w")
|
||||
self.file = open(self.file_like, 'w')
|
||||
else:
|
||||
self.file = open(self.file_like, "w", encoding="utf-8") # novm
|
||||
self.file = open(self.file_like, 'w', encoding='utf-8') # novm
|
||||
else:
|
||||
self.file = StringIO()
|
||||
return self.file
|
||||
@@ -342,9 +343,8 @@ def close(self):
|
||||
|
||||
class MultiProcessFd(object):
|
||||
"""Return an object which stores a file descriptor and can be passed as an
|
||||
argument to a function run with ``multiprocessing.Process``, such that
|
||||
the file descriptor is available in the subprocess."""
|
||||
|
||||
argument to a function run with ``multiprocessing.Process``, such that
|
||||
the file descriptor is available in the subprocess."""
|
||||
def __init__(self, fd):
|
||||
self._connection = None
|
||||
self._fd = None
|
||||
@@ -434,7 +434,7 @@ def log_output(*args, **kwargs):
|
||||
This method is actually a factory serving a per platform
|
||||
(unix vs windows) log_output class
|
||||
"""
|
||||
if sys.platform == "win32":
|
||||
if sys.platform == 'win32':
|
||||
return winlog(*args, **kwargs)
|
||||
else:
|
||||
return nixlog(*args, **kwargs)
|
||||
@@ -454,9 +454,8 @@ class nixlog(object):
|
||||
work within test frameworks like nose and pytest.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, file_like=None, echo=False, debug=0, buffer=False, env=None, filter_fn=None
|
||||
):
|
||||
def __init__(self, file_like=None, echo=False, debug=0, buffer=False,
|
||||
env=None, filter_fn=None):
|
||||
"""Create a new output log context manager.
|
||||
|
||||
Args:
|
||||
@@ -525,7 +524,8 @@ def __enter__(self):
|
||||
raise RuntimeError("Can't re-enter the same log_output!")
|
||||
|
||||
if self.file_like is None:
|
||||
raise RuntimeError("file argument must be set by either __init__ or __call__")
|
||||
raise RuntimeError(
|
||||
"file argument must be set by either __init__ or __call__")
|
||||
|
||||
# set up a stream for the daemon to write to
|
||||
self.log_file = FileWrapper(self.file_like)
|
||||
@@ -555,7 +555,9 @@ def __enter__(self):
|
||||
input_multiprocess_fd = None
|
||||
try:
|
||||
if sys.stdin.isatty():
|
||||
input_multiprocess_fd = MultiProcessFd(os.dup(sys.stdin.fileno()))
|
||||
input_multiprocess_fd = MultiProcessFd(
|
||||
os.dup(sys.stdin.fileno())
|
||||
)
|
||||
except BaseException:
|
||||
# just don't forward input if this fails
|
||||
pass
|
||||
@@ -564,14 +566,9 @@ def __enter__(self):
|
||||
self.process = multiprocessing.Process(
|
||||
target=_writer_daemon,
|
||||
args=(
|
||||
input_multiprocess_fd,
|
||||
read_multiprocess_fd,
|
||||
write_fd,
|
||||
self.echo,
|
||||
self.log_file,
|
||||
child_pipe,
|
||||
self.filter_fn,
|
||||
),
|
||||
input_multiprocess_fd, read_multiprocess_fd, write_fd,
|
||||
self.echo, self.log_file, child_pipe, self.filter_fn
|
||||
)
|
||||
)
|
||||
self.process.daemon = True # must set before start()
|
||||
self.process.start()
|
||||
@@ -612,7 +609,7 @@ def __enter__(self):
|
||||
self._saved_stderr = sys.stderr
|
||||
|
||||
# create a file object for the pipe; redirect to it.
|
||||
pipe_fd_out = os.fdopen(write_fd, "w")
|
||||
pipe_fd_out = os.fdopen(write_fd, 'w')
|
||||
sys.stdout = pipe_fd_out
|
||||
sys.stderr = pipe_fd_out
|
||||
|
||||
@@ -677,7 +674,8 @@ def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
def force_echo(self):
|
||||
"""Context manager to force local echo, even if echo is off."""
|
||||
if not self._active:
|
||||
raise RuntimeError("Can't call force_echo() outside log_output region!")
|
||||
raise RuntimeError(
|
||||
"Can't call force_echo() outside log_output region!")
|
||||
|
||||
# This uses the xon/xoff to highlight regions to be echoed in the
|
||||
# output. We us these control characters rather than, say, a
|
||||
@@ -693,26 +691,25 @@ def force_echo(self):
|
||||
|
||||
|
||||
class StreamWrapper:
|
||||
"""Wrapper class to handle redirection of io streams"""
|
||||
|
||||
""" Wrapper class to handle redirection of io streams """
|
||||
def __init__(self, sys_attr):
|
||||
self.sys_attr = sys_attr
|
||||
self.saved_stream = None
|
||||
if sys.platform.startswith("win32"):
|
||||
if sys.platform.startswith('win32'):
|
||||
if sys.version_info < (3, 5):
|
||||
libc = ctypes.CDLL(ctypes.util.find_library("c"))
|
||||
libc = ctypes.CDLL(ctypes.util.find_library('c'))
|
||||
else:
|
||||
if hasattr(sys, "gettotalrefcount"): # debug build
|
||||
libc = ctypes.CDLL("ucrtbased")
|
||||
if hasattr(sys, 'gettotalrefcount'): # debug build
|
||||
libc = ctypes.CDLL('ucrtbased')
|
||||
else:
|
||||
libc = ctypes.CDLL("api-ms-win-crt-stdio-l1-1-0")
|
||||
libc = ctypes.CDLL('api-ms-win-crt-stdio-l1-1-0')
|
||||
|
||||
kernel32 = ctypes.WinDLL("kernel32")
|
||||
kernel32 = ctypes.WinDLL('kernel32')
|
||||
|
||||
# https://docs.microsoft.com/en-us/windows/console/getstdhandle
|
||||
if self.sys_attr == "stdout":
|
||||
if self.sys_attr == 'stdout':
|
||||
STD_HANDLE = -11
|
||||
elif self.sys_attr == "stderr":
|
||||
elif self.sys_attr == 'stderr':
|
||||
STD_HANDLE = -12
|
||||
else:
|
||||
raise KeyError(self.sys_attr)
|
||||
@@ -731,7 +728,7 @@ def __init__(self, sys_attr):
|
||||
def redirect_stream(self, to_fd):
|
||||
"""Redirect stdout to the given file descriptor."""
|
||||
# Flush the C-level buffer stream
|
||||
if sys.platform.startswith("win32"):
|
||||
if sys.platform.startswith('win32'):
|
||||
self.libc.fflush(None)
|
||||
else:
|
||||
self.libc.fflush(self.c_stream)
|
||||
@@ -742,13 +739,13 @@ def redirect_stream(self, to_fd):
|
||||
# Make orig_stream_fd point to the same file as to_fd
|
||||
os.dup2(to_fd, self.orig_stream_fd)
|
||||
# Set sys_stream to a new stream that points to the redirected fd
|
||||
new_buffer = open(self.orig_stream_fd, "wb")
|
||||
new_buffer = open(self.orig_stream_fd, 'wb')
|
||||
new_stream = io.TextIOWrapper(new_buffer)
|
||||
setattr(sys, self.sys_attr, new_stream)
|
||||
self.sys_stream = getattr(sys, self.sys_attr)
|
||||
|
||||
def flush(self):
|
||||
if sys.platform.startswith("win32"):
|
||||
if sys.platform.startswith('win32'):
|
||||
self.libc.fflush(None)
|
||||
else:
|
||||
self.libc.fflush(self.c_stream)
|
||||
@@ -771,16 +768,14 @@ class winlog(object):
|
||||
|
||||
Does not support the use of 'v' toggling as nixlog does.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, file_like=None, echo=False, debug=0, buffer=False, env=None, filter_fn=None
|
||||
):
|
||||
def __init__(self, file_like=None, echo=False, debug=0, buffer=False,
|
||||
env=None, filter_fn=None):
|
||||
self.env = env
|
||||
self.debug = debug
|
||||
self.echo = echo
|
||||
self.logfile = file_like
|
||||
self.stdout = StreamWrapper("stdout")
|
||||
self.stderr = StreamWrapper("stderr")
|
||||
self.stdout = StreamWrapper('stdout')
|
||||
self.stderr = StreamWrapper('stderr')
|
||||
self._active = False
|
||||
self._ioflag = False
|
||||
self.old_stdout = sys.stdout
|
||||
@@ -791,7 +786,8 @@ def __enter__(self):
|
||||
raise RuntimeError("Can't re-enter the same log_output!")
|
||||
|
||||
if self.logfile is None:
|
||||
raise RuntimeError("file argument must be set by __init__ ")
|
||||
raise RuntimeError(
|
||||
"file argument must be set by __init__ ")
|
||||
|
||||
# Open both write and reading on logfile
|
||||
if type(self.logfile) == StringIO:
|
||||
@@ -800,8 +796,8 @@ def __enter__(self):
|
||||
sys.stdout = self.logfile
|
||||
sys.stderr = self.logfile
|
||||
else:
|
||||
self.writer = open(self.logfile, mode="wb+")
|
||||
self.reader = open(self.logfile, mode="rb+")
|
||||
self.writer = open(self.logfile, mode='wb+')
|
||||
self.reader = open(self.logfile, mode='rb+')
|
||||
|
||||
# Dup stdout so we can still write to it after redirection
|
||||
self.echo_writer = open(os.dup(sys.stdout.fileno()), "w")
|
||||
@@ -815,7 +811,7 @@ def background_reader(reader, echo_writer, _kill):
|
||||
# if echo: write line to user
|
||||
try:
|
||||
while True:
|
||||
is_killed = _kill.wait(0.1)
|
||||
is_killed = _kill.wait(.1)
|
||||
# Flush buffered build output to file
|
||||
# stdout/err fds refer to log file
|
||||
self.stderr.flush()
|
||||
@@ -823,7 +819,7 @@ def background_reader(reader, echo_writer, _kill):
|
||||
|
||||
line = reader.readline()
|
||||
if self.echo and line:
|
||||
echo_writer.write("{0}".format(line.decode()))
|
||||
echo_writer.write('{0}'.format(line.decode()))
|
||||
echo_writer.flush()
|
||||
|
||||
if is_killed:
|
||||
@@ -833,9 +829,8 @@ def background_reader(reader, echo_writer, _kill):
|
||||
|
||||
self._active = True
|
||||
with replace_environment(self.env):
|
||||
self._thread = Thread(
|
||||
target=background_reader, args=(self.reader, self.echo_writer, self._kill)
|
||||
)
|
||||
self._thread = Thread(target=background_reader,
|
||||
args=(self.reader, self.echo_writer, self._kill))
|
||||
self._thread.start()
|
||||
return self
|
||||
|
||||
@@ -859,19 +854,13 @@ def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
def force_echo(self):
|
||||
"""Context manager to force local echo, even if echo is off."""
|
||||
if not self._active:
|
||||
raise RuntimeError("Can't call force_echo() outside log_output region!")
|
||||
raise RuntimeError(
|
||||
"Can't call force_echo() outside log_output region!")
|
||||
yield
|
||||
|
||||
|
||||
def _writer_daemon(
|
||||
stdin_multiprocess_fd,
|
||||
read_multiprocess_fd,
|
||||
write_fd,
|
||||
echo,
|
||||
log_file_wrapper,
|
||||
control_pipe,
|
||||
filter_fn,
|
||||
):
|
||||
def _writer_daemon(stdin_multiprocess_fd, read_multiprocess_fd, write_fd, echo,
|
||||
log_file_wrapper, control_pipe, filter_fn):
|
||||
"""Daemon used by ``log_output`` to write to a log file and to ``stdout``.
|
||||
|
||||
The daemon receives output from the parent process and writes it both
|
||||
@@ -924,16 +913,16 @@ def _writer_daemon(
|
||||
# write_fd to terminate the reading loop, so we close the file descriptor
|
||||
# here. Forking is the process spawning method everywhere except Mac OS
|
||||
# for Python >= 3.8 and on Windows
|
||||
if sys.version_info < (3, 8) or sys.platform != "darwin":
|
||||
if sys.version_info < (3, 8) or sys.platform != 'darwin':
|
||||
os.close(write_fd)
|
||||
|
||||
# Use line buffering (3rd param = 1) since Python 3 has a bug
|
||||
# that prevents unbuffered text I/O.
|
||||
if sys.version_info < (3,):
|
||||
in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1)
|
||||
in_pipe = os.fdopen(read_multiprocess_fd.fd, 'r', 1)
|
||||
else:
|
||||
# Python 3.x before 3.7 does not open with UTF-8 encoding by default
|
||||
in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1, encoding="utf-8")
|
||||
in_pipe = os.fdopen(read_multiprocess_fd.fd, 'r', 1, encoding='utf-8')
|
||||
|
||||
if stdin_multiprocess_fd:
|
||||
stdin = os.fdopen(stdin_multiprocess_fd.fd)
|
||||
@@ -942,7 +931,7 @@ def _writer_daemon(
|
||||
|
||||
# list of streams to select from
|
||||
istreams = [in_pipe, stdin] if stdin else [in_pipe]
|
||||
force_echo = False # parent can force echo for certain output
|
||||
force_echo = False # parent can force echo for certain output
|
||||
|
||||
log_file = log_file_wrapper.unwrap()
|
||||
|
||||
@@ -965,7 +954,7 @@ def _writer_daemon(
|
||||
# check and the read, so we ignore SIGTTIN here.
|
||||
with ignore_signal(signal.SIGTTIN):
|
||||
try:
|
||||
if stdin.read(1) == "v":
|
||||
if stdin.read(1) == 'v':
|
||||
echo = not echo
|
||||
except IOError as e:
|
||||
# If SIGTTIN is ignored, the system gives EIO
|
||||
@@ -983,14 +972,14 @@ def _writer_daemon(
|
||||
line = _retry(in_pipe.readline)()
|
||||
except UnicodeDecodeError:
|
||||
# installs like --test=root gpgme produce non-UTF8 logs
|
||||
line = "<line lost: output was not encoded as UTF-8>\n"
|
||||
line = '<line lost: output was not encoded as UTF-8>\n'
|
||||
|
||||
if not line:
|
||||
return
|
||||
line_count += 1
|
||||
|
||||
# find control characters and strip them.
|
||||
clean_line, num_controls = control.subn("", line)
|
||||
clean_line, num_controls = control.subn('', line)
|
||||
|
||||
# Echo to stdout if requested or forced.
|
||||
if echo or force_echo:
|
||||
@@ -1054,7 +1043,6 @@ def _retry(function):
|
||||
relevant for this file.
|
||||
|
||||
"""
|
||||
|
||||
def wrapped(*args, **kwargs):
|
||||
while True:
|
||||
try:
|
||||
@@ -1067,7 +1055,6 @@ def wrapped(*args, **kwargs):
|
||||
if e.args[0] == errno.EINTR:
|
||||
continue
|
||||
raise
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
|
@@ -30,7 +30,6 @@
|
||||
termios = None
|
||||
try:
|
||||
import termios as term_mod
|
||||
|
||||
termios = term_mod
|
||||
except ImportError:
|
||||
pass
|
||||
@@ -43,8 +42,8 @@ class ProcessController(object):
|
||||
minion) similar to the way a shell would, by sending signals and I/O.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, pid, controller_fd, timeout=1, sleep_time=1e-1, debug=False):
|
||||
def __init__(self, pid, controller_fd,
|
||||
timeout=1, sleep_time=1e-1, debug=False):
|
||||
"""Create a controller to manipulate the process with id ``pid``
|
||||
|
||||
Args:
|
||||
@@ -85,19 +84,18 @@ def get_canon_echo_attrs(self):
|
||||
def horizontal_line(self, name):
|
||||
"""Labled horizontal line for debugging."""
|
||||
if self.debug:
|
||||
sys.stderr.write("------------------------------------------- %s\n" % name)
|
||||
sys.stderr.write(
|
||||
"------------------------------------------- %s\n" % name
|
||||
)
|
||||
|
||||
def status(self):
|
||||
"""Print debug message with status info for the minion."""
|
||||
if self.debug:
|
||||
canon, echo = self.get_canon_echo_attrs()
|
||||
sys.stderr.write(
|
||||
"canon: %s, echo: %s\n"
|
||||
% (
|
||||
"on" if canon else "off",
|
||||
"on" if echo else "off",
|
||||
)
|
||||
)
|
||||
sys.stderr.write("canon: %s, echo: %s\n" % (
|
||||
"on" if canon else "off",
|
||||
"on" if echo else "off",
|
||||
))
|
||||
sys.stderr.write("input: %s\n" % self.input_on())
|
||||
sys.stderr.write("bg: %s\n" % self.background())
|
||||
sys.stderr.write("\n")
|
||||
@@ -139,7 +137,7 @@ def write(self, byte_string):
|
||||
|
||||
def wait(self, condition):
|
||||
start = time.time()
|
||||
while ((time.time() - start) < self.timeout) and not condition():
|
||||
while (((time.time() - start) < self.timeout) and not condition()):
|
||||
time.sleep(1e-2)
|
||||
assert condition()
|
||||
|
||||
@@ -221,15 +219,14 @@ def minion_function(**kwargs)
|
||||
|_________________________________________________________|
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, controller_function, minion_function):
|
||||
self.proc = None
|
||||
self.controller_function = controller_function
|
||||
self.minion_function = minion_function
|
||||
|
||||
# these can be optionally set to change defaults
|
||||
self.controller_timeout = 3
|
||||
self.sleep_time = 0.1
|
||||
self.controller_timeout = 1
|
||||
self.sleep_time = 0
|
||||
|
||||
def start(self, **kwargs):
|
||||
"""Start the controller and minion processes.
|
||||
@@ -245,12 +242,8 @@ def start(self, **kwargs):
|
||||
"""
|
||||
self.proc = multiprocessing.Process(
|
||||
target=PseudoShell._set_up_and_run_controller_function,
|
||||
args=(
|
||||
self.controller_function,
|
||||
self.minion_function,
|
||||
self.controller_timeout,
|
||||
self.sleep_time,
|
||||
),
|
||||
args=(self.controller_function, self.minion_function,
|
||||
self.controller_timeout, self.sleep_time),
|
||||
kwargs=kwargs,
|
||||
)
|
||||
self.proc.start()
|
||||
@@ -262,8 +255,7 @@ def join(self):
|
||||
|
||||
@staticmethod
|
||||
def _set_up_and_run_minion_function(
|
||||
tty_name, stdout_fd, stderr_fd, ready, minion_function, **kwargs
|
||||
):
|
||||
tty_name, stdout_fd, stderr_fd, ready, minion_function, **kwargs):
|
||||
"""Minion process wrapper for PseudoShell.
|
||||
|
||||
Handles the mechanics of setting up a PTY, then calls
|
||||
@@ -281,7 +273,8 @@ def _set_up_and_run_minion_function(
|
||||
os.close(stdin_fd)
|
||||
|
||||
if kwargs.get("debug"):
|
||||
sys.stderr.write("minion: stdin.isatty(): %s\n" % sys.stdin.isatty())
|
||||
sys.stderr.write(
|
||||
"minion: stdin.isatty(): %s\n" % sys.stdin.isatty())
|
||||
|
||||
# tell the parent that we're really running
|
||||
if kwargs.get("debug"):
|
||||
@@ -295,15 +288,15 @@ def _set_up_and_run_minion_function(
|
||||
|
||||
@staticmethod
|
||||
def _set_up_and_run_controller_function(
|
||||
controller_function, minion_function, controller_timeout, sleep_time, **kwargs
|
||||
):
|
||||
controller_function, minion_function, controller_timeout,
|
||||
sleep_time, **kwargs):
|
||||
"""Set up a pty, spawn a minion process, execute controller_function.
|
||||
|
||||
Handles the mechanics of setting up a PTY, then calls
|
||||
``controller_function``.
|
||||
|
||||
"""
|
||||
os.setsid() # new session; this process is the controller
|
||||
os.setsid() # new session; this process is the controller
|
||||
|
||||
controller_fd, minion_fd = os.openpty()
|
||||
pty_name = os.ttyname(minion_fd)
|
||||
@@ -312,10 +305,11 @@ def _set_up_and_run_controller_function(
|
||||
pty_fd = os.open(pty_name, os.O_RDWR)
|
||||
os.close(pty_fd)
|
||||
|
||||
ready = multiprocessing.Value("i", False)
|
||||
ready = multiprocessing.Value('i', False)
|
||||
minion_process = multiprocessing.Process(
|
||||
target=PseudoShell._set_up_and_run_minion_function,
|
||||
args=(pty_name, sys.stdout.fileno(), sys.stderr.fileno(), ready, minion_function),
|
||||
args=(pty_name, sys.stdout.fileno(), sys.stderr.fileno(),
|
||||
ready, minion_function),
|
||||
kwargs=kwargs,
|
||||
)
|
||||
minion_process.start()
|
||||
@@ -335,7 +329,8 @@ def _set_up_and_run_controller_function(
|
||||
minion_pgid = os.getpgid(minion_process.pid)
|
||||
sys.stderr.write("minion pid: %d\n" % minion_process.pid)
|
||||
sys.stderr.write("minion pgid: %d\n" % minion_pgid)
|
||||
sys.stderr.write("minion sid: %d\n" % os.getsid(minion_process.pid))
|
||||
sys.stderr.write(
|
||||
"minion sid: %d\n" % os.getsid(minion_process.pid))
|
||||
sys.stderr.write("\n")
|
||||
sys.stderr.flush()
|
||||
# set up controller to ignore SIGTSTP, like a shell
|
||||
@@ -344,8 +339,7 @@ def _set_up_and_run_controller_function(
|
||||
# call the controller function once the minion is ready
|
||||
try:
|
||||
controller = ProcessController(
|
||||
minion_process.pid, controller_fd, debug=kwargs.get("debug")
|
||||
)
|
||||
minion_process.pid, controller_fd, debug=kwargs.get("debug"))
|
||||
controller.timeout = controller_timeout
|
||||
controller.sleep_time = sleep_time
|
||||
error = controller_function(minion_process, controller, **kwargs)
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user